diff --git a/cmake/FindLAPACKnames.cmake b/cmake/FindLAPACKnames.cmake index 8df0651aa..44175643a 100644 --- a/cmake/FindLAPACKnames.cmake +++ b/cmake/FindLAPACKnames.cmake @@ -9,8 +9,8 @@ # LAPACK_NAMES ... equals "UPPER", "LOWER", "UBACK", or "UFRONT". set(MANGLING_OPTIONS - "UPPER" "LOWER" + "UPPER" "UBACK" "UFRONT") diff --git a/cmake/lapack_mangling.cpp b/cmake/lapack_mangling.cpp index c213be682..77b0e21b5 100644 --- a/cmake/lapack_mangling.cpp +++ b/cmake/lapack_mangling.cpp @@ -3,21 +3,23 @@ // in the LAPACK library without actually using the fortran compiler. #if defined LAPACK_UPPER -# define func ILAVER +# define func DPOTRF #elif defined LAPACK_LOWER -# define func ilaver +# define func dpotrf #elif defined LAPACK_UBACK -# define func ilaver_ +# define func dpotrf_ #elif defined LAPACK_UFRONT -# define func _ilaver -#endif // if defined LAPACK_UP +# define func _dpotrf +#endif // if defined LAPACK_UPPER extern "C" { -void func(int * major, int * minor, int * patch); +void func(char * UPLO, int * N, double * A, int * LDA, int * INFO); } int main(int argc, char ** argv) { - int major = 0; - int minor = 0; - int patch = 0; - func(&major, &minor, &patch); + char UPLO = 'L'; + int N = 1; + double A[1]; + int LDA = 1; + int INFO = 0; + func(&UPLO, &N, A, &LDA, &INFO); return 0; } diff --git a/docs/changes.rst b/docs/changes.rst index 55c071654..d2bcb09d1 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -3,10 +3,28 @@ Change Log ------------------------- -2.3.9 (Unreleased) +2.3.9 (2020-10-15) ~~~~~~~~~~~~~~~~~~~~~~~~~ -* No changes yet +* Add stand-alone benchmarking tool (PR `#365`_). +* Update wheels to use latest OpenBLAS and SuiteSparse (PR `#368`_). +* Tweaks to atmosphere simulation based on calibration campaign (PR `#367`_). +* Add support for 2D polynomial filtering across focalplane (PR `#366`_). +* Ground scheduler support for elevation modulated scans (PR `#364`_). +* Add better dictionary interface to Cache class (PR `#363`_). +* Support simulating basic non-ideal HWP response (PR `#362`_). +* Ground scheduler support for fixed elevations and partial scans (PR `#361`_). +* Additional check for NULL plan returned from FFTW (PR `#360`_). + +.. _`#360`: https://github.com/hpc4cmb/toast/pull/360 +.. _`#361`: https://github.com/hpc4cmb/toast/pull/361 +.. _`#362`: https://github.com/hpc4cmb/toast/pull/362 +.. _`#363`: https://github.com/hpc4cmb/toast/pull/363 +.. _`#364`: https://github.com/hpc4cmb/toast/pull/364 +.. _`#365`: https://github.com/hpc4cmb/toast/pull/365 +.. _`#366`: https://github.com/hpc4cmb/toast/pull/366 +.. _`#367`: https://github.com/hpc4cmb/toast/pull/367 +.. _`#368`: https://github.com/hpc4cmb/toast/pull/368 2.3.8 (2020-06-27) ~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/pipelines/CMakeLists.txt b/pipelines/CMakeLists.txt index 27b1c4e9a..5fb1fc7f4 100644 --- a/pipelines/CMakeLists.txt +++ b/pipelines/CMakeLists.txt @@ -11,5 +11,6 @@ install(PROGRAMS toast_satellite_sim.py toast_ground_sim.py toast_ground_sim_simple.py + toast_benchmark.py DESTINATION bin ) diff --git a/pipelines/toast_benchmark.py b/pipelines/toast_benchmark.py new file mode 100644 index 000000000..2dd29d7f8 --- /dev/null +++ b/pipelines/toast_benchmark.py @@ -0,0 +1,1220 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2015-2020 by the parties listed in the AUTHORS file. +# All rights reserved. Use of this source code is governed by +# a BSD-style license that can be found in the LICENSE file. + +""" +This script dynamically builds a workflow tailored to the size of the communicator. +""" + +# This is the maximum random time in seconds for every process to sleep before starting. +# This should be long enough to avoid filesystem contention when loading shared +# libraries and python modules, but short enough that the scheduler does not think that +# MPI has hung and kills the job. +startup_delay = 2.0 + +# These are builtin modules, hopefully fast to load. +import random +import time + +wait = random.uniform(0.0, startup_delay) +time.sleep(wait) + +from toast.mpi import MPI + +# Now import the remaining modules +import os +import sys +import re +import copy +import argparse +import traceback +import pickle + +from datetime import datetime + +import psutil + +import numpy as np + +import healpy as hp + +from toast.mpi import get_world, Comm + +from toast.dist import distribute_uniform, Data, distribute_discrete + +from toast.utils import Logger, Environment, memreport + +from toast.timing import function_timer, GlobalTimers, Timer, gather_timers +from toast.timing import dump as dump_timing + +from toast.todmap import TODGround + +from toast import pipeline_tools + +from toast import rng + +from toast.pipeline_tools import Focalplane + +from toast.tod.sim_focalplane import hex_pol_angles_qu, hex_layout, hex_nring + +from toast.schedule import run_scheduler + + +# This function is part of the package in TOAST 3.0. Remove this here when porting. + + +def fake_hexagon_focalplane( + n_pix=7, + width_deg=5.0, + samplerate=1.0, + epsilon=0.0, + net=1.0, + fmin=0.0, + alpha=1.0, + fknee=0.05, +): + pol_A = hex_pol_angles_qu(n_pix, offset=0.0) + pol_B = hex_pol_angles_qu(n_pix, offset=90.0) + quat_A = hex_layout(n_pix, width_deg, "D", "A", pol_A) + quat_B = hex_layout(n_pix, width_deg, "D", "B", pol_B) + + det_data = dict(quat_A) + det_data.update(quat_B) + + nrings = hex_nring(n_pix) + detfwhm = 0.5 * 60.0 * width_deg / (2 * nrings - 1) + + for det in det_data.keys(): + det_data[det]["pol_leakage"] = epsilon + det_data[det]["fmin"] = fmin + det_data[det]["fknee"] = fknee + det_data[det]["alpha"] = alpha + det_data[det]["NET"] = net + det_data[det]["fwhm_arcmin"] = detfwhm + det_data[det]["fsample"] = samplerate + + return Focalplane(detector_data=det_data, sample_rate=samplerate) + + +def get_node_mem(mpicomm, node_rank): + avail = 2 ** 62 + if node_rank == 0: + vmem = psutil.virtual_memory()._asdict() + avail = vmem["available"] + if mpicomm is not None: + avail = mpicomm.allreduce(avail, op=MPI.MIN) + return int(avail) + + +def sample_distribution( + rank, procs_per_node, bytes_per_node, total_samples, sample_rate +): + # For this benchmark, we start by ramping up to a realistic number of detectors for + # one day of data. Then we extend the timespan to achieve the desired number of + # samples. + + log = Logger.get() + + # Hex-packed 127 pixels (6 rings) times two dets per pixel. + # max_detector = 254 + + # Hex-packed 1027 pixels (18 rings) times two dets per pixel. + max_detector = 2054 + + # Minimum time span (one day) + min_time_samples = int(24 * 3600 * sample_rate) + + # For the minimum time span, scale up the number of detectors to reach the + # requested total sample size. + + n_detector = 1 + test_samples = n_detector * min_time_samples + + while test_samples < total_samples and n_detector < max_detector: + n_detector += 1 + test_samples = n_detector * min_time_samples + + if rank == 0: + log.debug( + " Dist total = {}, using {} detectors at min time samples = {}".format( + total_samples, n_detector, min_time_samples + ) + ) + + # For this number of detectors, determine the group size needed to fit the + # minimum number of samples in memory. In practice, one day will actually be + # split up into multiple observations. However, sizing the groups this way ensures + # that each group will have multiple observations and improve the load balancing. + + det_bytes_per_sample = 2 * ( # At most 2 detector data copies. + 8 # 64 bit float / ints used + * (1 + 4) # detector timestream # pixel index and 3 IQU weights + + 1 # one byte per sample for flags + ) + + common_bytes_per_sample = ( + 8 * (4) # 64 bit floats # One quaternion per sample + + 1 # one byte per sample for common flag + ) + + group_nodes = 0 + group_mem = 0.0 + + # This just ensures we go through the loop once. + min_time_mem = 1.0 + + while group_mem < min_time_mem: + group_nodes += 1 + group_procs = group_nodes * procs_per_node + group_mem = group_nodes * bytes_per_node + + # NOTE: change this when moving to toast-3, since common data is in shared mem. + # So the prefactor should be nodes per group, not group_procs. + bytes_per_samp = ( + n_detector * det_bytes_per_sample + group_procs * common_bytes_per_sample + ) + # bytes_per_samp = ( + # n_detector * det_bytes_per_sample + group_nodes * common_bytes_per_sample + # ) + min_time_mem = min_time_samples * bytes_per_samp + if rank == 0: + log.verbose( + " Dist testing {} group nodes, {} proc/node, group mem = {}, comparing to minimum = {} ({} samp * {} bytes/samp)".format( + group_nodes, + procs_per_node, + group_mem, + min_time_mem, + min_time_samples, + bytes_per_samp, + ) + ) + + if rank == 0: + log.debug(" Dist selecting {} nodes per group".format(group_nodes)) + + # Now set the number of groups to get the target number of total samples. + + group_time_samples = min_time_samples + group_samples = n_detector * group_time_samples + + n_group = 1 + (total_samples // group_samples) + + time_samples = n_group * group_time_samples + + if rank == 0: + log.debug( + " Dist using {} groups, each with {} / {} (time / total) samples".format( + n_group, group_time_samples, group_samples + ) + ) + log.debug(" Dist using {} total samples".format(n_detector * time_samples)) + + return ( + n_detector, + time_samples, + group_procs, + group_nodes, + n_group, + group_time_samples, + ) + + +def job_size(mpicomm): + log = Logger.get() + + procs_per_node = 1 + node_rank = 0 + nodecomm = None + rank = 0 + procs = 1 + + if mpicomm is not None: + rank = mpicomm.rank + procs = mpicomm.size + nodecomm = mpicomm.Split_type(MPI.COMM_TYPE_SHARED, 0) + node_rank = nodecomm.rank + procs_per_node = nodecomm.size + min_per_node = mpicomm.allreduce(procs_per_node, op=MPI.MIN) + max_per_node = mpicomm.allreduce(procs_per_node, op=MPI.MAX) + if min_per_node != max_per_node: + raise RuntimeError("Nodes have inconsistent numbers of MPI ranks") + + # One process on each node gets available RAM and communicates it + avail = get_node_mem(mpicomm, node_rank) + + n_node = procs // procs_per_node + + if rank == 0: + log.info( + "Job running on {} nodes each with {} processes ({} total)".format( + n_node, procs_per_node, procs + ) + ) + return (procs_per_node, avail) + + +def job_config(mpicomm, cases): + env = Environment.get() + log = Logger.get() + + class args: + debug = False + # TOD Ground options + el_mod_step_deg = 0.0 + el_mod_rate_hz = 0.0 + el_mod_amplitude_deg = 1.0 + el_mod_sine = False + el_nod_deg = False + el_nod_every_scan = False + start_with_el_nod = False + end_with_el_nod = False + scan_rate = 1.0 + scan_rate_el = 0.0 + scan_accel = 1.0 + scan_accel_el = 0.0 + scan_cosecant_modulate = False + sun_angle_min = 30.0 + schedule = None # required + weather = "SIM" + timezone = 0 + sample_rate = 100.0 + coord = "C" + split_schedule = None + sort_schedule = False + hwp_rpm = 10.0 + hwp_step_deg = None + hwp_step_time_s = None + elevation_noise_a = 0.0 + elevation_noise_b = 0.0 + freq = "150" + do_daymaps = False + do_seasonmaps = False + # Pointing options + nside = 1024 + nside_submap = 16 + single_precision_pointing = False + common_flag_mask = 1 + # Polyfilter options + apply_polyfilter = False + poly_order = 0 + # Ground filter options + apply_groundfilter = False + ground_order = 0 + # Atmosphere options + simulate_atmosphere = False + simulate_coarse_atmosphere = False + focalplane_radius_deg = None + atm_verbosity = 0 + atm_lmin_center = 0.01 + atm_lmin_sigma = 0.001 + atm_lmax_center = 10.0 + atm_lmax_sigma = 10.0 + atm_gain = 2.0e-5 + atm_gain_coarse = 8.0e-5 + atm_zatm = 40000.0 + atm_zmax = 200.0 + atm_xstep = 10.0 + atm_ystep = 10.0 + atm_zstep = 10.0 + atm_nelem_sim_max = 10000 + atm_wind_dist = 3000.0 + atm_z0_center = 2000.0 + atm_z0_sigma = 0.0 + atm_T0_center = 280.0 + atm_T0_sigma = 10.0 + atm_cache = None + atm_apply_flags = False + # Noise simulation options + simulate_noise = False + # Gain scrambler + apply_gainscrambler = False + gain_sigma = 0.01 + # Map maker + mapmaker_prefix = "toast" + mapmaker_mask = None + mapmaker_weightmap = None + mapmaker_iter_max = 20 + mapmaker_precond_width = 100 + mapmaker_prefilter_order = None + mapmaker_baseline_length = 200.0 + mapmaker_noisefilter = False + mapmaker_fourier2D_order = None + mapmaker_fourier2D_subharmonics = None + write_hits = True + write_binmap = True + write_wcov = False + write_wcov_inv = False + zip_maps = False + # Monte Carlo + MC_start = 0 + MC_count = 1 + # Sky signal + input_map = None + simulate_sky = True + # Input dir + auxdir = "toast_inputs" + # Output + outdir = "toast" + tidas = None + spt3g = None + + parser = argparse.ArgumentParser( + description="Run a TOAST workflow scaled appropriately to the MPI communicator size and available memory.", + fromfile_prefix_chars="@", + ) + + parser.add_argument( + "--node_mem_gb", + required=False, + default=None, + type=float, + help="Use this much memory per node in GB", + ) + + parser.add_argument( + "--dry_run", + required=False, + default=None, + type=str, + help="Comma-separated total_procs,node_procs to simulate.", + ) + + parser.parse_args(namespace=args) + + procs = 1 + rank = 0 + if mpicomm is not None: + procs = mpicomm.size + rank = mpicomm.rank + + avail_node_bytes = None + procs_per_node = None + + if args.dry_run is not None: + dryrun_total, dryrun_node = args.dry_run.split(",") + dryrun_total = int(dryrun_total) + dryrun_node = int(dryrun_node) + if rank == 0: + log.info( + "DRY RUN simulating {} total processes with {} per node".format( + dryrun_total, dryrun_node + ) + ) + procs_per_node = dryrun_node + procs = dryrun_total + # We are simulating the distribution + avail_node_bytes = get_node_mem(mpicomm, 0) + + else: + # Get information about the actual job size + procs_per_node, avail_node_bytes = job_size(mpicomm) + + if rank == 0: + log.info( + "Minimum detected per-node memory available is {:0.2f} GB".format( + avail_node_bytes / (1024 ** 3) + ) + ) + + if args.node_mem_gb is not None: + avail_node_bytes = int((1024 ** 3) * args.node_mem_gb) + if rank == 0: + log.info( + "Setting per-node available memory to {:0.2f} GB as requested".format( + avail_node_bytes / (1024 ** 3) + ) + ) + + # Based on the total number of processes and count per node, choose the number of + # nodes in each observation and a focalplane such that every process has >= 4 + # detectors. + + n_nodes = procs // procs_per_node + if rank == 0: + log.info("Job has {} total nodes".format(n_nodes)) + + if rank == 0: + log.info("Examining {} possible cases to run:".format(len(cases))) + + selected_case = None + selected_nodes = None + n_detector = None + time_samples = None + group_procs = None + group_nodes = None + n_group = None + group_time_samples = None + + for case_name, case_samples in cases.items(): + ( + case_n_detector, + case_time_samples, + case_group_procs, + case_group_nodes, + case_n_group, + case_group_time_samples, + ) = sample_distribution( + rank, procs_per_node, avail_node_bytes, case_samples, args.sample_rate + ) + + case_min_nodes = case_n_group * case_group_nodes + if rank == 0: + log.info( + " {:8s}: requires {:d} nodes for {} MPI ranks and {:0.1f}GB per node".format( + case_name, + case_min_nodes, + procs_per_node, + avail_node_bytes / (1024 ** 3), + ) + ) + + if selected_nodes is None: + if case_min_nodes <= n_nodes: + # First case that fits in our job + selected_case = case_name + selected_nodes = case_min_nodes + n_detector = case_n_detector + time_samples = case_time_samples + group_procs = case_group_procs + group_nodes = case_group_nodes + n_group = case_n_group + group_time_samples = case_group_time_samples + else: + if (case_min_nodes <= n_nodes) and (case_min_nodes >= selected_nodes): + # This case fits in our job and is larger than the current one + selected_case = case_name + selected_nodes = case_min_nodes + n_detector = case_n_detector + time_samples = case_time_samples + group_procs = case_group_procs + group_nodes = case_group_nodes + n_group = case_n_group + group_time_samples = case_group_time_samples + + if selected_case is None: + msg = ( + "None of the available cases fit into aggregate memory. Use a larger job." + ) + if rank == 0: + log.error(msg) + raise RuntimeError(msg) + else: + if rank == 0: + log.info("Selected case '{}'".format(selected_case)) + + if rank == 0: + log.info("Using groups of {} nodes".format(group_nodes)) + + # Adjust number of groups + + if n_nodes % group_nodes != 0: + msg = "Current number of nodes ({}) is not divisible by the required group size ({})".format( + n_nodes, group_nodes + ) + if rank == 0: + log.error(msg) + raise RuntimeError(msg) + + n_group = n_nodes // group_nodes + group_time_samples = 1 + time_samples // n_group + + group_seconds = group_time_samples / args.sample_rate + + if args.simulate_atmosphere and args.weather is None: + raise RuntimeError("Cannot simulate atmosphere without a TOAST weather file") + + comm = None + if mpicomm is None or args.dry_run is not None: + comm = Comm(world=None) + else: + comm = Comm(world=mpicomm, groupsize=group_procs) + + jobdate = datetime.now().strftime("%Y%m%d-%H:%M:%S") + + args.outdir += "_{:06d}_grp-{:04d}p-{:02d}n_{}".format( + procs, group_procs, group_nodes, jobdate + ) + args.auxdir = os.path.join(args.outdir, "inputs") + + if rank == 0: + os.makedirs(args.outdir) + os.makedirs(args.auxdir, exist_ok=True) + + if rank == 0: + with open(os.path.join(args.outdir, "log"), "w") as f: + f.write("Running at {}\n".format(jobdate)) + f.write("TOAST version = {}\n".format(env.version())) + f.write("TOAST max threads = {}\n".format(env.max_threads())) + f.write("MPI Processes = {}\n".format(procs)) + f.write("MPI Processes per node = {}\n".format(procs_per_node)) + f.write( + "Memory per node = {:0.2f} GB\n".format(avail_node_bytes / (1024 ** 3)) + ) + f.write("Number of groups = {}\n".format(n_group)) + f.write("Group nodes = {}\n".format(group_nodes)) + f.write("Group MPI Processes = {}\n".format(group_procs)) + f.write("Case selected = {}\n".format(selected_case)) + f.write("Case number of detectors = {}\n".format(n_detector)) + f.write( + "Case total samples = {}\n".format( + n_group * group_time_samples * n_detector + ) + ) + f.write( + "Case samples per group = {}\n".format(group_time_samples * n_detector) + ) + f.write("Case data seconds per group = {}\n".format(group_seconds)) + f.write("Parameters:\n") + for k, v in vars(args).items(): + if re.match(r"_.*", k) is None: + f.write(" {} = {}\n".format(k, v)) + + args.schedule = os.path.join(args.auxdir, "schedule.txt") + args.input_map = os.path.join(args.auxdir, "cmb.fits") + + return args, comm, n_nodes, n_detector, selected_case, group_seconds, n_group + + +def create_schedules(args, max_ces_seconds, days): + opts = [ + "--site-lat", + str(-22.958064), + "--site-lon", + str(-67.786222), + "--site-alt", + str(5200.0), + "--site-name", + "ATACAMA", + "--telescope", + "atacama_telescope", + "--patch-coord", + "C", + "--el-min-deg", + str(30.0), + "--el-max-deg", + str(80.0), + "--sun-el-max-deg", + str(90.0), + "--sun-avoidance-angle-deg", + str(30.0), + "--moon-avoidance-angle-deg", + str(10.0), + "--start", + "2021-06-01 00:00:00", + "--gap-s", + str(600.0), + "--gap-small-s", + str(0.0), + "--fp-radius-deg", + str(0.0), + "--patch", + "BICEP,1,-10,-55,10,-58", + "--ces-max-time-s", + str(max_ces_seconds), + "--operational-days", + str(days), + ] + + if not os.path.isfile(args.schedule): + log = Logger.get() + log.info("Generating input schedule file {}:".format(args.schedule)) + + opts.extend(["--out", args.schedule]) + run_scheduler(opts=opts) + + +def create_input_maps(args): + if not os.path.isfile(args.input_map): + log = Logger.get() + log.info("Generating input map {}".format(args.input_map)) + + # This is *completely* fake- just to have something on the sky besides zeros. + + ell = np.arange(3 * args.nside - 1, dtype=np.float64) + + sig = 50.0 + numer = ell - 30.0 + tspec = (1.0 / (sig * np.sqrt(2.0 * np.pi))) * np.exp( + -0.5 * numer ** 2 / sig ** 2 + ) + tspec *= 2000.0 + + sig = 100.0 + numer = ell - 500.0 + espec = (1.0 / (sig * np.sqrt(2.0 * np.pi))) * np.exp( + -0.5 * numer ** 2 / sig ** 2 + ) + espec *= 1.0 + + cls = ( + tspec, + espec, + np.zeros(3 * args.nside - 1, dtype=np.float32), + np.zeros(3 * args.nside - 1, dtype=np.float32), + ) + maps = hp.synfast( + cls, + args.nside, + pol=True, + pixwin=False, + sigma=None, + new=True, + fwhm=np.radians(3.0 / 60.0), + verbose=False, + ) + # for m in maps: + # hp.reorder(m, inp="RING", out="NEST") + hp.write_map(args.input_map, maps, nest=True, fits_IDL=False, dtype=np.float32) + + import matplotlib.pyplot as plt + + hp.mollview(maps[0]) + plt.savefig("{}_fake-T.png".format(args.input_map)) + plt.close() + + hp.mollview(maps[1]) + plt.savefig("{}_fake-E.png".format(args.input_map)) + plt.close() + + +@function_timer +def create_focalplanes(args, comm, schedules, n_detector): + """Attach a focalplane to each of the schedules. + + Args: + schedules (list) : List of Schedule instances. + Each schedule has two members, telescope + and ceslist, a list of CES objects. + Returns: + detweights (dict) : Inverse variance noise weights for every + detector across all focal planes. In [K_CMB^-2]. + They can be used to bin the TOD. + """ + + # Load focalplane information + + focalplanes = [] + if comm.world_rank == 0: + # First create a fake hexagonal focalplane + n_pixel = 1 + ring = 1 + while 2 * n_pixel < n_detector: + n_pixel += 6 * ring + ring += 1 + + fake = fake_hexagon_focalplane( + n_pix=n_pixel, + width_deg=10.0, + samplerate=100.0, + epsilon=0.0, + net=10.0, + fmin=0.0, + alpha=1.0, + fknee=0.05, + ) + + # Now truncate the detectors to the desired count. + newdat = dict() + off = 0 + for k, v in fake.detector_data.items(): + newdat[k] = v + off += 1 + if off >= n_detector: + break + fake.detector_data = newdat + fake.reset_properties() + focalplanes.append(fake) + + if comm.comm_world is not None: + focalplanes = comm.comm_world.bcast(focalplanes) + + if len(focalplanes) == 1 and len(schedules) > 1: + focalplanes *= len(schedules) + + # Append a focal plane and telescope to each entry in the schedules + # list and assemble a detector weight dictionary that represents all + # detectors in all focalplanes + detweights = {} + for schedule, focalplane in zip(schedules, focalplanes): + schedule.telescope.focalplane = focalplane + detweights.update(schedule.telescope.focalplane.detweights) + + return detweights + + +@function_timer +def create_observation(args, comm, telescope, ces, verbose=True): + """Create a TOAST observation. + + Create an observation for the CES scan + + Args: + args : argparse arguments + comm : TOAST communicator + ces (CES) : One constant elevation scan + + """ + focalplane = telescope.focalplane + site = telescope.site + weather = site.weather + noise = focalplane.noise + totsamples = int((ces.stop_time - ces.start_time) * args.sample_rate) + + # create the TOD for this observation + + if comm.comm_group is not None: + ndetrank = comm.comm_group.size + else: + ndetrank = 1 + + if args.el_nod_deg and (ces.subscan == 0 or args.el_nod_every_scan): + el_nod = args.el_nod_deg + else: + el_nod = None + + try: + tod = TODGround( + comm.comm_group, + focalplane.detquats, + totsamples, + detranks=ndetrank, + boresight_angle=ces.boresight_angle, + firsttime=ces.start_time, + rate=args.sample_rate, + site_lon=site.lon, + site_lat=site.lat, + site_alt=site.alt, + azmin=ces.azmin, + azmax=ces.azmax, + el=ces.el, + el_nod=el_nod, + start_with_elnod=args.start_with_el_nod, + end_with_elnod=args.end_with_el_nod, + el_mod_step=args.el_mod_step_deg, + el_mod_rate=args.el_mod_rate_hz, + el_mod_amplitude=args.el_mod_amplitude_deg, + el_mod_sine=args.el_mod_sine, + scanrate=args.scan_rate, + scanrate_el=args.scan_rate_el, + scan_accel=args.scan_accel, + scan_accel_el=args.scan_accel_el, + cosecant_modulation=args.scan_cosecant_modulate, + CES_start=None, + CES_stop=None, + sun_angle_min=args.sun_angle_min, + coord=args.coord, + sampsizes=None, + report_timing=args.debug, + hwprpm=args.hwp_rpm, + hwpstep=args.hwp_step_deg, + hwpsteptime=args.hwp_step_time_s, + ) + except RuntimeError as e: + raise RuntimeError( + 'Failed to create TOD for {}-{}-{}: "{}"' + "".format(ces.name, ces.scan, ces.subscan, e) + ) + + # Create the observation + + obs = {} + obs["name"] = "CES-{}-{}-{}-{}-{}".format( + site.name, telescope.name, ces.name, ces.scan, ces.subscan + ) + obs["tod"] = tod + obs["baselines"] = None + obs["noise"] = copy.deepcopy(noise) + obs["id"] = int(ces.mjdstart * 10000) + obs["intervals"] = tod.subscans + obs["site"] = site + obs["site_name"] = site.name + obs["site_id"] = site.id + obs["altitude"] = site.alt + obs["weather"] = site.weather + obs["telescope"] = telescope + obs["telescope_name"] = telescope.name + obs["telescope_id"] = telescope.id + obs["focalplane"] = telescope.focalplane.detector_data + obs["fpradius"] = telescope.focalplane.radius + obs["start_time"] = ces.start_time + obs["season"] = ces.season + obs["date"] = ces.start_date + obs["MJD"] = ces.mjdstart + obs["rising"] = ces.rising + obs["mindist_sun"] = ces.mindist_sun + obs["mindist_moon"] = ces.mindist_moon + obs["el_sun"] = ces.el_sun + return obs + + +@function_timer +def create_observations(args, comm, schedules): + """Create and distribute TOAST observations for every CES in + schedules. + + Args: + schedules (iterable) : a list of Schedule objects. + """ + log = Logger.get() + timer = Timer() + timer.start() + + data = Data(comm) + + # Loop over the schedules, distributing each schedule evenly across + # the process groups. For now, we'll assume that each schedule has + # the same number of operational days and the number of process groups + # matches the number of operational days. Relaxing these constraints + # will cause the season break to occur on different process groups + # for different schedules and prevent splitting the communicator. + + total_samples = 0 + group_samples = 0 + for schedule in schedules: + + telescope = schedule.telescope + all_ces = schedule.ceslist + nces = len(all_ces) + + breaks = pipeline_tools.get_breaks(comm, all_ces, args) + + ces_weights = [x.stop_time - x.start_time for x in all_ces] + groupdist = distribute_discrete(ces_weights, comm.ngroups, breaks=breaks) + + # groupdist = distribute_uniform(nces, comm.ngroups, breaks=breaks) + group_firstobs = groupdist[comm.group][0] + group_numobs = groupdist[comm.group][1] + + for ices in range(group_firstobs, group_firstobs + group_numobs): + obs = create_observation(args, comm, telescope, all_ces[ices]) + group_samples += obs["tod"].total_samples + data.obs.append(obs) + + if comm.comm_rank is not None: + if comm.comm_group.rank == 0: + total_samples = comm.comm_rank.allreduce(group_samples, op=MPI.SUM) + total_samples = comm.comm_group.bcast(total_samples, root=0) + if comm.comm_world is None or comm.comm_group.rank == 0: + log.info("Group # {:4} has {} observations.".format(comm.group, len(data.obs))) + + if len(data.obs) == 0: + raise RuntimeError( + "Too many tasks. Every MPI task must " + "be assigned to at least one observation." + ) + + if comm.comm_world is not None: + comm.comm_world.barrier() + timer.stop() + if comm.world_rank == 0: + timer.report("Simulated scans") + + # Split the data object for each telescope for separate mapmaking. + # We could also split by site. + + if len(schedules) > 1: + telescope_data = data.split("telescope_name") + if len(telescope_data) == 1: + # Only one telescope available + telescope_data = [] + else: + telescope_data = [] + telescope_data.insert(0, ("all", data)) + return data, telescope_data, total_samples + + +def setup_sigcopy(args): + """Determine if an extra copy of the atmospheric signal is needed. + + When we simulate multichroic focal planes, the frequency-independent + part of the atmospheric noise is simulated first and then the + frequency scaling is applied to a copy of the atmospheric noise. + """ + if len(args.freq.split(",")) == 1: + totalname = "total" + totalname_freq = "total" + else: + totalname = "total" + totalname_freq = "total_freq" + + return totalname, totalname_freq + + +@function_timer +def setup_output(args, comm, mc, freq): + outpath = "{}/{:03}/{:03}".format(args.outdir, mc, int(freq)) + if comm.world_rank == 0: + os.makedirs(outpath, exist_ok=True) + return outpath + + +def main(): + env = Environment.get() + env.enable_function_timers() + + log = Logger.get() + gt = GlobalTimers.get() + gt.start("toast_benchmark (total)") + + mpiworld, procs, rank = get_world() + + if rank == 0: + log.info("TOAST version = {}".format(env.version())) + log.info("Using a maximum of {} threads per process".format(env.max_threads())) + if mpiworld is None: + log.info("Running serially with one process at {}".format(str(datetime.now()))) + else: + if rank == 0: + log.info( + "Running with {} processes at {}".format(procs, str(datetime.now())) + ) + + cases = { + "tiny": 5000000, # O(1) GB RAM + "xsmall": 50000000, # O(10) GB RAM + "small": 500000000, # O(100) GB RAM + "medium": 5000000000, # O(1) TB RAM + "large": 50000000000, # O(10) TB RAM + "xlarge": 500000000000, # O(100) TB RAM + "heroic": 5000000000000, # O(1000) TB RAM + } + + args, comm, n_nodes, n_detector, case, group_seconds, n_group = job_config( + mpiworld, cases + ) + + # Note: The number of "days" here will just be an approximation of the desired + # data volume since we are doing a realistic schedule for a real observing site. + + n_days = int(2.0 * (group_seconds * n_group) / (24 * 3600)) + if n_days == 0: + n_days = 1 + + if rank == 0: + log.info( + "Using {} detectors for approximately {} days".format(n_detector, n_days) + ) + + # Create the schedule file and input maps on one process + if rank == 0: + create_schedules(args, group_seconds, n_days) + create_input_maps(args) + if mpiworld is not None: + mpiworld.barrier() + + if args.dry_run is not None: + if rank == 0: + log.info("Exit from dry run") + # We are done! + sys.exit(0) + + gt.start("toast_benchmark (science work)") + + # Load and broadcast the schedule file + + schedules = pipeline_tools.load_schedule(args, comm) + + # Load the weather and append to schedules + + pipeline_tools.load_weather(args, comm, schedules) + + # Simulate the focalplane + + detweights = create_focalplanes(args, comm, schedules, n_detector) + + # Create the TOAST data object to match the schedule. This will + # include simulating the boresight pointing. + + data, telescope_data, total_samples = create_observations(args, comm, schedules) + + # handle = None + # if comm.world_rank == 0: + # handle = open(os.path.join(args.outdir, "distdata.txt"), "w") + # data.info(handle) + # if comm.world_rank == 0: + # handle.close() + # if comm.comm_world is not None: + # comm.comm_world.barrier() + + # Split the communicator for day and season mapmaking + + time_comms = pipeline_tools.get_time_communicators(args, comm, data) + + # Expand boresight quaternions into detector pointing weights and + # pixel numbers + + pipeline_tools.expand_pointing(args, comm, data) + + # Optionally rewrite the noise PSD:s in each observation to include + # elevation-dependence + + pipeline_tools.get_elevation_noise(args, comm, data) + + # Purge the pointing if we are NOT going to export the + # data to a TIDAS volume + if (args.tidas is None) and (args.spt3g is None): + for ob in data.obs: + tod = ob["tod"] + tod.free_radec_quats() + + # Prepare auxiliary information for distributed map objects + + signalname = pipeline_tools.scan_sky_signal(args, comm, data, "signal") + + # Set up objects to take copies of the TOD at appropriate times + + totalname, totalname_freq = setup_sigcopy(args) + + # Loop over Monte Carlos + + firstmc = args.MC_start + nsimu = args.MC_count + + freqs = [float(freq) for freq in args.freq.split(",")] + nfreq = len(freqs) + + for mc in range(firstmc, firstmc + nsimu): + + pipeline_tools.simulate_atmosphere(args, comm, data, mc, totalname) + + # Loop over frequencies with identical focal planes and identical + # atmospheric noise. + + for ifreq, freq in enumerate(freqs): + + if comm.world_rank == 0: + log.info( + "Processing frequency {}GHz {} / {}, MC = {}".format( + freq, ifreq + 1, nfreq, mc + ) + ) + + # Make a copy of the atmosphere so we can scramble the gains and apply + # frequency-dependent scaling. + pipeline_tools.copy_signal(args, comm, data, totalname, totalname_freq) + + pipeline_tools.scale_atmosphere_by_frequency( + args, comm, data, freq=freq, mc=mc, cache_name=totalname_freq + ) + + pipeline_tools.update_atmospheric_noise_weights(args, comm, data, freq, mc) + + # Add previously simulated sky signal to the atmospheric noise. + + pipeline_tools.add_signal( + args, comm, data, totalname_freq, signalname, purge=(nsimu == 1) + ) + + mcoffset = ifreq * 1000000 + + pipeline_tools.simulate_noise( + args, comm, data, mc + mcoffset, totalname_freq + ) + + pipeline_tools.scramble_gains( + args, comm, data, mc + mcoffset, totalname_freq + ) + + outpath = setup_output(args, comm, mc + mcoffset, freq) + + # Bin and destripe maps + + pipeline_tools.apply_mapmaker( + args, + comm, + data, + outpath, + totalname_freq, + time_comms=time_comms, + telescope_data=telescope_data, + first_call=(mc == firstmc), + ) + + if args.apply_polyfilter or args.apply_groundfilter: + + # Filter signal + + pipeline_tools.apply_polyfilter(args, comm, data, totalname_freq) + + pipeline_tools.apply_groundfilter(args, comm, data, totalname_freq) + + # Bin filtered maps + + pipeline_tools.apply_mapmaker( + args, + comm, + data, + outpath, + totalname_freq, + time_comms=time_comms, + telescope_data=telescope_data, + first_call=False, + extra_prefix="filtered", + bin_only=True, + ) + + gt.stop_all() + if mpiworld is not None: + mpiworld.barrier() + + runtime = gt.seconds("toast_benchmark (science work)") + prefactor = 1.0e-3 + kilo_samples = 1.0e-3 * total_samples + sample_factor = 1.2 + det_factor = 2.0 + metric = ( + prefactor + * n_detector ** det_factor + * kilo_samples ** sample_factor + / (n_nodes * runtime) + ) + if rank == 0: + msg = "Science Metric: {:0.1e} * ({:d}**{:0.2f}) * ({:0.3e}**{:0.3f}) / ({:0.1f} * {}) = {:0.2f}".format( + prefactor, + n_detector, + det_factor, + kilo_samples, + sample_factor, + runtime, + n_nodes, + metric, + ) + log.info("") + log.info(msg) + log.info("") + with open(os.path.join(args.outdir, "log"), "a") as f: + f.write(msg) + f.write("\n\n") + + timer = Timer() + timer.start() + alltimers = gather_timers(comm=mpiworld) + if comm.world_rank == 0: + out = os.path.join(args.outdir, "timing") + dump_timing(alltimers, out) + with open(os.path.join(args.outdir, "log"), "a") as f: + f.write("Copy of Global Timers:\n") + with open("{}.csv".format(out), "r") as t: + f.write(t.read()) + timer.stop() + timer.report("Gather and dump timing info") + return + + +if __name__ == "__main__": + try: + main() + except Exception: + # We have an unhandled exception on at least one process. Print a stack + # trace for this process and then abort so that all processes terminate. + mpiworld, procs, rank = get_world() + if procs == 1: + raise + exc_type, exc_value, exc_traceback = sys.exc_info() + lines = traceback.format_exception(exc_type, exc_value, exc_traceback) + lines = ["Proc {}: {}".format(rank, x) for x in lines] + print("".join(lines), flush=True) + if mpiworld is not None: + mpiworld.Abort(6) diff --git a/pipelines/toast_ground_schedule.py b/pipelines/toast_ground_schedule.py index b1d02fb31..d26f6cf8e 100644 --- a/pipelines/toast_ground_schedule.py +++ b/pipelines/toast_ground_schedule.py @@ -9,2878 +9,15 @@ to toast_ground_sim.py """ -import argparse -from datetime import datetime, timezone, timedelta -import dateutil.parser -import os -import sys -import traceback - -import numpy as np -from scipy.constants import degree -from matplotlib import cm - -import ephem -import healpy as hp - -from toast.mpi import get_world -from toast.utils import Logger -import toast.qarray as qa -from toast.timing import function_timer, GlobalTimers - - -XAXIS, YAXIS, ZAXIS = np.eye(3) - - -class TooClose(Exception): - pass - - -class SunTooClose(TooClose): - pass - - -class MoonTooClose(TooClose): - pass - - -class Patch(object): - - hits = 0 - partial_hits = 0 - rising_hits = 0 - setting_hits = 0 - time = 0 - rising_time = 0 - setting_time = 0 - step = -1 - az_min = 0 - az_max = 2 * np.pi - _area = None - current_el_min = 0 - current_el_max = 0 - el_min0 = 0 - el_max0 = np.pi / 2 - el_min = el_min0 - el_max = el_max0 - el_step = 0 - alternate = False - ra_amplitude = None - ra_period = 10 - dec_amplitude = None - dec_period = 10 - corners = [] - preferred_el = None - - def __init__( - self, - name, - weight, - corners, - el_min=0, - el_max=np.pi / 2, - el_step=0, - alternate=False, - site_lat=0, - area=None, - ra_period=10, - ra_amplitude=None, - dec_period=10, - dec_amplitude=None, - elevations=None, - ): - self.name = name - self.weight = weight - self.corners = corners - self.el_min0 = el_min - self.el_min = el_min - self.el_max0 = el_max - self.el_step = np.abs(el_step) - self.alternate = alternate - self._area = area - self.site_lat = site_lat - self.ra_period = ra_period - self.ra_amplitude = np.radians(ra_amplitude) - self.dec_period = dec_period - self.dec_amplitude = np.radians(dec_amplitude) - # Use the site latitude to infer the lowest elevation that all - # corners cross. - site_el_max = np.pi / 2 - for corner in corners: - el_max = np.pi / 2 - np.abs(corner._dec - self.site_lat) - if el_max < site_el_max: - site_el_max = el_max - if elevations is None: - if site_el_max < self.el_max0: - self.el_max0 = site_el_max - self.elevations = None - else: - # Parse the allowed elevations - try: - # Try parsing as a string - self.elevations = [ - np.radians(float(el)) for el in elevations.split(",") - ] - except AttributeError: - # Try parsing as an iterable - self.elevations = [np.radians(el) for el in elevations] - self.elevations = np.sort(np.array(self.elevations)) - # Check if any of the allowed elevations is above the highest - # observable elevation - bad = self.elevations > site_el_max - if np.any(bad): - good = np.logical_not(bad) - if np.any(good): - print( - "WARNING: {} of the observing elevations are too high " - "for '{}': {} > {:.2f} deg".format( - np.sum(bad), - self.name, - np.degrees(self.elevations[bad]), - np.degrees(site_el_max), - ), - flush=True, - ) - self.elevations = self.elevations[good] - else: - print( - "ERROR: all of the observing elevations are too high for {}. " - "Maximum observing elevation is {} deg".format( - self.name, np.degrees(site_el_max) - ), - flush=True, - ) - sys.exit() - self.el_min0 = np.amin(self.elevations) - self.el_max0 = np.amax(self.elevations) - if el_step != 0: - self.nstep_el = int((self.el_max0 - self.el_min0 + 1e-3) // el_step) + 1 - self.elevations0 = self.elevations - self.el_max = self.el_max0 - self.el_lim = self.el_min0 - self.step_azel() - return - - def oscillate(self): - if self.ra_amplitude: - # Oscillate RA - halfperiod = self.ra_period // 2 - old_phase = np.fmod(self.hits - 1 + halfperiod, self.ra_period) - halfperiod - new_phase = np.fmod(self.hits + halfperiod, self.ra_period) - halfperiod - old_offset = old_phase / halfperiod * self.ra_amplitude - new_offset = new_phase / halfperiod * self.ra_amplitude - offset = new_offset - old_offset - for corner in self.corners: - corner._ra += offset - if self.dec_amplitude: - # Oscillate DEC - halfperiod = self.dec_period // 2 - old_phase = ( - np.fmod(self.hits - 1 + halfperiod, self.dec_period) - halfperiod - ) - new_phase = np.fmod(self.hits + halfperiod, self.dec_period) - halfperiod - old_offset = old_phase / halfperiod * self.dec_amplitude - new_offset = new_phase / halfperiod * self.dec_amplitude - offset = new_offset - old_offset - for corner in self.corners: - corner._dec += offset - return - - @function_timer - def get_area(self, observer, nside=32, equalize=False): - self.update(observer) - if self._area is None: - npix = 12 * nside ** 2 - hitmap = np.zeros(npix) - for corner in self.corners: - corner.compute(observer) - for pix in range(npix): - lon, lat = hp.pix2ang(nside, pix, lonlat=True) - center = ephem.FixedBody() - center._ra = np.radians(lon) - center._dec = np.radians(lat) - center.compute(observer) - hitmap[pix] = self.in_patch(center) - self._area = np.sum(hitmap) / hitmap.size - if self._area == 0: - raise RuntimeError("Patch has zero area!") - if equalize: - self.weight /= self._area - return self._area - - @function_timer - def corner_coordinates(self, observer=None, unwind=False): - """Return the corner coordinates in horizontal frame. - - PyEphem measures the azimuth East (clockwise) from North. - """ - azs = [] - els = [] - az0 = None - for corner in self.corners: - if observer is not None: - corner.compute(observer) - if unwind: - if az0 is None: - az0 = corner.az - azs.append(unwind_angle(az0, corner.az)) - else: - azs.append(corner.az) - els.append(corner.alt) - return np.array(azs), np.array(els) - - @function_timer - def in_patch(self, obj): - """ - Determine if the object (e.g. Sun or Moon) is inside the patch - by using a ray casting algorithm. The ray is cast along a - constant meridian to follow a great circle. - """ - az0 = obj.az - # Get corner coordinates, assuming they were already computed - azs, els = self.corner_coordinates() - els_cross = [] - for i in range(len(self.corners)): - az1 = azs[i] - el1 = els[i] - j = (i + 1) % len(self.corners) - az2 = unwind_angle(az1, azs[j]) - el2 = els[j] - azmean = 0.5 * (az1 + az2) - az0 = unwind_angle(azmean, np.float(obj.az), np.pi) - if (az1 - az0) * (az2 - az0) > 0: - # the constant meridian is not between the two corners - continue - el_cross = el1 + (az1 - az0) * (el2 - el1) / (az1 - az2) - if np.abs(obj.az - (az0 % (2 * np.pi))) < 1e-3: - els_cross.append(el_cross) - elif el_cross > 0: - els_cross.append(np.pi - el_cross) - else: - els_cross.append(-np.pi - el_cross) - - els_cross = np.array(els_cross) - if els_cross.size < 2: - return False - - # Unwind the crossing elevations to minimize the scatter - els_cross = np.sort(els_cross) - if els_cross.size > 1: - ptps = [] - for i in range(els_cross.size): - els_cross_alt = els_cross.copy() - els_cross_alt[:i] += 2 * np.pi - ptps.append(np.ptp(els_cross_alt)) - i = np.argmin(ptps) - if i > 0: - els_cross[:i] += 2 * np.pi - els_cross = np.sort(els_cross) - el_mean = np.mean(els_cross) - el0 = unwind_angle(el_mean, np.float(obj.alt)) - - ncross = np.sum(els_cross > el0) - - if ncross % 2 == 0: - # Even number of crossings means that the object is outside - # of the patch - return False - return True - - @function_timer - def step_azel(self): - self.step += 1 - if self.el_step != 0 and self.alternate: - # alternate between rising and setting scans - if self.rising_hits < self.setting_hits: - # Schedule a rising scan - istep = self.rising_hits % self.nstep_el - self.el_min = min(self.el_max0, self.el_min0 + istep * self.el_step) - self.el_max = self.el_max0 - self.az_min = 0 - self.az_max = np.pi - else: - # Schedule a setting scan - istep = self.setting_hits % self.nstep_el - self.el_min = self.el_min0 - self.el_max = max(self.el_min0, self.el_max0 - istep * self.el_step) - self.az_min = np.pi - self.az_max = 2 * np.pi - else: - if self.alternate: - self.az_min = (self.az_min + np.pi) % (2 * np.pi) - self.az_max = self.az_min + np.pi - else: - self.el_min += self.el_step - if self.el_min > self.el_max0: - self.el_min = self.el_min0 - if self.el_step != 0 and self.elevations is not None: - tol = np.radians(0.1) - self.elevations = np.array( - [ - el - for el in self.elevations0 - if (el + tol >= self.el_min and el - tol <= self.el_max) - ] - ) - return - - def reset(self): - self.step += 1 - self.el_min = self.el_min0 - self.el_max = self.el_max0 - self.elevations = self.elevations0 - self.az_min = 0 - if self.alternate: - self.az_max = np.pi - else: - self.az_max = 2 * np.pi - return - - def visible( - self, - el_min, - observer, - sun, - moon, - sun_avoidance_angle, - moon_avoidance_angle, - check_sso, - ): - self.update(observer) - patch_el_max = -1000 - patch_el_min = 1000 - in_view = False - for i, corner in enumerate(self.corners): - corner.compute(observer) - patch_el_min = min(patch_el_min, corner.alt) - patch_el_max = max(patch_el_max, corner.alt) - if corner.alt > el_min: - # At least one corner is visible - in_view = True - if check_sso: - if sun_avoidance_angle > 0: - angle = np.degrees(ephem.separation(sun, corner)) - if angle < sun_avoidance_angle: - # Patch is too close to the Sun - return False, "Too close to Sun {:.2f}".format(angle) - if moon_avoidance_angle > 0: - angle = np.degrees(ephem.separation(moon, corner)) - if angle < moon_avoidance_angle: - # Patch is too close to the Moon - return False, "Too close to Moon {:.2f}".format(angle) - if not in_view: - msg = "Below el_min = {:.2f} at el = {:.2f}..{:.2f}.".format( - np.degrees(el_min), np.degrees(patch_el_min), np.degrees(patch_el_max) - ) - else: - msg = "in view" - self.current_el_min = patch_el_min - self.current_el_max = patch_el_max - - return in_view, msg - - def update(self, *args, **kwargs): - """ - A virtual method that is implemented by moving targets - """ - pass - - -class SSOPatch(Patch): - def __init__(self, name, weight, radius): - self.name = name - self.weight = weight - self.radius = radius - try: - self.body = getattr(ephem, name)() - except: - raise RuntimeError("Failed to initialize {} from pyEphem".format(name)) - self.corners = None - return - - def update(self, observer): - """ - Calculate the relative position of the SSO at a given time - """ - self.body.compute(observer) - ra, dec = self.body.ra, self.body.dec - # Synthesize 8 corners around the center - phi = ra - theta = dec - r = self.radius - ncorner = 8 - angstep = 2 * np.pi / ncorner - self.corners = [] - for icorner in range(ncorner): - ang = angstep * icorner - delta_theta = np.cos(ang) * r - delta_phi = np.sin(ang) * r / np.cos(theta + delta_theta) - patch_corner = ephem.FixedBody() - patch_corner._ra = phi + delta_phi - patch_corner._dec = theta + delta_theta - self.corners.append(patch_corner) - return - - -class CoolerCyclePatch(Patch): - def __init__( - self, - weight, - power, - hold_time_min, - hold_time_max, - cycle_time, - az, - el, - last_cycle_end, - ): - # Standardized name for cooler cycles - self.name = "cooler_cycle" - self.hold_time_min = hold_time_min * 3600 - self.hold_time_max = hold_time_max * 3600 - self.cycle_time = cycle_time * 3600 - self.az = az - self.el = el - self.last_cycle_end = last_cycle_end - self.weight0 = weight - self.weight = weight - self.power = power - return - - def get_area(self, *args, **kwargs): - if self._area is None: - self._area = 0 - return self._area - - def corner_coordinates(self, *args, **kwargs): - return None - - def in_patch(self, *args, **kwargs): - return False - - def step_azel(self, *args, **kwargs): - return - - def reset(self, *args, **kwargs): - return - - def get_current_hold_time(self, observer): - tlast = to_DJD(self.last_cycle_end) - tnow = float(observer.date) # In Dublin Julian date - hold_time = (tnow - tlast) * 86400 # in seconds - return hold_time - - def visible( - self, - el_min, - observer, - sun, - moon, - sun_avoidance_angle, - moon_avoidance_angle, - check_sso, - ): - self.update(observer) - hold_time = self.get_current_hold_time(observer) - if hold_time > self.hold_time_min: - visible = True - msg = "minimum hold time exceeded" - else: - visible = False - msg = "minimum hold time not met" - return visible, msg - - def update(self, observer): - hold_time = self.get_current_hold_time(observer) - if hold_time < self.hold_time_min: - self.weight = np.inf - else: - weight = (self.hold_time_max - hold_time) / ( - self.hold_time_max - self.hold_time_min - ) - self.weight = self.weight0 * weight ** self.power - return - - -class HorizontalPatch(Patch): - def __init__(self, name, weight, azmin, azmax, el, scantime): - self.name = name - self.weight = weight - if azmin <= np.pi and azmax <= np.pi: - self.rising = True - elif azmin >= np.pi and azmax >= np.pi: - self.rising = False - else: - raise RuntimeError("Horizontal patch must either be rising or setting.") - self.az_min = azmin - self.az_max = azmax - self.el = el - # scan time is the maximum time spent on this scan before targeting again - self.scantime = scantime # in minutes. - self.scandrift = scantime / 60 * 15 * degree - - self.el_min0 = el - self.el_min = el - self.el_max0 = el - self.el_step = 0 - self.alternate = False - self._area = 0 - self.el_max = self.el_max0 - self.el_lim = self.el_min0 - return - - def get_area(self, observer, nside=32, equalize=False): - return 1 - - def corner_coordinates(self, observer=None, unwind=False): - azs = [self.az_min, self.az_max] - els = [self.el_min, self.el_max] - return np.array(azs), np.array(els) - - def in_patch(self, obj, angle=0): - azmin = obj.az - angle - azmax = obj.az + angle - elmin = obj.alt - angle - elmax = obj.alt + angle - if self.rising: - elmax += self.scandrift - else: - elmin -= self.scandrift - if ( - azmin > self.az_min - and azmax < self.az_max - and elmin > self.el_min - and elmax < self.el_max - ): - return True - return False - - def step_azel(self): - return - - def visible( - self, - el_min, - observer, - sun, - moon, - sun_avoidance_angle, - moon_avoidance_angle, - check_sso, - ): - - in_view = True - msg = "" - if check_sso: - for sso, angle, name in [ - (sun, sun_avoidance_angle, "Sun"), - (moon, moon_avoidance_angle, "Moon"), - ]: - if self.in_patch(sso, angle=angle): - in_view = False - msg += "{} too close;".format(name) - - if in_view: - msg = "in view" - self.current_el_min = self.el_min - self.current_el_max = self.el_max - return in_view, msg - - -def to_UTC(t): - # Convert UNIX time stamp to a date string - return datetime.fromtimestamp(t, timezone.utc).strftime("%Y-%m-%d %H:%M:%S") - - -def to_JD(t): - # Unix time stamp to Julian date - # (days since -4712-01-01 12:00:00 UTC) - return t / 86400.0 + 2440587.5 - - -def to_MJD(t): - # Convert Unix time stamp to modified Julian date - # (days since 1858-11-17 00:00:00 UTC) - return to_JD(t) - 2400000.5 - - -def to_DJD(t): - # Convert Unix time stamp to Dublin Julian date - # (days since 1899-12-31 12:00:00) - # This is the time format used by PyEphem - return to_JD(t) - 2415020 - - -def DJDtoUNIX(djd): - # Convert Dublin Julian date to a UNIX time stamp - return ((djd + 2415020) - 2440587.5) * 86400.0 - - -def patch_is_rising(patch): - try: - # Horizontal patch definition - rising = patch.rising - except: - rising = True - for corner in patch.corners: - if corner.alt > 0 and corner.az > np.pi: - # The patch is setting - rising = False - break - return rising - - -@function_timer -def prioritize(args, visible): - """Order visible targets by priority and number of scans.""" - log = Logger.get() - for i in range(len(visible)): - for j in range(len(visible) - i - 1): - # If either of the patches is a cooler cycle, we don't modulate - # the priorities with hit counts, observing time or elevation - if isinstance(visible[j], CoolerCyclePatch) or isinstance( - visible[j + 1], CoolerCyclePatch - ): - weight1 = visible[j].weight - weight2 = visible[j + 1].weight - else: - if patch_is_rising(visible[j]): - if args.equalize_time: - hits1 = visible[j].rising_time - else: - hits1 = visible[j].rising_hits - el1 = np.degrees(visible[j].current_el_max) - else: - if args.equalize_time: - hits1 = visible[j].setting_time - else: - hits1 = visible[j].setting_hits - el1 = np.degrees(visible[j].current_el_min) - if patch_is_rising(visible[j + 1]): - if args.equalize_time: - hits2 = visible[j + 1].rising_time - else: - hits2 = visible[j + 1].rising_hits - el2 = np.degrees(visible[j + 1].current_el_max) - else: - if args.equalize_time: - hits2 = visible[j + 1].setting_time - else: - hits2 = visible[j + 1].setting_hits - el2 = np.degrees(visible[j + 1].current_el_min) - # Patch with the lower weight goes first. Having more - # earlier observing time and lower observing elevation - # will increase the weight. - weight1 = (hits1 + 1) * visible[j].weight - weight2 = (hits2 + 1) * visible[j + 1].weight - # Optional elevation penalty - if args.elevation_penalty_limit > 0: - lim = args.elevation_penalty_limit - if el1 < lim: - weight1 *= (lim / el1) ** args.elevation_penalty_power - if el2 < lim: - weight2 *= (lim / el2) ** args.elevation_penalty_power - if weight1 > weight2: - visible[j], visible[j + 1] = visible[j + 1], visible[j] - names = [] - for patch in visible: - names.append(patch.name) - log.debug("Prioritized list of viewable patches: {}".format(names)) - return - - -@function_timer -def attempt_scan( - args, - observer, - visible, - not_visible, - t, - fp_radius, - stop_timestamp, - tstop_cooler, - sun, - moon, - sun_el_max, - fout, - fout_fmt, - ods, - boresight_angle, -): - """Attempt scanning the visible patches in order until success.""" - log = Logger.get() - success = False - # Always begin by attempting full scans. If none can be completed - # and user allowed partials scans, try them next. - for allow_partial_scans in False, True: - if allow_partial_scans and not args.allow_partial_scans: - break - for patch in visible: - if isinstance(patch, CoolerCyclePatch): - # Cycle the cooler - t = add_cooler_cycle( - args, - t, - stop_timestamp, - observer, - sun, - moon, - fout, - fout_fmt, - patch, - boresight_angle, - ) - success = True - break - # All on-sky targets - for rising in [True, False]: - observer.date = to_DJD(t) - el = get_constant_elevation( - args, - observer, - patch, - rising, - fp_radius, - not_visible, - partial_scan=allow_partial_scans, - ) - if el is None: - continue - success, azmins, azmaxs, aztimes, tstop = scan_patch( - args, - el, - patch, - t, - fp_radius, - observer, - sun, - not_visible, - tstop_cooler, - sun_el_max, - rising, - ) - if success: - try: - t, _ = add_scan( - args, - t, - tstop, - aztimes, - azmins, - azmaxs, - rising, - fp_radius, - observer, - sun, - moon, - fout, - fout_fmt, - patch, - el, - ods, - boresight_angle, - partial_scan=allow_partial_scans, - ) - patch.step_azel() - break - except TooClose: - success = False - break - if success: - break - if success: - break - return success, t - - -def from_angles(az, el): - elquat = qa.rotation(YAXIS, np.radians(90 - el)) - azquat = qa.rotation(ZAXIS, np.radians(az)) - return qa.mult(azquat, elquat) - - -def unwind_quat(quat1, quat2): - if np.sum(np.abs(quat1 - quat2)) > np.sum(np.abs(quat1 + quat2)): - return -quat2 - else: - return quat2 - - -@function_timer -def check_sso(observer, az1, az2, el, sso, angle, tstart, tstop): - """ - Check if a solar system object (SSO) enters within "angle" of - the constant elevation scan. - """ - if az2 < az1: - az2 += 360 - naz = max(3, np.int(0.25 * (az2 - az1) * np.cos(np.radians(el)))) - quats = [] - for az in np.linspace(az1, az2, naz): - quats.append(from_angles(az % 360, el)) - vecs = qa.rotate(quats, ZAXIS) - - tstart = to_DJD(tstart) - tstop = to_DJD(tstop) - t1 = tstart - # Test every ten minutes - tstep = 10 / 1440 - while t1 < tstop: - t2 = min(tstop, t1 + tstep) - observer.date = t1 - sso.compute(observer) - sun_az1, sun_el1 = np.degrees(sso.az), np.degrees(sso.alt) - observer.date = t2 - sso.compute(observer) - sun_az2, sun_el2 = np.degrees(sso.az), np.degrees(sso.alt) - sun_quat1 = from_angles(sun_az1, sun_el1) - sun_quat2 = from_angles(sun_az2, sun_el2) - sun_quat2 = unwind_quat(sun_quat1, sun_quat2) - t = np.linspace(0, 1, 10) - sun_quats = qa.slerp(t, [0, 1], [sun_quat1, sun_quat2]) - sun_vecs = qa.rotate(sun_quats, ZAXIS).T - dpmax = np.amax(np.dot(vecs, sun_vecs)) - min_dist = np.degrees(np.arccos(dpmax)) - if min_dist < angle: - return True, DJDtoUNIX(t1) - t1 = t2 - return False, DJDtoUNIX(t2) - - -@function_timer -def attempt_scan_pole( - args, - observer, - visible, - not_visible, - tstart, - fp_radius, - el_max, - el_min, - stop_timestamp, - tstop_cooler, - sun, - moon, - sun_el_max, - fout, - fout_fmt, - ods, - boresight_angle, -): - """Attempt scanning the visible patches in order until success.""" - if args.one_scan_per_day and stop_timestamp > tstop_cooler: - raise RuntimeError("one_scan_per_day is incompatible with cooler cycles") - success = False - for patch in visible: - observer.date = to_DJD(tstart) - if isinstance(patch, CoolerCyclePatch): - # Cycle the cooler - t = add_cooler_cycle( - args, - tstart, - stop_timestamp, - observer, - sun, - moon, - fout, - fout_fmt, - patch, - boresight_angle, - ) - success = True - break - # In pole scheduling, first elevation is just below the patch - el = get_constant_elevation_pole( - args, observer, patch, fp_radius, el_min, el_max, not_visible - ) - if el is None: - continue - pole_success = True - subscan = -1 - t = tstart - while pole_success: - (pole_success, azmins, azmaxs, aztimes, tstop) = scan_patch_pole( - args, - el, - patch, - t, - fp_radius, - observer, - sun, - not_visible, - tstop_cooler, - sun_el_max, - ) - if pole_success: - if success: - # Still the same scan - patch.hits -= 1 - try: - t, subscan = add_scan( - args, - t, - tstop, - aztimes, - azmins, - azmaxs, - False, - fp_radius, - observer, - sun, - moon, - fout, - fout_fmt, - patch, - el, - ods, - boresight_angle, - subscan=subscan, - ) - el += np.radians(args.pole_el_step_deg) - success = True - except TooClose: - success = False - pole_success = False - if success: - break - tstop = t - if args.one_scan_per_day: - day1 = int(to_MJD(tstart)) - while int(to_MJD(tstop)) == day1: - tstop += 60.0 - return success, tstop - - -@function_timer -def get_constant_elevation( - args, - observer, - patch, - rising, - fp_radius, - not_visible, - partial_scan=False, -): - """Determine the elevation at which to scan.""" - log = Logger.get() - - azs, els = patch.corner_coordinates(observer) - - ind_rising = azs < np.pi - ind_setting = azs > np.pi - - el = None - if rising: - if np.sum(ind_rising) == 0: - not_visible.append((patch.name, "No rising corners")) - else: - el = np.amax(els[ind_rising]) + fp_radius - else: - if np.sum(ind_setting) == 0: - not_visible.append((patch.name, "No setting corners")) - else: - el = np.amin(els[ind_setting]) - fp_radius - - if el is not None and patch.elevations is not None: - # Fixed elevation mode. Find the first allowed observing elevation. - if rising: - ind = patch.elevations >= el - if np.any(ind): - el = np.amin(patch.elevations[ind]) - elif partial_scan: - # None of the elevations allow a full rising scan, - # Observe at the highest allowed elevation - el = np.amax(patch.elevations) - if el < np.amin(els[ind_rising]) + fp_radius: - not_visible.append( - (patch.name, "Rising patch above maximum elevation") - ) - el = None - else: - not_visible.append((patch.name, "Only partial rising scans available")) - el = None - else: - ind = patch.elevations <= el - if np.any(ind): - el = np.amax(patch.elevations[ind]) - elif partial_scan: - # None of the elevations allow a full setting scan, - # Observe at the lowest allowed elevation - el = np.amin(patch.elevations) - if el > np.amax(els[ind_setting]) + fp_radius: - not_visible.append( - (patch.name, "Setting patch above below elevation") - ) - el = None - else: - not_visible.append((patch.name, "Only partial setting scans available")) - el = None - elif el is not None: - if el < patch.el_min: - if partial_scan and np.any(patch.el_min < els[ind_setting] - fp_radius): - # Partial setting scan - el = patch.el_min - else: - not_visible.append( - ( - patch.name, - "el < el_min ({:.2f} < {:.2f}) rising = {}, partial = {}".format( - el / degree, patch.el_min / degree, rising, partial_scan - ), - ) - ) - el = None - elif el > patch.el_max: - if partial_scan and np.any(patch.el_max > els[ind_rising] + fp_radius): - # Partial rising scan - el = patch.el_max - else: - not_visible.append( - ( - patch.name, - "el > el_max ({:.2f} > {:.2f}) rising = {}, partial = {}".format( - el / degree, patch.el_max / degree, rising, partial_scan - ), - ) - ) - el = None - if el is None: - log.debug("NO ELEVATION: {}".format(not_visible[-1])) - else: - log.debug( - "{} : ELEVATION = {}, rising = {}, partial = {}".format( - patch.name, el / degree, rising, partial_scan - ) - ) - return el - - -@function_timer -def get_constant_elevation_pole( - args, observer, patch, fp_radius, el_min, el_max, not_visible -): - """Determine the elevation at which to scan.""" - log = Logger.get() - _, els = patch.corner_coordinates(observer) - el = np.amin(els) - fp_radius - - if el < el_min: - not_visible.append( - ( - patch.name, - "el < el_min ({:.2f} < {:.2f})".format(el / degree, el_min / degree), - ) - ) - el = None - elif el > el_max: - not_visible.append( - ( - patch.name, - "el > el_max ({:.2f} > {:.2f})".format(el / degree, el_max / degree), - ) - ) - el = None - if el is None: - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - return el - - -def check_sun_el(t, observer, sun, sun_el_max, args, not_visible): - log = Logger.get() - observer.date = to_DJD(t) - if sun_el_max < np.pi / 2: - sun.compute(observer) - if sun.alt > sun_el_max: - not_visible.append( - ( - patch.name, - "Sun too high {:.2f} rising = {}" - "".format(np.degrees(sun.alt), rising), - ) - ) - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - return True - return False - - -@function_timer -def scan_patch( - args, - el, - patch, - t, - fp_radius, - observer, - sun, - not_visible, - stop_timestamp, - sun_el_max, - rising, -): - """Attempt scanning the patch specified by corners at elevation el.""" - log = Logger.get() - azmins, azmaxs, aztimes = [], [], [] - if isinstance(patch, HorizontalPatch): - # No corners. Simply scan for the requested time - if rising and not patch.rising: - return False, azmins, azmaxs, aztimes, t - if check_sun_el(t, observer, sun, sun_el_max, args, not_visible): - return False, azmins, azmaxs, aztimes, t - azmins = [patch.az_min] - azmaxs = [patch.az_max] - aztimes = [t] - return True, azmins, azmaxs, aztimes, t + patch.scantime * 60 - # Traditional patch, track each corner - success = False - # and now track when all corners are past the elevation - tstop = t - tstep = 60 - to_cross = np.ones(len(patch.corners), dtype=np.bool) - scan_started = False - while True: - if tstop > stop_timestamp or tstop - t > 86400: - not_visible.append( - (patch.name, "Ran out of time rising = {}".format(rising)) - ) - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - break - if check_sun_el(tstop, observer, sun, sun_el_max, args, not_visible): - break - azs, els = patch.corner_coordinates(observer) - has_extent = current_extent( - azmins, - azmaxs, - aztimes, - patch.corners, - fp_radius, - el, - azs, - els, - rising, - tstop, - ) - if has_extent: - scan_started = True - - if rising: - good = azs <= np.pi - to_cross[np.logical_and(els > el + fp_radius, good)] = False - else: - good = azs >= np.pi - to_cross[np.logical_and(els < el - fp_radius, good)] = False - - # If we are alternating rising and setting scans, reject patches - # that appear on the wrong side of the sky. - if np.any((np.array(azmins) % (2 * np.pi)) < patch.az_min) or np.any( - (np.array(azmaxs) % (2 * np.pi)) > patch.az_max - ): - success = False - break - - if len(aztimes) > 0 and not np.any(to_cross): - # All corners made it across the CES line. - success = True - # Begin the scan before the patch is at the CES line - if aztimes[0] > t: - aztimes[0] -= tstep - break - - if scan_started and not has_extent: - # The patch went out of view before all corners - # could cross the elevation line. - success = False - break - tstop += tstep - - return success, azmins, azmaxs, aztimes, tstop - - -def unwind_angle(alpha, beta, multiple=2 * np.pi): - """Minimize absolute difference between alpha and beta. - - Minimize the absolute difference by adding a multiple of - 2*pi to beta to match alpha. - """ - while np.abs(alpha - beta - multiple) < np.abs(alpha - beta): - beta += multiple - while np.abs(alpha - beta + multiple) < np.abs(alpha - beta): - beta -= multiple - return beta - - -@function_timer -def scan_patch_pole( - args, - el, - patch, - t, - fp_radius, - observer, - sun, - not_visible, - stop_timestamp, - sun_el_max, -): - """Attempt scanning the patch specified by corners at elevation el. - - The pole scheduling mode will not wait for the patch to drift across. - It simply attempts to scan for the required time: args.pole_ces_time. - """ - log = Logger.get() - success = False - tstop = t - tstep = 60 - azmins, azmaxs, aztimes = [], [], [] - while True: - if tstop - t > args.pole_ces_time_s - 1: - # Succesfully scanned the maximum time - if len(azmins) > 0: - success = True - else: - not_visible.append( - (patch.name, "No overlap at {:.2f}".format(el / degree)) - ) - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - break - if tstop > stop_timestamp or tstop - t > 86400: - not_visible.append((patch.name, "Ran out of time")) - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - break - observer.date = to_DJD(tstop) - sun.compute(observer) - if sun.alt > sun_el_max: - not_visible.append( - (patch.name, "Sun too high {:.2f}".format(sun.alt / degree)) - ) - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - break - azs, els = patch.corner_coordinates(observer) - if np.amax(els) + fp_radius < el: - not_visible.append((patch.name, "Patch below {:.2f}".format(el / degree))) - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - break - radius = max(np.radians(1), fp_radius) - current_extent_pole( - azmins, azmaxs, aztimes, patch.corners, radius, el, azs, els, tstop - ) - tstop += tstep - return success, azmins, azmaxs, aztimes, tstop - - -@function_timer -def current_extent_pole( - azmins, azmaxs, aztimes, corners, fp_radius, el, azs, els, tstop -): - """Get the azimuthal extent of the patch along elevation el. - - Pole scheduling does not care if the patch is "rising" or "setting". - """ - azs_cross = [] - for i in range(len(corners)): - if np.abs(els[i] - el) < fp_radius: - azs_cross.append(azs[i]) - j = (i + 1) % len(corners) - if np.abs(els[j] - el) < fp_radius: - azs_cross.append(azs[j]) - if np.abs(els[i] - el) < fp_radius or np.abs(els[j] - el) < fp_radius: - continue - elif (els[i] - el) * (els[j] - el) < 0: - # Record the location where a line between the corners - # crosses el. - az1 = azs[i] - az2 = azs[j] - el1 = els[i] - el - el2 = els[j] - el - if az2 - az1 > np.pi: - az1 += 2 * np.pi - if az1 - az2 > np.pi: - az2 += 2 * np.pi - az_cross = (az1 + el1 * (az2 - az1) / (el1 - el2)) % (2 * np.pi) - azs_cross.append(az_cross) - - # Translate the azimuths at multiples of 2pi so they are in a - # compact cluster - - for i in range(1, len(azs_cross)): - azs_cross[i] = unwind_angle(azs_cross[0], azs_cross[i]) - - if len(azs_cross) > 0: - azs_cross = np.sort(azs_cross) - azmin = azs_cross[0] - azmax = azs_cross[-1] - azmax = unwind_angle(azmin, azmax) - if azmax - azmin > np.pi: - # Patch crosses the zero meridian - azmin, azmax = azmax, azmin - if len(azmins) > 0: - azmin = unwind_angle(azmins[-1], azmin) - azmax = unwind_angle(azmaxs[-1], azmax) - azmins.append(azmin) - azmaxs.append(azmax) - aztimes.append(tstop) - return - - -@function_timer -def current_extent( - azmins, azmaxs, aztimes, corners, fp_radius, el, azs, els, rising, t -): - """Get the azimuthal extent of the patch along elevation el. - - Find the pairs of corners that are on opposite sides - of the CES line. Record the crossing azimuth of a - line between the corners. - - """ - azs_cross = [] - for i in range(len(corners)): - j = (i + 1) % len(corners) - for el0 in [el - fp_radius, el, el + fp_radius]: - if (els[i] - el0) * (els[j] - el0) < 0: - # The corners are on opposite sides of the elevation line - az1 = azs[i] - az2 = azs[j] - el1 = els[i] - el0 - el2 = els[j] - el0 - az2 = unwind_angle(az1, az2) - az_cross = (az1 + el1 * (az2 - az1) / (el1 - el2)) % (2 * np.pi) - azs_cross.append(az_cross) - if fp_radius == 0: - break - if len(azs_cross) == 0: - return False - - azs_cross = np.array(azs_cross) - if rising: - good = azs_cross < np.pi - else: - good = azs_cross > np.pi - ngood = np.sum(good) - if ngood == 0: - return False - elif ngood > 1: - azs_cross = azs_cross[good] - - # Unwind the crossing azimuths to minimize the scatter - azs_cross = np.sort(azs_cross) - if azs_cross.size > 1: - ptp0 = azs_cross[-1] - azs_cross[0] - ptps = azs_cross[:-1] + 2 * np.pi - azs_cross[1:] - ptps = np.hstack([ptp0, ptps]) - i = np.argmin(ptps) - azs_cross[:i] += 2 * np.pi - np.roll(azs_cross, i) - - if len(azs_cross) > 1: - azmin = azs_cross[0] % (2 * np.pi) - azmax = azs_cross[-1] % (2 * np.pi) - if azmax - azmin > np.pi: - # Patch crosses the zero meridian - azmin, azmax = azmax, azmin - azmins.append(azmin) - azmaxs.append(azmax) - aztimes.append(t) - return True - return False - - -@function_timer -def add_scan( - args, - tstart, - tstop, - aztimes, - azmins, - azmaxs, - rising, - fp_radius, - observer, - sun, - moon, - fout, - fout_fmt, - patch, - el, - ods, - boresight_angle, - subscan=-1, - partial_scan=False, -): - """Make an entry for a CES in the schedule file.""" - log = Logger.get() - ces_time = tstop - tstart - if ces_time > args.ces_max_time_s: # and not args.pole_mode: - nsub = np.int(np.ceil(ces_time / args.ces_max_time_s)) - ces_time /= nsub - aztimes = np.array(aztimes) - azmins = np.array(azmins) - azmaxs = np.array(azmaxs) - azmaxs[0] = unwind_angle(azmins[0], azmaxs[0]) - for i in range(1, azmins.size): - azmins[i] = unwind_angle(azmins[0], azmins[i]) - azmaxs[i] = unwind_angle(azmaxs[0], azmaxs[i]) - azmaxs[i] = unwind_angle(azmins[i], azmaxs[i]) - # for i in range(azmins.size-1): - # if azmins[i+1] - azmins[i] > np.pi: - # azmins[i+1], azmaxs[i+1] = azmins[i+1]-2*np.pi, azmaxs[i+1]-2*np.pi - # if azmins[i+1] - azmins[i] < np.pi: - # azmins[i+1], azmaxs[i+1] = azmins[i+1]+2*np.pi, azmaxs[i+1]+2*np.pi - rising_string = "R" if rising else "S" - t1 = np.amin(aztimes) - entries = [] - while t1 < tstop - 1: - subscan += 1 - if args.operational_days: - # See if adding this scan would exceed the number of desired - # operational days - if subscan == 0: - tz = args.timezone / 24 - od = int(to_MJD(tstart) + tz) - ods.add(od) - if len(ods) > args.operational_days: - # Prevent adding further entries to the schedule once - # the number of operational days is full - break - t2 = min(t1 + ces_time, tstop) - if tstop - t2 < ces_time / 10: - # Append leftover scan to the last full subscan - t2 = tstop - ind = np.logical_and(aztimes >= t1, aztimes <= t2) - if np.all(aztimes > t2): - ind[0] = True - if np.all(aztimes < t1): - ind[-1] = True - if azmins[ind][0] < azmaxs[ind][0]: - azmin = np.amin(azmins[ind]) - azmax = np.amax(azmaxs[ind]) - else: - # we are, scan from the maximum to the minimum - azmin = np.amax(azmins[ind]) - azmax = np.amin(azmaxs[ind]) - if args.scan_margin > 0: - # Add a random error to the scan parameters to smooth out - # caustics in the hit map - delta_az = azmax - unwind_angle(azmax, azmin) - sub_az = delta_az * np.abs(np.random.randn()) * args.scan_margin * 0.5 - add_az = delta_az * np.abs(np.random.randn()) * args.scan_margin * 0.5 - azmin = (azmin - sub_az) % (2 * np.pi) - azmax = (azmax + add_az) % (2 * np.pi) - if t2 == tstop: - delta_t = t2 - t1 # tstop - tstart - add_t = delta_t * np.abs(np.random.randn()) * args.scan_margin - t2 += add_t - # Add the focal plane radius to the scan width - fp_radius_eff = fp_radius / np.cos(el) - azmin = (azmin - fp_radius_eff) % (2 * np.pi) / degree - azmax = (azmax + fp_radius_eff) % (2 * np.pi) / degree - # Get the Sun and Moon locations at the beginning and end - observer.date = to_DJD(t1) - sun.compute(observer) - moon.compute(observer) - sun_az1, sun_el1 = sun.az / degree, sun.alt / degree - moon_az1, moon_el1 = moon.az / degree, moon.alt / degree - moon_phase1 = moon.phase - # It is possible that the Sun or the Moon gets too close to the - # scan, even if they are far enough from the actual patch. - sun_too_close, sun_time = check_sso( - observer, - azmin, - azmax, - el / degree, - sun, - args.sun_avoidance_angle_deg, - t1, - t2, - ) - moon_too_close, moon_time = check_sso( - observer, - azmin, - azmax, - el / degree, - moon, - args.moon_avoidance_angle_deg, - t1, - t2, - ) - - if ( - (isinstance(patch, HorizontalPatch) or partial_scan) - and sun_time > tstart + 1 - and moon_time > tstart + 1 - ): - # Simply terminate the scan when the Sun or the Moon is too close - t2 = min(sun_time, moon_time) - if sun_too_close or moon_too_close: - tstop = t2 - if t1 == t2: - break - else: - # For regular patches, this is a failure condition - if sun_too_close: - log.debug("Sun too close") - raise SunTooClose - if moon_too_close: - log.debug("Moon too close") - raise MoonTooClose - - observer.date = to_DJD(t2) - sun.compute(observer) - moon.compute(observer) - sun_az2, sun_el2 = sun.az / degree, sun.alt / degree - moon_az2, moon_el2 = moon.az / degree, moon.alt / degree - moon_phase2 = moon.phase - # Create an entry in the schedule - entry = fout_fmt.format( - to_UTC(t1), - to_UTC(t2), - to_MJD(t1), - to_MJD(t2), - boresight_angle, - patch.name, - (azmin + args.boresight_offset_az_deg) % 360, - (azmax + args.boresight_offset_az_deg) % 360, - (el / degree + args.boresight_offset_el_deg), - rising_string, - sun_el1, - sun_az1, - sun_el2, - sun_az2, - moon_el1, - moon_az1, - moon_el2, - moon_az2, - 0.005 * (moon_phase1 + moon_phase2), - -1 - patch.partial_hits if partial_scan else patch.hits, - subscan, - ) - entries.append(entry) - if partial_scan: - # Never append more than one partial scan before - # checking if full scans are again available - tstop = t2 - break - t1 = t2 + args.gap_small_s - - # Write the entries - for entry in entries: - log.debug(entry) - fout.write(entry) - fout.flush() - - if not partial_scan: - # Only update the patch counters when performing full scans - patch.hits += 1 - patch.time += ces_time - if rising or args.pole_mode: - patch.rising_hits += 1 - patch.rising_time += ces_time - if not rising or args.pole_mode: - patch.setting_hits += 1 - patch.setting_time += ces_time - # The oscillate method will slightly shift the patch to - # blur the boundaries - patch.oscillate() - # Advance the time - tstop += args.gap_s - else: - patch.partial_hits += 1 - # Advance the time - tstop += args.gap_small_s - - return tstop, subscan - - -@function_timer -def add_cooler_cycle( - args, tstart, tstop, observer, sun, moon, fout, fout_fmt, patch, boresight_angle -): - """Make an entry for a cooler cycle in the schedule file.""" - log = Logger.get() - az = patch.az - el = patch.el - t1 = tstart - t2 = t1 + patch.cycle_time - - observer.date = to_DJD(t1) - sun.compute(observer) - moon.compute(observer) - sun_az1, sun_el1 = sun.az / degree, sun.alt / degree - moon_az1, moon_el1 = moon.az / degree, moon.alt / degree - moon_phase1 = moon.phase - - observer.date = to_DJD(t2) - sun.compute(observer) - moon.compute(observer) - sun_az2, sun_el2 = sun.az / degree, sun.alt / degree - moon_az2, moon_el2 = moon.az / degree, moon.alt / degree - moon_phase2 = moon.phase - - # Create an entry in the schedule - entry = fout_fmt.format( - to_UTC(t1), - to_UTC(t2), - to_MJD(t1), - to_MJD(t2), - boresight_angle, - patch.name, - az, - az, - el, - "R", - sun_el1, - sun_az1, - sun_el2, - sun_az2, - moon_el1, - moon_az1, - moon_el2, - moon_az2, - 0.005 * (moon_phase1 + moon_phase2), - patch.hits, - 0, - ) - - # Write the entry - log.debug(entry) - fout.write(entry) - fout.flush() - - patch.last_cycle_end = t2 - patch.hits += 1 - patch.time += t2 - t1 - patch.rising_hits += 1 - patch.rising_time += t2 - t1 - patch.setting_hits += 1 - patch.setting_time += t2 - t1 - - return t2 - - -@function_timer -def get_visible(args, observer, sun, moon, patches, el_min): - """Determine which patches are visible.""" - log = Logger.get() - visible = [] - not_visible = [] - for patch in patches: - # Reject all patches that have even one corner too close - # to the Sun or the Moon and patches that are completely - # below the horizon - in_view, msg = patch.visible( - el_min, - observer, - sun, - moon, - args.sun_avoidance_angle_deg, - args.moon_avoidance_angle_deg, - not (args.allow_partial_scans or args.delay_sso_check), - ) - if not in_view: - not_visible.append((patch.name, msg)) - - if in_view: - if not (args.allow_partial_scans or args.delay_sso_check): - # Finally, check that the Sun or the Moon are not - # inside the patch - if args.moon_avoidance_angle_deg >= 0 and patch.in_patch(moon): - not_visible.append((patch.name, "Moon in patch")) - in_view = False - if args.sun_avoidance_angle_deg >= 0 and patch.in_patch(sun): - not_visible.append((patch.name, "Sun in patch")) - in_view = False - if in_view: - visible.append(patch) - log.debug( - "In view: {}. el = {:.2f}..{:.2f}".format( - patch.name, np.degrees(patch.el_min), np.degrees(patch.el_max) - ) - ) - else: - log.debug("NOT VISIBLE: {}".format(not_visible[-1])) - return visible, not_visible - - -@function_timer -def get_boresight_angle(args, t, t0=0): - """Return the scheduled boresight angle at time t.""" - if args.boresight_angle_step_deg == 0 or args.boresight_angle_time_min == 0: - return 0 - - istep = int((t - t0) / 60 / args.boresight_angle_time_min) - return (args.boresight_angle_step_deg * istep) % 360 - - -@function_timer -def apply_blockouts(args, t_in): - """Check if `t` is inside a blockout period. - If so, advance it to the next unblocked time. - - Returns: The (new) time and a boolean flag indicating if - the time was blocked and subsequently advanced. - """ - if not args.block_out: - return t_in, False - log = Logger.get() - t = t_in - blocked = False - for block_out in args.block_out: - current = datetime.fromtimestamp(t, timezone.utc) - start, stop = block_out.split("-") - try: - # If the block out specifies the year then no extra logic is needed - start_year, start_month, start_day = start.split("/") - start = datetime( - int(start_year), - int(start_month), - int(start_day), - 0, - 0, - 0, - 0, - timezone.utc, - ) - except ValueError: - # No year given so must figure out which year is the right one - start_month, start_day = start.split("/") - start = datetime( - current.year, int(start_month), int(start_day), 0, 0, 0, 0, timezone.utc - ) - if start > current: - # This year's block out is still in the future but the past - # year's blockout may still be active - start = start.replace(year=start.year - 1) - try: - # If the block out specifies the year then no extra logic is needed - stop_year, stop_month, stop_day = stop.split("/") - stop = datetime( - int(stop_year), int(stop_month), int(stop_day), 0, 0, 0, 0, timezone.utc - ) - except ValueError: - # No year given so must figure out which year is the right one - stop_month, stop_day = stop.split("/") - stop = datetime( - start.year, int(stop_month), int(stop_day), 0, 0, 0, 0, timezone.utc - ) - if stop < start: - # The block out ends on a different year than it starts - stop = stop.replace(year=start.year + 1) - # advance the stop time by one day to make the definition inclusive - stop += timedelta(days=1) - if start < current and current < stop: - # `t` is inside the block out. - # Advance to the end of the block out. - log.info( - "{} is inside block out {}, advancing to {}".format( - current, block_out, stop - ) - ) - t = stop.timestamp() - blocked = True - return t, blocked - - -def advance_time(t, time_step, offset=0): - """Advance the time ensuring that the sampling falls - over same discrete times (multiples of time_step) - regardless of the current value of t. - """ - return offset + ((t - offset) // time_step + 1) * time_step - - -@function_timer -def build_schedule(args, start_timestamp, stop_timestamp, patches, observer, sun, moon): - log = Logger.get() - - sun_el_max = args.sun_el_max_deg * degree - el_min = args.el_min_deg - el_max = args.el_max_deg - if args.elevations_deg is None: - el_min = args.el_min_deg - el_max = args.el_max_deg - else: - # Override the elevation limits - el_min = 90 - el_max = 0 - for el in args.elevations_deg.split(","): - el = np.float(el) - el_min = min(el * 0.9, el_min) - el_max = max(el * 1.1, el_max) - el_min *= degree - el_max *= degree - fp_radius = args.fp_radius_deg * degree - - fname_out = args.out - dir_out = os.path.dirname(fname_out) - if dir_out: - log.info("Creating '{}'".format(dir_out)) - os.makedirs(dir_out, exist_ok=True) - fout = open(fname_out, "w") - - fout.write( - "#{:15} {:15} {:>15} {:>15} {:>15}\n".format( - "Site", "Telescope", "Latitude [deg]", "Longitude [deg]", "Elevation [m]" - ) - ) - fout.write( - " {:15} {:15} {:15.3f} {:15.3f} {:15.1f}\n".format( - args.site_name, - args.telescope, - np.degrees(observer.lat), - np.degrees(observer.lon), - observer.elevation, - ) - ) - - fout_fmt0 = ( - "#{:>20} {:>20} {:>14} {:>14} {:>8} " - "{:35} {:>8} {:>8} {:>8} {:>5} " - "{:>8} {:>8} {:>8} {:>8} " - "{:>8} {:>8} {:>8} {:>8} {:>5} " - "{:>5} {:>3}\n" - ) - - fout_fmt = ( - " {:20} {:20} {:14.6f} {:14.6f} {:8.2f} " - "{:35} {:8.2f} {:8.2f} {:8.2f} {:5} " - "{:8.2f} {:8.2f} {:8.2f} {:8.2f} " - "{:8.2f} {:8.2f} {:8.2f} {:8.2f} {:5.2f} " - "{:5} {:3}\n" - ) - - fout.write( - fout_fmt0.format( - "Start time UTC", - "Stop time UTC", - "Start MJD", - "Stop MJD", - "Rotation", - "Patch name", - "Az min", - "Az max", - "El", - "R/S", - "Sun el1", - "Sun az1", - "Sun el2", - "Sun az2", - "Moon el1", - "Moon az1", - "Moon el2", - "Moon az2", - "Phase", - "Pass", - "Sub", - ) - ) - - # Operational days - ods = set() - - t = start_timestamp - last_successful = t - while True: - t, blocked = apply_blockouts(args, t) - boresight_angle = get_boresight_angle(args, t) - if t > stop_timestamp: - break - if t - last_successful > 86400 or blocked: - # A long time has passed since the last successfully - # scheduled scan. - # Reset the individual patch az and el limits - for patch in patches: - patch.reset() - if blocked: - last_successful = t - else: - # Only try this once for every day. Swapping - # `t` <-> `last_successful` means that we will not trigger - # this branch again without scheduling a succesful scan - log.debug( - "Resetting patches and returning to the last successful " - "scan: {}".format(to_UTC(last_successful)) - ) - t, last_successful = last_successful, t - - # Determine which patches are observable at time t. - - log.debug("t = {}".format(to_UTC(t))) - # Determine which patches are visible - observer.date = to_DJD(t) - sun.compute(observer) - if sun.alt > sun_el_max: - log.debug( - "Sun elevation is {:.2f} > {:.2f}. Moving on.".format( - sun.alt / degree, sun_el_max / degree - ) - ) - t = advance_time(t, args.time_step_s) - continue - moon.compute(observer) - - visible, not_visible = get_visible(args, observer, sun, moon, patches, el_min) - - if len(visible) == 0: - log.debug("No patches visible at {}: {}".format(to_UTC(t), not_visible)) - t = advance_time(t, args.time_step_s) - continue - - # Determine if a cooler cycle sets a limit for observing - tstop_cooler = stop_timestamp - for patch in patches: - if isinstance(patch, CoolerCyclePatch): - ttest = patch.last_cycle_end + patch.hold_time_max - if ttest < tstop_cooler: - tstop_cooler = ttest - - # Order the targets by priority and attempt to observe with both - # a rising and setting scans until we find one that can be - # succesfully scanned. - # If the criteria are not met, advance the time by a step - # and try again - - prioritize(args, visible) - - if args.pole_mode: - success, t = attempt_scan_pole( - args, - observer, - visible, - not_visible, - t, - fp_radius, - el_max, - el_min, - stop_timestamp, - tstop_cooler, - sun, - moon, - sun_el_max, - fout, - fout_fmt, - ods, - boresight_angle, - ) - else: - success, t = attempt_scan( - args, - observer, - visible, - not_visible, - t, - fp_radius, - stop_timestamp, - tstop_cooler, - sun, - moon, - sun_el_max, - fout, - fout_fmt, - ods, - boresight_angle, - ) - - if args.operational_days and len(ods) > args.operational_days: - break - - if not success: - log.debug( - "No patches could be scanned at {}: {}".format(to_UTC(t), not_visible) - ) - t = advance_time(t, args.time_step_s) - else: - last_successful = t - - fout.close() - return - - -def parse_args(): - parser = argparse.ArgumentParser( - description="Generate ground observation schedule.", fromfile_prefix_chars="@" - ) - - parser.add_argument( - "--site-name", required=False, default="LBL", help="Observing site name" - ) - parser.add_argument( - "--telescope", - required=False, - default="Telescope", - help="Observing telescope name", - ) - parser.add_argument( - "--site-lon", - required=False, - default="-122.247", - help="Observing site longitude [PyEphem string]", - ) - parser.add_argument( - "--site-lat", - required=False, - default="37.876", - help="Observing site latitude [PyEphem string]", - ) - parser.add_argument( - "--site-alt", - required=False, - default=100, - type=np.float, - help="Observing site altitude [meters]", - ) - parser.add_argument( - "--scan-margin", - required=False, - default=0, - type=np.float, - help="Random fractional margin [0..1] added to the " - "scans to smooth out edge effects", - ) - parser.add_argument( - "--ra-period", - required=False, - default=10, - type=np.int, - help="Period of patch position oscillations in RA [visits]", - ) - parser.add_argument( - "--ra-amplitude-deg", - required=False, - default=0, - type=np.float, - help="Amplitude of patch position oscillations in RA [deg]", - ) - parser.add_argument( - "--dec-period", - required=False, - default=10, - type=np.int, - help="Period of patch position oscillations in DEC [visits]", - ) - parser.add_argument( - "--dec-amplitude-deg", - required=False, - default=0, - type=np.float, - help="Amplitude of patch position oscillations in DEC [deg]", - ) - parser.add_argument( - "--elevation-penalty-limit", - required=False, - default=0, - type=np.float, - help="Assign a penalty to observing elevations below this limit [degrees]", - ) - parser.add_argument( - "--elevation-penalty-power", - required=False, - default=2, - type=np.float, - help="Power in the elevation penalty function [> 0] ", - ) - parser.add_argument( - "--equalize-area", - required=False, - default=False, - action="store_true", - help="Adjust priorities to account for patch area", - ) - parser.add_argument( - "--equalize-time", - required=False, - action="store_true", - dest="equalize_time", - help="Modulate priority by integration time.", - ) - parser.add_argument( - "--equalize-scans", - required=False, - action="store_false", - dest="equalize_time", - help="Modulate priority by number of scans.", - ) - parser.set_defaults(equalize_time=False) - parser.add_argument( - "--patch", - required=True, - action="append", - help="Patch definition: " - "name,weight,lon1,lat1,lon2,lat2 ... " - "OR name,weight,lon,lat,width", - ) - parser.add_argument( - "--patch-coord", - required=False, - default="C", - help="Sky patch coordinate system [C,E,G]", - ) - parser.add_argument( - "--el-min-deg", - required=False, - default=30, - type=np.float, - help="Minimum elevation for a CES", - ) - parser.add_argument( - "--el-max-deg", - required=False, - default=80, - type=np.float, - help="Maximum elevation for a CES", - ) - parser.add_argument( - "--el-step-deg", - required=False, - default=0, - type=np.float, - help="Optional step to apply to minimum elevation", - ) - parser.add_argument( - "--alternate", - required=False, - default=False, - action="store_true", - help="Alternate between rising and setting scans", - ) - parser.add_argument( - "--fp-radius-deg", - required=False, - default=0, - type=np.float, - help="Focal plane radius [deg]", - ) - parser.add_argument( - "--sun-avoidance-angle-deg", - required=False, - default=30, - type=np.float, - help="Minimum distance between the Sun and the bore sight [deg]", - ) - parser.add_argument( - "--moon-avoidance-angle-deg", - required=False, - default=20, - type=np.float, - help="Minimum distance between the Moon and the bore sight [deg]", - ) - parser.add_argument( - "--sun-el-max-deg", - required=False, - default=90, - type=np.float, - help="Maximum allowed sun elevation [deg]", - ) - parser.add_argument( - "--boresight-angle-step-deg", - required=False, - default=0, - type=np.float, - help="Boresight rotation step size [deg]", - ) - parser.add_argument( - "--boresight-angle-time-min", - required=False, - default=0, - type=np.float, - help="Boresight rotation step interval [minutes]", - ) - parser.add_argument( - "--start", - required=False, - default="2000-01-01 00:00:00", - help="UTC start time of the schedule", - ) - parser.add_argument("--stop", required=False, help="UTC stop time of the schedule") - parser.add_argument( - "--block-out", - required=False, - action="append", - help="Range of UTC calendar days to omit from scheduling in format " - "START_MONTH/START_DAY-END_MONTH/END_DAY or " - "START_YEAR/START_MONTH/START_DAY-END_YEAR/END_MONTH/END_DAY " - "where YEAR, MONTH and DAY are integers. END days are inclusive", - ) - parser.add_argument( - "--operational-days", - required=False, - type=np.int, - help="Number of operational days to schedule (empty days do not count)", - ) - parser.add_argument( - "--timezone", - required=False, - type=np.int, - default=0, - help="Offset to apply to MJD to separate operational days [hours]", - ) - parser.add_argument( - "--gap-s", - required=False, - default=100, - type=np.float, - help="Gap between CES:es [seconds]", - ) - parser.add_argument( - "--gap-small-s", - required=False, - default=10, - type=np.float, - help="Gap between split CES:es [seconds]", - ) - parser.add_argument( - "--time-step-s", - required=False, - default=600, - type=np.float, - help="Time step after failed target acquisition [seconds]", - ) - parser.add_argument( - "--one-scan-per-day", - required=False, - default=False, - action="store_true", - help="Pad each operational day to have only one CES", - ) - parser.add_argument( - "--ces-max-time-s", - required=False, - default=900, - type=np.float, - help="Maximum length of a CES [seconds]", - ) - parser.add_argument( - "--debug", - required=False, - default=False, - action="store_true", - help="Write diagnostics, including patch plots.", - ) - parser.add_argument( - "--polmap", - required=False, - help="Include polarization from map in the plotted patches when --debug", - ) - parser.add_argument( - "--pol-min", - required=False, - type=np.float, - help="Lower plotting range for polarization map", - ) - parser.add_argument( - "--pol-max", - required=False, - type=np.float, - help="Upper plotting range for polarization map", - ) - parser.add_argument( - "--delay-sso-check", - required=False, - default=False, - action="store_true", - help="Only apply SSO check during simulated scan.", - ) - parser.add_argument( - "--pole-mode", - required=False, - default=False, - action="store_true", - help="Pole scheduling mode (no drift scan)", - ) - parser.add_argument( - "--pole-el-step-deg", - required=False, - default=0.25, - type=np.float, - help="Elevation step in pole scheduling mode [deg]", - ) - parser.add_argument( - "--pole-ces-time-s", - required=False, - default=3000, - type=np.float, - help="Time to scan at constant elevation in pole mode", - ) - parser.add_argument( - "--out", required=False, default="schedule.txt", help="Output filename" - ) - parser.add_argument( - "--boresight-offset-el-deg", - required=False, - default=0, - type=np.float, - help="Optional offset added to every observing elevation", - ) - parser.add_argument( - "--boresight-offset-az-deg", - required=False, - default=0, - type=np.float, - help="Optional offset added to every observing azimuth", - ) - parser.add_argument( - "--elevations-deg", - required=False, - help="Fixed observing elevations in a comma-separated list.", - ) - parser.add_argument( - "--partial-scans", - required=False, - action="store_true", - dest="allow_partial_scans", - help="Allow partials scans when full scans are not available.", - ) - parser.add_argument( - "--no-partial-scans", - required=False, - action="store_false", - dest="allow_partial_scans", - help="Allow partials scans when full scans are not available.", - ) - parser.set_defaults(allow_partial_scans=False) - - try: - args = parser.parse_args() - except SystemExit: - sys.exit(0) - - if args.operational_days is None and args.stop is None: - raise RuntimeError("You must provide --stop or --operational-days") - - stop_time = None - if args.start.endswith("Z"): - start_time = dateutil.parser.parse(args.start) - if args.stop is not None: - if not args.stop.endswith("Z"): - raise RuntimeError("Either both or neither times must be given in UTC") - stop_time = dateutil.parser.parse(args.stop) - else: - if args.timezone < 0: - tz = "-{:02}00".format(-args.timezone) - else: - tz = "+{:02}00".format(args.timezone) - start_time = dateutil.parser.parse(args.start + tz) - if args.stop is not None: - if args.stop.endswith("Z"): - raise RuntimeError("Either both or neither times must be given in UTC") - stop_time = dateutil.parser.parse(args.stop + tz) - - start_timestamp = start_time.timestamp() - if stop_time is None: - # Keep scheduling until the desired number of operational days is full. - stop_timestamp = 2 ** 60 - else: - stop_timestamp = stop_time.timestamp() - return args, start_timestamp, stop_timestamp - - -@function_timer -def parse_patch_sso(args, parts): - log = Logger.get() - log.info("SSO format") - name = parts[0] - weight = float(parts[2]) - radius = float(parts[3]) * degree - patch = SSOPatch(name, weight, radius, elevations=args.elevations_deg) - return patch - - -@function_timer -def parse_patch_cooler(args, parts, last_cycle_end): - log = Logger.get() - log.info("Cooler cycle format") - weight = float(parts[2]) - power = float(parts[3]) - hold_time_min = float(parts[4]) # in hours - hold_time_max = float(parts[5]) # in hours - cycle_time = float(parts[6]) # in hours - az = float(parts[7]) - el = float(parts[8]) - patch = CoolerCyclePatch( - weight, power, hold_time_min, hold_time_max, cycle_time, az, el, last_cycle_end - ) - return patch - - -@function_timer -def parse_patch_horizontal(args, parts): - """Parse an explicit patch definition line""" - log = Logger.get() - corners = [] - log.info("Horizontal format") - name = parts[0] - weight = float(parts[2]) - azmin = float(parts[3]) * degree - azmax = float(parts[4]) * degree - el = float(parts[5]) * degree - scantime = float(parts[6]) # minutes - patch = HorizontalPatch(name, weight, azmin, azmax, el, scantime) - return patch - - -@function_timer -def parse_patch_explicit(args, parts): - """Parse an explicit patch definition line""" - log = Logger.get() - corners = [] - log.info("Explicit-corners format: ") - name = parts[0] - i = 2 - definition = "" - while i + 1 < len(parts): - definition += " ({}, {})".format(parts[i], parts[i + 1]) - try: - # Assume coordinates in degrees - lon = float(parts[i]) * degree - lat = float(parts[i + 1]) * degree - except ValueError: - # Failed simple interpreration, assume pyEphem strings - lon = parts[i] - lat = parts[i + 1] - i += 2 - if args.patch_coord == "C": - corner = ephem.Equatorial(lon, lat, epoch="2000") - elif args.patch_coord == "E": - corner = ephem.Ecliptic(lon, lat, epoch="2000") - elif args.patch_coord == "G": - corner = ephem.Galactic(lon, lat, epoch="2000") - else: - raise RuntimeError("Unknown coordinate system: {}".format(args.patch_coord)) - corner = ephem.Equatorial(corner) - if corner.dec > 80 * degree or corner.dec < -80 * degree: - raise RuntimeError( - "{} has at least one circumpolar corner. " - "Circumpolar targeting not yet implemented".format(name) - ) - patch_corner = ephem.FixedBody() - patch_corner._ra = corner.ra - patch_corner._dec = corner.dec - corners.append(patch_corner) - log.info(definition) - return corners - - -@function_timer -def parse_patch_rectangular(args, parts): - """Parse a rectangular patch definition line""" - log = Logger.get() - corners = [] - log.info("Rectangular format") - name = parts[0] - try: - # Assume coordinates in degrees - lon_min = float(parts[2]) * degree - lat_max = float(parts[3]) * degree - lon_max = float(parts[4]) * degree - lat_min = float(parts[5]) * degree - except ValueError: - # Failed simple interpreration, assume pyEphem strings - lon_min = parts[2] - lat_max = parts[3] - lon_max = parts[4] - lat_min = parts[5] - if args.patch_coord == "C": - coordconv = ephem.Equatorial - elif args.patch_coord == "E": - coordconv = ephem.Ecliptic - elif args.patch_coord == "G": - coordconv = ephem.Galactic - else: - raise RuntimeError("Unknown coordinate system: {}".format(args.patch_coord)) - - nw_corner = coordconv(lon_min, lat_max, epoch="2000") - ne_corner = coordconv(lon_max, lat_max, epoch="2000") - se_corner = coordconv(lon_max, lat_min, epoch="2000") - sw_corner = coordconv(lon_min, lat_min, epoch="2000") - - lon_max = unwind_angle(lon_min, lon_max) - if lon_min < lon_max: - delta_lon = lon_max - lon_min - else: - delta_lon = lon_min - lon_max - area = (np.cos(np.pi / 2 - lat_max) - np.cos(np.pi / 2 - lat_min)) * delta_lon - - corners_temp = [] - add_side(nw_corner, ne_corner, corners_temp, coordconv) - add_side(ne_corner, se_corner, corners_temp, coordconv) - add_side(se_corner, sw_corner, corners_temp, coordconv) - add_side(sw_corner, nw_corner, corners_temp, coordconv) - - for corner in corners_temp: - if corner.dec > 80 * degree or corner.dec < -80 * degree: - raise RuntimeError( - "{} has at least one circumpolar corner. " - "Circumpolar targeting not yet implemented".format(name) - ) - patch_corner = ephem.FixedBody() - patch_corner._ra = corner.ra - patch_corner._dec = corner.dec - corners.append(patch_corner) - return corners, area - - -@function_timer -def add_side(corner1, corner2, corners_temp, coordconv): - """Add one side of a rectangle. - - Add one side of a rectangle with enough interpolation points. - """ - step = np.radians(1) - corners_temp.append(ephem.Equatorial(corner1)) - lon1 = corner1.ra - lon2 = corner2.ra - lat1 = corner1.dec - lat2 = corner2.dec - if lon1 == lon2: - lon = lon1 - if lat1 < lat2: - lat_step = step - else: - lat_step = -step - for lat in np.arange(lat1, lat2, lat_step): - corners_temp.append(ephem.Equatorial(coordconv(lon, lat, epoch="2000"))) - elif lat1 == lat2: - lat = lat1 - if lon1 < lon2: - lon_step = step / np.cos(lat) - else: - lon_step = -step / np.cos(lat) - for lon in np.arange(lon1, lon2, lon_step): - corners_temp.append(ephem.Equatorial(coordconv(lon, lat, epoch="2000"))) - else: - raise RuntimeError("add_side: both latitude and longitude change") - return - - -@function_timer -def parse_patch_center_and_width(args, parts): - """Parse center-and-width patch definition""" - log = Logger.get() - corners = [] - log.info("Center-and-width format") - try: - # Assume coordinates in degrees - lon = float(parts[2]) * degree - lat = float(parts[3]) * degree - except ValueError: - # Failed simple interpreration, assume pyEphem strings - lon = parts[2] - lat = parts[3] - width = float(parts[4]) * degree - if args.patch_coord == "C": - center = ephem.Equatorial(lon, lat, epoch="2000") - elif args.patch_coord == "E": - center = ephem.Ecliptic(lon, lat, epoch="2000") - elif args.patch_coord == "G": - center = ephem.Galactic(lon, lat, epoch="2000") - else: - raise RuntimeError("Unknown coordinate system: {}".format(args.patch_coord)) - center = ephem.Equatorial(center) - # Synthesize 8 corners around the center - phi = center.ra - theta = center.dec - r = width / 2 - ncorner = 8 - angstep = 2 * np.pi / ncorner - for icorner in range(ncorner): - ang = angstep * icorner - delta_theta = np.cos(ang) * r - delta_phi = np.sin(ang) * r / np.cos(theta + delta_theta) - patch_corner = ephem.FixedBody() - patch_corner._ra = phi + delta_phi - patch_corner._dec = theta + delta_theta - corners.append(patch_corner) - return corners - - -@function_timer -def parse_patches(args, observer, sun, moon, start_timestamp, stop_timestamp): - # Parse the patch definitions - log = Logger.get() - patches = [] - total_weight = 0 - for patch_def in args.patch: - parts = patch_def.split(",") - name = parts[0] - log.info('Adding patch "{}"'.format(name)) - if parts[1].upper() == "HORIZONTAL": - patch = parse_patch_horizontal(args, parts) - elif parts[1].upper() == "SSO": - patch = parse_patch_sso(args, parts) - elif parts[1].upper() == "COOLER": - patch = parse_patch_cooler(args, parts, start_timestamp) - else: - weight = float(parts[1]) - if np.isnan(weight): - raise RuntimeError("Patch has NaN priority: {}".format(patch_def)) - if weight == 0: - raise RuntimeError("Patch has zero priority: {}".format(patch_def)) - if len(parts[2:]) == 3: - corners = parse_patch_center_and_width(args, parts) - area = None - elif len(parts[2:]) == 4: - corners, area = parse_patch_rectangular(args, parts) - else: - corners = parse_patch_explicit(args, parts) - area = None - patch = Patch( - name, - weight, - corners, - el_min=args.el_min_deg * degree, - el_max=args.el_max_deg * degree, - el_step=args.el_step_deg * degree, - alternate=args.alternate, - site_lat=observer.lat, - area=area, - ra_period=args.ra_period, - ra_amplitude=args.ra_amplitude_deg, - dec_period=args.dec_period, - dec_amplitude=args.dec_amplitude_deg, - elevations=args.elevations_deg, - ) - if args.equalize_area or args.debug: - area = patch.get_area(observer, nside=32, equalize=args.equalize_area) - total_weight += patch.weight - patches.append(patch) - - log.debug( - "Highest possible observing elevation: {:.2f} degrees." - " Sky fraction = {:.4f}".format(patches[-1].el_max0 / degree, patch._area) - ) - - if args.debug: - import matplotlib.pyplot as plt - - polmap = None - if args.polmap: - polmap = hp.read_map(args.polmap, [1, 2]) - bad = polmap[0] == hp.UNSEEN - polmap = np.sqrt(polmap[0] ** 2 + polmap[1] ** 2) * 1e6 - polmap[bad] = hp.UNSEEN - plt.style.use("default") - cmap = cm.inferno - cmap.set_under("w") - plt.figure(figsize=[20, 4]) - plt.subplots_adjust(left=0.1, right=0.9) - patch_color = "black" - sun_color = "black" - sun_lw = 8 - sun_avoidance_color = "gray" - moon_color = "black" - moon_lw = 2 - moon_avoidance_color = "gray" - alpha = 0.5 - avoidance_alpha = 0.01 - sun_step = np.int(86400 * 1) - moon_step = np.int(86400 * 0.1) - for iplot, coord in enumerate("CEG"): - scoord = {"C": "Equatorial", "E": "Ecliptic", "G": "Galactic"}[coord] - title = scoord # + ' patch locations' - if polmap is None: - nside = 256 - avoidance_map = np.zeros(12 * nside ** 2) - # hp.mollview(np.zeros(12) + hp.UNSEEN, coord=coord, cbar=False, - # title='', sub=[1, 3, 1 + iplot], cmap=cmap) - else: - hp.mollview( - polmap, - coord="G" + coord, - cbar=True, - unit="$\mu$K", - min=args.polmin, - max=args.polmax, - norm="log", - cmap=cmap, - title=title, - sub=[1, 3, 1 + iplot], - notext=True, - format="%.1f", - xsize=1600, - ) - # Plot sun and moon avoidance circle - sunlon, sunlat = [], [] - moonlon, moonlat = [], [] - sun_avoidance_angle = args.sun_avoidance_angle_deg * degree - moon_avoidance_angle = args.moon_avoidance_angle_deg * degree - for lon, lat, sso, angle_min, color, step, lw in [ - ( - sunlon, - sunlat, - sun, - sun_avoidance_angle, - sun_avoidance_color, - sun_step, - sun_lw, - ), - ( - moonlon, - moonlat, - moon, - moon_avoidance_angle, - moon_avoidance_color, - moon_step, - moon_lw, - ), - ]: - for t in range(np.int(start_timestamp), np.int(stop_timestamp), step): - observer.date = to_DJD(t) - sso.compute(observer) - lon.append(sso.a_ra / degree) - lat.append(sso.a_dec / degree) - if angle_min <= 0: - continue - if polmap is None: - # accumulate avoidance map - vec = hp.dir2vec(lon[-1], lat[-1], lonlat=True) - pix = hp.query_disc(nside, vec, angle_min) - for p in pix: - avoidance_map[p] += 1 - else: - # plot a circle around the location - clon, clat = [], [] - phi = sso.a_ra - theta = sso.a_dec - r = angle_min - for ang in np.linspace(0, 2 * np.pi, 36): - dtheta = np.cos(ang) * r - dphi = np.sin(ang) * r / np.cos(theta + dtheta) - clon.append((phi + dphi) / degree) - clat.append((theta + dtheta) / degree) - hp.projplot( - clon, - clat, - "-", - color=color, - alpha=avoidance_alpha, - lw=lw, - threshold=1, - lonlat=True, - coord="C", - ) - if polmap is None: - avoidance_map[avoidance_map == 0] = hp.UNSEEN - hp.mollview( - avoidance_map, - coord="C" + coord, - cbar=False, - title="", - sub=[1, 3, 1 + iplot], - cmap=cmap, - ) - hp.graticule(30, verbose=False) - - # Plot patches - for patch in patches: - lon = [corner._ra / degree for corner in patch.corners] - lat = [corner._dec / degree for corner in patch.corners] - if len(lon) == 0: - # Special patch without sky coordinates - continue - lon.append(lon[0]) - lat.append(lat[0]) - log.info( - "{} corners:\n lon = {}\n lat= {}".format(patch.name, lon, lat) - ) - hp.projplot( - lon, - lat, - "-", - threshold=1, - lonlat=True, - coord="C", - color=patch_color, - lw=2, - alpha=alpha, - ) - if len(patches) > 10: - continue - # label the patch - it = np.argmax(lat) - area = patch.get_area(observer) - title = "{} {:.2f}%".format(patch.name, 100 * area) - hp.projtext( - lon[it], - lat[it], - title, - lonlat=True, - coord="C", - color=patch_color, - fontsize=14, - alpha=alpha, - ) - if polmap is not None: - # Plot Sun and Moon trajectory - hp.projplot( - sunlon, - sunlat, - "-", - color=sun_color, - alpha=alpha, - threshold=1, - lonlat=True, - coord="C", - lw=sun_lw, - ) - hp.projplot( - moonlon, - moonlat, - "-", - color=moon_color, - alpha=alpha, - threshold=1, - lonlat=True, - coord="C", - lw=moon_lw, - ) - hp.projtext( - sunlon[0], - sunlat[0], - "Sun", - color=sun_color, - lonlat=True, - coord="C", - fontsize=14, - alpha=alpha, - ) - hp.projtext( - moonlon[0], - moonlat[0], - "Moon", - color=moon_color, - lonlat=True, - coord="C", - fontsize=14, - alpha=alpha, - ) - - plt.savefig("patches.png") - plt.close() - - # Normalize the weights - for i in range(len(patches)): - patches[i].weight /= total_weight - return patches +from toast.timing import GlobalTimers +from toast.scheduler import run_scheduler def main(): gt = GlobalTimers.get() gt.start("toast_ground_schedule") - args, start_timestamp, stop_timestamp = parse_args() - - observer = ephem.Observer() - observer.lon = args.site_lon - observer.lat = args.site_lat - observer.elevation = args.site_alt # In meters - observer.epoch = "2000" - observer.temp = 0 # in Celcius - observer.compute_pressure() - - sun = ephem.Sun() - moon = ephem.Moon() - - patches = parse_patches(args, observer, sun, moon, start_timestamp, stop_timestamp) - - build_schedule(args, start_timestamp, stop_timestamp, patches, observer, sun, moon) + run_scheduler() gt.stop_all() gt.report() diff --git a/src/libtoast/include/toast/sys_environment.hpp b/src/libtoast/include/toast/sys_environment.hpp index b44145cee..af37bc94c 100644 --- a/src/libtoast/include/toast/sys_environment.hpp +++ b/src/libtoast/include/toast/sys_environment.hpp @@ -29,6 +29,8 @@ class Environment { std::vector info() const; void print() const; bool function_timers() const; + void enable_function_timers(); + void disable_function_timers(); int max_threads() const; int current_threads() const; void set_threads(int nthread); diff --git a/src/libtoast/src/toast_atm_sim.cpp b/src/libtoast/src/toast_atm_sim.cpp index a67c05952..59d7b9b4d 100644 --- a/src/libtoast/src/toast_atm_sim.cpp +++ b/src/libtoast/src/toast_atm_sim.cpp @@ -591,11 +591,12 @@ cholmod_sparse * toast::atm_sim_build_sparse_covariance( // If the covariance exceeds the threshold, add it to the // sparse matrix - //if (val * val > 1e-6 * diagonal[icol] * diagonal[irow]) { + // if (val * val > 1e-6 * diagonal[icol] * diagonal[irow]) { myrows.push_back(irow); mycols.push_back(icol); myvals.push_back(val); - //} + + // } } } # pragma omp critical diff --git a/src/libtoast/src/toast_fod_psd.cpp b/src/libtoast/src/toast_fod_psd.cpp index c989aa323..82b8b6fef 100644 --- a/src/libtoast/src/toast_fod_psd.cpp +++ b/src/libtoast/src/toast_fod_psd.cpp @@ -71,6 +71,7 @@ void toast::fod_crosssums(int64_t n, double const * x, double const * y, lagsum += xgood[i] * ygood[j]; hitsum += gd[i] * gd[j]; } + // Use symmetry to double the statistics for (i = 0, j = lag; i < (n - lag); ++i, ++j) { lagsum += xgood[j] * ygood[i]; diff --git a/src/libtoast/src/toast_sys_environment.cpp b/src/libtoast/src/toast_sys_environment.cpp index 2bfee807b..72f8c75da 100644 --- a/src/libtoast/src/toast_sys_environment.cpp +++ b/src/libtoast/src/toast_sys_environment.cpp @@ -199,6 +199,14 @@ bool toast::Environment::function_timers() const { return func_timers_; } +void toast::Environment::enable_function_timers() { + func_timers_ = true; +} + +void toast::Environment::disable_function_timers() { + func_timers_ = false; +} + int64_t toast::Environment::tod_buffer_length() const { return tod_buffer_length_; } diff --git a/src/toast/CMakeLists.txt b/src/toast/CMakeLists.txt index 23db974ca..7c088d3dd 100644 --- a/src/toast/CMakeLists.txt +++ b/src/toast/CMakeLists.txt @@ -91,6 +91,7 @@ install(FILES fft.py healpix.py weather.py + schedule.py "RELEASE" DESTINATION ${PYTHON_SITE}/toast ) diff --git a/src/toast/_libtoast_pixels.cpp b/src/toast/_libtoast_pixels.cpp index 7f04e0213..98a25cd13 100644 --- a/src/toast/_libtoast_pixels.cpp +++ b/src/toast/_libtoast_pixels.cpp @@ -60,6 +60,6 @@ void init_pixels(py::module & m) { (tuple): The (local submap, pixel within submap) for each global pixel. )"); - m.def("global_to_local", &global_to_local ); - m.def("global_to_local", &global_to_local ); + m.def("global_to_local", &global_to_local ); + m.def("global_to_local", &global_to_local ); } diff --git a/src/toast/_libtoast_sys.cpp b/src/toast/_libtoast_sys.cpp index 9c5952ebb..aa5dbced3 100644 --- a/src/toast/_libtoast_sys.cpp +++ b/src/toast/_libtoast_sys.cpp @@ -54,6 +54,14 @@ void init_sys(py::module & m) { R"( Return True if function timing has been enabled. )") + .def("enable_function_timers", &toast::Environment::enable_function_timers, + R"( + Explicitly enable function timers. + )") + .def("disable_function_timers", &toast::Environment::disable_function_timers, + R"( + Explicitly disable function timers. + )") .def("tod_buffer_length", &toast::Environment::tod_buffer_length, R"( Returns the number of samples to buffer for TOD operations. diff --git a/src/toast/pipeline_tools/todground.py b/src/toast/pipeline_tools/todground.py index fe8d8de79..391707503 100644 --- a/src/toast/pipeline_tools/todground.py +++ b/src/toast/pipeline_tools/todground.py @@ -713,16 +713,20 @@ def load_weather(args, comm, schedules, verbose=False): if comm.world_rank == 0: weathers = [] weatherdict = {} - ftimer = Timer() - for fname in args.weather.split(","): - if fname not in weatherdict: - if not os.path.isfile(fname): - raise RuntimeError("No such weather file: {}".format(fname)) - ftimer.start() - weatherdict[fname] = Weather(fname) - ftimer.stop() - ftimer.report_clear("Load {}".format(fname)) - weathers.append(weatherdict[fname]) + if args.weather == "SIM": + weatherdict["SIM"] = Weather(None) + weathers.append(weatherdict["SIM"]) + else: + ftimer = Timer() + for fname in args.weather.split(","): + if fname not in weatherdict: + if not os.path.isfile(fname): + raise RuntimeError("No such weather file: {}".format(fname)) + ftimer.start() + weatherdict[fname] = Weather(fname) + ftimer.stop() + ftimer.report_clear("Load {}".format(fname)) + weathers.append(weatherdict[fname]) else: weathers = None diff --git a/src/toast/schedule.py b/src/toast/schedule.py new file mode 100644 index 000000000..23a5e7894 --- /dev/null +++ b/src/toast/schedule.py @@ -0,0 +1,2881 @@ +#!/usr/bin/env python3 + +# Copyright (c) 2015-2020 by the parties listed in the AUTHORS file. +# All rights reserved. Use of this source code is governed by +# a BSD-style license that can be found in the LICENSE file. + +""" +This script creates a CES schedule file that can be used as input +to toast_ground_sim.py +""" + +import argparse +from datetime import datetime, timezone, timedelta +import dateutil.parser +import os +import sys +import traceback + +import numpy as np +from scipy.constants import degree +from matplotlib import cm + +import ephem +import healpy as hp + +from .utils import Logger +from . import qarray as qa +from .timing import function_timer + + +XAXIS, YAXIS, ZAXIS = np.eye(3) + + +class TooClose(Exception): + pass + + +class SunTooClose(TooClose): + pass + + +class MoonTooClose(TooClose): + pass + + +class Patch(object): + + hits = 0 + partial_hits = 0 + rising_hits = 0 + setting_hits = 0 + time = 0 + rising_time = 0 + setting_time = 0 + step = -1 + az_min = 0 + az_max = 2 * np.pi + _area = None + current_el_min = 0 + current_el_max = 0 + el_min0 = 0 + el_max0 = np.pi / 2 + el_min = el_min0 + el_max = el_max0 + el_step = 0 + alternate = False + ra_amplitude = None + ra_period = 10 + dec_amplitude = None + dec_period = 10 + corners = [] + preferred_el = None + + def __init__( + self, + name, + weight, + corners, + el_min=0, + el_max=np.pi / 2, + el_step=0, + alternate=False, + site_lat=0, + area=None, + ra_period=10, + ra_amplitude=None, + dec_period=10, + dec_amplitude=None, + elevations=None, + ): + self.name = name + self.weight = weight + self.corners = corners + self.el_min0 = el_min + self.el_min = el_min + self.el_max0 = el_max + self.el_step = np.abs(el_step) + self.alternate = alternate + self._area = area + self.site_lat = site_lat + self.ra_period = ra_period + self.ra_amplitude = np.radians(ra_amplitude) + self.dec_period = dec_period + self.dec_amplitude = np.radians(dec_amplitude) + # Use the site latitude to infer the lowest elevation that all + # corners cross. + site_el_max = np.pi / 2 + for corner in corners: + el_max = np.pi / 2 - np.abs(corner._dec - self.site_lat) + if el_max < site_el_max: + site_el_max = el_max + if elevations is None: + if site_el_max < self.el_max0: + self.el_max0 = site_el_max + self.elevations = None + else: + # Parse the allowed elevations + try: + # Try parsing as a string + self.elevations = [ + np.radians(float(el)) for el in elevations.split(",") + ] + except AttributeError: + # Try parsing as an iterable + self.elevations = [np.radians(el) for el in elevations] + self.elevations = np.sort(np.array(self.elevations)) + # Check if any of the allowed elevations is above the highest + # observable elevation + bad = self.elevations > site_el_max + if np.any(bad): + good = np.logical_not(bad) + if np.any(good): + print( + "WARNING: {} of the observing elevations are too high " + "for '{}': {} > {:.2f} deg".format( + np.sum(bad), + self.name, + np.degrees(self.elevations[bad]), + np.degrees(site_el_max), + ), + flush=True, + ) + self.elevations = self.elevations[good] + else: + print( + "ERROR: all of the observing elevations are too high for {}. " + "Maximum observing elevation is {} deg".format( + self.name, np.degrees(site_el_max) + ), + flush=True, + ) + sys.exit() + self.el_min0 = np.amin(self.elevations) + self.el_max0 = np.amax(self.elevations) + if el_step != 0: + self.nstep_el = int((self.el_max0 - self.el_min0 + 1e-3) // el_step) + 1 + self.elevations0 = self.elevations + self.el_max = self.el_max0 + self.el_lim = self.el_min0 + self.step_azel() + return + + def oscillate(self): + if self.ra_amplitude: + # Oscillate RA + halfperiod = self.ra_period // 2 + old_phase = np.fmod(self.hits - 1 + halfperiod, self.ra_period) - halfperiod + new_phase = np.fmod(self.hits + halfperiod, self.ra_period) - halfperiod + old_offset = old_phase / halfperiod * self.ra_amplitude + new_offset = new_phase / halfperiod * self.ra_amplitude + offset = new_offset - old_offset + for corner in self.corners: + corner._ra += offset + if self.dec_amplitude: + # Oscillate DEC + halfperiod = self.dec_period // 2 + old_phase = ( + np.fmod(self.hits - 1 + halfperiod, self.dec_period) - halfperiod + ) + new_phase = np.fmod(self.hits + halfperiod, self.dec_period) - halfperiod + old_offset = old_phase / halfperiod * self.dec_amplitude + new_offset = new_phase / halfperiod * self.dec_amplitude + offset = new_offset - old_offset + for corner in self.corners: + corner._dec += offset + return + + @function_timer + def get_area(self, observer, nside=32, equalize=False): + self.update(observer) + if self._area is None: + npix = 12 * nside ** 2 + hitmap = np.zeros(npix) + for corner in self.corners: + corner.compute(observer) + for pix in range(npix): + lon, lat = hp.pix2ang(nside, pix, lonlat=True) + center = ephem.FixedBody() + center._ra = np.radians(lon) + center._dec = np.radians(lat) + center.compute(observer) + hitmap[pix] = self.in_patch(center) + self._area = np.sum(hitmap) / hitmap.size + if self._area == 0: + raise RuntimeError("Patch has zero area!") + if equalize: + self.weight /= self._area + return self._area + + @function_timer + def corner_coordinates(self, observer=None, unwind=False): + """Return the corner coordinates in horizontal frame. + + PyEphem measures the azimuth East (clockwise) from North. + """ + azs = [] + els = [] + az0 = None + for corner in self.corners: + if observer is not None: + corner.compute(observer) + if unwind: + if az0 is None: + az0 = corner.az + azs.append(unwind_angle(az0, corner.az)) + else: + azs.append(corner.az) + els.append(corner.alt) + return np.array(azs), np.array(els) + + @function_timer + def in_patch(self, obj): + """ + Determine if the object (e.g. Sun or Moon) is inside the patch + by using a ray casting algorithm. The ray is cast along a + constant meridian to follow a great circle. + """ + az0 = obj.az + # Get corner coordinates, assuming they were already computed + azs, els = self.corner_coordinates() + els_cross = [] + for i in range(len(self.corners)): + az1 = azs[i] + el1 = els[i] + j = (i + 1) % len(self.corners) + az2 = unwind_angle(az1, azs[j]) + el2 = els[j] + azmean = 0.5 * (az1 + az2) + az0 = unwind_angle(azmean, np.float(obj.az), np.pi) + if (az1 - az0) * (az2 - az0) > 0: + # the constant meridian is not between the two corners + continue + el_cross = el1 + (az1 - az0) * (el2 - el1) / (az1 - az2) + if np.abs(obj.az - (az0 % (2 * np.pi))) < 1e-3: + els_cross.append(el_cross) + elif el_cross > 0: + els_cross.append(np.pi - el_cross) + else: + els_cross.append(-np.pi - el_cross) + + els_cross = np.array(els_cross) + if els_cross.size < 2: + return False + + # Unwind the crossing elevations to minimize the scatter + els_cross = np.sort(els_cross) + if els_cross.size > 1: + ptps = [] + for i in range(els_cross.size): + els_cross_alt = els_cross.copy() + els_cross_alt[:i] += 2 * np.pi + ptps.append(np.ptp(els_cross_alt)) + i = np.argmin(ptps) + if i > 0: + els_cross[:i] += 2 * np.pi + els_cross = np.sort(els_cross) + el_mean = np.mean(els_cross) + el0 = unwind_angle(el_mean, np.float(obj.alt)) + + ncross = np.sum(els_cross > el0) + + if ncross % 2 == 0: + # Even number of crossings means that the object is outside + # of the patch + return False + return True + + @function_timer + def step_azel(self): + self.step += 1 + if self.el_step != 0 and self.alternate: + # alternate between rising and setting scans + if self.rising_hits < self.setting_hits: + # Schedule a rising scan + istep = self.rising_hits % self.nstep_el + self.el_min = min(self.el_max0, self.el_min0 + istep * self.el_step) + self.el_max = self.el_max0 + self.az_min = 0 + self.az_max = np.pi + else: + # Schedule a setting scan + istep = self.setting_hits % self.nstep_el + self.el_min = self.el_min0 + self.el_max = max(self.el_min0, self.el_max0 - istep * self.el_step) + self.az_min = np.pi + self.az_max = 2 * np.pi + else: + if self.alternate: + self.az_min = (self.az_min + np.pi) % (2 * np.pi) + self.az_max = self.az_min + np.pi + else: + self.el_min += self.el_step + if self.el_min > self.el_max0: + self.el_min = self.el_min0 + if self.el_step != 0 and self.elevations is not None: + tol = np.radians(0.1) + self.elevations = np.array( + [ + el + for el in self.elevations0 + if (el + tol >= self.el_min and el - tol <= self.el_max) + ] + ) + return + + def reset(self): + self.step += 1 + self.el_min = self.el_min0 + self.el_max = self.el_max0 + self.elevations = self.elevations0 + self.az_min = 0 + if self.alternate: + self.az_max = np.pi + else: + self.az_max = 2 * np.pi + return + + def visible( + self, + el_min, + observer, + sun, + moon, + sun_avoidance_angle, + moon_avoidance_angle, + check_sso, + ): + self.update(observer) + patch_el_max = -1000 + patch_el_min = 1000 + in_view = False + for i, corner in enumerate(self.corners): + corner.compute(observer) + patch_el_min = min(patch_el_min, corner.alt) + patch_el_max = max(patch_el_max, corner.alt) + if corner.alt > el_min: + # At least one corner is visible + in_view = True + if check_sso: + if sun_avoidance_angle > 0: + angle = np.degrees(ephem.separation(sun, corner)) + if angle < sun_avoidance_angle: + # Patch is too close to the Sun + return False, "Too close to Sun {:.2f}".format(angle) + if moon_avoidance_angle > 0: + angle = np.degrees(ephem.separation(moon, corner)) + if angle < moon_avoidance_angle: + # Patch is too close to the Moon + return False, "Too close to Moon {:.2f}".format(angle) + if not in_view: + msg = "Below el_min = {:.2f} at el = {:.2f}..{:.2f}.".format( + np.degrees(el_min), np.degrees(patch_el_min), np.degrees(patch_el_max) + ) + else: + msg = "in view" + self.current_el_min = patch_el_min + self.current_el_max = patch_el_max + + return in_view, msg + + def update(self, *args, **kwargs): + """ + A virtual method that is implemented by moving targets + """ + pass + + +class SSOPatch(Patch): + def __init__(self, name, weight, radius): + self.name = name + self.weight = weight + self.radius = radius + try: + self.body = getattr(ephem, name)() + except: + raise RuntimeError("Failed to initialize {} from pyEphem".format(name)) + self.corners = None + return + + def update(self, observer): + """ + Calculate the relative position of the SSO at a given time + """ + self.body.compute(observer) + ra, dec = self.body.ra, self.body.dec + # Synthesize 8 corners around the center + phi = ra + theta = dec + r = self.radius + ncorner = 8 + angstep = 2 * np.pi / ncorner + self.corners = [] + for icorner in range(ncorner): + ang = angstep * icorner + delta_theta = np.cos(ang) * r + delta_phi = np.sin(ang) * r / np.cos(theta + delta_theta) + patch_corner = ephem.FixedBody() + patch_corner._ra = phi + delta_phi + patch_corner._dec = theta + delta_theta + self.corners.append(patch_corner) + return + + +class CoolerCyclePatch(Patch): + def __init__( + self, + weight, + power, + hold_time_min, + hold_time_max, + cycle_time, + az, + el, + last_cycle_end, + ): + # Standardized name for cooler cycles + self.name = "cooler_cycle" + self.hold_time_min = hold_time_min * 3600 + self.hold_time_max = hold_time_max * 3600 + self.cycle_time = cycle_time * 3600 + self.az = az + self.el = el + self.last_cycle_end = last_cycle_end + self.weight0 = weight + self.weight = weight + self.power = power + return + + def get_area(self, *args, **kwargs): + if self._area is None: + self._area = 0 + return self._area + + def corner_coordinates(self, *args, **kwargs): + return None + + def in_patch(self, *args, **kwargs): + return False + + def step_azel(self, *args, **kwargs): + return + + def reset(self, *args, **kwargs): + return + + def get_current_hold_time(self, observer): + tlast = to_DJD(self.last_cycle_end) + tnow = float(observer.date) # In Dublin Julian date + hold_time = (tnow - tlast) * 86400 # in seconds + return hold_time + + def visible( + self, + el_min, + observer, + sun, + moon, + sun_avoidance_angle, + moon_avoidance_angle, + check_sso, + ): + self.update(observer) + hold_time = self.get_current_hold_time(observer) + if hold_time > self.hold_time_min: + visible = True + msg = "minimum hold time exceeded" + else: + visible = False + msg = "minimum hold time not met" + return visible, msg + + def update(self, observer): + hold_time = self.get_current_hold_time(observer) + if hold_time < self.hold_time_min: + self.weight = np.inf + else: + weight = (self.hold_time_max - hold_time) / ( + self.hold_time_max - self.hold_time_min + ) + self.weight = self.weight0 * weight ** self.power + return + + +class HorizontalPatch(Patch): + def __init__(self, name, weight, azmin, azmax, el, scantime): + self.name = name + self.weight = weight + if azmin <= np.pi and azmax <= np.pi: + self.rising = True + elif azmin >= np.pi and azmax >= np.pi: + self.rising = False + else: + raise RuntimeError("Horizontal patch must either be rising or setting.") + self.az_min = azmin + self.az_max = azmax + self.el = el + # scan time is the maximum time spent on this scan before targeting again + self.scantime = scantime # in minutes. + self.scandrift = scantime / 60 * 15 * degree + + self.el_min0 = el + self.el_min = el + self.el_max0 = el + self.el_step = 0 + self.alternate = False + self._area = 0 + self.el_max = self.el_max0 + self.el_lim = self.el_min0 + return + + def get_area(self, observer, nside=32, equalize=False): + return 1 + + def corner_coordinates(self, observer=None, unwind=False): + azs = [self.az_min, self.az_max] + els = [self.el_min, self.el_max] + return np.array(azs), np.array(els) + + def in_patch(self, obj, angle=0): + azmin = obj.az - angle + azmax = obj.az + angle + elmin = obj.alt - angle + elmax = obj.alt + angle + if self.rising: + elmax += self.scandrift + else: + elmin -= self.scandrift + if ( + azmin > self.az_min + and azmax < self.az_max + and elmin > self.el_min + and elmax < self.el_max + ): + return True + return False + + def step_azel(self): + return + + def visible( + self, + el_min, + observer, + sun, + moon, + sun_avoidance_angle, + moon_avoidance_angle, + check_sso, + ): + + in_view = True + msg = "" + if check_sso: + for sso, angle, name in [ + (sun, sun_avoidance_angle, "Sun"), + (moon, moon_avoidance_angle, "Moon"), + ]: + if self.in_patch(sso, angle=angle): + in_view = False + msg += "{} too close;".format(name) + + if in_view: + msg = "in view" + self.current_el_min = self.el_min + self.current_el_max = self.el_max + return in_view, msg + + +def to_UTC(t): + # Convert UNIX time stamp to a date string + return datetime.fromtimestamp(t, timezone.utc).strftime("%Y-%m-%d %H:%M:%S") + + +def to_JD(t): + # Unix time stamp to Julian date + # (days since -4712-01-01 12:00:00 UTC) + return t / 86400.0 + 2440587.5 + + +def to_MJD(t): + # Convert Unix time stamp to modified Julian date + # (days since 1858-11-17 00:00:00 UTC) + return to_JD(t) - 2400000.5 + + +def to_DJD(t): + # Convert Unix time stamp to Dublin Julian date + # (days since 1899-12-31 12:00:00) + # This is the time format used by PyEphem + return to_JD(t) - 2415020 + + +def DJDtoUNIX(djd): + # Convert Dublin Julian date to a UNIX time stamp + return ((djd + 2415020) - 2440587.5) * 86400.0 + + +def patch_is_rising(patch): + try: + # Horizontal patch definition + rising = patch.rising + except: + rising = True + for corner in patch.corners: + if corner.alt > 0 and corner.az > np.pi: + # The patch is setting + rising = False + break + return rising + + +@function_timer +def prioritize(args, visible): + """Order visible targets by priority and number of scans.""" + log = Logger.get() + for i in range(len(visible)): + for j in range(len(visible) - i - 1): + # If either of the patches is a cooler cycle, we don't modulate + # the priorities with hit counts, observing time or elevation + if isinstance(visible[j], CoolerCyclePatch) or isinstance( + visible[j + 1], CoolerCyclePatch + ): + weight1 = visible[j].weight + weight2 = visible[j + 1].weight + else: + if patch_is_rising(visible[j]): + if args.equalize_time: + hits1 = visible[j].rising_time + else: + hits1 = visible[j].rising_hits + el1 = np.degrees(visible[j].current_el_max) + else: + if args.equalize_time: + hits1 = visible[j].setting_time + else: + hits1 = visible[j].setting_hits + el1 = np.degrees(visible[j].current_el_min) + if patch_is_rising(visible[j + 1]): + if args.equalize_time: + hits2 = visible[j + 1].rising_time + else: + hits2 = visible[j + 1].rising_hits + el2 = np.degrees(visible[j + 1].current_el_max) + else: + if args.equalize_time: + hits2 = visible[j + 1].setting_time + else: + hits2 = visible[j + 1].setting_hits + el2 = np.degrees(visible[j + 1].current_el_min) + # Patch with the lower weight goes first. Having more + # earlier observing time and lower observing elevation + # will increase the weight. + weight1 = (hits1 + 1) * visible[j].weight + weight2 = (hits2 + 1) * visible[j + 1].weight + # Optional elevation penalty + if args.elevation_penalty_limit > 0: + lim = args.elevation_penalty_limit + if el1 < lim: + weight1 *= (lim / el1) ** args.elevation_penalty_power + if el2 < lim: + weight2 *= (lim / el2) ** args.elevation_penalty_power + if weight1 > weight2: + visible[j], visible[j + 1] = visible[j + 1], visible[j] + names = [] + for patch in visible: + names.append(patch.name) + log.debug("Prioritized list of viewable patches: {}".format(names)) + return + + +@function_timer +def attempt_scan( + args, + observer, + visible, + not_visible, + t, + fp_radius, + stop_timestamp, + tstop_cooler, + sun, + moon, + sun_el_max, + fout, + fout_fmt, + ods, + boresight_angle, +): + """Attempt scanning the visible patches in order until success.""" + log = Logger.get() + success = False + # Always begin by attempting full scans. If none can be completed + # and user allowed partials scans, try them next. + for allow_partial_scans in False, True: + if allow_partial_scans and not args.allow_partial_scans: + break + for patch in visible: + if isinstance(patch, CoolerCyclePatch): + # Cycle the cooler + t = add_cooler_cycle( + args, + t, + stop_timestamp, + observer, + sun, + moon, + fout, + fout_fmt, + patch, + boresight_angle, + ) + success = True + break + # All on-sky targets + for rising in [True, False]: + observer.date = to_DJD(t) + el = get_constant_elevation( + args, + observer, + patch, + rising, + fp_radius, + not_visible, + partial_scan=allow_partial_scans, + ) + if el is None: + continue + success, azmins, azmaxs, aztimes, tstop = scan_patch( + args, + el, + patch, + t, + fp_radius, + observer, + sun, + not_visible, + tstop_cooler, + sun_el_max, + rising, + ) + if success: + try: + t, _ = add_scan( + args, + t, + tstop, + aztimes, + azmins, + azmaxs, + rising, + fp_radius, + observer, + sun, + moon, + fout, + fout_fmt, + patch, + el, + ods, + boresight_angle, + partial_scan=allow_partial_scans, + ) + patch.step_azel() + break + except TooClose: + success = False + break + if success: + break + if success: + break + return success, t + + +def from_angles(az, el): + elquat = qa.rotation(YAXIS, np.radians(90 - el)) + azquat = qa.rotation(ZAXIS, np.radians(az)) + return qa.mult(azquat, elquat) + + +def unwind_quat(quat1, quat2): + if np.sum(np.abs(quat1 - quat2)) > np.sum(np.abs(quat1 + quat2)): + return -quat2 + else: + return quat2 + + +@function_timer +def check_sso(observer, az1, az2, el, sso, angle, tstart, tstop): + """ + Check if a solar system object (SSO) enters within "angle" of + the constant elevation scan. + """ + if az2 < az1: + az2 += 360 + naz = max(3, np.int(0.25 * (az2 - az1) * np.cos(np.radians(el)))) + quats = [] + for az in np.linspace(az1, az2, naz): + quats.append(from_angles(az % 360, el)) + vecs = qa.rotate(quats, ZAXIS) + + tstart = to_DJD(tstart) + tstop = to_DJD(tstop) + t1 = tstart + # Test every ten minutes + tstep = 10 / 1440 + while t1 < tstop: + t2 = min(tstop, t1 + tstep) + observer.date = t1 + sso.compute(observer) + sun_az1, sun_el1 = np.degrees(sso.az), np.degrees(sso.alt) + observer.date = t2 + sso.compute(observer) + sun_az2, sun_el2 = np.degrees(sso.az), np.degrees(sso.alt) + sun_quat1 = from_angles(sun_az1, sun_el1) + sun_quat2 = from_angles(sun_az2, sun_el2) + sun_quat2 = unwind_quat(sun_quat1, sun_quat2) + t = np.linspace(0, 1, 10) + sun_quats = qa.slerp(t, [0, 1], [sun_quat1, sun_quat2]) + sun_vecs = qa.rotate(sun_quats, ZAXIS).T + dpmax = np.amax(np.dot(vecs, sun_vecs)) + min_dist = np.degrees(np.arccos(dpmax)) + if min_dist < angle: + return True, DJDtoUNIX(t1) + t1 = t2 + return False, DJDtoUNIX(t2) + + +@function_timer +def attempt_scan_pole( + args, + observer, + visible, + not_visible, + tstart, + fp_radius, + el_max, + el_min, + stop_timestamp, + tstop_cooler, + sun, + moon, + sun_el_max, + fout, + fout_fmt, + ods, + boresight_angle, +): + """Attempt scanning the visible patches in order until success.""" + if args.one_scan_per_day and stop_timestamp > tstop_cooler: + raise RuntimeError("one_scan_per_day is incompatible with cooler cycles") + success = False + for patch in visible: + observer.date = to_DJD(tstart) + if isinstance(patch, CoolerCyclePatch): + # Cycle the cooler + t = add_cooler_cycle( + args, + tstart, + stop_timestamp, + observer, + sun, + moon, + fout, + fout_fmt, + patch, + boresight_angle, + ) + success = True + break + # In pole scheduling, first elevation is just below the patch + el = get_constant_elevation_pole( + args, observer, patch, fp_radius, el_min, el_max, not_visible + ) + if el is None: + continue + pole_success = True + subscan = -1 + t = tstart + while pole_success: + (pole_success, azmins, azmaxs, aztimes, tstop) = scan_patch_pole( + args, + el, + patch, + t, + fp_radius, + observer, + sun, + not_visible, + tstop_cooler, + sun_el_max, + ) + if pole_success: + if success: + # Still the same scan + patch.hits -= 1 + try: + t, subscan = add_scan( + args, + t, + tstop, + aztimes, + azmins, + azmaxs, + False, + fp_radius, + observer, + sun, + moon, + fout, + fout_fmt, + patch, + el, + ods, + boresight_angle, + subscan=subscan, + ) + el += np.radians(args.pole_el_step_deg) + success = True + except TooClose: + success = False + pole_success = False + if success: + break + tstop = t + if args.one_scan_per_day: + day1 = int(to_MJD(tstart)) + while int(to_MJD(tstop)) == day1: + tstop += 60.0 + return success, tstop + + +@function_timer +def get_constant_elevation( + args, observer, patch, rising, fp_radius, not_visible, partial_scan=False +): + """Determine the elevation at which to scan.""" + log = Logger.get() + + azs, els = patch.corner_coordinates(observer) + + ind_rising = azs < np.pi + ind_setting = azs > np.pi + + el = None + if rising: + if np.sum(ind_rising) == 0: + not_visible.append((patch.name, "No rising corners")) + else: + el = np.amax(els[ind_rising]) + fp_radius + else: + if np.sum(ind_setting) == 0: + not_visible.append((patch.name, "No setting corners")) + else: + el = np.amin(els[ind_setting]) - fp_radius + + if el is not None and patch.elevations is not None: + # Fixed elevation mode. Find the first allowed observing elevation. + if rising: + ind = patch.elevations >= el + if np.any(ind): + el = np.amin(patch.elevations[ind]) + elif partial_scan: + # None of the elevations allow a full rising scan, + # Observe at the highest allowed elevation + el = np.amax(patch.elevations) + if el < np.amin(els[ind_rising]) + fp_radius: + not_visible.append( + (patch.name, "Rising patch above maximum elevation") + ) + el = None + else: + not_visible.append((patch.name, "Only partial rising scans available")) + el = None + else: + ind = patch.elevations <= el + if np.any(ind): + el = np.amax(patch.elevations[ind]) + elif partial_scan: + # None of the elevations allow a full setting scan, + # Observe at the lowest allowed elevation + el = np.amin(patch.elevations) + if el > np.amax(els[ind_setting]) + fp_radius: + not_visible.append( + (patch.name, "Setting patch above below elevation") + ) + el = None + else: + not_visible.append((patch.name, "Only partial setting scans available")) + el = None + elif el is not None: + if el < patch.el_min: + if partial_scan and np.any(patch.el_min < els[ind_setting] - fp_radius): + # Partial setting scan + el = patch.el_min + else: + not_visible.append( + ( + patch.name, + "el < el_min ({:.2f} < {:.2f}) rising = {}, partial = {}".format( + el / degree, patch.el_min / degree, rising, partial_scan + ), + ) + ) + el = None + elif el > patch.el_max: + if partial_scan and np.any(patch.el_max > els[ind_rising] + fp_radius): + # Partial rising scan + el = patch.el_max + else: + not_visible.append( + ( + patch.name, + "el > el_max ({:.2f} > {:.2f}) rising = {}, partial = {}".format( + el / degree, patch.el_max / degree, rising, partial_scan + ), + ) + ) + el = None + if el is None: + log.debug("NO ELEVATION: {}".format(not_visible[-1])) + else: + log.debug( + "{} : ELEVATION = {}, rising = {}, partial = {}".format( + patch.name, el / degree, rising, partial_scan + ) + ) + return el + + +@function_timer +def get_constant_elevation_pole( + args, observer, patch, fp_radius, el_min, el_max, not_visible +): + """Determine the elevation at which to scan.""" + log = Logger.get() + _, els = patch.corner_coordinates(observer) + el = np.amin(els) - fp_radius + + if el < el_min: + not_visible.append( + ( + patch.name, + "el < el_min ({:.2f} < {:.2f})".format(el / degree, el_min / degree), + ) + ) + el = None + elif el > el_max: + not_visible.append( + ( + patch.name, + "el > el_max ({:.2f} > {:.2f})".format(el / degree, el_max / degree), + ) + ) + el = None + if el is None: + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + return el + + +def check_sun_el(t, observer, sun, sun_el_max, args, not_visible): + log = Logger.get() + observer.date = to_DJD(t) + if sun_el_max < np.pi / 2: + sun.compute(observer) + if sun.alt > sun_el_max: + not_visible.append( + ( + patch.name, + "Sun too high {:.2f} rising = {}" + "".format(np.degrees(sun.alt), rising), + ) + ) + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + return True + return False + + +@function_timer +def scan_patch( + args, + el, + patch, + t, + fp_radius, + observer, + sun, + not_visible, + stop_timestamp, + sun_el_max, + rising, +): + """Attempt scanning the patch specified by corners at elevation el.""" + log = Logger.get() + azmins, azmaxs, aztimes = [], [], [] + if isinstance(patch, HorizontalPatch): + # No corners. Simply scan for the requested time + if rising and not patch.rising: + return False, azmins, azmaxs, aztimes, t + if check_sun_el(t, observer, sun, sun_el_max, args, not_visible): + return False, azmins, azmaxs, aztimes, t + azmins = [patch.az_min] + azmaxs = [patch.az_max] + aztimes = [t] + return True, azmins, azmaxs, aztimes, t + patch.scantime * 60 + # Traditional patch, track each corner + success = False + # and now track when all corners are past the elevation + tstop = t + tstep = 60 + to_cross = np.ones(len(patch.corners), dtype=np.bool) + scan_started = False + while True: + if tstop > stop_timestamp or tstop - t > 86400: + not_visible.append( + (patch.name, "Ran out of time rising = {}".format(rising)) + ) + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + break + if check_sun_el(tstop, observer, sun, sun_el_max, args, not_visible): + break + azs, els = patch.corner_coordinates(observer) + has_extent = current_extent( + azmins, + azmaxs, + aztimes, + patch.corners, + fp_radius, + el, + azs, + els, + rising, + tstop, + ) + if has_extent: + scan_started = True + + if rising: + good = azs <= np.pi + to_cross[np.logical_and(els > el + fp_radius, good)] = False + else: + good = azs >= np.pi + to_cross[np.logical_and(els < el - fp_radius, good)] = False + + # If we are alternating rising and setting scans, reject patches + # that appear on the wrong side of the sky. + if np.any((np.array(azmins) % (2 * np.pi)) < patch.az_min) or np.any( + (np.array(azmaxs) % (2 * np.pi)) > patch.az_max + ): + success = False + break + + if len(aztimes) > 0 and not np.any(to_cross): + # All corners made it across the CES line. + success = True + # Begin the scan before the patch is at the CES line + if aztimes[0] > t: + aztimes[0] -= tstep + break + + if scan_started and not has_extent: + # The patch went out of view before all corners + # could cross the elevation line. + success = False + break + tstop += tstep + + return success, azmins, azmaxs, aztimes, tstop + + +def unwind_angle(alpha, beta, multiple=2 * np.pi): + """Minimize absolute difference between alpha and beta. + + Minimize the absolute difference by adding a multiple of + 2*pi to beta to match alpha. + """ + while np.abs(alpha - beta - multiple) < np.abs(alpha - beta): + beta += multiple + while np.abs(alpha - beta + multiple) < np.abs(alpha - beta): + beta -= multiple + return beta + + +@function_timer +def scan_patch_pole( + args, + el, + patch, + t, + fp_radius, + observer, + sun, + not_visible, + stop_timestamp, + sun_el_max, +): + """Attempt scanning the patch specified by corners at elevation el. + + The pole scheduling mode will not wait for the patch to drift across. + It simply attempts to scan for the required time: args.pole_ces_time. + """ + log = Logger.get() + success = False + tstop = t + tstep = 60 + azmins, azmaxs, aztimes = [], [], [] + while True: + if tstop - t > args.pole_ces_time_s - 1: + # Succesfully scanned the maximum time + if len(azmins) > 0: + success = True + else: + not_visible.append( + (patch.name, "No overlap at {:.2f}".format(el / degree)) + ) + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + break + if tstop > stop_timestamp or tstop - t > 86400: + not_visible.append((patch.name, "Ran out of time")) + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + break + observer.date = to_DJD(tstop) + sun.compute(observer) + if sun.alt > sun_el_max: + not_visible.append( + (patch.name, "Sun too high {:.2f}".format(sun.alt / degree)) + ) + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + break + azs, els = patch.corner_coordinates(observer) + if np.amax(els) + fp_radius < el: + not_visible.append((patch.name, "Patch below {:.2f}".format(el / degree))) + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + break + radius = max(np.radians(1), fp_radius) + current_extent_pole( + azmins, azmaxs, aztimes, patch.corners, radius, el, azs, els, tstop + ) + tstop += tstep + return success, azmins, azmaxs, aztimes, tstop + + +@function_timer +def current_extent_pole( + azmins, azmaxs, aztimes, corners, fp_radius, el, azs, els, tstop +): + """Get the azimuthal extent of the patch along elevation el. + + Pole scheduling does not care if the patch is "rising" or "setting". + """ + azs_cross = [] + for i in range(len(corners)): + if np.abs(els[i] - el) < fp_radius: + azs_cross.append(azs[i]) + j = (i + 1) % len(corners) + if np.abs(els[j] - el) < fp_radius: + azs_cross.append(azs[j]) + if np.abs(els[i] - el) < fp_radius or np.abs(els[j] - el) < fp_radius: + continue + elif (els[i] - el) * (els[j] - el) < 0: + # Record the location where a line between the corners + # crosses el. + az1 = azs[i] + az2 = azs[j] + el1 = els[i] - el + el2 = els[j] - el + if az2 - az1 > np.pi: + az1 += 2 * np.pi + if az1 - az2 > np.pi: + az2 += 2 * np.pi + az_cross = (az1 + el1 * (az2 - az1) / (el1 - el2)) % (2 * np.pi) + azs_cross.append(az_cross) + + # Translate the azimuths at multiples of 2pi so they are in a + # compact cluster + + for i in range(1, len(azs_cross)): + azs_cross[i] = unwind_angle(azs_cross[0], azs_cross[i]) + + if len(azs_cross) > 0: + azs_cross = np.sort(azs_cross) + azmin = azs_cross[0] + azmax = azs_cross[-1] + azmax = unwind_angle(azmin, azmax) + if azmax - azmin > np.pi: + # Patch crosses the zero meridian + azmin, azmax = azmax, azmin + if len(azmins) > 0: + azmin = unwind_angle(azmins[-1], azmin) + azmax = unwind_angle(azmaxs[-1], azmax) + azmins.append(azmin) + azmaxs.append(azmax) + aztimes.append(tstop) + return + + +@function_timer +def current_extent( + azmins, azmaxs, aztimes, corners, fp_radius, el, azs, els, rising, t +): + """Get the azimuthal extent of the patch along elevation el. + + Find the pairs of corners that are on opposite sides + of the CES line. Record the crossing azimuth of a + line between the corners. + + """ + azs_cross = [] + for i in range(len(corners)): + j = (i + 1) % len(corners) + for el0 in [el - fp_radius, el, el + fp_radius]: + if (els[i] - el0) * (els[j] - el0) < 0: + # The corners are on opposite sides of the elevation line + az1 = azs[i] + az2 = azs[j] + el1 = els[i] - el0 + el2 = els[j] - el0 + az2 = unwind_angle(az1, az2) + az_cross = (az1 + el1 * (az2 - az1) / (el1 - el2)) % (2 * np.pi) + azs_cross.append(az_cross) + if fp_radius == 0: + break + if len(azs_cross) == 0: + return False + + azs_cross = np.array(azs_cross) + if rising: + good = azs_cross < np.pi + else: + good = azs_cross > np.pi + ngood = np.sum(good) + if ngood == 0: + return False + elif ngood > 1: + azs_cross = azs_cross[good] + + # Unwind the crossing azimuths to minimize the scatter + azs_cross = np.sort(azs_cross) + if azs_cross.size > 1: + ptp0 = azs_cross[-1] - azs_cross[0] + ptps = azs_cross[:-1] + 2 * np.pi - azs_cross[1:] + ptps = np.hstack([ptp0, ptps]) + i = np.argmin(ptps) + azs_cross[:i] += 2 * np.pi + np.roll(azs_cross, i) + + if len(azs_cross) > 1: + azmin = azs_cross[0] % (2 * np.pi) + azmax = azs_cross[-1] % (2 * np.pi) + if azmax - azmin > np.pi: + # Patch crosses the zero meridian + azmin, azmax = azmax, azmin + azmins.append(azmin) + azmaxs.append(azmax) + aztimes.append(t) + return True + return False + + +@function_timer +def add_scan( + args, + tstart, + tstop, + aztimes, + azmins, + azmaxs, + rising, + fp_radius, + observer, + sun, + moon, + fout, + fout_fmt, + patch, + el, + ods, + boresight_angle, + subscan=-1, + partial_scan=False, +): + """Make an entry for a CES in the schedule file.""" + log = Logger.get() + ces_time = tstop - tstart + if ces_time > args.ces_max_time_s: # and not args.pole_mode: + nsub = np.int(np.ceil(ces_time / args.ces_max_time_s)) + ces_time /= nsub + aztimes = np.array(aztimes) + azmins = np.array(azmins) + azmaxs = np.array(azmaxs) + azmaxs[0] = unwind_angle(azmins[0], azmaxs[0]) + for i in range(1, azmins.size): + azmins[i] = unwind_angle(azmins[0], azmins[i]) + azmaxs[i] = unwind_angle(azmaxs[0], azmaxs[i]) + azmaxs[i] = unwind_angle(azmins[i], azmaxs[i]) + # for i in range(azmins.size-1): + # if azmins[i+1] - azmins[i] > np.pi: + # azmins[i+1], azmaxs[i+1] = azmins[i+1]-2*np.pi, azmaxs[i+1]-2*np.pi + # if azmins[i+1] - azmins[i] < np.pi: + # azmins[i+1], azmaxs[i+1] = azmins[i+1]+2*np.pi, azmaxs[i+1]+2*np.pi + rising_string = "R" if rising else "S" + t1 = np.amin(aztimes) + entries = [] + while t1 < tstop - 1: + subscan += 1 + if args.operational_days: + # See if adding this scan would exceed the number of desired + # operational days + if subscan == 0: + tz = args.timezone / 24 + od = int(to_MJD(tstart) + tz) + ods.add(od) + if len(ods) > args.operational_days: + # Prevent adding further entries to the schedule once + # the number of operational days is full + break + t2 = min(t1 + ces_time, tstop) + if tstop - t2 < ces_time / 10: + # Append leftover scan to the last full subscan + t2 = tstop + ind = np.logical_and(aztimes >= t1, aztimes <= t2) + if np.all(aztimes > t2): + ind[0] = True + if np.all(aztimes < t1): + ind[-1] = True + if azmins[ind][0] < azmaxs[ind][0]: + azmin = np.amin(azmins[ind]) + azmax = np.amax(azmaxs[ind]) + else: + # we are, scan from the maximum to the minimum + azmin = np.amax(azmins[ind]) + azmax = np.amin(azmaxs[ind]) + if args.scan_margin > 0: + # Add a random error to the scan parameters to smooth out + # caustics in the hit map + delta_az = azmax - unwind_angle(azmax, azmin) + sub_az = delta_az * np.abs(np.random.randn()) * args.scan_margin * 0.5 + add_az = delta_az * np.abs(np.random.randn()) * args.scan_margin * 0.5 + azmin = (azmin - sub_az) % (2 * np.pi) + azmax = (azmax + add_az) % (2 * np.pi) + if t2 == tstop: + delta_t = t2 - t1 # tstop - tstart + add_t = delta_t * np.abs(np.random.randn()) * args.scan_margin + t2 += add_t + # Add the focal plane radius to the scan width + fp_radius_eff = fp_radius / np.cos(el) + azmin = (azmin - fp_radius_eff) % (2 * np.pi) / degree + azmax = (azmax + fp_radius_eff) % (2 * np.pi) / degree + # Get the Sun and Moon locations at the beginning and end + observer.date = to_DJD(t1) + sun.compute(observer) + moon.compute(observer) + sun_az1, sun_el1 = sun.az / degree, sun.alt / degree + moon_az1, moon_el1 = moon.az / degree, moon.alt / degree + moon_phase1 = moon.phase + # It is possible that the Sun or the Moon gets too close to the + # scan, even if they are far enough from the actual patch. + sun_too_close, sun_time = check_sso( + observer, + azmin, + azmax, + el / degree, + sun, + args.sun_avoidance_angle_deg, + t1, + t2, + ) + moon_too_close, moon_time = check_sso( + observer, + azmin, + azmax, + el / degree, + moon, + args.moon_avoidance_angle_deg, + t1, + t2, + ) + + if ( + (isinstance(patch, HorizontalPatch) or partial_scan) + and sun_time > tstart + 1 + and moon_time > tstart + 1 + ): + # Simply terminate the scan when the Sun or the Moon is too close + t2 = min(sun_time, moon_time) + if sun_too_close or moon_too_close: + tstop = t2 + if t1 == t2: + break + else: + # For regular patches, this is a failure condition + if sun_too_close: + log.debug("Sun too close") + raise SunTooClose + if moon_too_close: + log.debug("Moon too close") + raise MoonTooClose + + observer.date = to_DJD(t2) + sun.compute(observer) + moon.compute(observer) + sun_az2, sun_el2 = sun.az / degree, sun.alt / degree + moon_az2, moon_el2 = moon.az / degree, moon.alt / degree + moon_phase2 = moon.phase + # Create an entry in the schedule + entry = fout_fmt.format( + to_UTC(t1), + to_UTC(t2), + to_MJD(t1), + to_MJD(t2), + boresight_angle, + patch.name, + (azmin + args.boresight_offset_az_deg) % 360, + (azmax + args.boresight_offset_az_deg) % 360, + (el / degree + args.boresight_offset_el_deg), + rising_string, + sun_el1, + sun_az1, + sun_el2, + sun_az2, + moon_el1, + moon_az1, + moon_el2, + moon_az2, + 0.005 * (moon_phase1 + moon_phase2), + -1 - patch.partial_hits if partial_scan else patch.hits, + subscan, + ) + entries.append(entry) + if partial_scan: + # Never append more than one partial scan before + # checking if full scans are again available + tstop = t2 + break + t1 = t2 + args.gap_small_s + + # Write the entries + for entry in entries: + log.debug(entry) + fout.write(entry) + fout.flush() + + if not partial_scan: + # Only update the patch counters when performing full scans + patch.hits += 1 + patch.time += ces_time + if rising or args.pole_mode: + patch.rising_hits += 1 + patch.rising_time += ces_time + if not rising or args.pole_mode: + patch.setting_hits += 1 + patch.setting_time += ces_time + # The oscillate method will slightly shift the patch to + # blur the boundaries + patch.oscillate() + # Advance the time + tstop += args.gap_s + else: + patch.partial_hits += 1 + # Advance the time + tstop += args.gap_small_s + + return tstop, subscan + + +@function_timer +def add_cooler_cycle( + args, tstart, tstop, observer, sun, moon, fout, fout_fmt, patch, boresight_angle +): + """Make an entry for a cooler cycle in the schedule file.""" + log = Logger.get() + az = patch.az + el = patch.el + t1 = tstart + t2 = t1 + patch.cycle_time + + observer.date = to_DJD(t1) + sun.compute(observer) + moon.compute(observer) + sun_az1, sun_el1 = sun.az / degree, sun.alt / degree + moon_az1, moon_el1 = moon.az / degree, moon.alt / degree + moon_phase1 = moon.phase + + observer.date = to_DJD(t2) + sun.compute(observer) + moon.compute(observer) + sun_az2, sun_el2 = sun.az / degree, sun.alt / degree + moon_az2, moon_el2 = moon.az / degree, moon.alt / degree + moon_phase2 = moon.phase + + # Create an entry in the schedule + entry = fout_fmt.format( + to_UTC(t1), + to_UTC(t2), + to_MJD(t1), + to_MJD(t2), + boresight_angle, + patch.name, + az, + az, + el, + "R", + sun_el1, + sun_az1, + sun_el2, + sun_az2, + moon_el1, + moon_az1, + moon_el2, + moon_az2, + 0.005 * (moon_phase1 + moon_phase2), + patch.hits, + 0, + ) + + # Write the entry + log.debug(entry) + fout.write(entry) + fout.flush() + + patch.last_cycle_end = t2 + patch.hits += 1 + patch.time += t2 - t1 + patch.rising_hits += 1 + patch.rising_time += t2 - t1 + patch.setting_hits += 1 + patch.setting_time += t2 - t1 + + return t2 + + +@function_timer +def get_visible(args, observer, sun, moon, patches, el_min): + """Determine which patches are visible.""" + log = Logger.get() + visible = [] + not_visible = [] + for patch in patches: + # Reject all patches that have even one corner too close + # to the Sun or the Moon and patches that are completely + # below the horizon + in_view, msg = patch.visible( + el_min, + observer, + sun, + moon, + args.sun_avoidance_angle_deg, + args.moon_avoidance_angle_deg, + not (args.allow_partial_scans or args.delay_sso_check), + ) + if not in_view: + not_visible.append((patch.name, msg)) + + if in_view: + if not (args.allow_partial_scans or args.delay_sso_check): + # Finally, check that the Sun or the Moon are not + # inside the patch + if args.moon_avoidance_angle_deg >= 0 and patch.in_patch(moon): + not_visible.append((patch.name, "Moon in patch")) + in_view = False + if args.sun_avoidance_angle_deg >= 0 and patch.in_patch(sun): + not_visible.append((patch.name, "Sun in patch")) + in_view = False + if in_view: + visible.append(patch) + log.debug( + "In view: {}. el = {:.2f}..{:.2f}".format( + patch.name, np.degrees(patch.el_min), np.degrees(patch.el_max) + ) + ) + else: + log.debug("NOT VISIBLE: {}".format(not_visible[-1])) + return visible, not_visible + + +@function_timer +def get_boresight_angle(args, t, t0=0): + """Return the scheduled boresight angle at time t.""" + if args.boresight_angle_step_deg == 0 or args.boresight_angle_time_min == 0: + return 0 + + istep = int((t - t0) / 60 / args.boresight_angle_time_min) + return (args.boresight_angle_step_deg * istep) % 360 + + +@function_timer +def apply_blockouts(args, t_in): + """Check if `t` is inside a blockout period. + If so, advance it to the next unblocked time. + + Returns: The (new) time and a boolean flag indicating if + the time was blocked and subsequently advanced. + """ + if not args.block_out: + return t_in, False + log = Logger.get() + t = t_in + blocked = False + for block_out in args.block_out: + current = datetime.fromtimestamp(t, timezone.utc) + start, stop = block_out.split("-") + try: + # If the block out specifies the year then no extra logic is needed + start_year, start_month, start_day = start.split("/") + start = datetime( + int(start_year), + int(start_month), + int(start_day), + 0, + 0, + 0, + 0, + timezone.utc, + ) + except ValueError: + # No year given so must figure out which year is the right one + start_month, start_day = start.split("/") + start = datetime( + current.year, int(start_month), int(start_day), 0, 0, 0, 0, timezone.utc + ) + if start > current: + # This year's block out is still in the future but the past + # year's blockout may still be active + start = start.replace(year=start.year - 1) + try: + # If the block out specifies the year then no extra logic is needed + stop_year, stop_month, stop_day = stop.split("/") + stop = datetime( + int(stop_year), int(stop_month), int(stop_day), 0, 0, 0, 0, timezone.utc + ) + except ValueError: + # No year given so must figure out which year is the right one + stop_month, stop_day = stop.split("/") + stop = datetime( + start.year, int(stop_month), int(stop_day), 0, 0, 0, 0, timezone.utc + ) + if stop < start: + # The block out ends on a different year than it starts + stop = stop.replace(year=start.year + 1) + # advance the stop time by one day to make the definition inclusive + stop += timedelta(days=1) + if start < current and current < stop: + # `t` is inside the block out. + # Advance to the end of the block out. + log.info( + "{} is inside block out {}, advancing to {}".format( + current, block_out, stop + ) + ) + t = stop.timestamp() + blocked = True + return t, blocked + + +def advance_time(t, time_step, offset=0): + """Advance the time ensuring that the sampling falls + over same discrete times (multiples of time_step) + regardless of the current value of t. + """ + return offset + ((t - offset) // time_step + 1) * time_step + + +@function_timer +def build_schedule(args, start_timestamp, stop_timestamp, patches, observer, sun, moon): + log = Logger.get() + + sun_el_max = args.sun_el_max_deg * degree + el_min = args.el_min_deg + el_max = args.el_max_deg + if args.elevations_deg is None: + el_min = args.el_min_deg + el_max = args.el_max_deg + else: + # Override the elevation limits + el_min = 90 + el_max = 0 + for el in args.elevations_deg.split(","): + el = np.float(el) + el_min = min(el * 0.9, el_min) + el_max = max(el * 1.1, el_max) + el_min *= degree + el_max *= degree + fp_radius = args.fp_radius_deg * degree + + fname_out = args.out + dir_out = os.path.dirname(fname_out) + if dir_out: + log.info("Creating '{}'".format(dir_out)) + os.makedirs(dir_out, exist_ok=True) + fout = open(fname_out, "w") + + fout.write( + "#{:15} {:15} {:>15} {:>15} {:>15}\n".format( + "Site", "Telescope", "Latitude [deg]", "Longitude [deg]", "Elevation [m]" + ) + ) + fout.write( + " {:15} {:15} {:15.3f} {:15.3f} {:15.1f}\n".format( + args.site_name, + args.telescope, + np.degrees(observer.lat), + np.degrees(observer.lon), + observer.elevation, + ) + ) + + fout_fmt0 = ( + "#{:>20} {:>20} {:>14} {:>14} {:>8} " + "{:35} {:>8} {:>8} {:>8} {:>5} " + "{:>8} {:>8} {:>8} {:>8} " + "{:>8} {:>8} {:>8} {:>8} {:>5} " + "{:>5} {:>3}\n" + ) + + fout_fmt = ( + " {:20} {:20} {:14.6f} {:14.6f} {:8.2f} " + "{:35} {:8.2f} {:8.2f} {:8.2f} {:5} " + "{:8.2f} {:8.2f} {:8.2f} {:8.2f} " + "{:8.2f} {:8.2f} {:8.2f} {:8.2f} {:5.2f} " + "{:5} {:3}\n" + ) + + fout.write( + fout_fmt0.format( + "Start time UTC", + "Stop time UTC", + "Start MJD", + "Stop MJD", + "Rotation", + "Patch name", + "Az min", + "Az max", + "El", + "R/S", + "Sun el1", + "Sun az1", + "Sun el2", + "Sun az2", + "Moon el1", + "Moon az1", + "Moon el2", + "Moon az2", + "Phase", + "Pass", + "Sub", + ) + ) + + # Operational days + ods = set() + + t = start_timestamp + last_successful = t + while True: + t, blocked = apply_blockouts(args, t) + boresight_angle = get_boresight_angle(args, t) + if t > stop_timestamp: + break + if t - last_successful > 86400 or blocked: + # A long time has passed since the last successfully + # scheduled scan. + # Reset the individual patch az and el limits + for patch in patches: + patch.reset() + if blocked: + last_successful = t + else: + # Only try this once for every day. Swapping + # `t` <-> `last_successful` means that we will not trigger + # this branch again without scheduling a succesful scan + log.debug( + "Resetting patches and returning to the last successful " + "scan: {}".format(to_UTC(last_successful)) + ) + t, last_successful = last_successful, t + + # Determine which patches are observable at time t. + + log.debug("t = {}".format(to_UTC(t))) + # Determine which patches are visible + observer.date = to_DJD(t) + sun.compute(observer) + if sun.alt > sun_el_max: + log.debug( + "Sun elevation is {:.2f} > {:.2f}. Moving on.".format( + sun.alt / degree, sun_el_max / degree + ) + ) + t = advance_time(t, args.time_step_s) + continue + moon.compute(observer) + + visible, not_visible = get_visible(args, observer, sun, moon, patches, el_min) + + if len(visible) == 0: + log.debug("No patches visible at {}: {}".format(to_UTC(t), not_visible)) + t = advance_time(t, args.time_step_s) + continue + + # Determine if a cooler cycle sets a limit for observing + tstop_cooler = stop_timestamp + for patch in patches: + if isinstance(patch, CoolerCyclePatch): + ttest = patch.last_cycle_end + patch.hold_time_max + if ttest < tstop_cooler: + tstop_cooler = ttest + + # Order the targets by priority and attempt to observe with both + # a rising and setting scans until we find one that can be + # succesfully scanned. + # If the criteria are not met, advance the time by a step + # and try again + + prioritize(args, visible) + + if args.pole_mode: + success, t = attempt_scan_pole( + args, + observer, + visible, + not_visible, + t, + fp_radius, + el_max, + el_min, + stop_timestamp, + tstop_cooler, + sun, + moon, + sun_el_max, + fout, + fout_fmt, + ods, + boresight_angle, + ) + else: + success, t = attempt_scan( + args, + observer, + visible, + not_visible, + t, + fp_radius, + stop_timestamp, + tstop_cooler, + sun, + moon, + sun_el_max, + fout, + fout_fmt, + ods, + boresight_angle, + ) + + if args.operational_days and len(ods) > args.operational_days: + break + + if not success: + log.debug( + "No patches could be scanned at {}: {}".format(to_UTC(t), not_visible) + ) + t = advance_time(t, args.time_step_s) + else: + last_successful = t + + fout.close() + return + + +def parse_args(opts=None): + parser = argparse.ArgumentParser( + description="Generate ground observation schedule.", fromfile_prefix_chars="@" + ) + + parser.add_argument( + "--site-name", required=False, default="LBL", help="Observing site name" + ) + parser.add_argument( + "--telescope", + required=False, + default="Telescope", + help="Observing telescope name", + ) + parser.add_argument( + "--site-lon", + required=False, + default="-122.247", + help="Observing site longitude [PyEphem string]", + ) + parser.add_argument( + "--site-lat", + required=False, + default="37.876", + help="Observing site latitude [PyEphem string]", + ) + parser.add_argument( + "--site-alt", + required=False, + default=100, + type=np.float, + help="Observing site altitude [meters]", + ) + parser.add_argument( + "--scan-margin", + required=False, + default=0, + type=np.float, + help="Random fractional margin [0..1] added to the " + "scans to smooth out edge effects", + ) + parser.add_argument( + "--ra-period", + required=False, + default=10, + type=np.int, + help="Period of patch position oscillations in RA [visits]", + ) + parser.add_argument( + "--ra-amplitude-deg", + required=False, + default=0, + type=np.float, + help="Amplitude of patch position oscillations in RA [deg]", + ) + parser.add_argument( + "--dec-period", + required=False, + default=10, + type=np.int, + help="Period of patch position oscillations in DEC [visits]", + ) + parser.add_argument( + "--dec-amplitude-deg", + required=False, + default=0, + type=np.float, + help="Amplitude of patch position oscillations in DEC [deg]", + ) + parser.add_argument( + "--elevation-penalty-limit", + required=False, + default=0, + type=np.float, + help="Assign a penalty to observing elevations below this limit [degrees]", + ) + parser.add_argument( + "--elevation-penalty-power", + required=False, + default=2, + type=np.float, + help="Power in the elevation penalty function [> 0] ", + ) + parser.add_argument( + "--equalize-area", + required=False, + default=False, + action="store_true", + help="Adjust priorities to account for patch area", + ) + parser.add_argument( + "--equalize-time", + required=False, + action="store_true", + dest="equalize_time", + help="Modulate priority by integration time.", + ) + parser.add_argument( + "--equalize-scans", + required=False, + action="store_false", + dest="equalize_time", + help="Modulate priority by number of scans.", + ) + parser.set_defaults(equalize_time=False) + parser.add_argument( + "--patch", + required=True, + action="append", + help="Patch definition: " + "name,weight,lon1,lat1,lon2,lat2 ... " + "OR name,weight,lon,lat,width", + ) + parser.add_argument( + "--patch-coord", + required=False, + default="C", + help="Sky patch coordinate system [C,E,G]", + ) + parser.add_argument( + "--el-min-deg", + required=False, + default=30, + type=np.float, + help="Minimum elevation for a CES", + ) + parser.add_argument( + "--el-max-deg", + required=False, + default=80, + type=np.float, + help="Maximum elevation for a CES", + ) + parser.add_argument( + "--el-step-deg", + required=False, + default=0, + type=np.float, + help="Optional step to apply to minimum elevation", + ) + parser.add_argument( + "--alternate", + required=False, + default=False, + action="store_true", + help="Alternate between rising and setting scans", + ) + parser.add_argument( + "--fp-radius-deg", + required=False, + default=0, + type=np.float, + help="Focal plane radius [deg]", + ) + parser.add_argument( + "--sun-avoidance-angle-deg", + required=False, + default=30, + type=np.float, + help="Minimum distance between the Sun and the bore sight [deg]", + ) + parser.add_argument( + "--moon-avoidance-angle-deg", + required=False, + default=20, + type=np.float, + help="Minimum distance between the Moon and the bore sight [deg]", + ) + parser.add_argument( + "--sun-el-max-deg", + required=False, + default=90, + type=np.float, + help="Maximum allowed sun elevation [deg]", + ) + parser.add_argument( + "--boresight-angle-step-deg", + required=False, + default=0, + type=np.float, + help="Boresight rotation step size [deg]", + ) + parser.add_argument( + "--boresight-angle-time-min", + required=False, + default=0, + type=np.float, + help="Boresight rotation step interval [minutes]", + ) + parser.add_argument( + "--start", + required=False, + default="2000-01-01 00:00:00", + help="UTC start time of the schedule", + ) + parser.add_argument("--stop", required=False, help="UTC stop time of the schedule") + parser.add_argument( + "--block-out", + required=False, + action="append", + help="Range of UTC calendar days to omit from scheduling in format " + "START_MONTH/START_DAY-END_MONTH/END_DAY or " + "START_YEAR/START_MONTH/START_DAY-END_YEAR/END_MONTH/END_DAY " + "where YEAR, MONTH and DAY are integers. END days are inclusive", + ) + parser.add_argument( + "--operational-days", + required=False, + type=np.int, + help="Number of operational days to schedule (empty days do not count)", + ) + parser.add_argument( + "--timezone", + required=False, + type=np.int, + default=0, + help="Offset to apply to MJD to separate operational days [hours]", + ) + parser.add_argument( + "--gap-s", + required=False, + default=100, + type=np.float, + help="Gap between CES:es [seconds]", + ) + parser.add_argument( + "--gap-small-s", + required=False, + default=10, + type=np.float, + help="Gap between split CES:es [seconds]", + ) + parser.add_argument( + "--time-step-s", + required=False, + default=600, + type=np.float, + help="Time step after failed target acquisition [seconds]", + ) + parser.add_argument( + "--one-scan-per-day", + required=False, + default=False, + action="store_true", + help="Pad each operational day to have only one CES", + ) + parser.add_argument( + "--ces-max-time-s", + required=False, + default=900, + type=np.float, + help="Maximum length of a CES [seconds]", + ) + parser.add_argument( + "--debug", + required=False, + default=False, + action="store_true", + help="Write diagnostics, including patch plots.", + ) + parser.add_argument( + "--polmap", + required=False, + help="Include polarization from map in the plotted patches when --debug", + ) + parser.add_argument( + "--pol-min", + required=False, + type=np.float, + help="Lower plotting range for polarization map", + ) + parser.add_argument( + "--pol-max", + required=False, + type=np.float, + help="Upper plotting range for polarization map", + ) + parser.add_argument( + "--delay-sso-check", + required=False, + default=False, + action="store_true", + help="Only apply SSO check during simulated scan.", + ) + parser.add_argument( + "--pole-mode", + required=False, + default=False, + action="store_true", + help="Pole scheduling mode (no drift scan)", + ) + parser.add_argument( + "--pole-el-step-deg", + required=False, + default=0.25, + type=np.float, + help="Elevation step in pole scheduling mode [deg]", + ) + parser.add_argument( + "--pole-ces-time-s", + required=False, + default=3000, + type=np.float, + help="Time to scan at constant elevation in pole mode", + ) + parser.add_argument( + "--out", required=False, default="schedule.txt", help="Output filename" + ) + parser.add_argument( + "--boresight-offset-el-deg", + required=False, + default=0, + type=np.float, + help="Optional offset added to every observing elevation", + ) + parser.add_argument( + "--boresight-offset-az-deg", + required=False, + default=0, + type=np.float, + help="Optional offset added to every observing azimuth", + ) + parser.add_argument( + "--elevations-deg", + required=False, + help="Fixed observing elevations in a comma-separated list.", + ) + parser.add_argument( + "--partial-scans", + required=False, + action="store_true", + dest="allow_partial_scans", + help="Allow partials scans when full scans are not available.", + ) + parser.add_argument( + "--no-partial-scans", + required=False, + action="store_false", + dest="allow_partial_scans", + help="Allow partials scans when full scans are not available.", + ) + parser.set_defaults(allow_partial_scans=False) + + args = None + if opts is None: + try: + args = parser.parse_args() + except SystemExit: + sys.exit(0) + else: + try: + args = parser.parse_args(opts) + except SystemExit: + sys.exit(0) + + if args.operational_days is None and args.stop is None: + raise RuntimeError("You must provide --stop or --operational-days") + + stop_time = None + if args.start.endswith("Z"): + start_time = dateutil.parser.parse(args.start) + if args.stop is not None: + if not args.stop.endswith("Z"): + raise RuntimeError("Either both or neither times must be given in UTC") + stop_time = dateutil.parser.parse(args.stop) + else: + if args.timezone < 0: + tz = "-{:02}00".format(-args.timezone) + else: + tz = "+{:02}00".format(args.timezone) + start_time = dateutil.parser.parse(args.start + tz) + if args.stop is not None: + if args.stop.endswith("Z"): + raise RuntimeError("Either both or neither times must be given in UTC") + stop_time = dateutil.parser.parse(args.stop + tz) + + start_timestamp = start_time.timestamp() + if stop_time is None: + # Keep scheduling until the desired number of operational days is full. + stop_timestamp = 2 ** 60 + else: + stop_timestamp = stop_time.timestamp() + return args, start_timestamp, stop_timestamp + + +@function_timer +def parse_patch_sso(args, parts): + log = Logger.get() + log.info("SSO format") + name = parts[0] + weight = float(parts[2]) + radius = float(parts[3]) * degree + patch = SSOPatch(name, weight, radius, elevations=args.elevations_deg) + return patch + + +@function_timer +def parse_patch_cooler(args, parts, last_cycle_end): + log = Logger.get() + log.info("Cooler cycle format") + weight = float(parts[2]) + power = float(parts[3]) + hold_time_min = float(parts[4]) # in hours + hold_time_max = float(parts[5]) # in hours + cycle_time = float(parts[6]) # in hours + az = float(parts[7]) + el = float(parts[8]) + patch = CoolerCyclePatch( + weight, power, hold_time_min, hold_time_max, cycle_time, az, el, last_cycle_end + ) + return patch + + +@function_timer +def parse_patch_horizontal(args, parts): + """Parse an explicit patch definition line""" + log = Logger.get() + corners = [] + log.info("Horizontal format") + name = parts[0] + weight = float(parts[2]) + azmin = float(parts[3]) * degree + azmax = float(parts[4]) * degree + el = float(parts[5]) * degree + scantime = float(parts[6]) # minutes + patch = HorizontalPatch(name, weight, azmin, azmax, el, scantime) + return patch + + +@function_timer +def parse_patch_explicit(args, parts): + """Parse an explicit patch definition line""" + log = Logger.get() + corners = [] + log.info("Explicit-corners format: ") + name = parts[0] + i = 2 + definition = "" + while i + 1 < len(parts): + definition += " ({}, {})".format(parts[i], parts[i + 1]) + try: + # Assume coordinates in degrees + lon = float(parts[i]) * degree + lat = float(parts[i + 1]) * degree + except ValueError: + # Failed simple interpreration, assume pyEphem strings + lon = parts[i] + lat = parts[i + 1] + i += 2 + if args.patch_coord == "C": + corner = ephem.Equatorial(lon, lat, epoch="2000") + elif args.patch_coord == "E": + corner = ephem.Ecliptic(lon, lat, epoch="2000") + elif args.patch_coord == "G": + corner = ephem.Galactic(lon, lat, epoch="2000") + else: + raise RuntimeError("Unknown coordinate system: {}".format(args.patch_coord)) + corner = ephem.Equatorial(corner) + if corner.dec > 80 * degree or corner.dec < -80 * degree: + raise RuntimeError( + "{} has at least one circumpolar corner. " + "Circumpolar targeting not yet implemented".format(name) + ) + patch_corner = ephem.FixedBody() + patch_corner._ra = corner.ra + patch_corner._dec = corner.dec + corners.append(patch_corner) + log.info(definition) + return corners + + +@function_timer +def parse_patch_rectangular(args, parts): + """Parse a rectangular patch definition line""" + log = Logger.get() + corners = [] + log.info("Rectangular format") + name = parts[0] + try: + # Assume coordinates in degrees + lon_min = float(parts[2]) * degree + lat_max = float(parts[3]) * degree + lon_max = float(parts[4]) * degree + lat_min = float(parts[5]) * degree + except ValueError: + # Failed simple interpreration, assume pyEphem strings + lon_min = parts[2] + lat_max = parts[3] + lon_max = parts[4] + lat_min = parts[5] + if args.patch_coord == "C": + coordconv = ephem.Equatorial + elif args.patch_coord == "E": + coordconv = ephem.Ecliptic + elif args.patch_coord == "G": + coordconv = ephem.Galactic + else: + raise RuntimeError("Unknown coordinate system: {}".format(args.patch_coord)) + + nw_corner = coordconv(lon_min, lat_max, epoch="2000") + ne_corner = coordconv(lon_max, lat_max, epoch="2000") + se_corner = coordconv(lon_max, lat_min, epoch="2000") + sw_corner = coordconv(lon_min, lat_min, epoch="2000") + + lon_max = unwind_angle(lon_min, lon_max) + if lon_min < lon_max: + delta_lon = lon_max - lon_min + else: + delta_lon = lon_min - lon_max + area = (np.cos(np.pi / 2 - lat_max) - np.cos(np.pi / 2 - lat_min)) * delta_lon + + corners_temp = [] + add_side(nw_corner, ne_corner, corners_temp, coordconv) + add_side(ne_corner, se_corner, corners_temp, coordconv) + add_side(se_corner, sw_corner, corners_temp, coordconv) + add_side(sw_corner, nw_corner, corners_temp, coordconv) + + for corner in corners_temp: + if corner.dec > 80 * degree or corner.dec < -80 * degree: + raise RuntimeError( + "{} has at least one circumpolar corner. " + "Circumpolar targeting not yet implemented".format(name) + ) + patch_corner = ephem.FixedBody() + patch_corner._ra = corner.ra + patch_corner._dec = corner.dec + corners.append(patch_corner) + return corners, area + + +@function_timer +def add_side(corner1, corner2, corners_temp, coordconv): + """Add one side of a rectangle. + + Add one side of a rectangle with enough interpolation points. + """ + step = np.radians(1) + corners_temp.append(ephem.Equatorial(corner1)) + lon1 = corner1.ra + lon2 = corner2.ra + lat1 = corner1.dec + lat2 = corner2.dec + if lon1 == lon2: + lon = lon1 + if lat1 < lat2: + lat_step = step + else: + lat_step = -step + for lat in np.arange(lat1, lat2, lat_step): + corners_temp.append(ephem.Equatorial(coordconv(lon, lat, epoch="2000"))) + elif lat1 == lat2: + lat = lat1 + if lon1 < lon2: + lon_step = step / np.cos(lat) + else: + lon_step = -step / np.cos(lat) + for lon in np.arange(lon1, lon2, lon_step): + corners_temp.append(ephem.Equatorial(coordconv(lon, lat, epoch="2000"))) + else: + raise RuntimeError("add_side: both latitude and longitude change") + return + + +@function_timer +def parse_patch_center_and_width(args, parts): + """Parse center-and-width patch definition""" + log = Logger.get() + corners = [] + log.info("Center-and-width format") + try: + # Assume coordinates in degrees + lon = float(parts[2]) * degree + lat = float(parts[3]) * degree + except ValueError: + # Failed simple interpreration, assume pyEphem strings + lon = parts[2] + lat = parts[3] + width = float(parts[4]) * degree + if args.patch_coord == "C": + center = ephem.Equatorial(lon, lat, epoch="2000") + elif args.patch_coord == "E": + center = ephem.Ecliptic(lon, lat, epoch="2000") + elif args.patch_coord == "G": + center = ephem.Galactic(lon, lat, epoch="2000") + else: + raise RuntimeError("Unknown coordinate system: {}".format(args.patch_coord)) + center = ephem.Equatorial(center) + # Synthesize 8 corners around the center + phi = center.ra + theta = center.dec + r = width / 2 + ncorner = 8 + angstep = 2 * np.pi / ncorner + for icorner in range(ncorner): + ang = angstep * icorner + delta_theta = np.cos(ang) * r + delta_phi = np.sin(ang) * r / np.cos(theta + delta_theta) + patch_corner = ephem.FixedBody() + patch_corner._ra = phi + delta_phi + patch_corner._dec = theta + delta_theta + corners.append(patch_corner) + return corners + + +@function_timer +def parse_patches(args, observer, sun, moon, start_timestamp, stop_timestamp): + # Parse the patch definitions + log = Logger.get() + patches = [] + total_weight = 0 + for patch_def in args.patch: + parts = patch_def.split(",") + name = parts[0] + log.info('Adding patch "{}"'.format(name)) + if parts[1].upper() == "HORIZONTAL": + patch = parse_patch_horizontal(args, parts) + elif parts[1].upper() == "SSO": + patch = parse_patch_sso(args, parts) + elif parts[1].upper() == "COOLER": + patch = parse_patch_cooler(args, parts, start_timestamp) + else: + weight = float(parts[1]) + if np.isnan(weight): + raise RuntimeError("Patch has NaN priority: {}".format(patch_def)) + if weight == 0: + raise RuntimeError("Patch has zero priority: {}".format(patch_def)) + if len(parts[2:]) == 3: + corners = parse_patch_center_and_width(args, parts) + area = None + elif len(parts[2:]) == 4: + corners, area = parse_patch_rectangular(args, parts) + else: + corners = parse_patch_explicit(args, parts) + area = None + patch = Patch( + name, + weight, + corners, + el_min=args.el_min_deg * degree, + el_max=args.el_max_deg * degree, + el_step=args.el_step_deg * degree, + alternate=args.alternate, + site_lat=observer.lat, + area=area, + ra_period=args.ra_period, + ra_amplitude=args.ra_amplitude_deg, + dec_period=args.dec_period, + dec_amplitude=args.dec_amplitude_deg, + elevations=args.elevations_deg, + ) + if args.equalize_area or args.debug: + area = patch.get_area(observer, nside=32, equalize=args.equalize_area) + total_weight += patch.weight + patches.append(patch) + + log.debug( + "Highest possible observing elevation: {:.2f} degrees." + " Sky fraction = {:.4f}".format(patches[-1].el_max0 / degree, patch._area) + ) + + if args.debug: + import matplotlib.pyplot as plt + + polmap = None + if args.polmap: + polmap = hp.read_map(args.polmap, [1, 2]) + bad = polmap[0] == hp.UNSEEN + polmap = np.sqrt(polmap[0] ** 2 + polmap[1] ** 2) * 1e6 + polmap[bad] = hp.UNSEEN + plt.style.use("default") + cmap = cm.inferno + cmap.set_under("w") + plt.figure(figsize=[20, 4]) + plt.subplots_adjust(left=0.1, right=0.9) + patch_color = "black" + sun_color = "black" + sun_lw = 8 + sun_avoidance_color = "gray" + moon_color = "black" + moon_lw = 2 + moon_avoidance_color = "gray" + alpha = 0.5 + avoidance_alpha = 0.01 + sun_step = np.int(86400 * 1) + moon_step = np.int(86400 * 0.1) + for iplot, coord in enumerate("CEG"): + scoord = {"C": "Equatorial", "E": "Ecliptic", "G": "Galactic"}[coord] + title = scoord # + ' patch locations' + if polmap is None: + nside = 256 + avoidance_map = np.zeros(12 * nside ** 2) + # hp.mollview(np.zeros(12) + hp.UNSEEN, coord=coord, cbar=False, + # title='', sub=[1, 3, 1 + iplot], cmap=cmap) + else: + hp.mollview( + polmap, + coord="G" + coord, + cbar=True, + unit="$\mu$K", + min=args.polmin, + max=args.polmax, + norm="log", + cmap=cmap, + title=title, + sub=[1, 3, 1 + iplot], + notext=True, + format="%.1f", + xsize=1600, + ) + # Plot sun and moon avoidance circle + sunlon, sunlat = [], [] + moonlon, moonlat = [], [] + sun_avoidance_angle = args.sun_avoidance_angle_deg * degree + moon_avoidance_angle = args.moon_avoidance_angle_deg * degree + for lon, lat, sso, angle_min, color, step, lw in [ + ( + sunlon, + sunlat, + sun, + sun_avoidance_angle, + sun_avoidance_color, + sun_step, + sun_lw, + ), + ( + moonlon, + moonlat, + moon, + moon_avoidance_angle, + moon_avoidance_color, + moon_step, + moon_lw, + ), + ]: + for t in range(np.int(start_timestamp), np.int(stop_timestamp), step): + observer.date = to_DJD(t) + sso.compute(observer) + lon.append(sso.a_ra / degree) + lat.append(sso.a_dec / degree) + if angle_min <= 0: + continue + if polmap is None: + # accumulate avoidance map + vec = hp.dir2vec(lon[-1], lat[-1], lonlat=True) + pix = hp.query_disc(nside, vec, angle_min) + for p in pix: + avoidance_map[p] += 1 + else: + # plot a circle around the location + clon, clat = [], [] + phi = sso.a_ra + theta = sso.a_dec + r = angle_min + for ang in np.linspace(0, 2 * np.pi, 36): + dtheta = np.cos(ang) * r + dphi = np.sin(ang) * r / np.cos(theta + dtheta) + clon.append((phi + dphi) / degree) + clat.append((theta + dtheta) / degree) + hp.projplot( + clon, + clat, + "-", + color=color, + alpha=avoidance_alpha, + lw=lw, + threshold=1, + lonlat=True, + coord="C", + ) + if polmap is None: + avoidance_map[avoidance_map == 0] = hp.UNSEEN + hp.mollview( + avoidance_map, + coord="C" + coord, + cbar=False, + title="", + sub=[1, 3, 1 + iplot], + cmap=cmap, + ) + hp.graticule(30, verbose=False) + + # Plot patches + for patch in patches: + lon = [corner._ra / degree for corner in patch.corners] + lat = [corner._dec / degree for corner in patch.corners] + if len(lon) == 0: + # Special patch without sky coordinates + continue + lon.append(lon[0]) + lat.append(lat[0]) + log.info( + "{} corners:\n lon = {}\n lat= {}".format(patch.name, lon, lat) + ) + hp.projplot( + lon, + lat, + "-", + threshold=1, + lonlat=True, + coord="C", + color=patch_color, + lw=2, + alpha=alpha, + ) + if len(patches) > 10: + continue + # label the patch + it = np.argmax(lat) + area = patch.get_area(observer) + title = "{} {:.2f}%".format(patch.name, 100 * area) + hp.projtext( + lon[it], + lat[it], + title, + lonlat=True, + coord="C", + color=patch_color, + fontsize=14, + alpha=alpha, + ) + if polmap is not None: + # Plot Sun and Moon trajectory + hp.projplot( + sunlon, + sunlat, + "-", + color=sun_color, + alpha=alpha, + threshold=1, + lonlat=True, + coord="C", + lw=sun_lw, + ) + hp.projplot( + moonlon, + moonlat, + "-", + color=moon_color, + alpha=alpha, + threshold=1, + lonlat=True, + coord="C", + lw=moon_lw, + ) + hp.projtext( + sunlon[0], + sunlat[0], + "Sun", + color=sun_color, + lonlat=True, + coord="C", + fontsize=14, + alpha=alpha, + ) + hp.projtext( + moonlon[0], + moonlat[0], + "Moon", + color=moon_color, + lonlat=True, + coord="C", + fontsize=14, + alpha=alpha, + ) + + plt.savefig("patches.png") + plt.close() + + # Normalize the weights + for i in range(len(patches)): + patches[i].weight /= total_weight + return patches + + +def run_scheduler(opts=None): + args, start_timestamp, stop_timestamp = parse_args(opts=opts) + + observer = ephem.Observer() + observer.lon = args.site_lon + observer.lat = args.site_lat + observer.elevation = args.site_alt # In meters + observer.epoch = "2000" + observer.temp = 0 # in Celcius + observer.compute_pressure() + + sun = ephem.Sun() + moon = ephem.Moon() + + patches = parse_patches(args, observer, sun, moon, start_timestamp, stop_timestamp) + + build_schedule(args, start_timestamp, stop_timestamp, patches, observer, sun, moon) + return diff --git a/src/toast/todmap/atm.py b/src/toast/todmap/atm.py index 1f203f496..a86756a33 100644 --- a/src/toast/todmap/atm.py +++ b/src/toast/todmap/atm.py @@ -625,8 +625,8 @@ def draw(self): # Wind is parallel to surface. Rotate to a frame where the scan # is across the X-axis. - #self._w *= 0.5 # DEBUG - #self._wdir += np.pi # DEBUG + # self._w *= 0.5 # DEBUG + # self._wdir += np.pi # DEBUG eastward_wind = self._w * np.cos(self._wdir) northward_wind = self._w * np.sin(self._wdir) diff --git a/src/toast/todmap/mapmaker.py b/src/toast/todmap/mapmaker.py index 4b60c0010..2b8224a83 100644 --- a/src/toast/todmap/mapmaker.py +++ b/src/toast/todmap/mapmaker.py @@ -1339,7 +1339,7 @@ def solve(self): beta *= sqsum proposal *= beta proposal += precond_residual - log.info("{} : Solution: {}".format(self.rank, guess)) # DEBUG + # log.info("{} : Solution: {}".format(self.rank, guess)) # DEBUG return guess @@ -1585,12 +1585,13 @@ def get_templatematrix(self, data): timer.start() log = Logger.get() templatelist = [] - if self.baseline_length: - log.info( - "Initializing offset template, step_length = {}".format( - self.baseline_length + if self.baseline_length is not None: + if self.rank == 0: + log.info( + "Initializing offset template, step_length = {}".format( + self.baseline_length + ) ) - ) templatelist.append( OffsetTemplate( data, @@ -1604,11 +1605,12 @@ def get_templatematrix(self, data): ) ) if self.subharmonic_order is not None: - log.info( - "Initializing subharmonic template, order = {}".format( - self.subharmonic_order + if self.rank == 0: + log.info( + "Initializing subharmonic template, order = {}".format( + self.subharmonic_order + ) ) - ) templatelist.append( SubharmonicTemplate( data, diff --git a/src/toast/todmap/pointing.py b/src/toast/todmap/pointing.py index 2ac6d2d9e..b6764477c 100644 --- a/src/toast/todmap/pointing.py +++ b/src/toast/todmap/pointing.py @@ -389,26 +389,22 @@ def __init__( @property def nside(self): - """(int): the HEALPix NSIDE value used. - """ + """(int): the HEALPix NSIDE value used.""" return self._nside @property def nest(self): - """(bool): if True, the pointing is NESTED ordering. - """ + """(bool): if True, the pointing is NESTED ordering.""" return self._nest @property def mode(self): - """(str): the pointing mode "I", "IQU", etc. - """ + """(str): the pointing mode "I", "IQU", etc.""" return self._mode @property def local_submaps(self): - """(list): Indices of locally hit submaps - """ + """(list): Indices of locally hit submaps""" if self._single_precision: dtype = np.int32 else: diff --git a/src/toast/todmap/sim_tod.py b/src/toast/todmap/sim_tod.py index 0931b7c12..4fbb525ff 100644 --- a/src/toast/todmap/sim_tod.py +++ b/src/toast/todmap/sim_tod.py @@ -953,6 +953,7 @@ def __init__( self._az = np.array([]) self._el = np.array([]) + nsample_elnod = 0 if start_with_elnod: # Begin with an el-nod nsample_elnod = self.simulate_elnod( diff --git a/src/toast/weather.py b/src/toast/weather.py index f91f92ff3..56edeb926 100644 --- a/src/toast/weather.py +++ b/src/toast/weather.py @@ -43,44 +43,107 @@ def __init__(self, fname, site=0, realization=0, time=None): self.set_time(time) self._varindex = {} - hdulist = pf.open(self._fname, "readonly") - - # Build the probability axis of the cumulative distribution - # function. The CDF:s stored for every month, hour and variable - # all assume the same probability axis. - prob_start = hdulist[1].header["probstrt"] - prob_stop = hdulist[1].header["probstop"] - nstep = hdulist[1].header["nstep"] - self._prob = np.linspace(prob_start, prob_stop, nstep) - - # Load the CDF:s. One entry per month. - self._monthly_cdf = [] - ivar = 0 - for month in range(12): - self._monthly_cdf.append([]) - hdu = hdulist[1 + month] - # One entry for every hour - for hour in range(24): - self._monthly_cdf[month].append({}) - # and one entry for every weather variable: - # TQI : ice water - # TQL : liquid water - # TQV : water vapor - # QV10M : specific humidity - # PS : surface pressure - # TS : surface temperature - # T10M : air temperature at 10m - # U10M : eastward wind at 10m - # V10M : northward wind at 10m - for col in hdu.columns: - name = col.name - if name not in self._varindex: - self._varindex[name] = ivar - ivar += 1 + if self._fname is None: + # We will simulate fake weather information. + prob_start = 0.0 + prob_stop = 1.0 + nstep = 101 + self._prob = np.linspace(prob_start, prob_stop, nstep) + + self._monthly_cdf = [] + ivar = 0 + for month in range(12): + self._monthly_cdf.append([]) + # One entry for every hour for hour in range(24): - self._monthly_cdf[month][hour][name] = hdu.data.field(name)[hour] - - hdulist.close() + self._monthly_cdf[month].append({}) + # and one entry for every weather variable: + # TQI : ice water + # TQL : liquid water + # TQV : water vapor + # QV10M : specific humidity + # PS : surface pressure + # TS : surface temperature + # T10M : air temperature at 10m + # U10M : eastward wind at 10m + # V10M : northward wind at 10m + name = "TQI" + self._varindex[name] = 0 + for hour in range(24): + self._monthly_cdf[month][hour][name] = 0.0 + name = "TQL" + self._varindex[name] = 1 + for hour in range(24): + self._monthly_cdf[month][hour][name] = 0.0 + name = "TQV" + self._varindex[name] = 2 + for hour in range(24): + self._monthly_cdf[month][hour][name] = 0.2 + name = "QV10M" + self._varindex[name] = 3 + for hour in range(24): + self._monthly_cdf[month][hour][name] = 0.00015 + name = "PS" + self._varindex[name] = 4 + for hour in range(24): + self._monthly_cdf[month][hour][name] = 58200.0 + name = "TS" + self._varindex[name] = 5 + for hour in range(24): + self._monthly_cdf[month][hour][name] = 263.0 + name = "T10M" + self._varindex[name] = 6 + for hour in range(24): + self._monthly_cdf[month][hour][name] = 263.0 + name = "U10M" + self._varindex[name] = 7 + for hour in range(24): + self._monthly_cdf[month][hour][name] = -3.0 + name = "V10M" + self._varindex[name] = 8 + for hour in range(24): + self._monthly_cdf[month][hour][name] = -7.0 + else: + hdulist = pf.open(self._fname, "readonly") + + # Build the probability axis of the cumulative distribution + # function. The CDF:s stored for every month, hour and variable + # all assume the same probability axis. + prob_start = hdulist[1].header["probstrt"] + prob_stop = hdulist[1].header["probstop"] + nstep = hdulist[1].header["nstep"] + self._prob = np.linspace(prob_start, prob_stop, nstep) + + # Load the CDF:s. One entry per month. + self._monthly_cdf = [] + ivar = 0 + for month in range(12): + self._monthly_cdf.append([]) + hdu = hdulist[1 + month] + # One entry for every hour + for hour in range(24): + self._monthly_cdf[month].append({}) + # and one entry for every weather variable: + # TQI : ice water + # TQL : liquid water + # TQV : water vapor + # QV10M : specific humidity + # PS : surface pressure + # TS : surface temperature + # T10M : air temperature at 10m + # U10M : eastward wind at 10m + # V10M : northward wind at 10m + for col in hdu.columns: + name = col.name + if name not in self._varindex: + self._varindex[name] = ivar + ivar += 1 + for hour in range(24): + self._monthly_cdf[month][hour][name] = hdu.data.field(name)[ + hour + ] + + hdulist.close() self._reset_vars()