From fef0a542bb289d1bb9dd395c9f5a3a91ba07f470 Mon Sep 17 00:00:00 2001 From: John Halley Gotway Date: Fri, 31 Mar 2023 08:45:19 -0600 Subject: [PATCH] Update develop-ref after #2495 (#2499) Co-authored-by: johnhg Co-authored-by: Julie Prestopnik Co-authored-by: Seth Linden Co-authored-by: John Halley Gotway Co-authored-by: Dave Albo Co-authored-by: MET Tools Test Account Co-authored-by: Howard Soh Co-authored-by: Jonathan Vigh Co-authored-by: jprestop Co-authored-by: Seth Linden Co-authored-by: hsoh-u Co-authored-by: davidalbo Co-authored-by: lisagoodrich <33230218+lisagoodrich@users.noreply.github.com> Co-authored-by: George McCabe <23407799+georgemccabe@users.noreply.github.com> Co-authored-by: Daniel Adriaansen Co-authored-by: Lisa Goodrich fix #2309 develop tcmpr (#2310) fix #2306 ascii2nc airnow hourly (#2314) fix_spread_md (#2335) fix #2389 develop flowchart (#2392) Fix Python environment issue (#2407) fix definitions of G172 and G220 based on comments in NOAA-EMC/NCEPLIBS-w3emc#157. (#2406) fix #2380 develop override (#2382) fix #2408 develop empty config (#2410) fix #2390 develop compile zlib (#2404) fix #2412 develop climo (#2422) fix #2437 develop convert (#2439) fix for develop, for #2437, forgot one reference to the search_parent for a dictionary lookup. fix #2452 develop airnow (#2454) fix #2449 develop pdf (#2464) fix #2402 develop sonarqube (#2468) fix #2426 develop buoy (#2475) --- .github/workflows/documentation.yml | 3 +- data/table_files/ndbc_stations.xml | 3981 +++++++++++------ docs/Users_Guide/appendixC.rst | 2 +- docs/Users_Guide/appendixF.rst | 2 + docs/Users_Guide/config_options.rst | 27 + docs/Users_Guide/release-notes.rst | 50 +- docs/conf.py | 11 +- docs/requirements.txt | 1 - internal/scripts/docker/Dockerfile | 2 +- internal/scripts/docker/Dockerfile.copy | 2 +- .../config/install_met_env.acorn_py3.10 | 4 +- .../config/install_met_env.generic | 2 +- .../config/install_met_env.wcoss2_py3.10 | 3 +- .../sonarqube/python.sonar-project.properties | 6 +- .../sonarqube/sonar-project.properties | 5 +- scripts/utility/Makefile.am | 3 +- scripts/utility/Makefile.in | 3 +- .../utility/build_ndbc_stations_from_web.py | 634 +++ .../var_info_nc_pinterp.cc | 8 +- src/libcode/vx_data2d_nccf/var_info_nccf.cc | 8 +- src/tools/other/ascii2nc/aeronet_handler.cc | 103 +- src/tools/other/ascii2nc/ndbc_handler.cc | 103 +- 22 files changed, 3485 insertions(+), 1478 deletions(-) create mode 100755 scripts/utility/build_ndbc_stations_from_web.py diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index f30b90d0f6..b039177ff7 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -8,7 +8,7 @@ on: - main_* - bugfix_* paths: - - met/docs/** + - docs/** pull_request: types: [opened, reopened, synchronize] workflow_dispatch: @@ -26,6 +26,7 @@ jobs: run: | python -m pip install --upgrade python-dateutil requests sphinx \ sphinx-gallery Pillow sphinx_rtd_theme sphinx-panels + python -m pip install -r docs/requirements.txt - name: Build docs run: ./.github/jobs/build_documentation.sh - uses: actions/upload-artifact@v3 diff --git a/data/table_files/ndbc_stations.xml b/data/table_files/ndbc_stations.xml index 7e73ef73a3..114957cd5a 100644 --- a/data/table_files/ndbc_stations.xml +++ b/data/table_files/ndbc_stations.xml @@ -1,1368 +1,2613 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/Users_Guide/appendixC.rst b/docs/Users_Guide/appendixC.rst index 0b5aa69346..2a1e358e68 100644 --- a/docs/Users_Guide/appendixC.rst +++ b/docs/Users_Guide/appendixC.rst @@ -1101,7 +1101,7 @@ Called "IGN" in ECNT output :numref:`table_ES_header_info_es_out_ECNT` The ignorance score (IGN) is the negative logarithm of a predictive probability density function (:ref:`Gneiting et al., 2004 `). In MET, the IGN is calculated based on a normal approximation to the forecast distribution (i.e. a normal pdf is fit to the forecast values). This approximation may not be valid, especially for discontinuous forecasts like precipitation, and also for very skewed forecasts. For a single normal distribution **N** with parameters :math:`\mu \text{ and } \sigma`, the ignorance score is -.. math:: \text{ign} (N( \mu, \sigma),y) = \frac{1}{2} \ln (2 \pi \sigma^2 ) + \frac{(y - \mu)^2}{\sigma^2}. +.. math:: \text{ign} (N( \mu, \sigma),y) = \frac{1}{2} \ln (2 \pi \sigma^2 ) + \frac{(y - \mu)^2}{2\sigma^2}. Accumulation of the ignorance score for many forecasts is via the average of individual ignorance scores. This average ignorance score is the value output by the MET software. Like many error statistics, the IGN is negatively oriented, so smaller numbers indicate better forecasts. diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 371773d519..c490cc07e3 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -9,6 +9,8 @@ Introduction MET includes the ability to embed Python to a limited degree. Users may use Python scripts and whatever associated Python packages they wish in order to prepare 2D gridded data fields, point observations, and matched pairs as input to the MET tools. We fully expect that this degree of embedding will increase in the future. In addition, plans are in place to extend Python with MET in upcoming releases, allowing users to invoke MET tools directly from their Python script. While MET version 8.0 was built on Python 2.x, MET versions 9.0 and beyond are built on Python 3.6+. +.. _compiling_python_support: + Compiling Python Support ======================== diff --git a/docs/Users_Guide/config_options.rst b/docs/Users_Guide/config_options.rst index 025c633d29..93a7a96229 100644 --- a/docs/Users_Guide/config_options.rst +++ b/docs/Users_Guide/config_options.rst @@ -272,6 +272,33 @@ The default table can be found in the installed XML content for all stations that allows lookups of latitude, longitude, and, in some cases, elevation for all stations based on stationId. +This set of stations comes from 2 online sources: the +`active stations website `_ +and the `complete stations website `_. +As these lists can change as a function of time, a script can be run to pull +down the contents of both websites and merge any changes with the existing stations +file content, creating an updated stations file locally. +The MET_NDBC_STATIONS environment variable can be then set to refer to this newer +stations file. Also, the MET development team will periodically +run this script and update *share/met/table_files/ndbc_stations.xml*. + +To run this utility: + +.. code-block:: none + + build_ndbc_stations_from_web.py <-d> <-p> <-o OUTPUT_FILE> + + Usage: build_ndbc_stations_from_web.py [options] + Options: + -h, --help show this help message and exit + -d, --diagnostic Rerun using downlaoded files, skipping download step (optional, default: False) + -p, --prune Prune files that are no longer online (optional, default: False) + -o OUT_FILE, --out=OUT_FILE + Save the text into the named file (optional, default: merged.txt) + +NOTE: The downloaded files are written to a subdirectory ndbc_temp_data which +can be deleted once the final output file is created. + MET_BASE ^^^^^^^^ diff --git a/docs/Users_Guide/release-notes.rst b/docs/Users_Guide/release-notes.rst index bcad89b962..840af44bf6 100644 --- a/docs/Users_Guide/release-notes.rst +++ b/docs/Users_Guide/release-notes.rst @@ -9,6 +9,44 @@ When applicable, release notes are followed by the GitHub issue number which des enhancement, or new feature (`MET GitHub issues `_). Important issues are listed **in bold** for emphasis. +MET Version 11.1.0-beta1 release notes (20230228) +------------------------------------------------- + + .. dropdown:: Repository, build, and test + + * Add modulefiles for supported systems to the repository (`#2415 `_). + * Add LICENSE.md to the repository (`#2461 `_). + * Update the copyright year to 2023 and increase the version number to 11.1.0 (`#2469 `_). + + .. dropdown:: Documentation + + * Enhance the Release Notes by adding dropdown menus (`#2146 `_). + + .. dropdown:: Enhancements + + * Convert the python list to the numpy array for the python embedding at the base class (`#2386 `_). + * Refine Python runtime environment (`#2388 `_). + * Upgrade to using Python 3.10.4 (`#2421 `_). + * **Enhance TC-Pairs to disable the output of consensus track members** (`#2429 `_). + + .. dropdown:: Bugfixes + + * Bugfix: Fix the MET CF-Compliant NetCDF library code to Polar Stereographic data from NSIDC Sea Ice Edge NetCDF files (`#2218 `_). + * Bugfix: Remove override keyword to avoid C++11 dependency (`#2380 `_). + * Bugfix: Fix ASCII2NC to not compute AOD 550 if other inputs are negative values (`#2383 `_). + * Bugfix: Fix PB2NC to report accurate total observation counts in log messages (`#2387 `_). + * Bugfix: Update the MET flowchart for version 11.0.0 (`#2389 `_). + * Bugfix: Fix issues with the met_compile_all.sh script and associated tar files (`#2390 `_). + * Bugfix: Correct definitions of NCEP grid numbers 172 and 220 (`#2399 `_). + * Bugfix: Address MET-11.0.0 SonarQube Blocker Bugs (`#2402 `_). + * Bugfix: Refine fix for handling empty configuration files (`#2408 `_). + * Bugfix: Fix time interpolation of monthly climatology data between December 15 and January 15 (`#2412 `_). + * Bugfix: Fix ASCII2NC to handle missing NDBC buoy location information (`#2426 `_). + * Bugfix: Fix the MET vx_pointdata_python library to handle MET_PYTHON_EXE for python embedding of point observations (`#2428 `_). + * Bugfix: Refine the regrid dictionary's data conversion and censoring operations and fix climo time matching logic for a single monthly climo file (`#2437 `_). + * Bugfix: Fix the creation of the MET User's Guide PDF (`#2449 `_). + * Bugfix: Fix inconsistent ASCII2NC AIRNOW location lookup logic (`#2452 `_). + MET Version 11.0.0 release notes (20221209) ------------------------------------------- @@ -26,8 +64,6 @@ MET Version 11.0.0 release notes (20221209) * Fix GHA documentation workflow (`#2282 `_). * Fix GHA warnings and update the version of actions (i.e. actions/checkout@v3) (`#2297 `_). - - .. dropdown:: Documentation * Create outline for the MET Contributor's Guide (`#1774 `_). @@ -143,4 +179,14 @@ MET Version 11.0.0 release notes (20221209) MET Upgrade Instructions ======================== +MET Version 11.1.0 upgrade instructions +--------------------------------------- + +* If compiling support for PYTHON (:numref:`compiling_python_support`), in addition to $MET_PYTHON_CC and $MET_PYTHON_LD, set **$MET_PYTHON_BIN_EXE** to specify the desired python executable to be used (`#2428 `_). + +* If running TC-Pairs to generate consensus tracks, update your TC-Pairs configuration file to include the new **write_members** option (`#2429 `_). + +MET Version 11.0.0 upgrade instructions +--------------------------------------- + * Ensemble post-processing has been fully removed from Ensemble-Stat in version 11.0.0. It can be performed using the Gen-Ens-Prod tool. diff --git a/docs/conf.py b/docs/conf.py index f24f32c6c0..fb7f5b4268 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,12 +19,12 @@ project = 'MET' author = 'UCAR/NCAR, NOAA, CSU/CIRA, and CU/CIRES' -author_list = 'Opatz, J., T. Jensen, J. Prestopnik, H. Soh, L. Goodrich, B. Brown, R. Bullock, J. Halley Gotway, K. Newman' -version = '11.1.0' +author_list = 'Jensen, T., J. Prestopnik, H. Soh, L. Goodrich, B. Brown, R. Bullock, J. Halley Gotway, K. Newman, J. Opatz' +version = '11.1.0-beta1' verinfo = version release = f'{version}' -release_year = '2022' -release_date = f'{release_year}-12-09' +release_year = '2023' +release_date = f'{release_year}-02-28' copyright = f'{release_year}, {author}' # -- General configuration --------------------------------------------------- @@ -32,8 +32,7 @@ # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -# Adding 'sphinx_panels' to use drop-down menus in appendixA. -extensions = ['sphinx.ext.autodoc','sphinx.ext.intersphinx','sphinx_panels','sphinx_design',] +extensions = ['sphinx.ext.autodoc','sphinx.ext.intersphinx','sphinx_design',] # settings for ReadTheDocs PDF creation latex_engine = 'pdflatex' diff --git a/docs/requirements.txt b/docs/requirements.txt index 80ac5cb61d..b0b0957e2a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,3 @@ sphinx-gallery sphinxcontrib-bibtex -sphinx-panels sphinx-design diff --git a/internal/scripts/docker/Dockerfile b/internal/scripts/docker/Dockerfile index 8ec46e33db..e7ba6fa10d 100644 --- a/internal/scripts/docker/Dockerfile +++ b/internal/scripts/docker/Dockerfile @@ -1,5 +1,5 @@ ARG MET_BASE_REPO=met-base -ARG MET_BASE_TAG=v1.1 +ARG MET_BASE_TAG=v2.0_debian10 FROM dtcenter/${MET_BASE_REPO}:${MET_BASE_TAG} MAINTAINER John Halley Gotway diff --git a/internal/scripts/docker/Dockerfile.copy b/internal/scripts/docker/Dockerfile.copy index 7d0fc6ae6f..d4456f87ff 100644 --- a/internal/scripts/docker/Dockerfile.copy +++ b/internal/scripts/docker/Dockerfile.copy @@ -1,5 +1,5 @@ ARG MET_BASE_REPO=met-base-unit-test -ARG MET_BASE_TAG=v1.1 +ARG MET_BASE_TAG=v2.0_debian10 FROM dtcenter/${MET_BASE_REPO}:${MET_BASE_TAG} MAINTAINER John Halley Gotway diff --git a/internal/scripts/installation/config/install_met_env.acorn_py3.10 b/internal/scripts/installation/config/install_met_env.acorn_py3.10 index b97699a187..9e725f6fac 100644 --- a/internal/scripts/installation/config/install_met_env.acorn_py3.10 +++ b/internal/scripts/installation/config/install_met_env.acorn_py3.10 @@ -12,7 +12,6 @@ module load g2c/1.6.4 export TEST_BASE=/apps/sw_review/emc/MET/11.0.1 export LIB_DIR=${TEST_BASE}/external_libs -export BIN_DIR_PATH=${TEST_BASE}/exec export COMPILER=intel_19.1.3.304 export MET_SUBDIR=${TEST_BASE} export MET_TARBALL=v11.0.1.tar.gz @@ -20,8 +19,9 @@ export USE_MODULES=TRUE export ADDTL_DIR=/apps/spack/gettext/0.21/intel/19.1.3.304/at2kdo4edvuhyzrt5g6zhwrdb7bdui4s/lib64 export PYTHON_MODULE=python_3.10.4 export MET_PYTHON=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp +export MET_PYTHON_LIB=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64 export MET_PYTHON_CC=-I/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/include/python3.10 -export MET_PYTHON_LD=-L/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib/python3.10/config-3.10-x86_64-linux-gnu/\ -L/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64\ -lpython3.10\ -lintl\ -lcrypt\ -ldl\ -lutil\ -lm\ -lm +export MET_PYTHON_LD=-L/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64\ -lpython3.10\ -lintl\ -lcrypt\ -ldl\ -lutil\ -lm\ -lm export MET_NETCDF=/apps/prod/hpc-stack/intel-19.1.3.304/netcdf/4.7.4 export MET_HDF5=/apps/prod/hpc-stack/intel-19.1.3.304/hdf5/1.12.2 export MET_BUFRLIB=/apps/ops/prod/libs/intel/19.1.3.304/bufr/11.5.0/lib64 diff --git a/internal/scripts/installation/config/install_met_env.generic b/internal/scripts/installation/config/install_met_env.generic index 5b73adbb09..f2ef8ccc3e 100644 --- a/internal/scripts/installation/config/install_met_env.generic +++ b/internal/scripts/installation/config/install_met_env.generic @@ -28,7 +28,7 @@ export USE_MODULES=FALSE export PYTHON_LOC="$(python3-config --prefix)" #Directory of your python executable -export MET_PYTHON=${PYTHON_LOC}/bin +export MET_PYTHON=${PYTHON_LOC} #Python ldflags created using python3-config export MET_PYTHON_LD="$(python3-config --ldflags)" diff --git a/internal/scripts/installation/config/install_met_env.wcoss2_py3.10 b/internal/scripts/installation/config/install_met_env.wcoss2_py3.10 index c6d550c3a3..e8c83f6ca5 100644 --- a/internal/scripts/installation/config/install_met_env.wcoss2_py3.10 +++ b/internal/scripts/installation/config/install_met_env.wcoss2_py3.10 @@ -25,8 +25,9 @@ export USE_MODULES=TRUE export ADDTL_DIR=/apps/spack/gettext/0.21/intel/19.1.3.304/at2kdo4edvuhyzrt5g6zhwrdb7bdui4s/lib64 export PYTHON_MODULE=python_3.10.4 export MET_PYTHON=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp +export MET_PYTHON_LIB=/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64 export MET_PYTHON_CC=-I/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/include/python3.10 -export MET_PYTHON_LD=-L/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib\ -lpython3.10\ -lpthread\ -ldl\ -lutil\ -lm\ -Xlinker\ -export-dynamic +export MET_PYTHON_LD=-L/apps/spack/python/3.10.4/intel/19.1.3.304/xqft4d45h4dp4xnbz2ue3nbxv65i6bgp/lib64\ -lpython3.10\ -lcrypt\ -lintl\ -ldl\ -lutil\ -lm\ -lm export MET_NETCDF=/apps/prod/hpc-stack/intel-19.1.3.304/netcdf/4.7.4 # JY export MET_HDF5=/apps/prod/hpc-stack/intel-19.1.3.304/hdf5/1.12.2 export MET_HDF5=${HDF5_ROOT} diff --git a/internal/scripts/sonarqube/python.sonar-project.properties b/internal/scripts/sonarqube/python.sonar-project.properties index 497cf3f623..7f25756321 100644 --- a/internal/scripts/sonarqube/python.sonar-project.properties +++ b/internal/scripts/sonarqube/python.sonar-project.properties @@ -1,4 +1,4 @@ -sonar.projectKey=org.sonarqube:MET_python_NB +sonar.projectKey=MET_python_NB sonar.projectName=MET python Nightly Build sonar.projectVersion=1.0 @@ -13,5 +13,5 @@ sonar.sourceEncoding=UTF-8 #sonar.host.url=http://localhost:9000 sonar.host.url=http://mandan:9000 -sonar.login=met -sonar.password=met@sonar.ucar +sonar.login=squ_e965ad7d3c3abed1326573f25262212dc969798b +sonar.branch.name=develop diff --git a/internal/scripts/sonarqube/sonar-project.properties b/internal/scripts/sonarqube/sonar-project.properties index 1cc9db2476..1760c20f68 100644 --- a/internal/scripts/sonarqube/sonar-project.properties +++ b/internal/scripts/sonarqube/sonar-project.properties @@ -1,4 +1,4 @@ -sonar.projectKey=org.sonarqube:MET_develop_NB +sonar.projectKey=MET_develop_NB sonar.projectName=MET Nightly Build sonar.projectVersion=1.0 @@ -14,6 +14,5 @@ sonar.sourceEncoding=UTF-8 #sonar.host.url=http://localhost:9000 sonar.host.url=http://mandan:9000 -sonar.login=met -sonar.password=met@sonar.ucar +sonar.login=squ_e965ad7d3c3abed1326573f25262212dc969798b sonar.branch.name=develop diff --git a/scripts/utility/Makefile.am b/scripts/utility/Makefile.am index 6e22a33e2b..d807a69977 100644 --- a/scripts/utility/Makefile.am +++ b/scripts/utility/Makefile.am @@ -26,7 +26,8 @@ pythonutilitydir = $(pkgdatadir)/utility pythonutility_DATA = \ - print_pointnc2ascii.py + print_pointnc2ascii.py \ + build_ndbc_stations_from_web.py EXTRA_DIST = ${pythonutility_DATA} diff --git a/scripts/utility/Makefile.in b/scripts/utility/Makefile.in index a515a31201..bdaec7b3f9 100644 --- a/scripts/utility/Makefile.in +++ b/scripts/utility/Makefile.in @@ -298,7 +298,8 @@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ pythonutilitydir = $(pkgdatadir)/utility pythonutility_DATA = \ - print_pointnc2ascii.py + print_pointnc2ascii.py \ + build_ndbc_stations_from_web.py EXTRA_DIST = ${pythonutility_DATA} MAINTAINERCLEANFILES = Makefile.in diff --git a/scripts/utility/build_ndbc_stations_from_web.py b/scripts/utility/build_ndbc_stations_from_web.py new file mode 100755 index 0000000000..cc20b6d02d --- /dev/null +++ b/scripts/utility/build_ndbc_stations_from_web.py @@ -0,0 +1,634 @@ +#!/usr/bin/env python3 + +''' +Created on February 17, 2023 + +@author: davealbo + +The script reads NDBC station information from two NOAA websites and merges the contents into one local list. +The list contains latitude, longitude and elevation data for all known stations. +The local list can be read by ascii2nc for processing of NDBC data inputs. +Algorithm: + Read the current default ndbc_stations.xml file and create a list of default station information objects. + Pull down active station xml file from web and create a list of active information objects. + Write the list to an active stations text file. + Pull down complete index list from web. + for each file refered to in the complete index list contents: + pull down that stations web page data and append to list of complete station information objects. + Write the list of complete station info objects to a text file. + Save all the individual web page data that was pulled down into a subdirectory. + + Compare the complete stations information to the default station information objects. + If a station is on the complete list but not on the default list, add it to the default list. + If a station is on both lists, but has different location info, change the locaiton info to that of the complete, + (unless the complete has no meaningful lat/lon information, typically 0,0). + + Compare the augmented default list to the active stations list. + If a station is on the active list but not on the default list, add it to the default list. + If a station is on both lists, but has different location info, keep the default list values + (unless the default has no meaningful lat/lon information, typically 0,0, then change to the active). + + Log Warnings about discrepancies. + Keep counts of everything. + + Write the final default list a a new output. + + Optionally prune the default list, removing all stations that are not active or complete. + +''' + +from optparse import OptionParser +import os +import shutil +import shlex +import errno +from subprocess import Popen, PIPE + +# this needs to change! +# hardwired location of current default stations file +DEFAULT_STATIONS_FILE = "../../data/table_files/ndbc_stations.xml" + +# hardwired NOAA top webpage +TOP_WEBSITE = "https://www.ndbc.noaa.gov" + +# hardwired website with active station xml +ACTIVE_WEBSITE = "https://www.ndbc.noaa.gov/activestations.xml" + +# hardwired data subdirectory +DATA_SUBDIR = "/ndbc_temp_data" + +#hardwired complete stations subdirecdtory +STATIONS_SUBDIR = "/ndbc_temp_data/stations" + +# hardwired result of a wget of ACTIVE_WEBSITE +ACTIVE_STATIONS_XML = "./ndbc_temp_data/activestations.xml" + +# hardwired website with index to a complete list of stations +COMPLETE_INDEX_WEBSITE = "https://www.ndbc.noaa.gov/to_station.shtml" + +# hardwired result of a wget of COMPLETE_INDEX_WEBSITE +COMPLETE_STATIONS_INDEX_INFO = "./ndbc_temp_data/to_station.shtml" + +# hardwired name of optionally saved active stations +ACTIVE_TEXT_FILE = "./ndbc_temp_data/active.txt" + +# hardwired name of optionally saved complete stations +COMPLETE_TEXT_FILE = "./ndbc_temp_data/complete.txt" + +# default output file name +DEFAULT_OUTPUT_FILE = "merged.txt" + +MISSING = -99.9 + +def usage(): + print(f'Usage: BuildNdbcStationsFromWeb.py , <--diagnostic> <--out=out_filename> <--prune>') + print(f' -d/--diagnostic: special mode to rerun using already downloaded files, skips all downloading if True (Downloaded files are in ./{DATA_SUBDIR}') + print(f' -o/--out=out_filename: save final text into the named file (default: file name is {DEFAULT_OUTPUT_FILE})"') + print(f' -p/--prune: Delete all stations from the local ndbc_stations file that are no longer online') + print(f' Note: <> indicates optional arguments') + +#---------------------------------------------- +def create_parser_options(parser): + parser.add_option("-d", "--diagnostic", dest="diagnostic", action="store_true", default=False, help="Rerun using downlaoded files, skipping download step (optional, default: False)") + parser.add_option("-p", "--prune", dest="prune", action="store_true", default=False, help="Prune files that are no longer online (optional, default:False)") + parser.add_option("-o", "--out", dest="out_file", + default=DEFAULT_OUTPUT_FILE, help=" Save the text into the named file (default: " + DEFAULT_OUTPUT_FILE +" )") + parser.add_option("-H", "--Help", dest="full_usage", action="store_true", default=False, help = " show more usage information (optional, default = False)") + return parser.parse_args() + +#---------------------------------------------- +class Station: + def __init__(self, name = "", idvalue="", lat=MISSING, lon=MISSING, elev=MISSING): + self._name = name + self._id = idvalue + self._lat = lat + self._lon = lon + self._elev = elev + + def empty(self): + return self._id == "" + + def textForLookups(self): + if self._elev == MISSING: + txt = ''.format(a=self._id,b=self._lat,c=self._lon) + else: + txt = ''.format(a=self._id,b=self._lat,c=self._lon,d=self._elev) + return txt + + def location_match(self, other): + if self.empty() or other.empty(): + # this method is used to print mismatches, so don't print mismatches to empty stations + return True + return self._lat == other._lat and self._lon == other._lon and self._elev == other._elev + + def location_string(self): + if self._elev == MISSING: + txt = '{a}({b},{c})'.format(a=self._name,b=self._lat,c=self._lon) + else: + txt = '{a}({b},{c},{d})'.format(a=self._name,b=self._lat,c=self._lon,d=self._elev) + return txt + + def equals(self, other): + return self._id == other._id and self._lat == other._lat and self._lon == other._lon and self._elev == other._elev + + def setName(self, name): + self._name = name + +#---------------------------------------------- +def replaceLatLonIfGood(header, name, stations, station): + if station._lat == 0 and station._lon == 0: + #print(header, ",No replacement using:", station.textForLookups()) + return False + for n in range(len(stations)): + if stations[n]._id == station._id: + print(header, "Replacing: ", stations[n].textForLookups(), " with ", station.textForLookups()) + s = station + s.setName(name) + stations[n] = station + return True + print("Warning:", header, "No match for replacment of station ", station._id) + return False + +#---------------------------------------------- +def replaceLatLonIfListIsBad(header, name, stations, station): + if station._lat == 0 and station._lon == 0: + #print(header, ",No replacement using:", station.textForLookups()) + return False + for n in range(len(stations)): + if stations[n]._id == station._id: + if stations[n]._lat == 0 and stations[n]._lon == 0: + print(header, "Replacing: ", stations[n].textForLookups(), " with ", station.textForLookups()) + s = station + s.setName(name) + stations[n] = station + return True + else: + return False + + print("Warning:", header, "No match for replacment of station ", station._id) + return False + +#---------------------------------------------- +def matchingId(id, stations): + for station in stations: + if station._id == id: + return station + return Station() + +#---------------------------------------------- +def doCmd(cmd, debug=False): + + #print(cmd) + my_env = os.environ.copy() + args = shlex.split(cmd) + proc = Popen(args, stdout=PIPE, stderr=PIPE, env=my_env) + out, err = proc.communicate() + exitcode = proc.returncode + if exitcode == 0: + return str(out) + else: + if debug: + print("Command failed ", cmd) + return "" + +#---------------------------------------------------------------------------- +def makeOrScrub(path, debug=False): + if (debug): + print("Recreating path " + path) + if (os.path.exists(path)): + try: + shutil.rmtree(path) + os.makedirs(path) + except: + print('WARNING: ' + path + ' not completely cleaned out.') + else: + os.makedirs(path) + + +#---------------------------------------------- +def main(diagnostic, out_file, prune): + + cwd = os.getcwd() + + if not diagnostic: + status = True + + dataDir = cwd + DATA_SUBDIR + print("cleanining out ", dataDir) + makeOrScrub(dataDir) + + os.chdir(dataDir) + + # pull the active stations xml from the web + cmd = "wget " + ACTIVE_WEBSITE + print(cmd) + s = doCmd(cmd, True) + if not s: + status = False + # pull the complete stations html from the web + cmd = "wget " + COMPLETE_INDEX_WEBSITE + print(cmd) + s = doCmd(cmd, True) + if not s: + status = False + if not status: + print("ERROR reading web content") + os.exit(1) + + # move back to top directory + os.chdir(cwd) + + # prepare to compare to the default stations file to see what has changed + default_stations = parse("Default", DEFAULT_STATIONS_FILE) + numDefault = len(default_stations) + print("PARSED DEFAUILT STATIONS FILE NUM=", len(default_stations)) + + # make a copy of this as the final outputs + final_stations = default_stations + for f in final_stations: + f.setName("Final") + + # parse the active stations XML to create a list, which will become the final list + if diagnostic: + active_stations = parse("Active", ACTIVE_TEXT_FILE) + print("PARSED ACTIVE STATION FILES: num=", len(active_stations)) + else: + active_stations = processActive("Active") + print("BUILT ACTIVE STATION FILES: num=", len(active_stations)) + + # read the complete stations html, find all the individual stations web links, + # pull each stations data, parse that downloaded station content to create a list + if diagnostic: + complete_stations = parse("Complete", COMPLETE_TEXT_FILE) + print("PARSED COMPLETE STATIONS FILES: num=", len(complete_stations)) + else: + complete_stations = processComplete("Complete") + print("BUILT COMPLETE STATIONS FILES: num=", len(complete_stations)) + + # see which ids are not in complete from active, and which have different lat/lons + # note the one used if that happens is always the active one at this point + numNew = 0 + numNewComplete = 0 + numNewActive = 0 + numConflict = 0 + numConflictChanged = 0 + numComplete = 0 + numActive = 0 + numCompleteNotActive = 0 + numActiveNotComplete = 0 + + # compare complete stations to default stations + for complete in complete_stations: + numComplete = numComplete + 1 + id = complete._id + default = matchingId(id, default_stations) + active = matchingId(id, active_stations) + if active.empty(): + numCompleteNotActive = numCompleteNotActive + 1 + if default.empty(): + # station is on the complete list but not on the default list, add it + f = complete + f.setName("Final") + final_stations.append(f) + numNew = numNew+1 + numNewComplete = numNewComplete + 1 + else: + # compare complete and default + if not complete.location_match(default): + numConflict = numConflict + 1 + if replaceLatLonIfGood("Complete to Final", "Final", final_stations, complete): + numConflictChanged = numConflictChanged + 1 + + # compare active stations to final stations + for active in active_stations: + numActive = numActive + 1 + id = active._id + final = matchingId(id, final_stations) + complete = matchingId(id, complete_stations) + if complete.empty(): + numActiveNotComplete = numActiveNotComplete +1 + if final.empty(): + # station is on the active list but not on the final list, add it + a = active + a.setName("Final") + final_stations.append(a) + numNew = numNew+1 + numNewActive = numNewActive + 1 + else: + # compare complete and default + if not final.location_match(active): + numConflict = numConflict + 1 + if replaceLatLonIfListIsBad("Active to Final", "Final", final_stations, active): + numConflictChanged = numConflictChanged + 1 + + # see which id's have vanished from the current default list, to be used when prune is true + numVanished = 0 + purgeIds = [] + print("Comparing current default stations to final list") + for default in default_stations: + id = default._id + active = matchingId(id, active_stations) + complete = matchingId(id, complete_stations) + if active.empty() and complete.empty(): + #print("Station in the local table file but no longer on the webpages:", id) + numVanished = numVanished+1 + purgeIds.append(id) + + for f in final_stations: + id = f._id + default = matchingId(id, default_stations) + if default.empty(): + #print("New station on web not in local table file:", id) + numNew = numNew+1 + + #now write out the full meal deal by creating a string list + nout = 0 + nprune = 0 + txtAll = [] + for f in final_stations: + if prune and f.IdOnList(purgeIds): + print("Pruning station: ", f._id, " No longer on line") + nprune = nprune + 1 + else: + txt = f.textForLookups() + txtAll.append(txt) + nout = nout + 1 + + # sort for ease of use + txtAll.sort() + fout = open(out_file, "w") + for txt in txtAll: + fout.write(txt+"\n") + fout.close() + + print("Num complete: ", numComplete) + print("Num active: ", numActive) + print("Num default: ", numDefault) + print("Num final: ", nout) + print("Num pruned: ", nprune) + print("Num vanished: ", numVanished) + print("Num new complete: ", numNewComplete) + print("Num new active: ", numNewActive) + print("Num new total: ", numNew) + print("Num conflict no change: ", numConflict) + print("Num conflict with change:", numConflictChanged) + print("Num active not complete: ", numActiveNotComplete) + print("Num complete not active: ", numCompleteNotActive) + + return 0 + +#---------------------------------------------------- +def processComplete(name): + ''' + read the complete stations html, find all the individual stations web links, + pull each stations data, parse that downloaded station content to create a list + ''' + + # initialize return to empty + stations = [] + + # create the output location, which should be ./ndbc_temp_data/stations + cwd = os.getcwd() + outLoc = cwd + STATIONS_SUBDIR + if not makeDirIfNeeded(outLoc): + print("ERROR creating storage for individual station files ", outLoc) + return stations + + + # Open the file with the list of php pages online (or local files pulled down) + with open(COMPLETE_STATIONS_INDEX_INFO, 'r') as file: + data = file.read().replace('\n', '') + file.close() + + # start at the beginning + index = 0 + txtAll = [] + while index < len(data): + # pull down another stations info if you can, and parse it + [index, station] = createNextStationInfo(name, data, index) + if index == -1: + break + if not station.empty(): + # form a string and append that plus all the individual stuff to lists + txt = station.textForLookups() + txtAll.append(txt) + stations.append(station) + + # keep the subdirectory of individual stations information + # sort the list for ease of use, then write it + txtAll.sort() + fout = open(COMPLETE_TEXT_FILE, "w") + for txt in txtAll: + fout.write(txt+"\n") + fout.close() + return stations + +#---------------------------------------------- +def createNextStationInfo(name, data, i): + + s = Station() + + #data has entries like this: 45001 + #on entry i points to the starting location within data to start looking + index = data.find('href="station_page.php?', i) + if index == -1: + return [-1, s] + + # the stuff beyond 'href="' is the file name that you get via wget, followed by another '"' + index2 = index + 6 # point to 'station_page' + index3 = data.find('">', index2) # point to " at end (which is followed by >) + + index = index3 + 3 # set index for return to beyond this + + # what to go for online: + ref = TOP_WEBSITE + '/' + data[index2:index3] + + # name of returned file + filename = data[index2:index3] + + # temporarily change to the correct subdirectory + cwd = os.getcwd() + os.chdir(cwd + STATIONS_SUBDIR) + # go get it + cmd = 'wget "' + ref + '"' + print(cmd) + s = doCmd(cmd, True) + # move back + os.chdir(cwd) + if not s: + # note try to keep going forward as index has been updated + print("ERROR data not online: ", ref) + return [index, s] + + # parse the file and return the information, including the next index + return parseStationInfo(name, cwd + STATIONS_SUBDIR + "/" + filename, index) + +#---------------------------------------------------------------------------- +def makeDirIfNeeded(path, debug=False): + if (debug): + print("Making directory if needed " + path) + + try: + os.makedirs(path) + return True + except OSError as exception: + if exception.errno != errno.EEXIST: + print("ERROR creating", path) + return False + else: + return True + +#---------------------------------------------------------------------------- +def parseStationInfo(name, fname, index): + + s = Station() + + # the file is assumed already downloaded + # initialize station values + station = setStationId(fname) + if not station: + return [index, s] + elev = setElev(fname) + lat = setLat(fname) + lon = setLon(fname) + s = Station(name, station, lat, lon, elev) + return [index, s] + +#---------------------------------------------- +def setStationId(fname): + stationId = "" + cmd = 'grep "var currentstnid" ' + fname + s = doCmd(cmd, True) + if s: + index6 = s.find("'", 0) + index7 = s.find("'", index6+1) + stationId = s[index6+1:index7] + return stationId + +#---------------------------------------------- +def setElev(fname): + elev = MISSING + cmd = 'grep "Site elev" ' + fname + #print(cmd) + s = doCmd(cmd) + if s: + if "m above mean sea level" in s: + # scan to + index6 = s.find("") + index7 = s.find("m above") + elev = float(s[index6+4:index7]) + elif " sea level', index_all+1) + if indexend == -1: + print("UNexpected lack of />") + break + + data = data_all[index_all:indexend+2] + if debug: + print("Parsing this: ", data) + index = 0 + + # expect to see '> data_line) { @@ -293,6 +330,8 @@ bool AeronetHandler::_readObservations(LineDataFile &ascii_file) // Make sure that the line contains the correct number of tokens // + if (!ready_to_process) break; + if (data_line.n_items() != column_cnt) { bad_line_count++; @@ -324,51 +363,47 @@ bool AeronetHandler::_readObservations(LineDataFile &ascii_file) if (first_line) { if (format_version == 3) { - // Get the stationId - if (elv_idx < 0) { - mlog << Warning << "\n" << method_name << "Can not find header column \"" - << elv_col2 << "\". from " << ascii_file.filename() << "\".\n\n"; - break; - } - else if ((lat_idx < 0) || (lon_idx < 0)) { - string field_name = (lat_idx < 0) ? lat_col2 : lon_col2; - mlog << Error << "\n" << method_name << "Can not find header column \"" - << field_name << "\". Skip the input \"" << ascii_file.filename() + // Check the stationId + if (sid_idx >= 0 && _stationId != data_line[sid_idx] && _stationId != SITE_MISSING) { + mlog << Error << "\n" << method_name + << "The header and data columns don't match." + << " The station ID from data column (" << data_line[sid_idx] << ") at " << sid_idx + << " is different from " << _stationId + << ". Skip this input \"" << ascii_file.filename() << "\"\n\n"; break; } - else { - if (sid_idx < 0) { - mlog << Warning << "\n" << method_name << "Can not find header column \"" - << site_name_col << "\" from the input \"" << ascii_file.filename() - << "\"\n\n"; - } - else if (_stationId != data_line[sid_idx] && _stationId != SITE_MISSING) { - mlog << Error << "\n" << method_name << "The header and data columns don't match." - << " The station ID from data column (" << data_line[sid_idx] << ") at " << sid_idx - << " is different from " << _stationId - << ". Skip this input \"" << ascii_file.filename() - << "\"\n\n"; - break; - } - } + } + first_line = false; + } + // Get the stationId for version 3 + cur_sid = (sid_idx < 0) ? _stationId : data_line[sid_idx]; + if (cur_sid.compare(prev_sid) != 0) { + if (has_lat_lon_idx) { // Get the stationLat _stationLat = atof(data_line[lat_idx]); // Get the stationLon _stationLon = atof(data_line[lon_idx]); // Get the stationAlt - if (elv_idx >= 0) _stationAlt = atof(data_line[elv_idx]); - else _stationAlt = bad_data_float; - - mlog << Debug(7) << "\n" << method_name << "stationID: " - << ((sid_idx < 0) ? _stationId : data_line[sid_idx]) << " from index " << sid_idx + _stationAlt = (elv_idx >= 0) + ? atof(data_line[elv_idx]) : bad_data_float; + mlog << Debug(7) << "\n" << method_name + << "stationID: " << cur_sid << " from index " << sid_idx << " lat: " << _stationLat << " lon: " << _stationLon << " elv: " << _stationAlt << " from index " << elv_idx << "\n"; } - first_line = false; + else { + mlog << Warning << "\n" << method_name + << "stationID is changed (" << prev_sid << " to " << cur_sid + << "). But lat/lon/elv are not available." + << " Stop processing \"" << ascii_file.filename() << "\".\n\n"; + break; + } + prev_sid = cur_sid; } + // // Pull the valid time from the data line // @@ -421,7 +456,7 @@ bool AeronetHandler::_readObservations(LineDataFile &ascii_file) } _addObservations(Observation(header_type, - (sid_idx<0 ? _stationId : data_line[sid_idx]), + cur_sid, valid_time, _stationLat, _stationLon, _stationAlt, @@ -439,7 +474,7 @@ bool AeronetHandler::_readObservations(LineDataFile &ascii_file) double aod_at_550 = angstrom_power_interplation(aod_at_675,aod_at_440,675.,440.,dheight); if (!is_eq(aod_at_550, bad_data_double)) { _addObservations(Observation(header_type, - (sid_idx<0 ? _stationId : data_line[sid_idx]), + cur_sid, valid_time, _stationLat, _stationLon, _stationAlt, na_str, var_id, bad_data_double, dheight, aod_at_550, diff --git a/src/tools/other/ascii2nc/ndbc_handler.cc b/src/tools/other/ascii2nc/ndbc_handler.cc index 6c4291cf92..9ec04255a2 100644 --- a/src/tools/other/ascii2nc/ndbc_handler.cc +++ b/src/tools/other/ascii2nc/ndbc_handler.cc @@ -87,11 +87,11 @@ NdbcHandler::NdbcHandler(const string &program_name) : // read in and parse the locations file if (!locations.initialize(locationsFileName)) { - mlog << Error << "\ncould not initialize station loations file\n\n"; + mlog << Error << "\nCannot initialize NDBC station loations file: " + << locationsFileName << "\n\n"; exit(1); } - //locations.print(); - + // // store column info for all the data columns (column names) // NOTE these will be used as index values in the observations @@ -215,13 +215,14 @@ bool NdbcHandler::_readObservations(LineDataFile &ascii_file) //////////////////////////////////////////////////////////////////////// bool NdbcHandler::_parseObservationLineStandard(DataLine &data_line, - const string &filename) + const string &filename) { string method_name = "NdbcHandler::_parseObservationLineStandard() "; if (format_version != NDBC_FORMAT_VERSION_STANDARD) { - mlog << Error << "\n" << method_name << "->" - << "Standard NDBC format is the only supported format\n\n"; + mlog << Warning << "\n" << method_name << "->" + << "Standard NDBC format is the only supported format: " + << filename << "\n\n"; return false; } @@ -229,11 +230,11 @@ bool NdbcHandler::_parseObservationLineStandard(DataLine &data_line, // Make sure that the line contains the correct number of tokens // if (data_line.n_items() != NUM_COLS_STANDARD) { - mlog << Error << "\n" << method_name << "-> " - << "line number " << data_line.line_number() - << " does not have the correct number of columns " << data_line.n_items() - << " (" << NUM_COLS_STANDARD << "). Skipping this line in \"" - << filename << "\".\n\n"; + mlog << Warning << "\n" << method_name << "-> " + << "Skipping line number " << data_line.line_number() + << " with an unexpected number of columns (" + << data_line.n_items() << " != " << NUM_COLS_STANDARD << "): " + << filename << "\n\n"; return false; } @@ -242,10 +243,10 @@ bool NdbcHandler::_parseObservationLineStandard(DataLine &data_line, // time_t valid_time = _getValidTime(data_line); if (valid_time == 0) { - mlog << Error << "\n" << method_name << "-> " - << "line number " << data_line.line_number() - << " time could not be parsed, skipping this line in \"" - << filename << "\".\n\n"; + mlog << Warning << "\n" << method_name << "-> " + << "Skipping line number " << data_line.line_number() + << " whose vaild time cannot not be parsed: " + << filename << "\n\n"; return false; } @@ -263,9 +264,9 @@ bool NdbcHandler::_parseObservationLineStandard(DataLine &data_line, name = column[i].name; grib_code = i; // it's not actually grib code, its obs_vid, according to howard _addObservations(Observation(header_type, stationId, valid_time, - stationLat, stationLon, stationAlt, - quality_flag, grib_code, pressure_level_hpa, - height_m, value, name)); + stationLat, stationLon, stationAlt, + quality_flag, grib_code, pressure_level_hpa, + height_m, value, name)); } return true; } @@ -287,15 +288,15 @@ bool NdbcHandler::_setStationInfo(const string &filename) // expect .txt as the name i0 = fname.find(".txt"); if (i0 == string::npos) { - mlog << Error << "\n" << "expect file name of format '.txt'\n" - << "Got " << fname << "\n\n"; + mlog << Warning << "\n" << "NDBC file name does not follow the " + << "expected '.txt' format: " << fname << "\n\n"; return false; } stationId = fname.substr(0, i0); if (!locations.lookupLatLonElev(stationId, stationLat, stationLon, - stationAlt)) { - mlog << Error << "\n" << "No location information found for station " - << stationId << " do not process file " << filename << "\n\n"; + stationAlt)) { + mlog << Warning << "\n" << "NDBC station " << stationId + << " location information not found: " << filename << "\n\n"; return false; } return true; @@ -323,8 +324,8 @@ bool NdbcHandler::_determineFileType(LineDataFile &ascii_file) } } format_version = NDBC_FORMAT_VERSION_UNKNOWN; - mlog << Error << "\nNdbcHandler::_determineFileType -> " - << "Unknown file type\n\n"; + mlog << Warning << "\nNdbcHandler::_determineFileType -> " + << "Unknown file type: " << ascii_file.filename() << "\n\n"; return false; } @@ -335,10 +336,10 @@ time_t NdbcHandler::_getValidTime(const DataLine &data_line) const // // Pull out the date information // - if (column_pointer_year < 0 || column_pointer_month < 0 || column_pointer_day < 0 || + if (column_pointer_year < 0 || column_pointer_month < 0 || column_pointer_day < 0 || column_pointer_hour < 0 || column_pointer_minute < 0) { - mlog << Error << "\nNdbcHandler::_getValidTime -> " - << "Not all time related column pointers are set\n\n"; + mlog << Warning << "\nNdbcHandler::_getValidTime -> " + << "Not all time related column pointers are set.\n\n"; return 0; } string year = _extractColumn(data_line, column_pointer_year); @@ -391,9 +392,9 @@ bool NdbcHandler::_readHeaderInfo(LineDataFile &ascii_file) // The first line of the file contains the headers // if (!(ascii_file >> data_line)) { - mlog << Error << "\nNdbcHandler::_readHeaderInfo() -> " - << "error reading header line from input ASCII file \"" - << ascii_file.filename() << "\"\n\n"; + mlog << Warning << "\nNdbcHandler::_readHeaderInfo() -> " + << "Problem reading header line from input ASCII file: " + << ascii_file.filename() << "\n\n"; return false; } @@ -401,9 +402,10 @@ bool NdbcHandler::_readHeaderInfo(LineDataFile &ascii_file) // Check for the correct number of columns in the header line // if (data_line.n_items() != NUM_COLS_STANDARD) { - mlog << Error << "\nNdbcHandler::_readHeaderInfo() -> " - << "NDBC file has incorrect number of columns (" - << data_line.n_items() << ") in header line\n\n"; + mlog << Warning << "\nNdbcHandler::_readHeaderInfo() -> " + << "Unexpected number of header columns (" << data_line.n_items() + << " != " << NUM_COLS_STANDARD << "): " + << ascii_file.filename() << "\n\n"; return false; } @@ -426,30 +428,33 @@ bool NdbcHandler::_readHeaderInfo(LineDataFile &ascii_file) } else { bool found = false; for (size_t j=0; j " - << "NDBC file has unknown header item " << s << "\n\n"; - status = false; + mlog << Warning << "\nNdbcHandler::_readHeaderInfo() -> " + << "Unexpected header column (" << s << "): " + << ascii_file.filename() << "\n\n"; + status = false; } } } - if (column_pointer_year == -1 || column_pointer_month == -1 || - column_pointer_day == -1 || column_pointer_hour == -1 || + if (column_pointer_year == -1 || column_pointer_month == -1 || + column_pointer_day == -1 || column_pointer_hour == -1 || column_pointer_minute == -1) { - mlog << Error << "\nNdbcHandler::_readHeaderInfo() -> " - << "NDBC file did not have all time fields in header \n\n"; - status = false; + mlog << Warning << "\nNdbcHandler::_readHeaderInfo() -> " + << "NDBC file did not have all time fields in header: " + << ascii_file.filename() << "\n\n"; + status = false; } for (size_t j=0; j " - << "NDBC file did not have all expected fields in header \n\n"; + mlog << Warning << "\nNdbcHandler::_readHeaderInfo() -> " + << "NDBC file did not have all expected fields in header: " + << ascii_file.filename() << "\n\n"; status = false; break; }