Skip to content

Commit

Permalink
feature dtcenter#2253 add tests for series_analysis
Browse files Browse the repository at this point in the history
  • Loading branch information
John-Sharples committed Aug 1, 2023
1 parent f6a86d2 commit 7e525f4
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 53 deletions.
43 changes: 43 additions & 0 deletions internal/tests/pytests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import getpass
import shutil
from pathlib import Path
from netCDF4 import Dataset

# add METplus directory to path so the wrappers and utilities can be found
metplus_dir = str(Path(__file__).parents[3])
Expand Down Expand Up @@ -113,3 +114,45 @@ def read_configs(extra_configs):
return config

return read_configs

@pytest.fixture(scope="module")
def make_dummy_nc():
return make_nc


def make_nc(tmp_path, lon, lat, z, data, variable='Temp', file_name='fake.nc'):
"""! Make a dummy netCDF file for use in tests. Populates a generic single
variable netcdf is dimension, lat, lon, z.
@param tmp_path directory to write this netCDF to.
@param lon list of longitude values.
@param lat list of latitude values.
@param z list of pressure levels.
@param data array of values with dimesions (lat, lon, z)
@param variable (optional) string name of variable, defualt 'Temp'
@param file_name (optional) string name of file, defualt 'fake.nc'
@returns path to netCDF file
"""
file_name = tmp_path / file_name
with Dataset(file_name, "w", format="NETCDF4") as rootgrp:
# Some tools (i.e. diff_util) can't deal with groups,
# so attach dimensions and variables to the root group.
rootgrp.createDimension("lon", len(lon))
rootgrp.createDimension("lat", len(lat))
rootgrp.createDimension("z", len(z))
rootgrp.createDimension("time", None)

# create variables
longitude = rootgrp.createVariable("Longitude", "f4", "lon")
latitude = rootgrp.createVariable("Latitude", "f4", "lat")
levels = rootgrp.createVariable("Levels", "i4", "z")
temp = rootgrp.createVariable(variable, "f4", ("time", "lon", "lat", "z"))
time = rootgrp.createVariable("Time", "i4", "time")

longitude[:] = lon
latitude[:] = lat
levels[:] = z
temp[0, :, :, :] = data

return file_name
49 changes: 11 additions & 38 deletions internal/tests/pytests/util/diff_util/test_diff_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,20 @@
]


@pytest.fixture(scope="module")
def dummy_nc1(tmp_path_factory):
@pytest.fixture()
def dummy_nc1(tmp_path_factory, make_dummy_nc):
# Construct a temporary netCDF file
return make_nc(
return make_dummy_nc(
tmp_path_factory.mktemp("data1"),
DEFAULT_NC[0],
DEFAULT_NC[1],
DEFAULT_NC[2],
DEFAULT_NC[3],
DEFAULT_NC[4],
# Note: "nc5" is not included in NETCDF_EXTENSIONS, hence
# we use it here to specifically trigger the call to
# netCDF.Dataset in get_file_type.
file_name= "fake.nc5"
)


Expand All @@ -55,37 +59,6 @@ def _statment_in_capfd(capfd, check_print):
assert statement in out


def make_nc(tmp_path, lon, lat, z, data, variable="Temp"):
# Make a dummy netCDF file. We can do this with a lot less
# code if xarray is available.

# Note: 'nc5' is not included in NETCDF_EXTENSIONS, hence
# we use it here to specifically trigger the call to
# netCDF.Dataset in get_file_type.
file_name = tmp_path / 'fake.nc5'
with Dataset(file_name, "w", format="NETCDF4") as rootgrp:
# diff_util can't deal with groups, so attach dimensions
# and variables to the root group.
rootgrp.createDimension("lon", len(lon))
rootgrp.createDimension("lat", len(lat))
rootgrp.createDimension("z", len(z))
rootgrp.createDimension("time", None)

# create variables
longitude = rootgrp.createVariable("Longitude", "f4", "lon")
latitude = rootgrp.createVariable("Latitude", "f4", "lat")
levels = rootgrp.createVariable("Levels", "i4", "z")
temp = rootgrp.createVariable(variable, "f4", ("time", "lon", "lat", "z"))
time = rootgrp.createVariable("Time", "i4", "time")

longitude[:] = lon
latitude[:] = lat
levels[:] = z
temp[0, :, :, :] = data

return file_name


def create_diff_files(tmp_path_factory, files_a, files_b):
dir_a = tmp_path_factory.mktemp('dir_a')
dir_b = tmp_path_factory.mktemp('dir_b')
Expand Down Expand Up @@ -372,10 +345,10 @@ def test_get_file_type_extensions():
)
@pytest.mark.util
def test_nc_is_equal(
capfd, tmp_path_factory, dummy_nc1, nc_data, fields, expected, check_print
capfd, tmp_path_factory, make_dummy_nc, dummy_nc1, nc_data, fields, expected, check_print
):
# make a dummy second file to compare to dummy_nc1
dummy_nc2 = make_nc(tmp_path_factory.mktemp("data2"), *nc_data)
dummy_nc2 = make_dummy_nc(tmp_path_factory.mktemp("data2"), *nc_data)
assert du.nc_is_equal(dummy_nc1, dummy_nc2, fields=fields, debug=True) == expected

if check_print:
Expand Down Expand Up @@ -404,9 +377,9 @@ def test_nc_is_equal(
)
@pytest.mark.util
def test_nc_is_equal_both_nan(
capfd, tmp_path_factory, nc_data, fields, expected, check_print
capfd, tmp_path_factory, make_dummy_nc, nc_data, fields, expected, check_print
):
dummy_nc = make_nc(tmp_path_factory.mktemp("data2"), *nc_data)
dummy_nc = make_dummy_nc(tmp_path_factory.mktemp("data2"), *nc_data)
assert du.nc_is_equal(dummy_nc, dummy_nc, fields=fields, debug=True) == expected

if check_print:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,28 @@ def set_minimum_config_settings(config):
'SERIES_ANALYSIS_MASK_POLY': 'MET_BASE/poly/EAST.poly',
},
{'METPLUS_MASK_DICT': 'mask = {grid = "FULL";poly = "MET_BASE/poly/EAST.poly";}'}),
# check tags are resolved and animation config works
({
'FCST_VAR1_LEVELS': 'A0{init?fmt=3}',
'SERIES_ANALYSIS_GENERATE_PLOTS': 'True',
'SERIES_ANALYSIS_GENERATE_ANIMATIONS': 'True',
'CONVERT_EXE': 'animation_exe'
},
{},),
# check 'BOTH_*' and '*INPUT_FILE_LIST' config
({'SERIES_ANALYSIS_REGRID_TO_GRID': 'FCST',
'BOTH_SERIES_ANALYSIS_INPUT_TEMPLATE': 'True',
},
{'METPLUS_REGRID_DICT': 'regrid = {to_grid = FCST;}'}),
({'SERIES_ANALYSIS_REGRID_TO_GRID': 'FCST',
'BOTH_SERIES_ANALYSIS_INPUT_FILE_LIST': 'True',
},
{'METPLUS_REGRID_DICT': 'regrid = {to_grid = FCST;}'}),
({'SERIES_ANALYSIS_REGRID_TO_GRID': 'FCST',
'FCST_SERIES_ANALYSIS_INPUT_FILE_LIST': 'True',
'OBS_SERIES_ANALYSIS_INPUT_FILE_LIST': 'True',
},
{'METPLUS_REGRID_DICT': 'regrid = {to_grid = FCST;}'}),
]
)
@pytest.mark.wrapper_a
Expand All @@ -374,6 +395,8 @@ def test_series_analysis_single_field(metplus_config, config_overrides,
wrapper = SeriesAnalysisWrapper(config)
assert wrapper.isOK

is_both = wrapper.c_dict.get('USING_BOTH')

app_path = os.path.join(config.getdir('MET_BIN_DIR'), wrapper.app_name)
verbosity = f"-v {wrapper.c_dict['VERBOSITY']}"

Expand All @@ -383,14 +406,21 @@ def test_series_analysis_single_field(metplus_config, config_overrides,
suffix = '_init_20050807000000_valid_ALL_lead_ALL.txt'
fcst_file = f'{prefix}fcst{suffix}'
obs_file = f'{prefix}obs{suffix}'
expected_cmds = [(f"{app_path} "

if is_both:
expected_cmds = [(f"{app_path} "
f"-both {out_dir}/{fcst_file} "
f"-out {out_dir}/2005080700 "
f"-config {config_file} {verbosity}"),
]
else:
expected_cmds = [(f"{app_path} "
f"-fcst {out_dir}/{fcst_file} "
f"-obs {out_dir}/{obs_file} "
f"-out {out_dir}/2005080700 "
f"-config {config_file} {verbosity}"),
]


all_cmds = wrapper.run_all_times()
print(f"ALL COMMANDS: {all_cmds}")

Expand Down Expand Up @@ -899,20 +929,56 @@ def test_get_output_dir(metplus_config, template, storm_id, label, expected_resu
assert(actual_result == os.path.join(output_dir, expected_result))


@pytest.mark.parametrize(
'data,expected_min,expected_max,variable_name', [
( [
[[1, 2], [3, 4], [5, 6]],
[[2, 3], [4, 5], [6, 7]],
[[30, 31], [33, 32], [34, 39]],
],
1,
39,
'Temp'
),
(
[
[[1, 1], [1, 1], [1, 1]]
],
1,
1,
'Temp'
),
(
[
[[1, 1], [1, 1], [1, 1]]
],
None,
None,
'Foo'
),
]
)
@pytest.mark.wrapper_a
def test_get_netcdf_min_max(metplus_config):
pytest.skip('Rewrite this test to write a NetCDF file and check vals instead of using file in met install dir')
expected_min = 0.0
expected_max = 8.0

def test_get_netcdf_min_max(tmp_path_factory,
metplus_config,
make_dummy_nc,
data,
expected_min,
expected_max,
variable_name):

filepath = make_dummy_nc(
tmp_path_factory.mktemp("data1"),
[359, 0, 1],
[-1, 0, 1],
[0, 1],
data,
"Temp"
)

wrapper = series_analysis_wrapper(metplus_config)
met_install_dir = wrapper.config.getdir('MET_INSTALL_DIR')
filepath = os.path.join(met_install_dir,
'share',
'met',
'tc_data',
'basin_global_tenth_degree.nc')
variable_name = 'basin'

min, max = wrapper._get_netcdf_min_max(filepath, variable_name)
assert min == expected_min
assert max == expected_max
Expand Down

0 comments on commit 7e525f4

Please sign in to comment.