diff --git a/docs/Users_Guide/appendixF.rst b/docs/Users_Guide/appendixF.rst index 4a8fe86cae..88f926664e 100644 --- a/docs/Users_Guide/appendixF.rst +++ b/docs/Users_Guide/appendixF.rst @@ -110,55 +110,88 @@ Python Script Requirements for 2D Gridded Dataplanes 3. The data inside the **met_data** variable must be **double precision floating point** type -4. A Python dictionary named **attrs** must be defined in the user's script and contain the :ref:`required attributes` +4. A Python dictionary named **attrs** must be defined in the user's script and contain the :ref:`required attributes` and + any :ref:`optional attributes` .. _pyembed-2d-attrs: -Required Attributes for 2D Gridded Dataplanes -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -The **attrs** dictionary must contain the following information: +Attributes for 2D Gridded Dataplanes +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ .. list-table:: 2D Dataplane Attributes - :widths: 5 5 10 + :widths: 5 5 10 5 :header-rows: 1 * - key - description - data type/format + - required/optional * - valid - valid time - string (YYYYMMDD_HHMMSS) + - required * - init - initialization time - string (YYYYMMDD_HHMMSS) + - required * - lead - forecast lead - string (HHMMSS) + - required * - accum - accumulation interval - string (HHMMSS) + - required * - name - variable name - string + - required * - long_name - variable long name - string + - required * - level - variable level - string + - required * - units - variable units - string + - required * - grid - - grid information + - :ref:`grid information` - string or dict + - required + * - fill_value + - :ref:`missing data value` + - int or float + - optional .. note:: Often times Xarray DataArray objects come with their own set of attributes available as a property. To avoid conflict with the required attributes for MET, it is advised to strip these attributes and rely on the **attrs** dictionary defined in your script. +.. _pyembed-fillvalue-attrs: + +Python embedding for 2D gridded dataplanes provides support for a user-defined missing data (or fill value). By default, the MET tools will respect (and ignore) the following special values in a user's **met_data** variable: + +1. NaN +2. Inf +3. -9999 +4. -9999. + +If a user has a 2D dataplane with another value that should be considered a fill value by MET, then the user must use the **fill_value** attribute in the **attrs** dictionary. An example would be if a user had a 2D dataplane with missing data indicated with -99. A user can use the **fill_value** attribute in their **attrs** dictionary which will tell MET to ignore those values: + +.. code-block:: none + :caption: User Fill Value for 2D Dataplane + + 'fill_value': -99 + +Alternatively, the user can choose to replace their special values with one of the four supported values instead of setting the **fill_value** attribute. Note that only a single user-defined fill value is supported at this time. + +.. _pyembed-grid-attrs: + The grid entry in the **attrs** dictionary must contain the grid size and projection information in the same format that is used in the netCDF files written out by the MET tools. The value of this item in the dictionary can either be a string, or another dictionary. Examples of the **grid** entry defined as a string are: • Using a named grid supported by MET: diff --git a/scripts/python/examples/read_ascii_numpy.py b/scripts/python/examples/read_ascii_numpy.py index a15fe17031..3c6310cec2 100644 --- a/scripts/python/examples/read_ascii_numpy.py +++ b/scripts/python/examples/read_ascii_numpy.py @@ -4,82 +4,96 @@ ########################################### -print("Python Script:\t" + repr(sys.argv[0])) +def log(msg): + dataplane.log_msg(msg) + +def set_dataplane_attrs(): + # attrs is a dictionary which contains attributes describing the dataplane. + # attrs should have 9 items, each of data type string: + # 'name': data name + # 'long_name': descriptive name + # 'valid': valid time (format = 'yyyymmdd_hhmmss') + # 'init': init time (format = 'yyyymmdd_hhmmss') + # 'lead': lead time (format = 'hhmmss') + # 'accum': accumulation time (format = 'hhmmss') + # 'level': vertilcal level + # 'units': units of the data + # 'grid': contains the grid information + # - a grid name (G212) + # - a gridded data file name + # - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" + # - a dictionary for the grid information + + valid_time = '20050807_120000' + init_time = '20050807_000000' + lead_time = '120000' + accum_time = '120000' + v_level = 'Surface' + units = 'None' + + grid_lambert_conformal = { + 'type': 'Lambert Conformal', + 'hemisphere': 'N', + + 'name': 'FooGrid', + + 'scale_lat_1': 25.0, + 'scale_lat_2': 25.0, + + 'lat_pin': 12.19, + 'lon_pin': -135.459, + + 'x_pin': 0.0, + 'y_pin': 0.0, + + 'lon_orient': -95.0, + + 'd_km': 40.635, + 'r_km': 6371.2, + + 'nx': 185, + 'ny': 129, + } + + long_name = data_name + "_word" + return dataplane.set_dataplane_attrs(data_name, valid_time, init_time, + lead_time, accum_time, v_level, units, + grid_lambert_conformal, long_name) + +log("Python Script:\t" + repr(sys.argv[0])) - ## - ## input file specified on the command line - ## load the data into the numpy array - ## +## +## input file specified on the command line +## load the data into the numpy array +## if len(sys.argv) != 3: - print("ERROR: read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) + dataplane.quit("read_ascii_numpy.py -> Must specify exactly one input file and a name for the data.") # Read the input file as the first argument input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] try: - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - # read_2d_text_input() reads n by m text data and returns 2D numpy array - met_data = dataplane.read_2d_text_input(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: - met_data = None - print("Can't find the input file") - -# attrs is a dictionary which contains attributes describing the dataplane. -# attrs should have 9 items, each of data type string: -# 'name': data name -# 'long_name': descriptive name -# 'valid': valid time (format = 'yyyymmdd_hhmmss') -# 'init': init time (format = 'yyyymmdd_hhmmss') -# 'lead': lead time (format = 'hhmmss') -# 'accum': accumulation time (format = 'hhmmss') -# 'level': vertilcal level -# 'units': units of the data -# 'grid': contains the grid information -# - a grid name (G212) -# - a gridded data file name -# - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" -# - a dictionary for the grid information - -valid_time = '20050807_120000' -init_time = '20050807_000000' -lead_time = '120000' -accum_time = '120000' -v_level = 'Surface' -units = 'None' - -grid_lambert_conformal = { - 'type': 'Lambert Conformal', - 'hemisphere': 'N', - - 'name': 'FooGrid', - - 'scale_lat_1': 25.0, - 'scale_lat_2': 25.0, - - 'lat_pin': 12.19, - 'lon_pin': -135.459, - - 'x_pin': 0.0, - 'y_pin': 0.0, - - 'lon_orient': -95.0, - - 'd_km': 40.635, - 'r_km': 6371.2, - - 'nx': 185, - 'ny': 129, -} - -long_name = data_name + "_word" -attrs = dataplane.set_dataplane_attrs(data_name, valid_time, init_time, - lead_time, accum_time, v_level, units, - grid_lambert_conformal, long_name) - -print("Attributes:\t" + repr(attrs)) + log("Input File:\t" + repr(input_file)) + log("Data Name:\t" + repr(data_name)) + if os.path.exists(input_file): + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + if met_data is None: + dataplane.quit(f" Fail to build met_data from {input_file}") + else: + log("Data Shape:\t" + repr(met_data.shape)) + log("Data Type:\t" + repr(met_data.dtype)) + else: + dataplane.quit(f"input {input_file} does exist!!!") +except: + import traceback + traceback.print_exc() + dataplane.quit(f"Unknown error with {sys.argv[0]}: ") + +attrs = set_dataplane_attrs() +log("Attributes:\t" + repr(attrs)) + +# Sets fill_value if it exists +#attrs['fill_value'] = 255 # for letter.txt diff --git a/scripts/python/examples/read_ascii_xarray.py b/scripts/python/examples/read_ascii_xarray.py index 8998235ea1..e4ba1f9a28 100644 --- a/scripts/python/examples/read_ascii_xarray.py +++ b/scripts/python/examples/read_ascii_xarray.py @@ -5,7 +5,10 @@ ########################################### -print("Python Script:\t" + repr(sys.argv[0])) +def log(msg): + dataplane.log_msg(msg) + +log("Python Script:\t" + repr(sys.argv[0])) ## ## input file specified on the command line @@ -13,23 +16,30 @@ ## if len(sys.argv) != 3: - print("ERROR: read_ascii_xarray.py -> Must specify exactly one input file and a name for the data.") - sys.exit(1) + dataplane.quit("read_ascii_xarray.py -> Must specify exactly one input file and a name for the data.") # Read the input file as the first argument input_file = os.path.expandvars(sys.argv[1]) data_name = sys.argv[2] try: - print("Input File:\t" + repr(input_file)) - print("Data Name:\t" + repr(data_name)) - # read_2d_text_input() reads n by m text data and returns 2D numpy array - met_data = dataplane.read_2d_text_input(input_file) - print("Data Shape:\t" + repr(met_data.shape)) - print("Data Type:\t" + repr(met_data.dtype)) -except NameError: + log("Input File:\t" + repr(input_file)) + log("Data Name:\t" + repr(data_name)) + if os.path.exists(input_file): + # read_2d_text_input() reads n by m text data and returns 2D numpy array + met_data = dataplane.read_2d_text_input(input_file) + if met_data is None: + dataplane.quit(f" Fail to build met_data from {input_file}") + else: + log("Data Shape:\t" + repr(met_data.shape)) + log("Data Type:\t" + repr(met_data.dtype)) + else: + dataplane.quit(f"input {input_file} does exist!!!") +except: met_data = None - print("Can't read the input file") + import traceback + traceback.print_exc() + dataplane.quit(f"Unknown error with {sys.argv[0]}: ") ########################################### @@ -105,5 +115,8 @@ # Delete the met_data variable, and reset it to be the Xarray object del met_data +# Sets fill_value/min_value/max_value if it exists +#ds.attrs['fill_value'] = 255 + # Create met_data and specify attrs because XR doesn't persist them. met_data = xr.DataArray(ds.fcst, attrs=ds.attrs) diff --git a/scripts/python/met/Makefile.am b/scripts/python/met/Makefile.am index 9e430722af..fd802449dd 100644 --- a/scripts/python/met/Makefile.am +++ b/scripts/python/met/Makefile.am @@ -25,6 +25,7 @@ pythonmetscriptsdir = $(pkgdatadir)/python/met pythonmetscripts_DATA = \ + logger.py \ dataplane.py \ mprbase.py \ point.py diff --git a/scripts/python/met/Makefile.in b/scripts/python/met/Makefile.in index 488e85355e..f149bf98e8 100644 --- a/scripts/python/met/Makefile.in +++ b/scripts/python/met/Makefile.in @@ -298,6 +298,7 @@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ pythonmetscriptsdir = $(pkgdatadir)/python/met pythonmetscripts_DATA = \ + logger.py \ dataplane.py \ mprbase.py \ point.py diff --git a/scripts/python/met/dataplane.py b/scripts/python/met/dataplane.py index 3da0e8b9e4..e11fc31d50 100644 --- a/scripts/python/met/dataplane.py +++ b/scripts/python/met/dataplane.py @@ -1,16 +1,62 @@ import os +import sys import numpy as np import netCDF4 as nc +import xarray as xr + +from importlib import util as import_util +from met.logger import logger ########################################### -class dataplane(): +class dataplane(logger): + + KEEP_XARRAY = True + class_name = "dataplane" + + MET_FILL_VALUE = -9999. + ATTR_USER_FILL_VALUE = 'user_fill_value' + + @staticmethod + def call_python(argv): + logger.log_msg(f"Module:\t{repr(argv[0])}") + if 1 == len(argv): + logger.quit(f"User command is missing") + + logger.log_msg("User Command:\t" + repr(' '.join(argv[1:]))) + # argv[0] is the python wrapper script (caller) + # argv[1] contains the user defined python script + pyembed_module_name = argv[1] + sys.argv = argv[1:] + logger.log_msg(f" sys.argv:\t{sys.argv}") + + # append user script dir to system path + pyembed_dir, pyembed_name = os.path.split(pyembed_module_name) + if pyembed_dir: + sys.path.insert(0, pyembed_dir) + + if not pyembed_module_name.endswith('.py'): + pyembed_module_name += '.py' + + user_base = pyembed_name.replace('.py','') + + spec = import_util.spec_from_file_location(user_base, pyembed_module_name) + met_in = import_util.module_from_spec(spec) + spec.loader.exec_module(met_in) + return met_in + + @staticmethod + def is_integer(a_data): + return isinstance(a_data, int) + + @staticmethod + def is_xarray_dataarray(a_data): + return isinstance(a_data, xr.DataArray) ## ## create the metadata dictionary ## - #@staticmethod # Python dictionary items: # 'name': data name # 'long_name': descriptive name @@ -25,6 +71,7 @@ class dataplane(): # - a gridded data file name # - MET specific grid string, "lambert 185 129 12.19 -133.459 -95 40.635 6371.2 25 25 N" # - a dictionary for the grid information + @staticmethod def set_dataplane_attrs(data_name, valid_time, init_time, lead_time, accum_time, v_level, units, grid_info, long_name=None): hdr_attrs = { @@ -44,7 +91,7 @@ def set_dataplane_attrs(data_name, valid_time, init_time, lead_time, } return hdr_attrs - #@staticmethod + @staticmethod def read_2d_text_input(input_file): if os.path.exists(input_file): met_data = np.loadtxt(input_file) @@ -52,11 +99,16 @@ def read_2d_text_input(input_file): met_data = None return met_data - #@staticmethod + @staticmethod def read_dataplane(netcdf_filename): # read NetCDF file ds = nc.Dataset(netcdf_filename, 'r') - met_data = ds['met_data'][:] + + dp = ds['met_data'] + met_data = dp[:] + attr_name = dataplane.ATTR_USER_FILL_VALUE + user_fill_value = dp.getncattr(attr_name) if hasattr(dp, attr_name) else None + met_attrs = {} # grid is defined as a dictionary or string @@ -76,10 +128,13 @@ def read_dataplane(netcdf_filename): met_info = {} met_info['met_data'] = met_data + if user_fill_value is not None: + met_attrs['fill_value'] = user_fill_value met_info['attrs'] = met_attrs + return met_info - #@staticmethod + @staticmethod def write_dataplane(met_in, netcdf_filename): met_info = {'met_data': met_in.met_data} if hasattr(met_in.met_data, 'attrs') and met_in.met_data.attrs: @@ -88,12 +143,6 @@ def write_dataplane(met_in, netcdf_filename): attrs = met_in.attrs met_info['attrs'] = attrs - # determine fill value - #try: - # fill = met_in.met_data.get_fill_value() - #except: - fill = -9999. - # write NetCDF file ds = nc.Dataset(netcdf_filename, 'w') @@ -101,13 +150,19 @@ def write_dataplane(met_in, netcdf_filename): nx, ny = met_in.met_data.shape ds.createDimension('x', nx) ds.createDimension('y', ny) - dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), fill_value=fill) + dp = ds.createVariable('met_data', met_in.met_data.dtype, ('x', 'y'), + fill_value=dataplane.MET_FILL_VALUE) dp[:] = met_in.met_data # append attributes for attr, attr_val in met_info['attrs'].items(): + if attr_val is None: + continue + if attr == 'name': setattr(ds, 'name_str', attr_val) + elif attr == 'fill_value': + setattr(dp, dataplane.ATTR_USER_FILL_VALUE, attr_val) elif type(attr_val) == dict: for key in attr_val: setattr(ds, attr + '.' + key, attr_val[key]) @@ -116,3 +171,73 @@ def write_dataplane(met_in, netcdf_filename): ds.close() + @staticmethod + def validate_met_data(met_data, fill_value=None): + method_name = f"{dataplane.class_name}.validate()" + #logger.log_msg(f"{method_name} type(met_data)= {type(met_data)}") + attrs = None + from_xarray = False + from_ndarray = False + if met_data is None: + logger.quit(f"{method_name} The met_data is None") + + nx, ny = met_data.shape + met_fill_value = dataplane.MET_FILL_VALUE + if dataplane.is_xarray_dataarray(met_data): + from_xarray = True + attrs = met_data.attrs + met_data = met_data.data + modified_met_data = True + if isinstance(met_data, np.ndarray): + from_ndarray = True + met_data = np.ma.array(met_data) + + if isinstance(met_data, np.ma.MaskedArray): + is_int_data = dataplane.is_integer(met_data[0,0]) or dataplane.is_integer(met_data[int(nx/2),int(ny/2)]) + met_data = np.ma.masked_equal(met_data, float('nan')) + met_data = np.ma.masked_equal(met_data, float('inf')) + if fill_value is not None: + met_data = np.ma.masked_equal(met_data, fill_value) + met_data = met_data.filled(int(met_fill_value) if is_int_data else met_fill_value) + else: + logger.log_msg(f"{method_name} unknown datatype {type(met_data)}") + + if dataplane.KEEP_XARRAY: + return xr.DataArray(met_data,attrs=attrs) if from_xarray else met_data + else: + return met_data + + +def main(argv): + global attrs, met_data, met_info + + met_in = dataplane.call_python(sys.argv) + + user_fill_value = None + try: + met_info = met_in.met_info + attrs = met_info['attrs'] + init_met_data = met_info['met_data'] + except: + met_info = {} + init_met_data = met_in.met_data + try: # numpy and attrs + attrs = met_in.attrs + except: # xarray + attrs = init_met_data.attrs + met_info['attrs'] = attrs + if hasattr(met_in, 'user_fill_value'): + fill_value = met_in.user_fill_value + + fill_value = attrs.get('fill_value', None) + dataplane.log_msg('validating the dataplane array...') + met_data = dataplane.validate_met_data(init_met_data, fill_value) + met_info['met_data'] = met_data + + if os.environ.get('MET_PYTHON_DEBUG', None) is not None: + dataplane.log_msg('--- met_data after validating ---') + dataplane.log_msg(met_data) + +if __name__ == '__main__' or __name__ == sys.argv[0]: + main(sys.argv) + dataplane.log_msg(f'{__name__} complete') diff --git a/scripts/python/met/logger.py b/scripts/python/met/logger.py new file mode 100644 index 0000000000..a7296124a6 --- /dev/null +++ b/scripts/python/met/logger.py @@ -0,0 +1,39 @@ + +########################################### + +import sys + +class logger(): + + PROMPT= " PYTHON:" + ERROR_PROMPT= "ERROR" + + ## + ## create the metadata dictionary + ## + + @staticmethod + def append_error_prompt(msg): + return f'{logger.ERROR_PROMPT}: {msg}' + + @staticmethod + def error_msg(msg): + msgs = msg if isinstance(msg, list) else [msg] + msgs.insert(0, '') + msgs.append('') + for a_msg in msgs: + logger.log_msg(logger.append_error_prompt(a_msg)) + + #@staticmethod + #def get_met_fill_value(): + # return logger.MET_FILL_VALUE + + @staticmethod + def log_msg(msg): + print(f'{logger.PROMPT} {msg}') + + @staticmethod + def quit(msg): + logger.error_msg([msg, "Quit..."]) + sys.exit(1) + diff --git a/scripts/python/met/point.py b/scripts/python/met/point.py index 3c64549e85..fbfb112f51 100644 --- a/scripts/python/met/point.py +++ b/scripts/python/met/point.py @@ -980,4 +980,4 @@ def main_nc(argv): if __name__ == '__main__': main() - print('Done python scripot') + print('Done python script') diff --git a/scripts/python/pyembed/python_embedding.py b/scripts/python/pyembed/python_embedding.py index 5240eb1120..fc8b15cb61 100644 --- a/scripts/python/pyembed/python_embedding.py +++ b/scripts/python/pyembed/python_embedding.py @@ -68,14 +68,14 @@ def call_python(argv): pyembed_tools.add_python_path(pyembed_module_name) # append user script dir to system path - pyembed_dir, _ = os.path.split(pyembed_module_name) + pyembed_dir, pyembed_name = os.path.split(pyembed_module_name) if pyembed_dir: sys.path.insert(0, pyembed_dir) if not pyembed_module_name.endswith('.py'): pyembed_module_name += '.py' - user_base = os.path.basename(pyembed_module_name).replace('.py','') + user_base = pyembed_name.replace('.py','') spec = import_util.spec_from_file_location(user_base, pyembed_module_name) met_in = import_util.module_from_spec(spec) diff --git a/src/libcode/vx_data2d_python/python_dataplane.cc b/src/libcode/vx_data2d_python/python_dataplane.cc index 00217bb7b2..118f2b45aa 100644 --- a/src/libcode/vx_data2d_python/python_dataplane.cc +++ b/src/libcode/vx_data2d_python/python_dataplane.cc @@ -29,14 +29,15 @@ extern GlobalPython GP; // this needs external linkage //////////////////////////////////////////////////////////////////////// -static const char * user_ppath = 0; +static const char * user_ppath = nullptr; static const char write_tmp_nc [] = "MET_BASE/python/pyembed/write_tmp_dataplane.py"; -static const char read_tmp_nc [] = "pyembed.read_tmp_dataplane"; // NO ".py" suffix +static const char read_tmp_nc [] = "MET_BASE/python/pyembed/read_tmp_dataplane.py"; static const char tmp_nc_var_name [] = "met_info"; +static const char validate_dataplane [] = "met.dataplane"; // NO ".py" suffix //////////////////////////////////////////////////////////////////////// @@ -55,6 +56,21 @@ static bool tmp_nc_dataplane(const char * script_name, //////////////////////////////////////////////////////////////////////// +void release_memory(int script_argc, char ** script_argv) { + if ( script_argv ) { + for (int i=0; i " + mlog << Warning << "\n" << method_name << "an error occurred initializing python\n\n"; + release_memory(script_argc, script_argv); return ( false ); @@ -166,7 +195,8 @@ if ( user_script_argc > 0 ) { // import the python script as a module // -module_obj = PyImport_ImportModule (user_base.c_str()); +//module_obj = PyImport_ImportModule (user_base.c_str()); +module_obj = PyImport_ImportModule (validate_dataplane); // // if needed, reload the module @@ -178,11 +208,13 @@ if ( do_reload ) { } +release_memory(script_argc, script_argv); + if ( PyErr_Occurred() ) { PyErr_Print(); - mlog << Warning << "\npython_dataplane() -> " + mlog << Warning << "\n" << method_name << "an error occurred importing module \"" << user_script_name << "\"\n\n"; @@ -192,7 +224,7 @@ if ( PyErr_Occurred() ) { if ( ! module_obj ) { - mlog << Warning << "\npython_dataplane() -> " + mlog << Warning << "\n" << method_name << "error running python script \"" << user_script_name << "\"\n\n"; @@ -222,7 +254,7 @@ if ( use_xarray ) { if ( ! data_array_obj ) { - mlog << Warning << "\npython_dataplane() -> " + mlog << Warning << "\n" << method_name << "trouble reading data from \"" << user_script_name << "\"\n\n"; @@ -245,9 +277,14 @@ if ( use_xarray ) { if ( !numpy_array_obj || !attrs_dict_obj ) { - mlog << Warning << "\npython_dataplane() -> " + mlog << Warning << "\n" << method_name << "trouble reading data from \"" << user_script_name << "\"\n\n"; + if ( !numpy_array_obj ) mlog << Warning << "\n" << method_name + << numpy_array_name << " is missing\n"; + if ( !attrs_dict_obj ) mlog << Warning << "\n" << method_name + << numpy_dict_name << " is missing\n"; + mlog << Warning << "\n"; return ( false ); } @@ -358,7 +395,9 @@ if ( PyErr_Occurred() ) { StringArray a; -a.add(read_tmp_nc); +a.add(validate_dataplane); + +a.add(replace_path(read_tmp_nc)); a.add(tmp_nc_path); @@ -373,7 +412,8 @@ mlog << Debug(4) << "Reading temporary Python dataplane file: " // import the python wrapper script as a module // -path = get_short_name(read_tmp_nc); +//path = get_short_name(read_tmp_nc); +path = get_short_name(validate_dataplane); PyObject * module_obj = PyImport_ImportModule (path.text()); @@ -425,7 +465,7 @@ PyObject * data_obj = PyDict_GetItem (module_dict_obj, key_obj); if ( ! data_obj || ! PyDict_Check(data_obj) ) { mlog << Error << "\ntmp_nc_dataplane() -> " - << "bad dict object\n\n"; + << (!data_obj ? "no" : "bad") << " dict object from " << tmp_nc_var_name << "\n\n"; exit ( 1 );