Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Extend mon_data_space.py to support MinMon binary data #162

Merged
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 34 additions & 14 deletions src/eva/data/mon_data_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@

# --------------------------------------------------------------------------------------------------


import os
import numpy as np
import array

from xarray import Dataset, concat, merge, align
from scipy.io import FortranFile
Expand Down Expand Up @@ -162,9 +162,10 @@ def execute(self, dataset_config, data_collections, timing):
ds, chans_dict = \
self.subset_coordinate(ds, coord_dict[x][1], requested_coord[x], chans_dict)

# Conditionally add channel, level, and scan related variables
# ------------------------------------------------------------
ds = self.loadConditionalItems(ds, chans_dict, levs_dict, scanpo)
# Conditionally add channel, level, scan, and iteration related variables
# -----------------------------------------------------------------------
iterations = x_range if 'Iteration' in coords.values() else None
ds = self.loadConditionalItems(ds, chans_dict, levs_dict, scanpo, iterations)

# Rename variables with group
rename_dict = {}
Expand Down Expand Up @@ -640,14 +641,17 @@ def read_ieee(self, file_name, coords, dims, ndims_used, dims_arr, nvars, vars,
for x in range(nvars):
if load_data:
if ndims_used == 1:
arr = f.read_reals(dtype=np.dtype('>f4'))
with open(filename, 'rb') as infile:
binary_data = infile.read()

arr = array.array('f')
arr.frombytes(binary_data)
else:
arr = np.transpose(f.read_reals(dtype=np.dtype('>f4')).reshape(dimensions))
else:
arr = zarray

rtn_array = np.append(rtn_array, [arr], axis=0)

if load_data:
f.close()
return rtn_array, cycle_tm
Expand Down Expand Up @@ -864,35 +868,48 @@ def load_dset(self, vars, nvars, coords, darr, dims, ndims_used,
# create dataset from file components
rtn_ds = None

new_coords = {}
for x in range(0, nvars):
if ndims_used == 1:
d = {
vars[x]: {"dims": (coords[dims_arr[0]]), "data": darr[x, :]}
}

# MinMon plots require both the 'allgnorm' data and log('allgnorm').
if vars[x] == 'allgnorm':
d.update({
"log_gnorm": {"dims": (coords[dims_arr[0]]), "data": np.log(darr[x, :])}
})
new_coords = {
coords[dims_arr[0]]: x_range
}
if ndims_used == 2:
d = {
vars[x]: {"dims": (coords[dims_arr[0]], coords[dims_arr[1]]),
"data": darr[x, :, :]}
}
new_coords = {
coords[dims_arr[0]]: x_range,
coords[dims_arr[1]]: y_range
}
if ndims_used == 3:
d = {
vars[x]: {"dims": (coords[dims_arr[0]], coords[dims_arr[1]],
coords[dims_arr[2]]),
"data": darr[x, :, :]}
}
new_coords = {
coords[dims_arr[0]]: x_range,
coords[dims_arr[1]]: y_range,
coords[dims_arr[2]]: z_range
}

if 'Channel' in coords.values():
d.update({"Channel": {"dims": ("Channel"), "data": channo}})

new_ds = Dataset.from_dict(d)
rtn_ds = new_ds if rtn_ds is None else rtn_ds.merge(new_ds)

# Define new coordinates
new_coords = {
coords[dims_arr[0]]: x_range,
coords[dims_arr[1]]: y_range
}

# Add the new coordinates to the dataset
rtn_ds = rtn_ds.assign_coords(new_coords)

Expand Down Expand Up @@ -931,7 +948,7 @@ def load_dset(self, vars, nvars, coords, darr, dims, ndims_used,

# ----------------------------------------------------------------------------------------------

def loadConditionalItems(self, dataset, chans_dict, levs_dict, scanpo):
def loadConditionalItems(self, dataset, chans_dict, levs_dict, scanpo, iterations=None):

"""
Add channel, level, and scan related variables to the dataset.
Expand All @@ -941,7 +958,7 @@ def loadConditionalItems(self, dataset, chans_dict, levs_dict, scanpo):
chans_dict (dict): Dictionary of channel components.
levs_dict (dict): Dictionary of level components.
scanpo (list): List of scan positions.

iterations (list): List of iterations.
Returns:
xarray.Dataset: Dataset with added scan-related variables.
"""
Expand Down Expand Up @@ -973,6 +990,9 @@ def loadConditionalItems(self, dataset, chans_dict, levs_dict, scanpo):
scan_array[:, x] = np.array([scanpo])
dataset['scan'] = (['Scan', 'Channel'], scan_array)

if iterations is not None:
dataset['iteration'] = (['Iteration'], iterations)

return dataset

# ----------------------------------------------------------------------------------------------
Expand Down
Loading