Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into pp/multi_time_from_list
Browse files Browse the repository at this point in the history
  • Loading branch information
ppinchuk committed Aug 15, 2024
2 parents a535b0a + a96af89 commit a569c25
Show file tree
Hide file tree
Showing 8 changed files with 69 additions and 4 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,11 @@ jobs:
pip install --upgrade pip
pip install pytest
pip install pytest-cov
pip install pytest-timeout
pip install flaky
pip install -e .
- name: Generate coverage report
run: |
pip install pytest-cov
cd tests
pytest --disable-warnings --cov=./ --cov-report=xml:coverage.xml
- name: Upload coverage to Codecov
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/pull_request_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ jobs:
pip install pytest
pip install pytest-cov
pip install pytest-timeout
pip install flaky
pip install -e .
- name: Run pytest and Generate coverage report
run: |
Expand Down
2 changes: 1 addition & 1 deletion rex/multi_file_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _get_dsets(h5_path):
shared_dsets = []
try:
with h5py.File(h5_path, mode='r') as f:
for dset in f:
for dset in Resource._get_datasets(f):
if dset not in ['meta', 'time_index', 'coordinates']:
unique_dsets.append(dset)
else:
Expand Down
3 changes: 3 additions & 0 deletions rex/rechunk_h5/rechunk_h5.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,9 @@ def get_dataset_attributes(h5_file, out_json=None, chunk_size=2,
class RechunkH5:
"""
Class to create new .h5 file with new chunking
.. WARNING:: This code does not currently support re-chunking H5
files with grouped datasets.
"""
# None time-series
NON_TS_DSETS = ('meta', 'coordinates', 'time_index')
Expand Down
2 changes: 1 addition & 1 deletion rex/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -924,7 +924,7 @@ def attrs(self):
"""
if self._attrs is None:
self._attrs = {}
for dset in set(self.datasets).intersection(self.h5):
for dset in self.datasets:
self._attrs[dset] = dict(self.h5[dset].attrs)

return self._attrs
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def run(self):
with open("requirements.txt") as f:
install_requires = f.readlines()

test_requires = ["pytest>=5.2", "pytest-timeout>=2.3.1"]
test_requires = ["pytest>=5.2", "pytest-timeout>=2.3.1", "flaky>=3.8.1"]
dev_requires = ["flake8", "pre-commit", "pylint", "hsds>=0.8.4"]
description = ("National Renewable Energy Laboratory's (NREL's) REsource "
"eXtraction tool: rex")
Expand Down
2 changes: 2 additions & 0 deletions tests/test_bc.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@
"""

import numpy as np
from flaky import flaky

from rex.temporal_stats.temporal_stats import cdf
from rex.utilities.bc_utils import QuantileDeltaMapping


@flaky(max_runs=3, min_passes=1)
def test_qdm():
"""Test basic QuantileDeltaMapping functionality with dummy distributions
Expand Down
58 changes: 58 additions & 0 deletions tests/test_resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -975,6 +975,64 @@ def test_mh5_iterator():
assert len(dsets_permutation) == len(mh5.datasets) ** 2


@pytest.mark.parametrize("read_class", [Resource, MultiFileResource])
def test_attrs_for_grouped_datasets(read_class):
""""Test attrs for files with datasets under groups."""

meta = pd.DataFrame({'latitude': np.ones(100),
'longitude': np.zeros(100)})
time_index = pd_date_range('20210101', '20220101', freq='1h',
closed='right')
with tempfile.TemporaryDirectory() as td:
fp = os.path.join(td, 'outputs.h5')

with Outputs(fp, 'w') as f:
f.meta = meta
f.time_index = time_index

Outputs.add_dataset(h5_file=fp, dset_name='dset1',
dset_data=np.ones((8760, 100)) * 42.42,
attrs={'scale_factor': 100}, dtype=np.int32)

with Outputs(fp, 'a', group="g1") as f:
f.meta = meta
f.time_index = time_index

Outputs.add_dataset(h5_file=fp, dset_name='dset_g1',
dset_data=np.ones((8760, 100)) * 42.42,
attrs={'scale_factor': 100}, dtype=np.int32,
group="g1")

with read_class(fp) as res:
assert np.allclose(res["dset1"], 42.42)
assert np.allclose(res["g1/dset_g1"], 42.42)

expected_dsets = {'dset1', 'meta', 'time_index',
'g1/dset_g1', 'g1/meta', 'g1/time_index'}
assert set(res.datasets) == expected_dsets
assert set(res.dtypes) == expected_dsets

expected_attrs = {'dset1': {'scale_factor': 100},
'g1/dset_g1': {'scale_factor': 100},
'g1/meta': {}, 'g1/time_index': {},
'meta': {}, 'time_index': {}}
assert res.attrs == expected_attrs

expected_shapes = {'dset1': (8760, 100),
'g1/dset_g1': (8760, 100),
'g1/meta': (100,),
'g1/time_index': (8760,),
'meta': (100,), 'time_index': (8760,)}
assert res.shapes == expected_shapes

expected_chunks = {'dset1': None,
'g1/dset_g1': None,
'g1/meta': None,
'g1/time_index': None,
'meta': None, 'time_index': None}
assert res.chunks == expected_chunks


def execute_pytest(capture='all', flags='-rapP'):
"""Execute module as pytest with detailed summary report.
Expand Down

0 comments on commit a569c25

Please sign in to comment.