diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 1879a8e5..a153a574 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -30,61 +30,110 @@ jobs: run: tox -e black pip: - name: Pip with Python${{ matrix.python-version }} + name: Python${{ matrix.python-version }} (${{ matrix.os }}) needs: black - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: include: - tox-env: py38 python-version: "3.8" - - tox-env: py39-numpy + os: ubuntu-latest + allow-errors: false + - tox-env: py39 python-version: "3.9" + os: ubuntu-latest + allow-errors: false + - tox-env: py310 + python-version: "3.10" + os: ubuntu-latest + allow-errors: false + - tox-env: py311 + python-version: "3.11" + os: ubuntu-latest + allow-errors: false - tox-env: py310 python-version: "3.10" + os: macos-latest + allow-errors: true steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - name: Install GDAL + + - name: Install GDAL (Ubuntu) + if: matrix.os == 'ubuntu-latest' run: | sudo apt-get update sudo apt-get install libgdal-dev - - name: Install tox and setuptools + - name: Install GDAL (macOS) + if: matrix.os == 'macos-latest' + uses: tecolicom/actions-use-homebrew-tools@v1 + with: + tools: gdal + cache: yes + - name: Set GDAL_VERSION (Ubuntu) + if: matrix.os == 'ubuntu-latest' + run: | + echo "GDAL_VERSION=$(gdal-config --version)" >> $GITHUB_ENV + - name: Set GDAL_VERSION (macOS) + if: matrix.os == 'macos-latest' + run: | + echo "GDAL_VERSION=$(gdalinfo --version | awk '{print $2}' | sed s'/.$//')" >> $GITHUB_ENV + + - name: Install tox run: | pip install tox~=4.0 - pip install --upgrade "setuptools<65.6" - name: Test with tox and report coverage - run: env GDAL_VERSION="$(gdal-config --version)" tox -e ${{ matrix.tox-env }} + run: | + python3 -m tox -e ${{ matrix.tox-env }} + continue-on-error: ${{ matrix.allow-errors }} env: - LD_PRELOAD: /lib/x86_64-linux-gnu/libstdc++.so.6 # :"$LD_PRELOAD" GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_FLAG_NAME: run-${{ matrix.tox-env }} COVERALLS_PARALLEL: true conda: - name: Conda + name: Python${{ matrix.python-version }} (${{ matrix.os }}) (Conda) needs: black - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - os: ubuntu-latest + python-version: "3.9" + - os: ubuntu-latest + python-version: "3.10" + - os: ubuntu-latest + python-version: "3.11" + - os: macos-latest + python-version: "3.9" defaults: run: shell: bash -l {0} steps: - uses: actions/checkout@v3 - - name: Setup Conda (Micromamba) with Python3.9 - uses: mamba-org/provision-with-micromamba@main + - name: Patch Environment File + if: matrix.os == 'windows-latest' + run: | + sed -i 's/climpred >=2.2.0/xesmf/' environment.yml + - name: Setup Conda (Micromamba) with Python${{ matrix.python-version }} + uses: mamba-org/setup-micromamba@v1 with: cache-downloads: true + cache-environment: true environment-file: environment.yml - extra-specs: | - mamba - pip<23.1 - python=3.9 + create-args: >- + conda + python=${{ matrix.python-version }} - name: Conda and Mamba versions run: | - mamba --version + conda --version + echo "micromamba: $(micromamba --version)" - name: Install RavenPy run: | pip install -e ".[dev]" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 380b4cf3..84b271ec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,31 +6,33 @@ repos: rev: v3.4.0 hooks: - id: pyupgrade - args: [ --py38-plus ] + args: [ '--py38-plus' ] - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: - id: trailing-whitespace - exclude: .rvc|.rvh|.rvi|.rvp|.rvt|.tpl|.txt|setup.cfg - id: end-of-file-fixer - exclude: .ipynb|.rvc|.rvh|.rvi|.rvp|.rvt|.tpl|.txt - id: check-json + - id: check-toml - id: check-yaml - args: [ --allow-multiple-documents ] + args: [ '--allow-multiple-documents' ] + - id: debug-statements - id: pretty-format-json args: [ '--autofix', '--no-ensure-ascii', '--no-sort-keys' ] - - id: debug-statements - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.10.0 hooks: - id: python-check-blanket-noqa - id: rst-inline-touching-normal + - repo: https://github.com/pappasam/toml-sort + rev: v0.23.0 + hooks: + - id: toml-sort-fix - repo: https://github.com/psf/black rev: 23.3.0 hooks: - id: black exclude: ^docs/ - args: [ --target-version=py38 ] - repo: https://github.com/pycqa/flake8 rev: 6.0.0 hooks: @@ -40,19 +42,16 @@ repos: rev: 5.12.0 hooks: - id: isort - args: [ --settings-file=setup.cfg ] - repo: https://github.com/nbQA-dev/nbQA rev: 1.7.0 hooks: - id: nbqa-pyupgrade args: [ --py38-plus ] - additional_dependencies: [ pyupgrade==3.3.1 ] + additional_dependencies: [ 'pyupgrade==3.3.1' ] - id: nbqa-black - args: [ --target-version=py38 ] - additional_dependencies: [ black==23.3.0 ] + additional_dependencies: [ 'black==23.3.0' ] - id: nbqa-isort - args: [ --settings-file=setup.cfg ] - additional_dependencies: [ isort==5.12.0 ] + additional_dependencies: [ 'isort==5.12.0' ] - repo: https://github.com/pycqa/pydocstyle rev: 6.3.0 hooks: @@ -62,16 +61,12 @@ repos: rev: v0.3.8 hooks: - id: blackdoc - additional_dependencies: [ black==23.3.0 ] + additional_dependencies: [ 'black==23.3.0' ] - repo: https://github.com/adrienverge/yamllint.git rev: v1.32.0 hooks: - id: yamllint - args: [ '--config-file', '.yamllint.yaml' ] - - repo: https://github.com/mgedmin/check-manifest - rev: "0.49" - hooks: - - id: check-manifest + args: [ '--config-file=.yamllint.yaml' ] - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.23.0 hooks: @@ -91,5 +86,5 @@ ci: autoupdate_branch: '' autoupdate_commit_msg: '[pre-commit.ci] pre-commit autoupdate' autoupdate_schedule: weekly - skip: [ check-manifest ] + skip: [ ] submodules: false diff --git a/.readthedocs.yml b/.readthedocs.yml index f7f63553..0afd6623 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -17,18 +17,20 @@ build: tools: python: "mambaforge-4.10" jobs: + post_install: + - pip install .[docs,gis] pre_build: - sphinx-apidoc -o docs/apidoc --private --module-first ravenpy - - sphinx-build -b linkcheck docs/ _build/linkcheck + - sphinx-build -b linkcheck docs/ _build/linkcheck || true formats: all conda: environment: environment-rtd.yml -python: - install: - - method: pip - path: . - extra_requirements: - - dev +#python: +# install: +# - method: pip +# path: . +# extra_requirements: +# - dev diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index c382f196..693f0ede 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -75,7 +75,7 @@ Ready to contribute? Here's how to set up `ravenpy` for local development. $ pre-commit install Special style and formatting checks will be run when you commit your changes. You - can always run the hooks on their own with: + can always run the hooks on their own with:: $ pre-commit run -a @@ -119,7 +119,7 @@ Before you submit a pull request, check that it meets these guidelines: 2. If the pull request adds functionality, the docs should be updated. Put your new functionality into a function with a docstring, and add the feature to the list in README.rst. -3. The pull request should work for Python 3.8, 3.9, and 3.10. Check +3. The pull request should work for Python 3.8, 3.9, 3.10, and 3.11. Check https://github.com/CSHS-CWRA/RavenPy/actions/workflows/main.yml and make sure that the tests pass for all supported Python versions. @@ -128,7 +128,7 @@ Tips To run a subset of tests:: -$ pytest tests.test_ravenpy + $ pytest tests.test_ravenpy Versioning/Tagging @@ -138,9 +138,9 @@ A reminder for the maintainers on how to deploy. Make sure all your changes are committed (including an entry in HISTORY.rst). Then run:: -$ bumpversion patch # possible: major / minor / patch -$ git push -$ git push --tags + $ bumpversion patch # possible: major / minor / patch + $ git push + $ git push --tags Packaging --------- @@ -148,21 +148,33 @@ Packaging When a new version has been minted (features have been successfully integrated test coverage and stability is adequate), maintainers should update the pip-installable package (wheel and source release) on PyPI as well as the binary on conda-forge. -The simple approach +The Automated Approach +~~~~~~~~~~~~~~~~~~~~~~ + +The simplest way to package `ravenpy` is to "publish" a version on GitHuh. GitHub CI Actions are presently configured to build the library and publish the packages on PyPI automatically. + +Tagged versions will trigger a GitHub Workflow (`tag-testpypi.yml`) that will attempt to build and publish the release on `TestPyPI `_. + +.. note:: + Should this step fail, changes may be needed in the package; Be sure to remove this tag on GitHub and locally, address any existing problems, and recreate the tag. + +To upload a new version to `PyPI `_, simply create a new "Published" release version on GitHub to trigger the upload workflow (`publish-pypi.yml`). When publishing on GitHub, the maintainer can either set the release notes manually (based on the `HISTORY.rst`), or set GitHub to generate release notes automatically. The choice of method is up to the maintainer. + +.. warning:: + A published version on TestPyPI/PyPI can never be overwritten. Be sure to verify that the package published at https://test.pypi.org/project/ravenpy/ matches expectations before publishing a release version on GitHub. + +The Manual Approach ~~~~~~~~~~~~~~~~~~~ -The simplest approach to packaging for general support (pip wheels) requires the following packages installed: - * setuptools - * wheel - * twine +The manual approach to library packaging for general support (pip wheels) requires that the `flit `_ library is installed. From the command line on your Linux distribution, simply run the following from the clone's main dev branch:: # To build the packages (sources and wheel) - $ python setup.py sdist bdist_wheel + $ flit build # To upload to PyPI - $ twine upload dist/* + $ flit publish The new version based off of the version checked out will now be available via `pip` (`$ pip install ravenpy`). @@ -177,8 +189,23 @@ In order to prepare an initial release on conda-forge, we *strongly* suggest con * https://github.com/conda-forge/staged-recipes Before updating the main conda-forge recipe, we echo the conda-forge documentation and *strongly* suggest performing the following checks: - * Ensure that dependencies and dependency versions correspond with those of the tagged version, with open or pinned versions for the `host` requirements. - * If possible, configure tests within the conda-forge build CI (e.g. `imports: ravenpy`, `commands: pytest ravenpy`) + * Ensure that dependencies and dependency versions correspond with those of the PyPI published version, with open or pinned versions for the `host` requirements. + * If possible, configure tests within the conda-forge build CI, e.g.: + +.. code-block:: yaml + + test: + source_files: + - tests + requires: + - pip + - pytest + - pytest-xdist + imports: + - ravenpy + commands: + - pip check + - pytest Subsequent releases ^^^^^^^^^^^^^^^^^^^ diff --git a/HISTORY.rst b/HISTORY.rst index ac110424..c545e75d 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -2,6 +2,16 @@ History ======= +0.12.0 (2023-05-25) +------------------- + +* Ravenpy now employs a new method for installing the Raven model using the `raven-hydro `_ python package (based on `scikit-build-core`). +* Replaced `setup.py`, `requirements.txt`, and `Manifest.in` for `PEP 517 `_ compliance (`pyproject.toml`) using the flit backend. +* Dealt with an import-based error that occurred due to the sequence in which modules are loaded at import (attempting to call ravenpy before it is installed). +* Updated pre-commit hooks to include formatters and checkers for TOML files. +* The build recipes no longer build on each other, so when installing the dev or docs recipe, you must also install the gis recipe. +* Updated the GeoServer API calls to work with the GeoPandas v0.13.0. + 0.11.0 (2023-02-16) ------------------- diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index a55c7dcb..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,36 +0,0 @@ -include AUTHORS.rst -include CONTRIBUTING.rst -include HISTORY.rst -include LICENSE -include README.rst -include requirements_dev.txt -include requirements_docs.txt -include requirements_gis.txt - -recursive-include ravenpy *.py *.zip *.csv *.rst -recursive-include tests * -recursive-include benchmark *.ipynb *.txt -recursive-include docs *.rst *.md conf.py Makefile make.bat *.jpg *.png *.gif *.ipynb - -recursive-exclude * __pycache__ -recursive-exclude * *.py[co] -recursive-exclude docs/notebooks/.ipynb_checkpoints * -recursive-exclude docs/_build * -recursive-exclude docs/apidoc *.rst -recursive-exclude docs/.jupyter_cache * -recursive-exclude docs/jupyter_execute * - -graft ravenpy/data - -exclude .coveralls.yml -exclude .cruft.json -exclude .editorconfig -exclude .pre-commit-config.yaml -exclude .readthedocs.yml -exclude .yamllint.yaml -exclude .zenodo.json -exclude Makefile -exclude environment.yml -exclude environment-rtd.yml -exclude mypy.ini -exclude tox.ini diff --git a/Makefile b/Makefile index ec0326f4..7a8d1994 100644 --- a/Makefile +++ b/Makefile @@ -89,9 +89,11 @@ release: dist ## package and upload a release twine upload dist/* dist: clean ## builds source and wheel package - python setup.py sdist - python setup.py bdist_wheel + flit build ls -l dist install: clean ## install the package to the active Python's site-packages - python setup.py install + python -m pip install --no-user . + +develop: clean ## install the package and development dependencies in editable mode to the active Python's site-packages + python -m pip install --no-user --editable ".[dev]" diff --git a/README.rst b/README.rst index 1bdd3368..1f385c05 100644 --- a/README.rst +++ b/README.rst @@ -15,13 +15,11 @@ Raven_ is an hydrological modeling framework that lets hydrologists build hydrol `RavenPy` provides a Python interface to Raven_, automating the creation of configuration files and allowing the model to be launched from Python. Results, or errors, are automatically parsed and exposed within the programming environment. This facilitates the launch of parallel simulations, multi-model prediction ensembles, sensitivity analyses and other experiments involving a large number of model runs. -Note that version 0.20 includes major changes compared to the previous 0.12 release, and breaks backward compatibility. The benefits of these changes are a much more intuitive interface for configuring and running the model. - +Note that version 0.20.0 includes many major changes compared to the previous 0.12 release, and breaks backward compatibility, notably, the Raven model is now compiled/installed/managed via a dedicated pip/conda package `raven-hydro`_. The other benefits of these changes are a much more intuitive interface for configuring and running the model. Features -------- -* Download and compile Raven with `pip` * Configure, run and parse Raven outputs from Python * Utility command to create grid weight files * Extract physiographic information about watersheds @@ -40,13 +38,14 @@ RavenPy's development has been funded by CANARIE_ and Ouranos_ and would be not This package was created with Cookiecutter_ and the `Ouranosinc/cookiecutter-pypackage`_ project template. +.. _CANARIE: https://www.canarie.ca .. _Cookiecutter: https://github.com/audreyfeldroy/cookiecutter-pypackage +.. _Ouranos: https://www.ouranos.ca +.. _Ouranosinc/cookiecutter-pypackage: https://github.com/Ouranosinc/cookiecutter-pypackage .. _Raven: http://raven.uwaterloo.ca -.. _`CANARIE`: https://www.canarie.ca -.. _`Ouranos`: https://www.ouranos.ca -.. _`Ouranosinc/cookiecutter-pypackage`: https://github.com/Ouranosinc/cookiecutter-pypackage -.. _`docs`: https://www.civil.uwaterloo.ca/raven/files/v3.5/RavenManual_v3.5.pdf -.. _`installation docs`: https://ravenpy.readthedocs.io/en/latest/installation.html +.. _docs: https://www.civil.uwaterloo.ca/raven/files/v3.7/RavenManual_v3.7.pdf +.. _installation docs: https://ravenpy.readthedocs.io/en/latest/installation.html +.. _raven-hydro: https://github.com/Ouranosinc/raven-hydro .. |pypi| image:: https://img.shields.io/pypi/v/ravenpy.svg diff --git a/docs/installation.rst b/docs/installation.rst index 7e59f63f..871e14d0 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -2,11 +2,12 @@ Installation ============ -Full Installation (Anaconda) +Anaconda Python Installation ---------------------------- For many reasons, we recommend using a `Conda environment `_ -to work with the full RavenPy installation. This implementation is able to manage the harder-to-install GIS dependencies,like `GDAL`. +to work with the full RavenPy installation. This implementation is able to manage the harder-to-install GIS dependencies, like `GDAL`. + Begin by creating an environment: .. code-block:: console @@ -25,70 +26,48 @@ RavenPy can then be installed directly via its `conda-forge` package by running: (ravenpy) $ conda install -c conda-forge ravenpy -This approach installs both the `Raven `_ binary directly to your environment `PATH`, +This approach installs the `Raven `_ binary directly to your environment `PATH`, as well as installs all the necessary Python and C libraries supporting GIS functionalities. +Python Installation (pip) +------------------------- -Custom Installation (Python/Pip) --------------------------------- +.. warning:: + In order to compile the Raven model (provided by the `raven-hydro` package, a C++ compiler (`GCC`, `Clang`, `MSVC`, etc.) and either `GNU Make` (Linux/macOS) or `Ninja` (Windows) must be exposed on the `$PATH`. .. warning:: - The following instructions will only work on POSIX-like systems (Unix/Linux; not supported on Windows). + The Raven model also requires that NetCDF4 libraries are installed on the system, exposed on the `$PATH`, and discoverable using the `FindNetCDF.cmake` helper script bundled with `raven-hydro`. + + On Linux, this can be provided by the `libnetcdf-dev` system library; On macOS by the `netcdf` homebrew package; And on Windows by using UNIDATA's [pre-built binaries](https://docs.unidata.ucar.edu/netcdf-c/current/winbin.html). -If you wish to install RavenPy and its C-libraries manually, compiling the `Raven` binaries for your system, -you can install the entire system directly, placing them in the `bin` folder of your environment. In order to perform this from Ubuntu/Debian: .. code-block:: console - $ sudo apt-get install gcc libnetcdf-dev gdal proj geos geopandas + $ sudo apt-get install gcc libnetcdf-dev gdal proj geos Then, from your python environment, run: .. code-block:: console $ pip install ravenpy[gis] - $ pip install ravenpy[gis] --verbose --install-option="--with-binaries" - -.. warning:: - - It is imperative that the Python dependencies are pre-installed before running the `--with-binaries` - option; This install step will fail otherwise. - -If desired, the core functions of `RavenPy` can be installed without its GIS functionalities as well. -This implementation of RavenPy is much lighter on dependencies and can be installed easily with `pip`, -without the need for `conda` or `virtualenv`. - -The only libraries required for RavenPy in this approach are a C++ compiler and the NetCDF4 development libraries. - -.. code-block:: console - $ sudo apt-get install gcc libnetcdf-dev +If desired, the core functions of `RavenPy` can be installed without its GIS functionalities as well. This implementation of RavenPy is much lighter on dependencies and can be installed easily with `pip`, without the need for `conda` or `virtualenv`. .. code-block:: console $ pip install ravenpy - $ pip install ravenpy --verbose --install-option="--with-binaries" -.. warning:: +Using A Custom Raven Model Binary +--------------------------------- - It is imperative that the Python dependencies are pre-installed before running the `--with-binaries` - option; This install step will fail otherwise. +If you wish to install the `Raven` model, either compiling the `Raven` binary from sources for your system or installing the pre-built binary offered by UWaterloo, we encourage you to consult the `Raven` documentation (`Raven Downloads `_). -If for any reason you prefer to install without the binaries, from a fresh python environment, run the following: +Once downloaded/compiled, the binary can be pointed to manually (as an absolute path) by setting the environment variable ``RAVENPY_RAVEN_BINARY_PATH`` in the terminal/command prompt/shell used at runtime. .. code-block:: console - (ravenpy-env) $ pip install ravenpy[gis] - -But then you will be in charge of providing ``raven`` binaries on your PATH, or setting values for the -``RAVENPY_RAVEN_BINARY_PATH`` environment variable (as an absolute path) in the -terminal/command prompt/shell used at runtime. - -.. note:: - - The `virtualenv `_ implementation also works well, but the - GIS system libraries it depends on (specifically `GDAL` and `GEOS`) can be more difficult to configure. + $ export RAVENPY_RAVEN_BINARY_PATH=/path/to/my/custom/raven Development Installation (from sources) --------------------------------------- @@ -114,16 +93,7 @@ You can then install RavenPy with: .. code-block:: console # for the python dependencies - (ravenpy) $ pip install --editable ".[dev]" - -.. warning:: - - The following command will only work on POSIX-like systems (Unix/Linux; not supported on Windows). - -.. code-block:: console - - # for the Raven binaries - (ravenpy) $ pip install --editable "." --install-option="--with-binaries" + (ravenpy) $ pip install --editable ".[dev,gis]" Install the pre-commit hook (to make sure that any code you contribute is properly formatted): diff --git a/docs/usage.md b/docs/usage.md index 427064fd..f4f2f186 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -12,7 +12,7 @@ In particular, RavenPy includes eight pre-configured model *emulators*. To run Raven using existing configuration files (`.rv*`), simply call the `run` function with the name of the configuration file and the path to the directory storing the RV files: -```python3 +```python from ravenpy import run output_path = run(modelname, configdir) @@ -25,7 +25,7 @@ output_path = run(modelname, configdir) The model outputs can be read with the `OutputReader` class: -```python3 +```python from ravenpy import OutputReader out = OutputReader(run_name, path=output_path) @@ -37,7 +37,7 @@ Note that this works only if simulated variables are stored as netCDF files, tha The class `EnsembleReader` does the same for an ensemble of model outputs, concatenating netCDF outputs along a new dimension: -```python3 +```python from ravenpy import EnsembleReader out = EnsembleReader( @@ -56,7 +56,7 @@ For more info, see {ref}`ensemble_reader`. Ravenpy comes packaged with pre-configured emulators, that is, Raven model configurations that can be modified on the fly. These emulators are made out of symbolic expressions, connecting model parameters to properties and coefficients. For example, the code below creates a model configuration for emulated model GR4JCN using the parameters given, as well as a `Gauge` configuration inferred by inspecting the `meteo.nc` file. -```python3 +```python from ravenpy.config.emulators import GR4JCN from ravenpy.config.commands import Gauge @@ -84,7 +84,7 @@ The RV files for the emulator above can be inspected using the `rvi`, `rvh`, `rv For convenience, `ravenpy` also proposes the `Emulator` class, designed to streamline the execution of the model and the retrieval of the results. -```python3 +```python from ravenpy import Emulator e = Emulator(config=gr4jcn, workdir="/tmp/gr4jcn/run_1") diff --git a/docs/user_api.rst b/docs/user_api.rst index 0efdc920..2ff77fb9 100644 --- a/docs/user_api.rst +++ b/docs/user_api.rst @@ -2,7 +2,6 @@ User API ======== - Execution ========= @@ -10,7 +9,6 @@ Execution :members: :noindex: - Configuration ============= @@ -22,8 +20,6 @@ Configuration :members: :noindex: - - Emulators ========= @@ -31,8 +27,6 @@ Emulators :members: :noindex: - - Extractors ========== @@ -44,7 +38,6 @@ Extractors :members: :noindex: - Utilities ========= diff --git a/environment-rtd.yml b/environment-rtd.yml index 45ab2102..f3e050cf 100644 --- a/environment-rtd.yml +++ b/environment-rtd.yml @@ -3,9 +3,11 @@ channels: - conda-forge - defaults dependencies: - - python >=3.8,<3.11 + - python >=3.8,<3.12 - autodoc-pydantic - click +# - clisops # mocked + - gdal - ipykernel - ipython - jupyter_client diff --git a/environment.yml b/environment.yml index 1a6f6b4e..f5805807 100644 --- a/environment.yml +++ b/environment.yml @@ -3,42 +3,41 @@ channels: - conda-forge - defaults dependencies: - - raven-hydro ==3.6 - - python >=3.8 + - raven-hydro ==0.2.1 + - python >=3.8,<3.12 - affine - cftime - cf_xarray - click - - climpred >=2.1.0 +# - climpred >=2.2.0 # conda package is incompatible with latest raven-hydro - dask - fiona >=1.9 + - flit - gdal >=3.1 - - geopandas >=0.9 + - geopandas >=0.13.0 - haversine - holoviews - hvplot - lxml - matplotlib - - netCDF4 - - numpy < 1.24 - - owslib >=0.24.1,<0.29 - - pandas < 2.0 + - netcdf4 + - numpy + - owslib <0.29.0 # see: https://github.com/geopython/OWSLib/issues/871 + - pandas - pint >=0.20 - - pip - pre-commit - pydantic - pymbolic + - pyogrio - pyproj >=3.0 - rasterio - requests - rioxarray - - scikit-learn ==0.24.2 - scipy - - setuptools <=65.6 - shapely - spotpy - statsmodels - - xarray <2022.11.0 # Pinned due to incompatibility with climpred @ 2.2.0 - - xclim >=0.40 + - xarray + - xclim >=0.43 + - xesmf - xskillscore - - wheel diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 153be1c2..00000000 --- a/mypy.ini +++ /dev/null @@ -1,19 +0,0 @@ -[mypy] -plugins = pydantic.mypy - -follow_imports = silent -warn_redundant_casts = True -warn_unused_ignores = True -disallow_any_generics = True -check_untyped_defs = True -no_implicit_reexport = False -ignore_missing_imports = True - -# for strict mypy: (this is the tricky one :-)) -disallow_untyped_defs = False - -[pydantic-mypy] -init_forbid_extra = True -init_typed = True -warn_required_dynamic_aliases = True -warn_untyped_fields = True diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..bb94e084 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,225 @@ +[build-system] +requires = ["flit_core >=3.8,<4"] +build-backend = "flit_core.buildapi" + +[project] +name = "ravenpy" +authors = [ + {name = "David Huard", email = "huard.david@ouranos.ca"}, + {name = "Richard Arsenault", email = "Richard.Arsenault@etsmtl.ca"} +] +maintainers = [ + {name = "Trevor James Smith", email = "smith.trevorj@ouranos.ca"} +] +readme = {file = "README.rst", content-type = "text/x-rst"} +requires-python = ">=3.8.0" +keywords = ["raven", "raven-hydro", "hydrology", "gis", "analysis", "modelling"] +license = {file = "LICENSE"} +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Education", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Topic :: Scientific/Engineering :: GIS", + "Topic :: Scientific/Engineering :: Hydrology" +] +dynamic = ["description", "version"] +dependencies = [ + "cftime", + "cf-xarray[all]", + "click", + "climpred>=2.2", + "dask", + "haversine", + "matplotlib", + "netCDF4", + "numpy", + "owslib>=0.24.1,<0.29", # see: https://github.com/geopython/OWSLib/issues/871 + "pandas<2.0; python_version == '3.8'", + "pandas; python_version >= '3.9'", + "pint>=0.20", + "pydantic>=1.10.8", + "pymbolic", + "raven-hydro==0.2.1", + "requests", + "scipy", + "spotpy", + "statsmodels", + "xarray", + "xclim>=0.43.0", + "xskillscore" +] + +[project.optional-dependencies] +dev = [ + "black>=23.3.0", + "bump2version", + "coverage", + "coveralls", + "filelock", + "flake8", + "flit", + "holoviews", + "hvplot", + "isort", + "mypy", + "pre-commit", + "pytest", + "pytest-cov", + "pytest-xdist>=3.2.0", + "tox", + "watchdog" +] +docs = [ + "autodoc_pydantic", + "birdhouse-birdy", + "cartopy", + "clisops", + "gcsfs", + "hs_restclient", + "intake", + "intake-esm", + "intake-xarray", + "ipykernel", + "ipyleaflet", + "ipython", + "ipywidgets", + "jupyter-cache", + "jupyter_client", + "jupytext", + "myst_nb", + "nbsphinx", + "numpydoc", + "pandoc", + "pymetalink", + "s3fs", + "sphinx", + "sphinx-click", + "sphinx-codeautolink", + "sphinx-copybutton", + "sphinx-rtd-theme>=1.0", + "xesmf" +] +gis = [ + "affine", + "fiona>=1.9", + "geopandas>=0.9.0", + "gdal", + "lxml", + "pyogrio", + "pyproj>=3.0.0", + "rasterio", + "rioxarray", + "shapely" +] + +[project.scripts] +ravenpy = "ravenpy.cli:main" + +[project.urls] +"Homepage" = "https://ravenpy.readthedocs.io" +"Source" = "https://github.com/CSHS-CWRA/RavenPy" +"Changelog" = "https://ravenpy.readthedocs.io/en/latest/history.html" +"Issue tracker" = "https://github.com/CSHS-CWRA/RavenPy/issues" +"About Ouranos" = "https://www.ouranos.ca/en/" +"About CSHS-CWRA" = "https://cwra.org/en/" + +[tool] + +[tool.black] +target-version = [ + "py38", + "py39", + "py310" +] + +[tool.coverage.run] +relative_files = true +omit = ["tests/*.py"] + +[tool.flit.sdist] +include = [ + "AUTHORS.rst", + "HISTORY.rst", + "CONTRIBUTING.rst", + "LICENSE", + "Makefile", + "README.rst", + "docs/**/*.ipynb", + "docs/**/*.md", + "docs/**/*.rst", + "docs/Makefile", + "docs/make.bat", + "docs/conf.py", + "environment*.yml", + "ravenpy/**/*.py", + "ravenpy/data/**/*.csv", + "ravenpy/data/**/*.zip", + "setup.cfg", + "tests/*.py", + "tests/test.cfg", + "tox.ini" +] +exclude = [ + "**/*.py[co]", + "**/__pycache__", + ".*", + "docs/_build", + "docs/apidoc/modules.rst", + "docs/source/**/.ipynb_checkpoints", + "docs/apidoc/ravenpy*.rst" +] + +[tool.isort] +profile = "black" +py_version = 38 +append_only = true + +[tool.mypy] +plugins = [ + "pydantic.mypy" +] +follow_imports = "silent" +warn_redundant_casts = true +warn_unused_ignores = true +disallow_any_generics = true +check_untyped_defs = true +no_implicit_reexport = false +ignore_missing_imports = true +# for strict mypy: (this is the tricky one :-)) +disallow_untyped_defs = false + +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true +warn_untyped_fields = true + +[tool.pytest.ini_options] +addopts = [ + "--verbose", + "--color=yes", + "--strict-markers", + "--tb=native", + "--numprocesses=0", + "--maxprocesses=8", + "--dist=worksteal" +] +python_files = "test_*.py" +norecursedirs = ["src", ".git", "bin"] +filterwarnings = ["ignore::UserWarning"] +testpaths = "tests" +markers = [ + "slow: marks tests as slow (deselect with '-m \"not slow\"')", + "online: mark tests that require external services (deselect with '-m \"not online\"')" +] diff --git a/ravenpy/__init__.py b/ravenpy/__init__.py index 97cf1ca7..b7586a95 100644 --- a/ravenpy/__init__.py +++ b/ravenpy/__init__.py @@ -1,4 +1,4 @@ -"""Top-level package for RavenPy.""" +"""A Python package to help run Raven, the hydrologic modelling framework.""" from .__version__ import __author__, __email__, __version__ # noqa: F401 from .ravenpy import Emulator, EnsembleReader, OutputReader, RavenWarning, run diff --git a/ravenpy/__version__.py b/ravenpy/__version__.py index 5648c599..0e695d02 100644 --- a/ravenpy/__version__.py +++ b/ravenpy/__version__.py @@ -2,6 +2,6 @@ # without importing the main package when its dependencies are not installed. # See: https://packaging.python.org/guides/single-sourcing-package-version -__author__ = """David Huard""" +__author__ = "David Huard" __email__ = "huard.david@ouranos.ca" __version__ = "0.11.0" diff --git a/ravenpy/config/commands.py b/ravenpy/config/commands.py index 4bdd23cf..6b156bd2 100644 --- a/ravenpy/config/commands.py +++ b/ravenpy/config/commands.py @@ -29,8 +29,7 @@ validator, ) -from ravenpy.config import options - +from ..config import options from .base import ( Command, FlatCommand, diff --git a/ravenpy/config/parsers.py b/ravenpy/config/parsers.py index cbfd0fb1..548f08eb 100644 --- a/ravenpy/config/parsers.py +++ b/ravenpy/config/parsers.py @@ -5,8 +5,7 @@ import cftime from xarray import open_dataset -from ravenpy.config import commands as rc - +from ..config import commands as rc from .conventions import RAVEN_OUTPUT_FMT diff --git a/ravenpy/config/rvs.py b/ravenpy/config/rvs.py index 7051099e..83fde0dc 100644 --- a/ravenpy/config/rvs.py +++ b/ravenpy/config/rvs.py @@ -6,10 +6,9 @@ import cftime from pydantic import Field, root_validator, validator -from ravenpy.config import commands as rc -from ravenpy.config import options as o -from ravenpy.config import processes as rp - +from ..config import commands as rc +from ..config import options as o +from ..config import processes as rp from .base import RV, Sym, parse_symbolic """ diff --git a/ravenpy/ravenpy.py b/ravenpy/ravenpy.py index f59a58ce..5459f817 100644 --- a/ravenpy/ravenpy.py +++ b/ravenpy/ravenpy.py @@ -12,8 +12,8 @@ import xarray as xr -from ravenpy.config import parsers -from ravenpy.config.rvs import Config +from .config import parsers +from .config.rvs import Config RAVEN_EXEC_PATH = os.getenv("RAVENPY_RAVEN_BINARY_PATH") or shutil.which("raven") diff --git a/ravenpy/utilities/geo.py b/ravenpy/utilities/geo.py index 9356bb67..95f2b05a 100644 --- a/ravenpy/utilities/geo.py +++ b/ravenpy/utilities/geo.py @@ -206,10 +206,10 @@ def generic_vector_reproject( Path to a file containing a valid vector layer. projected: Union[str, Path] Path to a file to be written. - source_crs : Union[str, dict, CRS] - Projection identifier (proj4) for the source geometry, Default: '+proj=longlat +datum=WGS84 +no_defs'. - target_crs : Union[str, dict, CRS] - Projection identifier (proj4) for the target geometry. + source_crs : Union[str, pyproj.crs.CRS] + CRS for the source geometry. Default: 4326. + target_crs : Union[str, pyproj.crs.CRS] + CRS for the target geometry. Returns ------- @@ -218,6 +218,8 @@ def generic_vector_reproject( if target_crs is None: raise ValueError("No target CRS is defined.") + if isinstance(target_crs, CRS): + target_crs = target_crs.to_dict() if isinstance(vector, Path): vector = vector.as_posix() diff --git a/ravenpy/utilities/geoserver.py b/ravenpy/utilities/geoserver.py index f70e8fc8..8e239757 100644 --- a/ravenpy/utilities/geoserver.py +++ b/ravenpy/utilities/geoserver.py @@ -15,8 +15,8 @@ import inspect import json import os +import urllib.request import warnings -from io import BytesIO from pathlib import Path from typing import Iterable, Optional, Sequence, Tuple, Union from urllib.parse import urljoin @@ -52,11 +52,11 @@ # We store the contour of different hydrobasins domains hybas_dir = Path(__file__).parent.parent / "data" / "hydrobasins_domains" -hybas_pat = "hybas_lake_{}_lev01_v1c.zip" +hybas_pat = "hybas_lake_{domain}_lev01_v1c.zip" # This could be inferred from existing files in hybas_dir hybas_regions = ["na", "ar"] -hybas_domains = {dom: hybas_dir / hybas_pat.format(dom) for dom in hybas_regions} +hybas_domains = {dom: hybas_dir / hybas_pat.format(domain=dom) for dom in hybas_regions} def _get_location_wfs( @@ -362,10 +362,11 @@ def hydrobasins_upstream(feature: dict, domain: str) -> pd.DataFrame: # filter = PropertyIsEqualTo(propertyname=basin_family, literal=feature[basin_family]) # Fetch all features in the same basin - req = filter_hydrobasins_attributes_wfs( + request_url = filter_hydrobasins_attributes_wfs( attribute=basin_family, value=feature[basin_family], domain=domain ) - df = gpd.read_file(req) + with urllib.request.urlopen(url=request_url) as req: + df = gpd.read_file(filename=req, engine="pyogrio") # Filter upstream watersheds return _determine_upstream_ids( @@ -436,8 +437,9 @@ def select_hybas_domain( with open(fn, "rb") as f: zf = fiona.io.ZipMemoryFile(f) coll = zf.open(fn.stem + ".shp") - for _ in coll.filter(bbox=bbox): - return dom + for feat in coll.filter(bbox=bbox): + if isinstance(feat, fiona.Feature): + return dom raise LookupError(f"Could not find feature containing bbox: {bbox}.") diff --git a/requirements_dev.txt b/requirements_dev.txt deleted file mode 100644 index b0fdacf5..00000000 --- a/requirements_dev.txt +++ /dev/null @@ -1,21 +0,0 @@ -pip>=21.0,<23.1 -bump2version -wheel -watchdog -flake8 -tox -coverage -coveralls -Sphinx -twine -Click -pytest -pytest-cov -pytest-xdist>=3.2.0 -filelock -black>=23.3.0 -isort -pre-commit -holoviews -hvplot -setuptools<65.6 diff --git a/requirements_docs.txt b/requirements_docs.txt deleted file mode 100644 index dcdca91e..00000000 --- a/requirements_docs.txt +++ /dev/null @@ -1,31 +0,0 @@ -sphinx -sphinx-click -sphinx-codeautolink -sphinx-copybutton -sphinx-rtd-theme>=1.0 -nbsphinx -pandoc -ipython -ipykernel -jupyter_client -sphinx-click -numpydoc -pymetalink -autodoc_pydantic -owslib<0.29 -birdhouse-birdy -intake -intake-esm -ipyleaflet -ipywidgets -jupyter -s3fs -clisops -intake-xarray -gcsfs -hs_restclient -jupytext -jupyter-cache -myst_nb -cartopy -xesmf diff --git a/requirements_gis.txt b/requirements_gis.txt deleted file mode 100644 index 92c5f079..00000000 --- a/requirements_gis.txt +++ /dev/null @@ -1,9 +0,0 @@ -affine -fiona>=1.9 -geopandas>=0.9.0 -lxml -owslib>=0.24.1 -pyproj>=3.0.0 -rasterio -rioxarray -shapely diff --git a/setup.cfg b/setup.cfg index 16d3f78f..8c170481 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,10 +3,6 @@ current_version = 0.11.0 commit = True tag = False -[bumpversion:file:setup.py] -search = version="{current_version}" -replace = version="{new_version}" - [bumpversion:file:ravenpy/__version__.py] search = __version__ = "{current_version}" replace = __version__ = "{new_version}" @@ -15,32 +11,6 @@ replace = __version__ = "{new_version}" search = "version": "{current_version}", replace = "version": "{new_version}", -[aliases] -test = pytest - -[tool:pytest] -collect_ignore = - setup.py -addopts = - --color=yes - --verbose - --numprocesses=0 - --maxprocesses=8 - --dist=worksteal -python_files = test_*.py -norecursedirs = src .git bin -filterwarnings = - ignore::UserWarning - -[isort] -profile = black -py_version = 38 -append_only = true - -[coverage:run] -relative_files = True -omit = tests/* - [flake8] exclude = .git, @@ -83,6 +53,16 @@ rst-roles = py:ref, ref +[pycodestyle] +count = False +exclude = tests +ignore = + E226, + E402, + E501, +max-line-length = 120 +statistics = True + [pydocstyle] convention = numpy -match = ((?!(test_|conf)).)*\.py +match = ((?!(test_|conftest|conf)).)*\.py diff --git a/setup.py b/setup.py deleted file mode 100644 index 202cfc1a..00000000 --- a/setup.py +++ /dev/null @@ -1,291 +0,0 @@ -#!/usr/bin/env python - -"""The setup script.""" - -import os -import shutil -import subprocess -import urllib.request -import zipfile -from pathlib import Path -from typing import Optional, Union -from urllib.parse import urljoin - -# Note: setuptools < 65.6 is needed for some dependencies (see: https://github.com/pypa/setuptools/issues/3693) -from setuptools import Distribution, find_packages, setup -from setuptools.command.develop import develop -from setuptools.command.install import install - -RAVEN_VERSION = "3.6" - -with open("README.rst") as readme_file: - readme = readme_file.read() - -with open("HISTORY.rst") as history_file: - history = history_file.read() - -requirements = [ - "cftime", - "cf-xarray", - "click", - "climpred>=2.1", - "dask", - "haversine", - "matplotlib", - "netCDF4", - "numpy", - "owslib<0.29", - "pandas<2.0", - "pint>=0.20", - "pydantic", - "pymbolic", - "requests", - "scipy", - "spotpy", - "statsmodels", - "wheel", - "xarray<2022.11.0", # Pinned due to incompatibility with climpred @ 2.2.0 - "xclim>=0.40.0", - "xskillscore", -] - -test_requirements = [ - "pytest>=3", -] - -docs_requirements = [ - dependency for dependency in open("requirements_docs.txt").readlines() -] - -gis_requirements = [ - dependency for dependency in open("requirements_gis.txt").readlines() -] -# Special GDAL handling -on_conda = os.getenv("CONDA_BUILD") -if on_conda == "1": - gis_requirements.append("gdal") -else: - try: - gdal_version = subprocess.run( - ["gdal-config", "--version"], capture_output=True - ).stdout.decode("utf-8") - gis_requirements.append(f"gdal=={gdal_version}") - except (subprocess.CalledProcessError, FileNotFoundError): - pass - -dev_requirements = gis_requirements.copy() -dev_requirements.extend( - [dependency for dependency in open("requirements_dev.txt").readlines()] -) - - -# Idea taken from: https://stackoverflow.com/a/25176606/787842 -class OnlyGetScriptPath(install): - def run(self): - # does not call install.run() by design - self.distribution.install_scripts = self.install_scripts - - -def get_setuptools_install_scripts_dir(): - dist = Distribution({"cmdclass": {"install": OnlyGetScriptPath}}) - dist.dry_run = True # not sure if necessary, but to be safe - dist.parse_config_files() - command = dist.get_command_obj("install") - command.ensure_finalized() - command.run() - return dist.install_scripts - - -def create_external_deps_install_class(command_cls): - """ - Class factory command to implement the customized binary download + compile + install logic - for both the install and develop command contexts. - """ - - class InstallExternalDeps(command_cls): - """ - Custom handler for the 'install' and 'develop' commands, to download, extract and compile - the source code of Raven and copy the resulting binaries in a location - available on the PATH. - """ - - external_deps_path = None - with_binaries = False - - user_options = command_cls.user_options + [ - # The format is (long option, short option, description). - ( - "with-binaries", - None, - "Download Raven sources and compile them.", - ), - ] - - def initialize_options(self): - """Set default values for options.""" - # Each user option must be listed here with their default value. - command_cls.initialize_options(self) - self.with_binaries = False - - def finalize_options(self): - command_cls.finalize_options(self) - - def install_binary_dep( - self, - url, - name: str, - version: str, - rev_name: Optional[str] = None, - binary_name: str = "", - make_target: str = "", - src_folder: Optional[Union[str, os.PathLike]] = None, - remove_line: Optional[str] = None, - ): - print(f"Downloading {name} source code..") - if rev_name: - file_path = f"v{(Path(version) / rev_name).as_posix()}" - else: - file_path = f"v{version}" - - print( - f"{urljoin(url, file_path)}.zip", - self.external_deps_path / f"{name}.zip", - ) - urllib.request.urlretrieve( - f"{urljoin(url, file_path)}.zip", - self.external_deps_path / f"{name}.zip", - ) - - print(f"Extracting {name} source code..") - if rev_name: - out_folder = self.external_deps_path.joinpath(rev_name) - else: - out_folder = self.external_deps_path - with zipfile.ZipFile( - self.external_deps_path / f"{name}.zip", "r" - ) as zip_ref: - zip_ref.extractall(out_folder) - - print(f"Compiling {name}..") - src_folder = src_folder if src_folder else rev_name - c_filepath = self.external_deps_path / src_folder - try: - print(c_filepath) - - # Hacky patch fix until we can safely remove all this logic - if remove_line: - print("Patching Makefile..") - with open(c_filepath.joinpath("Makefile"), "r+") as f: - d = f.readlines() - f.seek(0) - for i in d: - if remove_line not in i: - f.write(i) - f.truncate() - - subprocess.check_call( - f"make {make_target}", - cwd=c_filepath, - shell=True, - ) - except subprocess.CalledProcessError as e: - raise RuntimeError(f"There was an error while compiling {name}") from e - - # Copy binary in a location which should be available on the PATH - # Note 1: if command_cls==install, self.install_scripts should correspond to /bin or ~/.local/bin - # Note 2: if command_cls==develop, self.install_scripts is None, so we are using a trick to get the value - # it would have with the `install` command - scripts_dir = self.install_scripts or get_setuptools_install_scripts_dir() - target_bin_path = Path(scripts_dir) / name - - print( - f"Copying binary from: " - f"{self.external_deps_path.joinpath(src_folder).joinpath(binary_name)}\n" - f"To: {target_bin_path}" - ) - shutil.copy( - self.external_deps_path.joinpath(src_folder).joinpath(binary_name), - target_bin_path, - ) - - def run(self): - if self.with_binaries: - self.external_deps_path = Path().cwd().joinpath("external_deps") - self.external_deps_path.mkdir(exist_ok=True) - - url = "https://www.civil.uwaterloo.ca/raven/files/" - self.install_binary_dep( - url, - "raven", - version=RAVEN_VERSION, - rev_name=f"RavenSource_v{RAVEN_VERSION}", - binary_name="Raven.exe", - remove_line="CXXFLAGS += -c++11", - ) - - # This works with python setup.py install, but produces this error with pip install: - # ERROR: ravenpy==0.1.0 did not indicate that it installed an .egg-info directory. - # Only setup.py projects generating .egg-info directories are supported. - # super().do_egg_install() - - # This works with pip install, but has the problem that it ignores install_requires - # when running with `python setup.py install`: - # https://stackoverflow.com/questions/21915469/python-setuptools-install-requires-is-ignored-when-overriding-cmdclass - command_cls.run(self) - - return InstallExternalDeps - - -setup( - author="David Huard", - author_email="huard.david@ouranos.ca", - python_requires=">=3.8", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Intended Audience :: Education", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: MIT License", - "Natural Language :: English", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python", - "Topic :: Scientific/Engineering :: Atmospheric Science", - "Topic :: Scientific/Engineering :: GIS", - "Topic :: Scientific/Engineering :: Hydrology", - ], - description="A Python wrapper to setup and run the hydrologic modelling framework Raven.", - entry_points={ - "console_scripts": ["ravenpy=ravenpy.cli:main"], - }, - install_requires=requirements, - license="MIT license", - long_description=readme + "\n\n" + history, - long_description_content_type="text/x-rst", - include_package_data=True, - package_data={"ravenpy": ["*.csv", "*.zip"]}, - keywords="ravenpy", - name="ravenpy", - packages=find_packages( - include=[ - "ravenpy", - "ravenpy.*", - ], - ), - test_suite="tests", - tests_require=test_requirements, - extras_require=dict( - dev=dev_requirements, - docs=docs_requirements, - gis=gis_requirements, - ), - url="https://github.com/CSHS-CWRA/ravenpy", - version="0.11.0", - zip_safe=False, - cmdclass={ - "install": create_external_deps_install_class(install), - "develop": create_external_deps_install_class(develop), - }, -) diff --git a/tests/conftest.py b/tests/conftest.py index a3a82103..889f96ac 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -7,7 +7,7 @@ import pytest import xarray as xr from filelock import FileLock -from xclim.indicators.land import fit, stats +from xclim.indicators.generic import fit, stats from ravenpy.config import commands as rc from ravenpy.config.emulators import ( @@ -529,13 +529,13 @@ def symbolic_config(salmon_meteo, salmon_hru, request): gextras = {"ALL": {"elevation": salmon_hru["land"]["elevation"]}} if name in ["HBVEC", "HYPR"]: - ds = xr.open_dataset(salmon_meteo) - gextras["ALL"]["monthly_ave_temperature"] = ( - ((ds.tmin + ds.tmax) / 2).groupby("time.month").mean().values.tolist() - ) - gextras["ALL"]["monthly_ave_evaporation"] = ( - ds.pet.groupby("time.month").mean().values.tolist() - ) + with xr.open_dataset(salmon_meteo) as ds: + gextras["ALL"]["monthly_ave_temperature"] = ( + ((ds.tmin + ds.tmax) / 2).groupby("time.month").mean().values.tolist() + ) + gextras["ALL"]["monthly_ave_evaporation"] = ( + ds.pet.groupby("time.month").mean().values.tolist() + ) # Extra attributes for emulator extras = {} diff --git a/tests/test_emulators.py b/tests/test_emulators.py index c56d1291..1761d9dc 100644 --- a/tests/test_emulators.py +++ b/tests/test_emulators.py @@ -612,6 +612,7 @@ def test_routing_lievre_tutorial(get_local_testdata, tmp_path): assert out.hydrograph.q_sim[d].item() == pytest.approx(q_sim) +@pytest.mark.online def test_canopex(): CANOPEX_DAP = ( "https://pavics.ouranos.ca/twitcher/ows/proxy/thredds/dodsC/birdhouse/ets" diff --git a/tests/test_geoserver.py b/tests/test_geoserver.py index 4a4456ca..f5dfa744 100644 --- a/tests/test_geoserver.py +++ b/tests/test_geoserver.py @@ -1,4 +1,5 @@ import tempfile +import urllib.request import numpy as np import pytest @@ -15,14 +16,14 @@ class TestHydroBASINS: gpd = pytest.importorskip("geopandas") sgeo = pytest.importorskip("shapely.geometry") - def test_select_hybas_na_domain(self): + def test_select_hybas_na_domain_bbox(self): bbox = (-68.0, 50.0) * 2 - dom = self.geoserver.select_hybas_domain(bbox) + dom = self.geoserver.select_hybas_domain(bbox=bbox) assert dom == "na" - def test_select_hybas_ar_domain(self): - bbox = (-114.65, 61.35) * 2 - dom = self.geoserver.select_hybas_domain(bbox) + def test_select_hybas_ar_domain_point(self): + point = -114.65, 61.35 + dom = self.geoserver.select_hybas_domain(point=point) assert dom == "ar" def test_get_hydrobasins_location_wfs(self, tmp_path): @@ -49,7 +50,8 @@ def test_get_hydrobasins_attributes_wfs(self, tmp_path): region_url = self.geoserver.filter_hydrobasins_attributes_wfs( attribute="MAIN_BAS", value=main_bas, domain="na" ) - gdf = self.gpd.read_file(region_url) + with urllib.request.urlopen(url=region_url) as req: + gdf = self.gpd.read_file(filename=req, engine="pyogrio") assert len(gdf) == 18 assert gdf.crs.to_epsg() == 4326 @@ -106,7 +108,9 @@ def test_get_hydro_routing_attributes_wfs(self): region_url = self.geoserver.filter_hydro_routing_attributes_wfs( attribute="IsLake", value="1.0", lakes="1km", level="07" ) - gdf = self.gpd.read_file(region_url) + with urllib.request.urlopen(url=region_url) as req: + gdf = self.gpd.read_file(filename=req, engine="pyogrio") + assert len(gdf) == 11415 @pytest.mark.slow diff --git a/tests/test_graphs.py b/tests/test_graphs.py index c095f879..6b99e079 100644 --- a/tests/test_graphs.py +++ b/tests/test_graphs.py @@ -1,6 +1,6 @@ import numpy as np import xarray as xr -from xclim.indicators.land import fit, stats +from xclim.indicators.generic import fit, stats from ravenpy.utilities import graphs diff --git a/tests/test_nb_graphs.py b/tests/test_nb_graphs.py index a389d4cf..10d31820 100644 --- a/tests/test_nb_graphs.py +++ b/tests/test_nb_graphs.py @@ -8,23 +8,22 @@ class TestNBGraphs: nbg = pytest.importorskip("ravenpy.utilities.nb_graphs") def test_hydrograph(self, get_local_testdata): - self.nbg.hydrographs(xr.open_dataset(get_local_testdata(self.hydrographs))) + with xr.open_dataset(get_local_testdata(self.hydrographs)) as ds: + self.nbg.hydrographs(ds) def test_mean_annual_hydrograph(self, get_local_testdata): - self.nbg.mean_annual_hydrograph( - xr.open_dataset(get_local_testdata(self.hydrographs)) - ) + with xr.open_dataset(get_local_testdata(self.hydrographs)) as ds: + self.nbg.mean_annual_hydrograph(ds) def test_spaghetti_annual_hydrograph(self, get_local_testdata): - self.nbg.spaghetti_annual_hydrograph( - xr.open_dataset(get_local_testdata(self.hydrographs)) - ) + with xr.open_dataset(get_local_testdata(self.hydrographs)) as ds: + self.nbg.spaghetti_annual_hydrograph(ds) def test_ts_fit_graph(self, get_local_testdata): - from xclim.indicators.land import fit, stats + from xclim.indicators.generic import fit, stats - ds = xr.open_dataset(get_local_testdata(self.hydrographs)) + with xr.open_dataset(get_local_testdata(self.hydrographs)) as ds: + ts = stats(ds.q_sim.load(), op="max", freq="M") - ts = stats(ds.q_sim, op="max", freq="M") params = fit(ts, dist="gamma") self.nbg.ts_fit_graph(ts, params) diff --git a/tox.ini b/tox.ini index 595bf976..20eb53e9 100644 --- a/tox.ini +++ b/tox.ini @@ -1,23 +1,32 @@ [tox] min_version = 4.0 -envlist = black, py{38,39,310}-numpy, docs +envlist = + black + py{38,39,310,311} + docs requires = - pip >=21.0,<23.1 - setuptools >=63.0,<65.6 -opts = --verbose + pip >=21.0 + setuptools >=63.0 +opts = + -vv [testenv:black] skip_install = True deps = flake8 black -commands = make lint -allowlist_externals = make +commands = + make lint +allowlist_externals = + make [testenv:docs] -extras = docs -commands = make --directory=docs clean html -allowlist_externals = make +extras = + docs +commands = + make --directory=docs clean html +allowlist_externals = + make [testenv] setenv = @@ -29,31 +38,28 @@ passenv = GITHUB_* LD_PRELOAD RAVENPY_* -extras = dev +extras = + dev + gis download = true install_command = python -m pip install --no-user {opts} {packages} deps = # numpy must be present in python env before GDAL is installed - !numpy: numpy + numpy + gdal == {env:GDAL_VERSION} commands = - # Install the latest NumPy and Numba before GDAL and netCDF4 are installed - numpy: python -m pip install --upgrade --force-reinstall --no-cache-dir numpy==1.23 numba - # Install NetCDF4-Python via source files - # Pin @v1.6.0 needed due to issue with PyPI wheels (see: https://github.com/Unidata/netcdf4-python/issues/1192) - python -m pip install --upgrade --force-reinstall --no-deps --no-cache-dir netcdf4==1.6.0 --no-binary netcdf4 - # Deal with some GDAL silliness - python -m pip install --upgrade --force-reinstall --no-deps --no-cache-dir GDAL=={env:GDAL_VERSION} --global-option=build_ext --global-option="-I/usr/include/gdal" - # Pin @