diff --git a/.cruft.json b/.cruft.json index 9b90d6e0..60bb05d2 100644 --- a/.cruft.json +++ b/.cruft.json @@ -1,6 +1,6 @@ { "template": "https://github.com/Ouranosinc/cookiecutter-pypackage", - "commit": "14cbc49f79a6bba2502c8b3a593ff0babaf661ed", + "commit": "b9bacb7ddbbfaa8eff8c3e0182ebf2e8a66d00c7", "context": { "cookiecutter": { "full_name": "David Huard", @@ -22,7 +22,7 @@ "generated_with_cruft": "y", "__gh_slug": "https://github.com/CSHS-CWRA/RavenPy", "_template": "https://github.com/Ouranosinc/cookiecutter-pypackage", - "_commit": "14cbc49f79a6bba2502c8b3a593ff0babaf661ed" + "_commit": "b9bacb7ddbbfaa8eff8c3e0182ebf2e8a66d00c7" } }, "directory": null, diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml index 45a3b893..e1462237 100644 --- a/.github/workflows/bump-version.yml +++ b/.github/workflows/bump-version.yml @@ -21,6 +21,7 @@ on: - CHANGELOG.rst - CI/**.in - CI/**.txt + - CITATION.cff - CODE_OF_CONDUCT.md - CONTRIBUTING.rst - Makefile diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 67879a7c..11656eaa 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -63,7 +63,7 @@ jobs: fail-fast: false matrix: os: [ 'ubuntu-latest', "macos-latest" ] - python-version: [ "3.11", "3.12", "3.13" ] + python-version: [ "3.11", "3.12", "3.13", "3.14" ] include: - os: 'ubuntu-latest' python-version: '3.10' diff --git a/.gitignore b/.gitignore index 35effac7..7dc1dd50 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # RAVENPY-specific +Raven_errors.txt # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/.readthedocs.yml b/.readthedocs.yml index 7b442385..c20dff43 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -32,5 +32,3 @@ python: install: - method: pip path: . - extra_requirements: - - docs diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e678f8d3..d28c6ed6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -7,13 +7,25 @@ Changelog Contributors: Trevor James Smith (:user:`Zeitsperre`). -Changes -^^^^^^^ +New features +^^^^^^^^^^^^ * `raven-hydro` minimum version updated from v0.5.0 to v4.12.1 (for more information, see: https://github.com/Ouranosinc/raven-hydro/pull/93). (PR #587) -* Dependency updates. (PR #584): + +Breaking changes +^^^^^^^^^^^^^^^^ +* Dependency updates. (PR #584, PR #589): * Updated required `xskillscore` (``>= 0.0.29``) and `climpred` (``>= 2.6.0``). * Removed version pins on `intake` and `intake-xarray`. * Minimum required `birdy` is now ``>= 0.9.1``. + * Minimum required `pytest` is now ``>=9.0``. +* The development dependency recipes have been migrated to `dependency-groups` (PEP 735). Optional dsependency lists are now separated by features. (PR #589): + * `ravenpy[gis]`: GIS functionality. + * `ravenpy[plotting]`: Notebook interactivity. + * `ravenpy[raven-hydro]`: To compile and install the `raven-hydro` binary. +* Many unnecessary/optional dependencies have been removed from the base library requirements. (PR #589): + * `dask`, `pydap`, `typing-extensions`, `xsdba`, `xskillscore` have been removed from install requirements. + * `affine` and `setuptools` are no longer listed in `gis` recipe requirements. + * `requests` is now explicitly listed in `gis` recipe requirements. Fixes ^^^^^ @@ -25,6 +37,12 @@ Internal changes * Added a Makefile recipe and a GitHub Workflow to run tests against the notebooks using ``pytest --nbval`` on changes as well as on a weekly schedule. (PR #584) * Fixed a bug in several workflows that was impeding triggers when Pull Requests are merged to `main`. (PR #584) * The conda configurations (``environment-dev.yml`` and ``environment-docs.yml``) now specify the exact dependencies needed to run the development and documentation builds. (PR #584) +* Tests reliant on GIS libraries can now be disabled using `pytest -m "not gis"`. Many tests that were previously skipped have been re-enabled with modified assertions reflecting changes to the model expected output. (PR #589) +* The cookiecutter template has been updated. (PR #589): + * Enabled PEP 735 (`dependency-groups`) for development dependency management. + * Replaced `pre-commit` with `prek`. + * The Makefile now performs installation calls for recipes. + * `tox.toml` now uses `dependency-groups` and the new `optional-dependencies` lists for environment setup. .. _changes_0.20.0: diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 75c83478..31b2b71f 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -96,18 +96,19 @@ Ready to contribute? Here's how to set up `ravenpy` for local development. .. code-block:: console - python -m pip install -e .[dev] - pre-commit install + python -m pip install --group dev + python -m pip install --editable . + prek install This installs ``ravenpy`` in an "editable" state, meaning that changes to the code are immediately seen by the environment. To ensure a consistent coding style, `make dev` also installs the ``pre-commit`` hooks to your local clone. - On commit, ``pre-commit`` will check that ``flake8``, ``ruff``, ``numpydoc`` and other hooks are passing, perform automatic fixes if possible, and warn of violations that require intervention. If your commit fails the checks initially, simply fix the errors, re-add the files, and re-commit. + On commit, ``prek`` will will run ``pre-commit`` checks that ensure code quality checks are passing, perform automatic fixes if possible, and warn of violations that require intervention. If your commit fails the checks initially, simply fix the errors, re-add the files, and re-commit. You can also run the hooks manually with: .. code-block:: console - pre-commit run -a + prek run -a If you want to skip the ``pre-commit`` hooks temporarily, you can pass the `--no-verify` flag to `git commit`. @@ -134,10 +135,10 @@ Ready to contribute? Here's how to set up `ravenpy` for local development. .. code-block:: console git add . - git commit -m "Your detailed description of your changes." + git commit -s -m "Your detailed description of your changes." git push origin name-of-your-bugfix-or-feature - If ``pre-commit`` hooks fail, try fixing the issues, re-staging the files to be committed, and re-committing your changes (or, if need be, you can skip them with `git commit --no-verify`). + If ``pre-commit`` hooks fail, try fixing the issues, re-staging the files to be committed, and re-committing your changes (or, if need be, you can skip them with `--no-verify` flag). #. Submit a `Pull Request `_ through the GitHub website. @@ -195,6 +196,14 @@ Before you submit a pull request, check that it meets these guidelines: #. The pull request should work for all currently supported Python versions. Check the `pyproject.toml` or `tox.ini` files for the list of supported versions. +#. If you haven't already, ensure that you have read and agreed to the `Developer Certificate of Origin (DCO) `_, and that you have signed your commits using: + + .. code-block:: bash + + git commit -s/--signoff + + This will add a `Signed-off-by:` line to your commit message, which indicates that you agree to the DCO. + Tips ---- diff --git a/Makefile b/Makefile index b1858630..95b1f479 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: clean clean-build clean-pyc clean-test coverage dist docs help install lint lint/flake8 +.PHONY: clean clean-build clean-pyc clean-test coverage development dist docs help install lint release test .DEFAULT_GOAL := help define BROWSER_PYSCRIPT @@ -49,27 +49,41 @@ clean-pyc: ## remove Python file artifacts find . -name '__pycache__' -exec rm -fr {} + clean-test: ## remove test and coverage artifacts - rm -fr .tox/ rm -f .coverage - rm -fr htmlcov/ rm -fr .pytest_cache + rm -fr .tox/ + rm -fr htmlcov/ + +install-lint: ## install dependencies needed for linting + python -m pip install --quiet --group lint + +install-docs: ## install dependencies needed for building the docs + python -m pip install --quiet --group docs + +install-test: ## install dependencies needed for standard testing + python -m pip install --quiet --group test + +install-tox: ## install base dependencies needed for running tox + python -m pip install --quiet --group tox ## Testing targets: -lint/flake8: ## check style with flake8 +lint: install-lint ## check style python -m ruff check src/ravenpy tests python -m flake8 --config=.flake8 src/ravenpy tests python -m numpydoc lint src/ravenpy/**.py + python -m vulture src/ravenpy tests + codespell src/ravenpy tests docs + python -m deptry src + python -m yamllint --config-file=.yamllint.yaml src/ravenpy -lint: lint/flake8 ## check style - -test: ## run tests quickly with the default Python +test: install-test ## run tests quickly with the default Python python -m pytest -test-all: ## run tests on every Python version with tox +test-all: install-tox ## run tests on every Python version with tox python -m tox -coverage: ## check code coverage quickly with the default Python +coverage: install-test ## check code coverage quickly with the default Python python -m coverage run --source src/ravenpy -m pytest python -m coverage report -m python -m coverage html @@ -82,10 +96,10 @@ test-notebooks: ## test all notebooks under docs/notebooks ## Sphinx targets: -autodoc: clean-docs ## create sphinx-apidoc files +autodoc: install-docs clean-docs ## create sphinx-apidoc files sphinx-apidoc -o docs/apidoc --private --module-first src/ravenpy -autodoc-custom-index: clean-docs ## create sphinx-apidoc files but with special index handling for indices and indicators +autodoc-custom-index: install-docs clean-docs ## create sphinx-apidoc files but with special index handling for indices and indicators env SPHINX_APIDOC_OPTIONS="members,undoc-members,show-inheritance,noindex" sphinx-apidoc -o docs/apidoc --private --module-first src/ravenpy linkcheck: autodoc ## run checks over all external links found throughout the documentation @@ -96,17 +110,19 @@ initialize-translations: autodoc-custom-index ## initialize translations, ignori sphinx-intl update -p docs/_build/gettext -d docs/locales -l fr rm -fr docs/locales/fr/LC_MESSAGES/apidoc -docs: autodoc-custom-index ## generate Sphinx HTML documentation, including API docs +build-docs: autodoc-custom-index ## generate Sphinx HTML documentation, including API docs $(MAKE) -C docs html BUILDDIR="_build/html/en" ifneq ("$(wildcard $(LOCALES))","") ${MAKE} -C docs gettext $(MAKE) -C docs html BUILDDIR="_build/html/fr" SPHINXOPTS="-D language='fr'" endif + +docs: build-docs ## open the built documentation in a web browser ifndef READTHEDOCS $(BROWSER) docs/_build/html/en/html/index.html endif -servedocs: docs ## compile the docs watching for changes +servedocs: autodoc-custom-index ## compile the docs watching for changes watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . ## Development targets: @@ -121,6 +137,7 @@ release: dist ## package and upload a release install: clean ## install the package to the active Python's site-packages python -m pip install --no-user . -develop: clean ## install the package and development dependencies in editable mode to the active Python's site-packages - python -m pip install --no-user --editable ".[all]" - pre-commit install +development: clean ## install the package to the active Python's site-packages + python -m pip install --group dev + python -m pip install --no-user --editable .[extras] + prek install diff --git a/README.rst b/README.rst index 9bb5c7c8..ba78ac59 100644 --- a/README.rst +++ b/README.rst @@ -9,7 +9,7 @@ RavenPy |logo| +----------------------------+-----------------------------------------------------+ | Open Source | |license| |ossf-score| |zenodo| | +----------------------------+-----------------------------------------------------+ -| Coding Standards | |ruff| |ossf-bp| |pre-commit| | +| Coding Standards | |ruff| |ossf-bp| |prek| |pre-commit-ci| | +----------------------------+-----------------------------------------------------+ | Development Status | |status| |build| |coveralls| | +----------------------------+-----------------------------------------------------+ @@ -91,10 +91,14 @@ This package was created with Cookiecutter_ and the `Ouranosinc/cookiecutter-pyp :target: https://securityscorecards.dev/viewer/?uri=github.com/CSHS-CWRA/RavenPy :alt: OpenSSF Scorecard -.. |pre-commit| image:: https://results.pre-commit.ci/badge/github/CSHS-CWRA/RavenPy/master.svg +.. |pre-commit-ci| image:: https://results.pre-commit.ci/badge/github/CSHS-CWRA/RavenPy/master.svg :target: https://results.pre-commit.ci/latest/github/CSHS-CWRA/RavenPy/master :alt: pre-commit.ci status +.. |prek| image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/j178/prek/master/docs/assets/badge-v0.json + :target: https://github.com/j178/prek + :alt: prek + .. |pypi| image:: https://img.shields.io/pypi/v/RavenPy.svg :target: https://pypi.python.org/pypi/RavenPy :alt: PyPI diff --git a/docs/conf.py b/docs/conf.py index 973e9b4d..44d6afa6 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -118,7 +118,6 @@ def rebuild_readme(): # To avoid having to install these and burst memory limit on ReadTheDocs. autodoc_mock_imports = [ - "affine", "fiona", "geopandas", "holoviews", diff --git a/docs/index.rst b/docs/index.rst index 66928959..06df030d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -35,6 +35,11 @@ Welcome to RavenPy's documentation! apidoc/modules +.. toctree:: + :caption: GitHub Repository + + CSHS-CWRA/RavenPy + Indices and tables ================== * :ref:`genindex` diff --git a/environment-dev.yml b/environment-dev.yml index 0de5f15f..e7b8fc46 100644 --- a/environment-dev.yml +++ b/environment-dev.yml @@ -1,6 +1,7 @@ name: ravenpy-dev channels: - conda-forge + - nodefaults dependencies: - python >=3.10,<3.15 - raven-hydro >=4.12.1,<5.0 @@ -31,29 +32,30 @@ dependencies: - xskillscore >=0.0.29 - zarr # Dev tools and testing - - pip >=25.2 - - bump-my-version >=1.2.3 - - click >=8.2.0 + - pip >=26.0.1 + - bump-my-version >=1.2.6 + - click >=8.3.1 + - codespell >=2.4.1 - coverage >=7.5.0 - - deptry >=0.23.0 + - deptry >=0.24.0 - filelock >=3.14.0 - flake8 >=7.3.0 - flake8-rst-docstrings >=0.4.0 - flit >=3.11.0,<4.0 - - mypy >=1.18.2 - - numpydoc >=1.9.0 + - mypy >=1.19.1 + - numpydoc >=1.10.0 - pooch >=1.8.0 - - pre-commit >=3.5.0 - - pylint >=3.3.0 - - pytest >=8.2.3 - - pytest-cov >=5.0.0 + - prek >=0.3.0 + - pylint >=4.0.4 + - pytest >=9.0.2 + - pytest-cov >=7.0.0 - pytest-xdist >=3.2.0 - - ruff >=0.13.3 - - tox >=4.30.3 + - ruff >=0.15.0 + - tox >=4.41.0 + - tox-gh >=1.6.1 - vulture >=2.14 - - watchdog >=4.0.0 + - watchdog >=6.0.0 # GIS support - - affine >=2.4.0 - fiona >=1.9.0 - gdal >=3.1 - geopandas >=1.0 @@ -62,7 +64,6 @@ dependencies: - pyproj >=3.3.0 - rasterio - rioxarray - - setuptools >=71.0 - shapely >=2.0.0 # Notebook support - birdy >=0.9.1 diff --git a/environment-docs.yml b/environment-docs.yml index d25d9e83..d4a38196 100644 --- a/environment-docs.yml +++ b/environment-docs.yml @@ -1,12 +1,13 @@ name: ravenpy-docs channels: - conda-forge + - nodefaults dependencies: - python >=3.13,<3.14 - raven-hydro >=4.12.1,<5.0 - cftime >=1.4.1 - cf-xarray >=0.9.3 - - click >=8.2.0 + - click >=8.3.1 - climpred >=2.6.0 - dask >=2024.8.1 - h5netcdf >=1.5.0 @@ -38,6 +39,6 @@ dependencies: - sphinx >=8.2.0 - sphinx-click - sphinx-codeautolink >=0.16.2 - - sphinx-copybutton + - sphinx-copybutton >=0.5.2 - sphinx-rtd-theme >=1.0 - sphinxcontrib-svg2pdfconverter >=1.2.3 diff --git a/pyproject.toml b/pyproject.toml index 203a8751..a25ab3df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,93 @@ [build-system] -requires = ["flit_core >=3.11.0,<4"] +requires = ["flit_core >=3.12.0,<4"] build-backend = "flit_core.buildapi" +[dependency-groups] +lint = [ + # Linting tools + "codespell >=2.4.1", + "deptry >=0.24.0", + "flake8 >=7.3.0", + "flake8-rst-docstrings >=0.4.0", + "mypy >=1.19.1", + "numpydoc >=1.10.0", + "pylint >=4.0.4", + "ruff >=0.15.0", + "vulture >=2.14", + "yamllint >=1.38.0" +] +test = [ + # Base testing support + "coverage[toml] >=7.13.4", + "pooch >=1.8.0", + "nbval", + "netcdf4 >=1.7.2", + "pydap >=3.5.6", + "pytest >=9.0.2", + "pytest-cov >=7.0.0", + "pytest-xdist >=3.2.0" +] +notebooks = [ + "birdhouse-birdy >=0.9.1", + "cairosvg >=2.6.0", + "cartopy >=0.23.0", + "clisops >=0.16.1", + "dask >=2024.8.1", + "gcsfs", + # Needed for notebooks/HydroShare_integration.ipynb + # See: https://github.com/CSHS-CWRA/RavenPy/pull/326 + # "hsclient", + "intake >=2.0", + "intake-esm >=2023.07.07", + "intake-xarray >=2.0", + "ipykernel", + "ipyleaflet", + "ipython >=8.7.0", + "ipywidgets", + "jupyter-cache", + "jupyter_client", + "jupytext", + "pymetalink >=6.5.3", + "s3fs", + "salib", + "xesmf", + "xsdba >=0.4.0", + "xskillscore >=0.0.29", + "zarr" +] +docs = [ + # Documentation and example support + "autodoc-pydantic >=2.1.0", + "ipykernel", + "ipython >=8.10.0", + "jupyter_client", + "matplotlib >=3.7.0", + "myst_nb", + "nbsphinx >=0.9.8", + "sphinx >=8.0.0", + "sphinx-click", + "sphinx-codeautolink >=0.16.2", + "sphinx-copybutton >=0.5.2", + "sphinx-rtd-theme >=1.0", + "sphinxcontrib-svg2pdfconverter >=1.2.3" +] +tox = [ + "tox >=4.41.0", + "tox-gh >=1.6.1" +] +dev = [ + # Development and testing tools + "bump-my-version >=1.2.6", + "pip >=26.0.1", + "prek >=0.3.0", + "watchdog >=6.0.0", + {include-group = "docs"}, + {include-group = "lint"}, + {include-group = "notebooks"}, + {include-group = "test"}, + {include-group = "tox"} +] + [project] name = "ravenpy" authors = [ @@ -40,95 +126,28 @@ dependencies = [ "cftime >=1.4.1", # cf-xarray is differently named on conda-forge "cf-xarray >=0.9.3", - "click >=8.2.0", + "click >=8.3.1", "climpred >=2.6.0", - "dask >=2024.8.1", + "filelock >=3.14.0", "h5netcdf >=1.5.0", "haversine >=2.8.0", "matplotlib >=3.6.0", "numpy >=1.25.0", "owslib >=0.29.1", + "packaging", "pandas >=2.2.0", "pint >=0.24.4", "pydantic >=2.11", - "pydap >=3.5.6", "pymbolic >=2024.2", "scipy >=1.11.0", "spotpy >=1.6.1", "statsmodels >=0.14.2", - "typing-extensions", "xarray >=2023.11.0,!=2024.10.0", - "xclim >=0.57.0", - "xsdba >=0.4.0", - "xskillscore >=0.0.29" + "xclim >=0.57.0" ] [project.optional-dependencies] -dev = [ - # Dev tools and testing - "bump-my-version >=1.2.3", - "coverage >=7.5.0", - "filelock >=3.14.0", - "flake8 >=7.3.0", - "flake8-rst-docstrings >=0.4.0", - "flit >=3.11.0,<4.0", - "holoviews", - "hvplot", - "mypy >=1.18.2", - "nbval", - "numpydoc >=1.9.0", - "pip >=25.2", - "pooch >=1.8.0", - "pre-commit >=3.5.0", - "pylint >=3.3.0", - "pytest >=8.3.2", - "pytest-cov >=5.0.0", - "pytest-xdist >=3.2.0", - "requests >=2.32.2", - "ruff >=0.14.3", - "setuptools >=71.0.0", - "tox >=4.30.3", - "vulture >=2.14", - "watchdog >=4.0.0" -] -docs = [ - # Requires python >=3.11 - "autodoc-pydantic >=2.1.0", - "birdhouse-birdy >=0.9.1", - "cairosvg >=2.6.0", - "cartopy >=0.23.0", - "clisops >=0.16.1", - "gcsfs", - # Needed for notebooks/HydroShare_integration.ipynb - # See: https://github.com/CSHS-CWRA/RavenPy/pull/326 - # "hsclient", - "intake >=2.0", - "intake-esm >=2023.07.07", - "intake-xarray >=2.0", - "ipykernel", - "ipyleaflet", - "ipython >=8.7.0", - "ipywidgets", - "jupyter-cache", - "jupyter_client", - "jupytext", - "matplotlib >=3.7.0", - "myst_nb", - "nbsphinx >=0.9.5", - "pymetalink >=6.5.3", - "s3fs", - "salib", - "sphinx >=8.2.0", - "sphinx-click", - "sphinx-codeautolink >=0.16.2", - "sphinx-copybutton", - "sphinx-rtd-theme >=1.0", - "sphinxcontrib-svg2pdfconverter >=1.2.3", - "xesmf", - "zarr" -] gis = [ - "affine >=2.4.0", "fiona >=1.9", "geopandas >=1.0", "gdal >=3.1", @@ -137,18 +156,16 @@ gis = [ "pyproj >=3.3.0", "rasterio", "rioxarray", - "setuptools >=71.0", + "requests >=2.32.2", "shapely >=2.0" ] +plotting = [ + "holoviews", + "hvplot" +] raven-hydro = [ "raven-hydro >=4.12.1,<5.0" ] -all = [ - "ravenpy[dev]", - "ravenpy[docs]", - "ravenpy[gis]", - "ravenpy[raven-hydro]" -] [project.scripts] ravenpy = "ravenpy.cli:main" @@ -238,7 +255,8 @@ values = [ ] [tool.codespell] -ignore-words-list = "astroid,documen,socio-economic" +ignore-words-list = "astroid,documen,socio-economic,third-party" +skip = "docs/_build,docs/notebooks/paper/*.pdf" [tool.coverage.paths] source = ["src/ravenpy/", "*/site-packages/ravenpy/"] @@ -249,8 +267,16 @@ omit = ["tests/*.py"] source = ["ravenpy"] [tool.deptry] -extend_exclude = ["docs"] -pep621_dev_dependency_groups = ["all", "dev", "docs"] + +[tool.deptry.package_module_name_map] +gdal = "osgeo" +netcdf4 = "netCDF4" +prek = "prek" +tox-gh = "tox_gh" + +[tool.deptry.per_rule_ignores] +DEP002 = ["h5netcdf"] +DEP004 = ["holoviews", "hvplot", "pooch"] [tool.flit.sdist] include = [ @@ -358,8 +384,8 @@ init_typed = true warn_required_dynamic_aliases = true warn_untyped_fields = true -[tool.pytest.ini_options] -minversion = "7.0" +[tool.pytest] +minversion = "9.0" addopts = [ "-ra", "--color=yes", @@ -371,7 +397,7 @@ addopts = [ "--maxprocesses=8", "--dist=worksteal" ] -python_files = "test_*.py" +python_files = ["test_*.py"] norecursedirs = ["docs/notebooks"] filterwarnings = ["ignore::UserWarning"] testpaths = [ @@ -381,8 +407,9 @@ pythonpath = [ "src" ] markers = [ - "slow: marks tests as slow (deselect with '-m \"not slow\"')", - "online: mark tests that require external services (deselect with '-m \"not online\"')" + "gis: mark tests that require GIS libraries (deselect with '-m \"not gis\"')", + "online: mark tests that require external services (deselect with '-m \"not online\"')", + "slow: marks tests as slow (deselect with '-m \"not slow\"')" ] xfail_strict = true diff --git a/src/ravenpy/config/commands.py b/src/ravenpy/config/commands.py index 6e9ddfad..61afde31 100644 --- a/src/ravenpy/config/commands.py +++ b/src/ravenpy/config/commands.py @@ -1038,7 +1038,7 @@ def from_nc( Use keyword "ALL" to pass parameters to all variables. engine : {"h5netcdf", "netcdf4", "pydap"} The engine used to open the dataset. Default is 'h5netcdf'. - \*\*kwds : dict + **kwds : dict Additional arguments for Gauge. Returns diff --git a/src/ravenpy/extractors/forecasts.py b/src/ravenpy/extractors/forecasts.py index 9e8b9717..05d6983b 100644 --- a/src/ravenpy/extractors/forecasts.py +++ b/src/ravenpy/extractors/forecasts.py @@ -8,6 +8,7 @@ from urllib.parse import urljoin import pandas as pd +import rioxarray # noqa: F401 import xarray as xr from pandas import DatetimeIndex, Series, Timestamp from xarray import Dataset @@ -75,7 +76,7 @@ def get_CASPAR_dataset( # noqa: N802 directory : str The directory on the thredds server where the data is stored. Default: "dodsC/birdhouse/disk2/caspar/daily/". engine : str - The xarray engine to use to open the dataset. Default: "pydap". + The xarray engine to use to open the dataset. Default: "netcdf4". Returns ------- @@ -125,7 +126,7 @@ def get_ECCC_dataset( # noqa: N802 directory : str The directory on the thredds server where the data is stored. Default: "dodsC/datasets/forecasts/eccc_geps/". engine : str - The xarray engine to use to open the dataset. Default: "pydap". + The xarray engine to use to open the dataset. Default: "netcdf4". Returns ------- diff --git a/src/ravenpy/extractors/routing_product.py b/src/ravenpy/extractors/routing_product.py index 1da02e39..adc4ee45 100644 --- a/src/ravenpy/extractors/routing_product.py +++ b/src/ravenpy/extractors/routing_product.py @@ -4,6 +4,7 @@ from pathlib import Path from typing import Union +import numpy as np import pandas from ravenpy.utilities import gis_import_error_message @@ -11,6 +12,7 @@ try: import geopandas + import netCDF4 from osgeo import __version__ as osgeo_version from osgeo import ogr, osr from shapely import wkt @@ -20,9 +22,6 @@ msg = gis_import_error_message.format(Path(__file__).stem) raise ImportError(msg) from e -import netCDF4 -import numpy as np - def open_shapefile(path: Union[str, os.PathLike]): """Return GeoDataFrame from shapefile path.""" diff --git a/src/ravenpy/testing/utils.py b/src/ravenpy/testing/utils.py index 7e757e5f..4fc6f088 100644 --- a/src/ravenpy/testing/utils.py +++ b/src/ravenpy/testing/utils.py @@ -278,7 +278,7 @@ def load_registry_from_file( def yangtze( repo: str = TESTDATA_REPO_URL, branch: str = TESTDATA_BRANCH, - cache_dir: str | Path = TESTDATA_CACHE_DIR, + cache_dir: str | Path | None = TESTDATA_CACHE_DIR, allow_updates: bool = True, force_download: bool = False, ): @@ -291,8 +291,9 @@ def yangtze( URL of the repository to use when fetching testing datasets. branch : str Branch of repository to use when fetching testing datasets. - cache_dir : str or Path + cache_dir : str or Path or None The path to the directory where the data files are stored. + A valid cache_dir path is required. allow_updates : bool If True, allow updates to the data files. Default is True. force_download : bool @@ -327,6 +328,9 @@ def yangtze( example_file = yangtze().fetch("example.nc") data = xr.open_dataset(example_file) """ + if cache_dir is None: + raise NotImplementedError("A valid cache_dir path is required.") + if pooch is None: raise ImportError( "The `pooch` package is required to fetch the RavenPy testing data. " @@ -358,7 +362,7 @@ def _downloader( output_file: str | IO, poocher: pooch.Pooch, check_only: bool | None = False, - ) -> None: + ) -> bool | None: """Download the file from the URL and save it to the save_path.""" headers = {"User-Agent": f"RavenPy ({ravenpy.__version__})"} downloader = pooch.HTTPDownloader(headers=headers) @@ -395,8 +399,7 @@ def get_file( str The path to the file. """ - if _yangtze_kwargs is None: - _yangtze_kwargs = {} + _yangtze_kwargs = _yangtze_kwargs or {} return yangtze(**_yangtze_kwargs).fetch(name) diff --git a/src/ravenpy/utilities/forecasting.py b/src/ravenpy/utilities/forecasting.py index f9e10bee..7b9be1e4 100644 --- a/src/ravenpy/utilities/forecasting.py +++ b/src/ravenpy/utilities/forecasting.py @@ -350,7 +350,7 @@ def ensemble_prediction( The path to rv files and model outputs. If None, create temporary directory. overwrite : bool Overwrite files when writing to disk. - \*\*kwds : dict + **kwds : dict Keywords for the `Gauge.from_nc` function. Returns diff --git a/src/ravenpy/utilities/geoserver.py b/src/ravenpy/utilities/geoserver.py index 11b50760..b614994f 100644 --- a/src/ravenpy/utilities/geoserver.py +++ b/src/ravenpy/utilities/geoserver.py @@ -22,8 +22,6 @@ from typing import Optional, Union from urllib.parse import urlencode, urljoin -import urllib3 - from . import gis_import_error_message @@ -31,6 +29,7 @@ import fiona import geopandas as gpd import pandas as pd + import requests from lxml import etree from owslib.fes import PropertyIsLike from owslib.fes2 import Intersects @@ -178,8 +177,9 @@ def _get_feature_attributes_wfs( propertyName=",".join(attribute), ) url = urljoin(geoserver, "wfs") + "?" + urlencode(params) - http = urllib3.PoolManager() - response = http.request("GET", url) + + response = requests.get(url, params=params, timeout=15) + response.raise_for_status() return urljoin(host, response.url) @@ -232,8 +232,9 @@ def _filter_feature_attributes_wfs( ) url = urljoin(geoserver, "wfs") + "?" + urlencode(params) - http = urllib3.PoolManager() - response = http.request("GET", url) + + response = requests.get(url, params=params, timeout=15) + response.raise_for_status() return urljoin(host, response.url) @@ -496,7 +497,7 @@ def hydro_routing_upstream( level: int = 12, lakes: str = "1km", geoserver: str = GEOSERVER_URL, -) -> pd.Series: +) -> "gpd.GeoDataFrame": """ Return a list of hydro routing features located upstream. @@ -514,7 +515,7 @@ def hydro_routing_upstream( Returns ------- - pd.Series + gpd.GeoDataFrame Basins ids including `fid` and its upstream contributors. """ geoserver = _fix_server_url(geoserver) diff --git a/src/ravenpy/utilities/nb_graphs.py b/src/ravenpy/utilities/nb_graphs.py index 6a21818d..a478fc20 100644 --- a/src/ravenpy/utilities/nb_graphs.py +++ b/src/ravenpy/utilities/nb_graphs.py @@ -136,7 +136,7 @@ def ts_fit_graph(ts: xr.DataArray, params: xr.DataArray) -> matplotlib.pyplot.Fi # Note: The hover tool could be customized to show the histogram count in addition to the frequency. n = ts.nbasins.size if n > 1: - raise NotImplementedError + raise NotImplementedError("More than one basin provided.") ts = ts.isel(nbasins=0) params = params.isel(nbasins=0) diff --git a/tests/test_bias_correction.py b/tests/test_bias_correction.py deleted file mode 100644 index 6facfb7e..00000000 --- a/tests/test_bias_correction.py +++ /dev/null @@ -1,54 +0,0 @@ -import pytest -import xsdba -from xarray.coding.calendar_ops import convert_calendar - - -# FIXME: This doesn't test Raven functionality; Should we move it to xclim? -@pytest.mark.skip(reason="This test is not testing Raven functionality") -class TestBiasCorrect: - def test_bias_correction(self, open_dataset): - ds_fut_sub = open_dataset("cmip5/nasa_nex-gddp-1.0_day_inmcm4_historical+rcp85_nex-gddp_2070-2071_subset.nc") - - ds_ref_sub = open_dataset("cmip5/nasa_nex-gddp-1.0_day_inmcm4_historical+rcp45_nex-gddp_1971-1972_subset.nc") - ds_ref_sub = convert_calendar(ds_ref_sub, "noleap") - - ds_his_sub = open_dataset("nrcan/NRCAN_1971-1972_subset.nc") - - ds_his_sub = convert_calendar(ds_his_sub, "noleap") - group = xsdba.Grouper("time.month") - # Train the model to find the correction factors - Adj = xsdba.DetrendedQuantileMapping.train( - ref=ds_ref_sub["pr"], - hist=ds_his_sub["pr"], - nquantiles=50, - kind="+", - group=group, - ) - - # Apply the factors to the future data to bias-correct - Adj.adjust(ds_fut_sub["pr"], interp="linear") - - # Repeat for temperature max - Adj = xsdba.DetrendedQuantileMapping.train( - ref=ds_ref_sub["tasmax"], - hist=ds_his_sub["tasmax"], - nquantiles=50, - kind="+", - group=group, - ) - - # Apply the factors to the future data to bias-correct - Adj.adjust(ds_fut_sub["tasmax"], interp="linear") - - # Repeat for tasmin - Adj = xsdba.DetrendedQuantileMapping.train( - ref=ds_ref_sub["tasmin"], - hist=ds_his_sub["tasmin"], - nquantiles=50, - kind="+", - group=group, - ) - - Adj.adjust(ds_fut_sub["tasmin"], interp="linear") - - # TODO: Add numerical check diff --git a/tests/test_calibration.py b/tests/test_calibration.py index 4ba2d294..b7186314 100644 --- a/tests/test_calibration.py +++ b/tests/test_calibration.py @@ -376,7 +376,7 @@ @pytest.mark.xfail( - reason="Some platform and Python versions combinations raise errors for certain models", + reason="Some platform and Python versions combinations raise errors for certain models.", strict=False, ) def test_spotpy_calibration(symbolic_config): diff --git a/tests/test_cli.py b/tests/test_cli.py index f0c55879..c3a08c06 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,14 +1,14 @@ import re from shutil import copyfile -import netCDF4 as nc4 +import pytest from click.testing import CliRunner -from ravenpy.cli import aggregate_forcings_to_hrus, generate_grid_weights -from ravenpy.config.commands import GridWeights - +@pytest.mark.gis class TestGenerateGridWeights: + cli = pytest.importorskip("ravenpy.cli") + def test_generate_grid_weights_with_nc_input_and_2d_coords(self, tmp_path, yangtze): runner = CliRunner() output_path = tmp_path / "bla.rvt" @@ -35,7 +35,7 @@ def test_generate_grid_weights_with_nc_input_and_2d_coords(self, tmp_path, yangt ] params = list(map(str, params)) - result = runner.invoke(generate_grid_weights, params) + result = runner.invoke(self.cli.generate_grid_weights, params) assert not result.exception assert result.exit_code == 0 @@ -82,7 +82,7 @@ def test_generate_grid_weights_with_multiple_subids(self, tmp_path, yangtze): ] params = map(str, params) - result = runner.invoke(generate_grid_weights, params) + result = runner.invoke(self.cli.generate_grid_weights, params) assert result.exit_code == 0 assert not result.exception @@ -123,7 +123,7 @@ def test_generate_grid_weights_with_nc_input_and_1d_coords(self, tmp_path, yangt ] params = map(str, params) - result = runner.invoke(generate_grid_weights, params) + result = runner.invoke(self.cli.generate_grid_weights, params) assert result.exit_code == 0 assert not result.exception @@ -161,7 +161,7 @@ def test_generate_grid_weights_with_shp_input(self, tmp_path, yangtze): ] params = map(str, params) - result = runner.invoke(generate_grid_weights, params) + result = runner.invoke(self.cli.generate_grid_weights, params) assert result.exit_code == 0 assert not result.exception @@ -201,7 +201,7 @@ def test_generate_grid_weights_with_weight_rescaling(self, tmp_path, yangtze): ] params = map(str, params) - result = runner.invoke(generate_grid_weights, params) + result = runner.invoke(self.cli.generate_grid_weights, params) assert result.exit_code == 0 assert not result.exception @@ -217,7 +217,12 @@ def test_generate_grid_weights_with_weight_rescaling(self, tmp_path, yangtze): assert abs(weight - 0.9851111335377887) < 1e-04 +@pytest.mark.gis class TestAggregateForcingsToHRUs: + cli = pytest.importorskip("ravenpy.cli") + commands = pytest.importorskip("ravenpy.config.commands") + nc4 = pytest.importorskip("netCDF4") + def test_aggregate_forcings_to_hrus(self, tmp_path, yangtze): runner = CliRunner() output_nc_file_path = tmp_path / "aggreg.nc" @@ -248,14 +253,14 @@ def test_aggregate_forcings_to_hrus(self, tmp_path, yangtze): ] params = map(str, params) - result = runner.invoke(aggregate_forcings_to_hrus, params) + result = runner.invoke(self.cli.aggregate_forcings_to_hrus, params) assert result.exit_code == 0 assert not result.exception output_rvt = output_weight_file_path.read_text() - gws = GridWeights.parse(output_rvt) + gws = self.commands.GridWeights.parse(output_rvt) new_weights = gws.data @@ -273,7 +278,7 @@ def test_aggregate_forcings_to_hrus(self, tmp_path, yangtze): assert new_weights[3][2] == 1.0 # All new_weights[:][2] need to be 1.0 # check the aggregated NetCDF file - nc_in = nc4.Dataset(output_nc_file_path, "r") + nc_in = self.nc4.Dataset(output_nc_file_path, "r") val = nc_in.variables["Streaminputs"][:] nc_in.close() @@ -311,14 +316,14 @@ def test_aggregate_forcings_to_hrus_with_nodata(self, tmp_path, yangtze): ] params = map(str, params) - result = runner.invoke(aggregate_forcings_to_hrus, params) + result = runner.invoke(self.cli.aggregate_forcings_to_hrus, params) assert result.exit_code == 0 assert not result.exception output_rvt = output_weight_file_path.read_text() - gws = GridWeights.parse(output_rvt) + gws = self.commands.GridWeights.parse(output_rvt) new_weights = gws.data @@ -336,7 +341,7 @@ def test_aggregate_forcings_to_hrus_with_nodata(self, tmp_path, yangtze): assert new_weights[2][2] == 1.0 # All new_weights[:][2] need to be 1.0 # check aggregated NetCDF file - nc_in = nc4.Dataset(output_nc_file_path, "r") + nc_in = self.nc4.Dataset(output_nc_file_path, "r") val = nc_in.variables["et"][:] nc_in.close() diff --git a/tests/test_distributed_workflow.py b/tests/test_distributed_workflow.py index 6171f428..edc8cffd 100644 --- a/tests/test_distributed_workflow.py +++ b/tests/test_distributed_workflow.py @@ -2,102 +2,103 @@ import datetime as dt +import pytest + from ravenpy import Emulator from ravenpy.config import commands as rc from ravenpy.config.emulators import GR4JCN -from ravenpy.extractors.routing_product import ( - BasinMakerExtractor, - GridWeightExtractor, - open_shapefile, - upstream_from_coords, -) - - -def test_simple_workflow(tmp_path, yangtze): - shp_path = yangtze.fetch("basinmaker/drainage_region_0175_v2-1/finalcat_info_v2-1.zip") - - # Note that for this to work, the coordinates must be in the small - # BasinMaker example (drainage_region_0175) - df = open_shapefile(shp_path) - - # Gauge station for observations at Matapedia - # SubId: 175000128 - # -67.12542 48.10417 - sub = upstream_from_coords(-67.12542, 48.10417, df) - - # Confirm we got the right watershed - assert 175000128 in sub["SubId"].to_list() - - # Extract the subbasins and HRUs (one HRU per sub-basin) - bm = BasinMakerExtractor( - df=sub, - hru_aspect_convention="ArcGIS", - ) - rvh = bm.extract(hru_from_sb=True) - - # Streamflow obs - qobs_fn = yangtze.fetch("matapedia/Qobs_Matapedia_01BD009.nc") - - qobs = rc.ObservationData.from_nc( - qobs_fn, - uid=175000128, - alt_names=("discharge",), - ) - - # Meteo obs for GriddedForcing - does not work because subbasins do not overlap 100% with the ERA data - meteo_grid_fn = yangtze.fetch("matapedia/Matapedia_meteo_data_2D.nc") - - # Dict of GW attributes - gw = GridWeightExtractor( - meteo_grid_fn, - shp_path, - dim_names=("longitude", "latitude"), - var_names=("longitude", "latitude"), - gauge_ids=[ - "01BD009", - ], - ).extract() - - assert gw["number_hrus"] == len(sub) - - # Write GW command to file - gw_fn = tmp_path / "gw.txt" - gw_fn.write_text(rc.GridWeights(**gw).to_rv()) - - forcing = {"TEMP_MIN": "tmin", "TEMP_MAX": "tmax", "PRECIP": "pr"} - - [rc.GriddedForcing.from_nc(meteo_grid_fn, dtyp, alt_names=(alias,), grid_weights=gw_fn) for (dtyp, alias) in forcing.items()] - # Weights for some HRUs do not sum to one. - - # Meteo forcing per station (virtual stations, since this is ERA5 data) - meteo_station = yangtze.fetch("matapedia/Matapedia_meteo_data_stations.nc") - - [rc.StationForcing.from_nc(meteo_station, dtyp, alt_names=(alias,)) for (dtyp, alias) in forcing.items()] - # TODO: Complete with weights calculations - - # Virtual Gauges - gauges = [ - rc.Gauge.from_nc( - meteo_station, - data_type=[s for s in forcing.keys()], - station_idx=i + 1, - alt_names=forcing, + + +@pytest.mark.gis +class TestDistributedWorkflow: + routing_product = pytest.importorskip("ravenpy.extractors.routing_product") + + def test_simple_workflow(self, tmp_path, yangtze): + + shp_path = yangtze.fetch("basinmaker/drainage_region_0175_v2-1/finalcat_info_v2-1.zip") + + # Note that for this to work, the coordinates must be in the small + # BasinMaker example (drainage_region_0175) + df = self.routing_product.open_shapefile(shp_path) + + # Gauge station for observations at Matapedia + # SubId: 175000128 + # -67.12542 48.10417 + sub = self.routing_product.upstream_from_coords(-67.12542, 48.10417, df) + + # Confirm we got the right watershed + assert 175000128 in sub["SubId"].to_list() + + # Extract the subbasins and HRUs (one HRU per sub-basin) + bm = self.routing_product.BasinMakerExtractor( + df=sub, + hru_aspect_convention="ArcGIS", + ) + rvh = bm.extract(hru_from_sb=True) + + # Streamflow obs + qobs_fn = yangtze.fetch("matapedia/Qobs_Matapedia_01BD009.nc") + + qobs = rc.ObservationData.from_nc( + qobs_fn, + uid=175000128, + alt_names=("discharge",), ) - for i in range(6) - ] - - conf = GR4JCN( - params=[0.529, -3.396, 407.29, 1.072, 16.9, 0.947], - StartDate=dt.datetime(2000, 1, 1), - Duration=15, - GlobalParameter={"AVG_ANNUAL_RUNOFF": 208.480}, - ObservationData=[qobs], - Gauge=gauges, - **rvh, - ) - - out = Emulator(conf, workdir=tmp_path).run() - - # Number of gauged sub-basins - ng = sum([sb.gauged for sb in conf.sub_basins]) - assert len(out.hydrograph.nbasins) == ng + + # Meteo obs for GriddedForcing - does not work because subbasins do not overlap 100% with the ERA data + meteo_grid_fn = yangtze.fetch("matapedia/Matapedia_meteo_data_2D.nc") + + # Dict of GW attributes + gw = self.routing_product.GridWeightExtractor( + meteo_grid_fn, + shp_path, + dim_names=("longitude", "latitude"), + var_names=("longitude", "latitude"), + gauge_ids=[ + "01BD009", + ], + ).extract() + + assert gw["number_hrus"] == len(sub) + + # Write GW command to file + gw_fn = tmp_path / "gw.txt" + gw_fn.write_text(rc.GridWeights(**gw).to_rv()) + + forcing = {"TEMP_MIN": "tmin", "TEMP_MAX": "tmax", "PRECIP": "pr"} + + [rc.GriddedForcing.from_nc(meteo_grid_fn, dtyp, alt_names=(alias,), grid_weights=gw_fn) for (dtyp, alias) in forcing.items()] + # Weights for some HRUs do not sum to one. + + # Meteo forcing per station (virtual stations, since this is ERA5 data) + meteo_station = yangtze.fetch("matapedia/Matapedia_meteo_data_stations.nc") + + [rc.StationForcing.from_nc(meteo_station, dtyp, alt_names=(alias,)) for (dtyp, alias) in forcing.items()] + # TODO: Complete with weights calculations + + # Virtual Gauges + gauges = [ + rc.Gauge.from_nc( + meteo_station, + data_type=[s for s in forcing.keys()], + station_idx=i + 1, + alt_names=forcing, + ) + for i in range(6) + ] + + conf = GR4JCN( + params=[0.529, -3.396, 407.29, 1.072, 16.9, 0.947], + StartDate=dt.datetime(2000, 1, 1), + Duration=15, + GlobalParameter={"AVG_ANNUAL_RUNOFF": 208.480}, + ObservationData=[qobs], + Gauge=gauges, + **rvh, + ) + + out = Emulator(conf, workdir=tmp_path).run() + + # Number of gauged sub-basins + ng = sum([sb.gauged for sb in conf.sub_basins]) + assert len(out.hydrograph.nbasins) == ng diff --git a/tests/test_emulators.py b/tests/test_emulators.py index 0ed26e13..c6679f00 100644 --- a/tests/test_emulators.py +++ b/tests/test_emulators.py @@ -87,7 +87,7 @@ def test_emulator_config_is_read_only(dummy_config, tmp_path): e.config.run_name = "Renamed" -def test_duplicate(gr4jcn_config, salmon_hru, tmp_path): +def test_duplicate(salmon_hru): hru = salmon_hru["land"] conf1 = GR4JCN(hrus=[hru], Duration=10) conf2 = conf1.duplicate(hrus=[hru]) @@ -124,7 +124,7 @@ def test_set_params(gr4jcn_config): assert conf.params.GR4J_X1 == 0.529 -def test_nc_attrs(gr4jcn_config, tmp_path): +def test_nc_attrs(gr4jcn_config): gr4jcn, params = gr4jcn_config gr4jcn = gr4jcn.set_params(params) @@ -191,7 +191,7 @@ def test_evaluation_periods(gr4jcn_config, tmp_path): @pytest.mark.slow @pytest.mark.online -@pytest.mark.xfail(error=OSError, reason="Network may be unreliable") +@pytest.mark.xfail(error=OSError, reason="Network may be unreliable.") def test_run_with_dap_link(minimal_emulator, tmp_path): """Test Raven with DAP link instead of local netCDF file.""" # Link to THREDDS Data Server netCDF testdata @@ -471,14 +471,10 @@ def test_routing(yangtze): assert len(list(out.path.glob("*ForcingFunctions.nc"))) == 1 +@pytest.mark.gis @pytest.mark.slow -@pytest.mark.xfail def test_routing_lievre_tutorial(tmp_path, yangtze): - from ravenpy.extractors.routing_product import ( - BasinMakerExtractor, - GridWeightExtractor, - open_shapefile, - ) + routing_product = pytest.importorskip("ravenpy.extractors.routing_product") ############### # Input files # @@ -508,8 +504,8 @@ def test_routing_lievre_tutorial(tmp_path, yangtze): # RVH # ####### - rvh_extractor = BasinMakerExtractor( - open_shapefile(routing_product_shp_path), + rvh_extractor = routing_product.BasinMakerExtractor( + routing_product.open_shapefile(routing_product_shp_path), hru_aspect_convention="ArcGIS", ) rvh_config = rvh_extractor.extract() @@ -553,7 +549,7 @@ def test_routing_lievre_tutorial(tmp_path, yangtze): ), ] - gw_pr = GridWeightExtractor( + gw_pr = routing_product.GridWeightExtractor( vic_streaminputs_nc_path, routing_product_shp_path, var_names=["lon", "lat"], @@ -573,7 +569,7 @@ def test_routing_lievre_tutorial(tmp_path, yangtze): assert gf_pr.linear_transform.scale == 4 assert gf_pr.linear_transform.offset == 0 - gw_tas = GridWeightExtractor( + gw_tas = routing_product.GridWeightExtractor( vic_temperatures_nc_path, routing_product_shp_path, var_names=["lon", "lat"], @@ -600,24 +596,25 @@ def test_routing_lievre_tutorial(tmp_path, yangtze): # Verify # ########## - assert out.hydrograph.basin_name.item() == gauged_sb["name"] + assert f"sub_{out.hydrograph.basin_name.item()}" == gauged_sb["name"] csv_lines = out.files["diagnostics"].read_text().split("\n") assert csv_lines[1].split(",")[:-1] == [ "HYDROGRAPH_ALL[3077541]", str(observation_data_nc_path), - "0.253959", # NASH_SUTCLIFFE "0.253959", - "-17.0904", # PCT_BIAS "-17.0904" - "0.443212", # KLING_GUPTA "0.443212" + "0.227657", # NASH_SUTCLIFFE "0.253959", # FIXME: RHF v4.12 new value: "0.227657" + "-17.5132", # PCT_BIAS "-17.0904", # FIXME: RHF v4.12 new value: "-17.5132" + "0.405353", # KLING_GUPTA "0.443212", # FIXME: RHF v4.12 new value: 0.405353 ] + # FIXME: RHF 4.12: Many of these values are no longer valid for d, q_sim in [ (0, 85.92355875229545), - (1000, 74.05569855818379), - (2000, 62.675159400333115), - (3000, 42.73584909530037), - (4000, 128.70284018326998), + (1000, 70.7391269200262), # FIXME: Previously: 74.05569855818379 + (2000, 65.24225415070816), # FIXME: Previously: 62.675159400333115 + (3000, 45.2727362517773), # FIXME: Previously: 42.73584909530037 + (4000, 127.63921091228055), # FIXME: Previously: 128.70284018326998 ]: assert out.hydrograph.q_sim[d].item() == pytest.approx(q_sim) diff --git a/tests/test_external_dataset_access.py b/tests/test_external_dataset_access.py index 91b4e9aa..ac3764b3 100644 --- a/tests/test_external_dataset_access.py +++ b/tests/test_external_dataset_access.py @@ -7,7 +7,6 @@ import pytest import xarray -from ravenpy.extractors.forecasts import get_CASPAR_dataset, get_ECCC_dataset from ravenpy.testing.utils import ( default_testdata_cache, default_testdata_version, @@ -16,14 +15,17 @@ ) +@pytest.mark.gis @pytest.mark.online -class TestGet: +class TestGetGIS: + forecasts = pytest.importorskip("ravenpy.extractors.forecasts") + def test_get_CASPAR_dataset(self): - ds, _ = get_CASPAR_dataset("GEPS", dt.datetime(2018, 8, 31)) + ds, _ = self.forecasts.get_CASPAR_dataset("GEPS", dt.datetime(2018, 8, 31)) - @pytest.mark.xfail(error=OSError, reason="Network may be unreliable", strict=False) + @pytest.mark.xfail(error=OSError, reason="Network may be unreliable.", strict=False) def test_get_ECCC_dataset(self): - ds, _ = get_ECCC_dataset("GEPS") + ds, _ = self.forecasts.get_ECCC_dataset("GEPS") ns = np.datetime64("now") - ds.time.isel(time=0).values n_hours = ns / np.timedelta64(1, "h") @@ -34,13 +36,17 @@ def test_get_ECCC_dataset(self): @pytest.fixture(scope="function") def remote_access_teardown(request): def _teardown(): - ravenpy_location = Path(importlib.util.find_spec("ravenpy").origin).parent - testing = ravenpy_location / "testing" - - if testing.joinpath("main").is_dir(): - for f in testing.joinpath("main").iterdir(): - f.unlink() - testing.joinpath("main").rmdir() + ravenpy = importlib.util.find_spec("ravenpy") + if ravenpy is not None: + ravenpy_location = Path(ravenpy.origin).parent + testing = ravenpy_location / "testing" + + if testing.joinpath("main").is_dir(): + for f in testing.joinpath("main").iterdir(): + f.unlink() + testing.joinpath("main").rmdir() + else: + raise FileNotFoundError("Library ravenpy was not found. Check environment.") request.addfinalizer(_teardown) @@ -53,7 +59,7 @@ class TestRemoteFileAccess: @pytest.mark.xfail( raises=urllib.error.URLError, - reason="Get file is API rate limited", + reason="Get file is API rate limited.", strict=False, ) def test_get_file_default_cache(self, remote_access_teardown): # noqa: F841 diff --git a/tests/test_extractor.py b/tests/test_extractor.py index 48608f09..52a6b8e3 100644 --- a/tests/test_extractor.py +++ b/tests/test_extractor.py @@ -1,33 +1,39 @@ +import pytest + from ravenpy.config.emulators import BasicRoute -from ravenpy.extractors.routing_product import BasinMakerExtractor, open_shapefile - - -def test_basinmaker_extractor(tmp_path, yangtze): - routing_product_shp_path = yangtze.fetch("basinmaker/drainage_region_0175_v2-1/finalcat_info_v2-1.zip") - df = open_shapefile( - routing_product_shp_path, - ) - rvh_extractor = BasinMakerExtractor( - df=df, - hru_aspect_convention="ArcGIS", - routing_product_version="2.1", - ) - rvh_config = rvh_extractor.extract(hru_from_sb=True) - - # Create lists of values to check - bedslope_list = [item["bed_slope"] for item in rvh_config["channel_profile"]] - mannings_list = [value for d in rvh_config["channel_profile"] for value in [t[1] for t in d["roughness_zones"]]] - reach_length_list = [item["reach_length"] for item in rvh_config["sub_basins"]] - - rvh_config.pop("channel_profile") - - config = BasicRoute(**rvh_config) - config.write_rv(tmp_path, modelname="routing") - - # Checks that the bedslope, Manning and reach length values are non negative numbers - assert all(isinstance(x, (int, float)) for x in bedslope_list) is True - assert any(x < 0 for x in bedslope_list) is False - assert all(isinstance(y, (int, float)) for y in mannings_list) is True - assert any(y < 0 for y in mannings_list) is False - assert all(isinstance(z, (int, float)) for z in bedslope_list) is True - assert any(z < 0 for z in reach_length_list) is False + + +@pytest.mark.gis +class TestExtractor: + routing_product = pytest.importorskip("ravenpy.extractors.routing_product") + + def test_basinmaker_extractor(self, tmp_path, yangtze): + + routing_product_shp_path = yangtze.fetch("basinmaker/drainage_region_0175_v2-1/finalcat_info_v2-1.zip") + df = self.routing_product.open_shapefile( + routing_product_shp_path, + ) + rvh_extractor = self.routing_product.BasinMakerExtractor( + df=df, + hru_aspect_convention="ArcGIS", + routing_product_version="2.1", + ) + rvh_config = rvh_extractor.extract(hru_from_sb=True) + + # Create lists of values to check + bedslope_list = [item["bed_slope"] for item in rvh_config["channel_profile"]] + mannings_list = [value for d in rvh_config["channel_profile"] for value in [t[1] for t in d["roughness_zones"]]] + reach_length_list = [item["reach_length"] for item in rvh_config["sub_basins"]] + + rvh_config.pop("channel_profile") + + config = BasicRoute(**rvh_config) + config.write_rv(tmp_path, modelname="routing") + + # Checks that the bedslope, Manning and reach length values are non negative numbers + assert all(isinstance(x, (int, float)) for x in bedslope_list) is True + assert any(x < 0 for x in bedslope_list) is False + assert all(isinstance(y, (int, float)) for y in mannings_list) is True + assert any(y < 0 for y in mannings_list) is False + assert all(isinstance(z, (int, float)) for z in bedslope_list) is True + assert any(z < 0 for z in reach_length_list) is False diff --git a/tests/test_geo_utilities.py b/tests/test_geo_utilities.py index 635b100b..3445b423 100644 --- a/tests/test_geo_utilities.py +++ b/tests/test_geo_utilities.py @@ -105,7 +105,7 @@ def test_boundary_check(self, recwarn, yangtze): with pytest.raises(FileNotFoundError): self.checks.boundary_check(self.non_existing_file) - @pytest.mark.skip(reason="Not presently testable") + @pytest.mark.skip(reason="Not presently testable.") def test_multipolygon_check(self): pass @@ -129,7 +129,7 @@ def test_gdal_aspect_not_projected(self, tmp_path, yangtze): assert Path(aspect_tempfile).stat().st_size > 0 # Slope values are high due to data values using Geographic CRS - @pytest.mark.xfail(reason="Console commands have been modified in GDAL 3.11+", strict=False) + @pytest.mark.xfail(reason="Console commands have been modified in GDAL 3.11+.", strict=False) def test_gdal_slope_not_projected(self, tmp_path, yangtze): slope_grid = self.analysis.gdal_slope_analysis(yangtze.fetch(raster_file)) np.testing.assert_almost_equal(slope_grid.min(), 0.0) @@ -145,7 +145,7 @@ def test_gdal_slope_not_projected(self, tmp_path, yangtze): assert Path(slope_tempfile).stat().st_size > 0 # Slope values are high due to data values using Geographic CRS - @pytest.mark.xfail(reason="Console commands have been modified in GDAL 3.11+", strict=False) + @pytest.mark.xfail(reason="Console commands have been modified in GDAL 3.11+.", strict=False) def test_dem_properties(self, yangtze): dem_properties = self.analysis.dem_prop(yangtze.fetch(raster_file)) np.testing.assert_almost_equal(dem_properties["aspect"], 10.91190, decimal=5) diff --git a/tests/test_geoserver.py b/tests/test_geoserver.py index 374e407c..84b3ef6d 100644 --- a/tests/test_geoserver.py +++ b/tests/test_geoserver.py @@ -10,12 +10,12 @@ @pytest.mark.online class TestHydroBASINS: - geoserver = pytest.importorskip("ravenpy.utilities.geoserver") - fiona = pytest.importorskip("fiona") gpd = pytest.importorskip("geopandas") sgeo = pytest.importorskip("shapely.geometry") + geoserver = pytest.importorskip("ravenpy.utilities.geoserver") + def test_select_hybas_na_domain_bbox(self): bbox = (-68.0, 50.0) * 2 dom = self.geoserver.select_hybas_domain(bbox=bbox) @@ -79,12 +79,12 @@ def test_hydrobasins_upstream_aggregate(self, tmp_path): @pytest.mark.online class TestHydroRouting: - geoserver = pytest.importorskip("ravenpy.utilities.geoserver") - fiona = pytest.importorskip("fiona") gpd = pytest.importorskip("geopandas") sgeo = pytest.importorskip("shapely.geometry") + geoserver = pytest.importorskip("ravenpy.utilities.geoserver") + def test_hydro_routing_locations(self, tmp_path): lake_winnipeg = ( -98.03575958286369, @@ -117,12 +117,12 @@ def test_hydro_routing_upstream(self, tmp_path): @pytest.mark.online class TestWFS: - geoserver = pytest.importorskip("ravenpy.utilities.geoserver") - fiona = pytest.importorskip("fiona") gpd = pytest.importorskip("geopandas") sgeo = pytest.importorskip("shapely.geometry") + geoserver = pytest.importorskip("ravenpy.utilities.geoserver") + def test_get_location_wfs_point(self, tmp_path): las_vegas = (-115.136389, 36.175) usa_admin_bounds = "public:usa_admin_boundaries" @@ -154,13 +154,15 @@ def test_get_feature_attributes_wfs(self): # FIXME: Something strange is going on with GitHub Actions and PAVICS Geoserver access. Investigate. +@pytest.mark.gis @pytest.mark.online @pytest.mark.xfail(HTTPError, reason="Geoserver WCS seems to be inaccessible from GitHub.", strict=False) class TestWCS: + rasterio = pytest.importorskip("rasterio") + io = pytest.importorskip("ravenpy.utilities.io") geoserver = pytest.importorskip("ravenpy.utilities.geoserver") geo = pytest.importorskip("ravenpy.utilities.geo") - rasterio = pytest.importorskip("rasterio") saskatoon = "polygons/Saskatoon.geojson" diff --git a/tests/test_rvs.py b/tests/test_rvs.py index bcddc862..046f0b2a 100644 --- a/tests/test_rvs.py +++ b/tests/test_rvs.py @@ -110,11 +110,12 @@ def test_solution(yangtze): assert ":BasinIndex 1 watershed" in conf.rvc +@pytest.mark.gis def test_rvh_from_extractor(yangtze): - from ravenpy.extractors import BasinMakerExtractor, open_shapefile + extractors = pytest.importorskip("ravenpy.extractors") shp = yangtze.fetch("basinmaker/drainage_region_0175_v2-1/finalcat_info_v2-1.zip") - bm = BasinMakerExtractor(open_shapefile(shp)) + bm = extractors.BasinMakerExtractor(extractors.open_shapefile(shp)) # Smoke test Config(**bm.extract(hru_from_sb=True)) diff --git a/tox.toml b/tox.toml index 1929b8b6..ecf3e0b2 100644 --- a/tox.toml +++ b/tox.toml @@ -1,56 +1,70 @@ envlist = [ "lint", - "py3.{10,11,12,13}", + "py3.{10,11,12,13,14}", "docs" ] skip_missing_interpreters = true [env.docs] -extras = ["docs"] +description = "Build the documentation with makefile under {basepython}" +labels = ["docs"] +dependency_groups = ["docs"] +extras = [] commands = [ - ["make", "autodoc"], - ["make", "--directory=docs", "clean", "html"] + ["make", "build-docs"] ] commands_post = [] allowlist_externals = ["make"] +[env.gis] +description = "Run tests with extras under {basepython}" +deps = ["numpy >= 1.25.0", "gdal == {env:GDAL_VERSION}.*", "setuptools >= 71.0"] +extras = ["gis", "raven-hydro"] +commands = [ + ["python", "-m", "pip", "install", "--no-user", "--upgrade", "--force-reinstall", "--no-deps", "--no-binary", "gdal", "gdal[numpy]=={env:GDAL_VERSION}.*"], + ["python", "-m", "pip", "install", "--no-user", "--upgrade", "--force-reinstall", "--no-deps", "--no-binary", "h5py", "h5py>=3.12.1"], + ["python", "-c", "import ravenpy.testing.utils as rtu; rtu.populate_testing_data(branch=\"{env:RAVEN_TESTDATA_BRANCH}\")"], + ["pytest", "{posargs}"] +] + [env.lint] +description = "Run code quality compliance tests under {basepython}" +labels = ["lint"] skip_install = true -deps = [ - "flake8 >= 7.3.0", - "flake8-rst-docstrings >= 0.4.0", - "ruff >= 0.14.3", - "numpydoc >= 1.9.0" +dependency_groups = ["lint"] +extras = [] +commands = [ + ["make", "lint"] ] -commands = [["make", "lint"]] commands_post = [] allowlist_externals = ["make"] [env.upstream] +description = "Run tests with pytest under {basepython} for upstream libraries" commands_pre = [ ["python", "-m", "pip", "list"], ["python", "-m", "pip", "check"], - ["python", "-m", "pip", "install", "--no-user", "--upgrade", "--force-reinstall", "--no-deps", "--no-cache-dir", "git+https://github.com/Ouranosinc/raven-hydro.git@{env:UPSTREAM_BRANCH}"] + ["python", "-m", "pip", "install", "--no-user", "--upgrade", "--force-reinstall", "--no-deps", "--no-binary", "raven-hydro", "git+https://github.com/Ouranosinc/raven-hydro.git@{env:UPSTREAM_BRANCH}"] ] [env_run_base] +description = "Run tests with pytest under {basepython}" setenv = {PYTEST_ADDOPTS = "--color=yes --numprocesses=logical --durations=10 --cov=ravenpy --cov-report=lcov", PYTHONPATH = "{toxinidir}", TOX = "{envname}", UPSTREAM_BRANCH = "main"} passenv = ["CI", "COVERALLS_*", "GDAL_VERSION", "GITHUB_*", "LD_LIBRARY_PATH", "RAVEN_*", "UPSTREAM_BRANCH"] -extras = ["dev", "gis", "raven-hydro"] download = true install_command = ["python", "-m", "pip", "install", "--no-user", "{opts}", "{packages}"] -deps = ["numpy >= 1.25.0", "gdal == {env:GDAL_VERSION}.*"] +dependency_groups = ["test"] +extras = ["raven-hydro"] commands_pre = [ ["python", "-m", "pip", "list"], ["python", "-m", "pip", "check"] ] commands = [ ["python", "-m", "pip", "install", "--no-user", "--upgrade", "--force-reinstall", "--no-deps", "--no-binary", "h5py", "h5py>=3.12.1"], - ["python", "-m", "pip", "install", "--no-user", "--upgrade", "--force-reinstall", "--no-deps", "--no-cache-dir", "--no-build-isolation", "gdal[numpy]=={env:GDAL_VERSION}.*"], - ["python", "-c", "import ravenpy.testing.utils as rtu; rtu.populate_testing_data(branch=\"{env:RAVEN_TESTDATA_BRANCH}\")"] + ["python", "-c", "import ravenpy.testing.utils as rtu; rtu.populate_testing_data(branch=\"{env:RAVEN_TESTDATA_BRANCH}\")"], + ["pytest", "{posargs}"] ] commands_post = [ - ["pytest", "{posargs}"], ["coverage", "report"] ]