diff --git a/.travis.yml b/.travis.yml index 6f8d832c5..308925d31 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ before_install: - conda info -a install: - - conda create --name icepyx-env --channel conda-forge python=3.10 proj geos earthaccess + - conda create --name icepyx-env --channel conda-forge python=3.10 proj geos - source activate icepyx-env - pip install -r requirements.txt -r requirements-dev.txt - pip install -e .[complete] @@ -27,8 +27,8 @@ stages: jobs: include: - stage: basic tests - script: pytest icepyx/ --verbose --ignore icepyx/tests/test_behind_NSIDC_API_login.py - after_success: codecov + script: pytest icepyx/ --verbose --cov app --ignore icepyx/tests/test_behind_NSIDC_API_login.py + after_success: codecov/codecov-action@v3 - stage: behind Earthdata script: diff --git a/ATTRIBUTION.rst b/ATTRIBUTION.rst index 28aa82b40..372d43897 100644 --- a/ATTRIBUTION.rst +++ b/ATTRIBUTION.rst @@ -1,50 +1,114 @@ .. _attribution_ref_label: -Attribution Guidelines -====================== +Recognizing Contributions +========================= -We are extremely grateful to everyone who has contributed to the success of the icepyx community, whether through direct contributions to or feedback about icepyx or as developers or maintainers of complimentary resources that are included within the icepyx ecosystem. This document outlines our goals to give appropriate attribution to all contributors to icepyx in ways that are fair and diverse and supportive of professional goals. To do so, we define broadly *contributions* as: +We are extremely grateful to everyone who has contributed to the success of the icepyx community and software. +This document outlines our goals to give appropriate attribution to all contributors to icepyx in ways that are fair and diverse and supportive of professional goals. +We define *contributions* broadly as: - Efforts towards achieving icepyx's goals, including writing code, tests, or documentation, - development of example workflows, development, significant contributions, or maintenance of - a tailored package that broadens the functionality of icepyx, feedback and suggestions, - community building, etc. + Efforts towards achieving icepyx's goals, including (1) writing code, tests, or documentation, + (2) development of example workflows, (3) development, significant contributions, or maintenance of + a tailored package that broadens the functionality of icepyx, (4) feedback and suggestions, + (5) community building, (6) etc. + +We recognize contributions in the following ways. + + **Note**: These policies are not set in stone and may be changed to + accommodate the growth of the project or the preferences of the community. -We use the terms "contributors", "developers", and "authors" interchangeably. We will recognize contributions in the following ways. Contributors List ----------------- -Anyone who has contributed a pull request to icepyx is welcome to add themselves to the ``CONTRIBUTORS.rst`` file located in the top level directory; the file is packaged and distributed with icepyx. This process is optional, but is the easiest way for us to say "thank you" to everyone who has helped this project. + +This project follows the `all-contributors `_ specification. +When you contribute to icepyx for the first time or in a new way, you or a maintainer can use the `All Contributors bot +to open a PR `_` to recognize your contribution. +Comment on an existing PR with `@all-contributors please add @ for `. +This will add you (or your new contribution type) to the ``CONTRIBUTORS.rst`` file located in the top level directory; +the file is packaged and distributed with icepyx, so each release has a record of contributors and their contribution types. + + +Changelog +--------- + +Each release includes a changelog of updates. +Everyone who has made a commit since the last release is listed, with new contributors indicated. +This list is automatically generated using a Sphinx extension; where available, full names are used. +If the user's full name is not available on GitHub, their GitHub handle is used. Example Workflows ----------------- -Many of the example workflows included within icepyx were developed by individuals or small teams for educational or research purposes. We encourage example developers to provide proper recognition for these efforts both within the notebook itself and by adding contributors to the `Contributors List`_ for attribution as describered herein. +Many of the example workflows included within icepyx were developed by individuals or small teams for educational or research purposes. +We encourage example developers to provide proper recognition for these efforts both within the notebook itself and +by adding contributors to the `Contributors List`_ for attribution as describered herein. -Version Release on Zenodo -------------------------- -When new releases of icepyx are archived on Zenodo, anyone who has contributed to icepyx will be invited to be an author. The list of potential authors will be generated using the `Contributors List`. Thus, if you have contributed to icepyx and would like to be included as an author, you *must* add your full name, affiliation ("Unaffiliated" is acceptable), and ORCID (optional) to ``CONTRIBUTORS.rst``. -Author order will be determined based on co-author discussion during preparation of the version release, led by one or more of the members of the lead development team (Anthony Arendt, Lindsey Heagy, Fernando Perez, Jessica Scheick). Metrics for guiding the determination of author order will include the number of commits made to the repository (``git shortlog -sne``) and active engagement on GitHub (e.g. through issues and pull requests) and Discourse. Author order may also be modified on a case-by-case basis by consensus of the lead development team and top contributors. +Version Releases on Zenodo +-------------------------- + +Each new release of icepyx is `archived on Zenodo `_. -If you do not wish to be included in the author list for Zenodo version releases, please add a note (e.g. "do not include in Zenodo") to your entry. +Following the collaborative approach of `The Turing Way `_, +we aim to encourage community leadership and shared ownership of icepyx. +To this end, beginning with version 0.6.4 (the full adoption of the all-contributors specification) +we collectively represent the icepyx authors in citations (including Zenodo releases) as "The icepyx Developers". +As described above, a complete list of contributors and their contribution types is available via the `Contributors List`_. -Scientific Publications (Papers) --------------------------------- + ** A note about releases `_ provided by `Fatiando a Terra `_ and encourages potential co-authors to consider the resources provided by the `NASA High Mountain Asia Team (HiMAT) `_. +Motivation and References +------------------------- + +Concepts and models of attribution, credit, contribution, and authorship can vary across time, application, and communities. +`FORCE11 `_ has an entire `Attribution Working Group `_ dedicated to working on attribution for research products. +`URSSI `_ hosted a workshop in 2019 (`report `_) +to identify core issues and propose solutions to challenges around software credit. +For software, current best practices (`e.g. `_) emphasize the importance of having a document +such as this one to describe an individual community's policies for credit, authorship, and attribution. +This document is an effort to describe icepyx's policies, with an awareness that they may change +to accomodate community growth, best practices, and feedback. + +We do not attempt to identify contribution levels through the number of commits made to the repository (e.g. ``git shortlog -sne``) +or active engagement on GitHub (e.g. through issues, discussions, and pull requests) and Discourse. +The latter is difficult to quantify, and the use of squash merges into the development branch can mask the relative complexity +of various contributions and does not necessarily capture significant conceptual contributions. + + +Copyright notice: Preparation of this document and our credit policies was inspired in part by these `authorship guidelines `_ provided by `Fatiando a Terra `_ +and `The Turing Way `_. +We encourage potential contributors to consider the resources provided by the `NASA High Mountain Asia Team (HiMAT) `_ +and established or emerging best practices in their community. +Please get in touch if you would like to discuss updates to this contribution recognition policy. diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 000000000..f7e0ba025 --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,35 @@ +# This CITATION.cff file was generated with cffinit. +# Visit https://bit.ly/cffinit to generate yours today! + +cff-version: 1.2.0 +title: icepyx +message: >- + If you use this software, please cite the software version + you used by its version-specific DOI. + The DOI for each version is available on Zenodo (link below). +type: software +authors: + - name: The icepyx Developers +repository-code: 'https://github.com/icesat2py/icepyx' +url: 'https://icepyx.readthedocs.io/en/latest/' +repository: 'https://zenodo.org/record/7729175' +repository-artifact: 'https://anaconda.org/conda-forge/icepyx' +abstract: >- + icepyx is both a software library and a community composed + of ICESat-2 data users, developers, and the scientific + community. We are working together to develop a shared + library of resources - including existing resources, new + code, tutorials, and use-cases/examples - that simplify + the process of querying, obtaining, analyzing, and + manipulating ICESat-2 datasets to enable scientific + discovery. +keywords: + - ICESat-2 + - Python + - open science + - NASA +license: BSD-3-Clause +identifiers: + - description: "All archived versions of icepyx." + type: doi + value: 10.5281/zenodo.7729175 \ No newline at end of file diff --git a/CITATION.rst b/CITATION.rst index 955a6987f..37b88cf97 100644 --- a/CITATION.rst +++ b/CITATION.rst @@ -6,33 +6,50 @@ Citing icepyx icepyx ------ -This community and software is developed with the goal of supporting science applications. Thus, our contributors (including those who have developed the packages used within icepyx) and maintainers justify their efforts and demonstrate the impact of their work through citations. +This community and software is developed with the goal of supporting science applications. +Thus, our contributors (including those who have developed the packages used within icepyx) +and maintainers justify their efforts and demonstrate the impact of their work through citations. + +If you have used icepyx in your work, please consider citing our library. +We encourage you to use a version-specific citation and DOI (available from `Zenodo `_) +to improve reproducibility and let users know the state of the software at the time of your analysis. + +A non-versioned citation of icepyx: + The icepyx Developers, (2023). icepyx: Python tools for obtaining and working with ICESat-2 data. + Zenodo. https://doi.org/10.5281/zenodo.7729175 + -If you have used icepyx in your work, please consider citing our library: - Scheick, J. *et al.*, (2019). icepyx: Python tools for obtaining and working with ICESat-2 data. - https://github.com/icesat2py/icepyx. - A bibtex version for users working in Latex:: - @Misc{icepyx, - author = {Scheick, Jessica and others}, + @Misc{icepyx, + author = {{The icepyx Developers}}, organization = {icesat2py}, title = {{icepyx: Python} tools for obtaining and working with {ICESat-2} data}, - year = {2019--}, - url = "https://github.com/icesat2py/icepyx" - } + year = {2023}, + doi = "https://doi.org/10.5281/zenodo.7729175", + publisher = {Zenodo}, + url = "https://github.com/icesat2py/icepyx", + } +For more information on the "icepyx Developers", please see our `Attribution Guidelines `_. +See our docs for a `full list of contributors `_ and their contribution types. icepyx Dependencies --------------------------- -If you have used one of the included packages to extend your data analysis capabilities within icepyx, please consider additionally citing that work, because it represents an independent software contribution to the open-source community. `SciPy `_ provides a `helpful resource `_ for citing packages within the SciPy ecosystem (including Matplotlib, NumPy, pandas, and SciPy). Links to citation information for other commonly used packages are below. +------------------- +If you have used one of the included packages to extend your data analysis capabilities within icepyx, +please consider additionally citing that work, because it represents an independent software contribution to the open-source community. +`SciPy `_ provides a `helpful resource `_ for citing +packages within the SciPy ecosystem (including Matplotlib, NumPy, pandas, and SciPy). +Links to citation information for other commonly used packages are below. - `fiona `_ - `GeoPandas `_ - `Pangeo `_ - `shapely `_ +- `xarray `_ ICESat-2 Data ------------- -ICESat-2 data citation depends on the exact dataset used. Citation information for each data product can be found through the `NSIDC website `_. \ No newline at end of file +ICESat-2 data citation depends on the exact dataset used. +Citation information for each data product can be found through the `NSIDC website `_. \ No newline at end of file diff --git a/CONTRIBUTORS.rst b/CONTRIBUTORS.rst index cd970931c..c6b0c84f5 100644 --- a/CONTRIBUTORS.rst +++ b/CONTRIBUTORS.rst @@ -2,7 +2,7 @@ Project Contributors ==================== The following people have made contributions to the project (in alphabetical -order by last name) and are considered "The icepyx Developers". +order) and are considered "The icepyx Developers". Thanks goes to these wonderful people (`emoji key `_): .. raw:: html diff --git a/README.rst b/README.rst index cd2e2ff09..20709cb61 100644 --- a/README.rst +++ b/README.rst @@ -22,9 +22,9 @@ Current development version (development branch): |Docs Status dev| |Travis dev :alt: All Contributors :target: https://github.com/icesat2py/icepyx/blob/main/CONTRIBUTORS.rst -.. |JOSS| image:: https://joss.theoj.org/papers/a355344b24ffa5fda7a04a5067e8be1f/status.svg - :alt: JOSS publication status - :target: https://joss.theoj.org/papers/a355344b24ffa5fda7a04a5067e8be1f +.. |JOSS| image:: https://joss.theoj.org/papers/10.21105/joss.04912/status.svg + :alt: JOSS publication link and DOI + :target: https://doi.org/10.21105/joss.04912 .. |Docs Status main| image:: https://readthedocs.org/projects/icepyx/badge/?version=latest :target: http://icepyx.readthedocs.io/?badge=latest diff --git a/doc/source/contributing/how_to_contribute.rst b/doc/source/contributing/how_to_contribute.rst index 42f837027..b55394d55 100644 --- a/doc/source/contributing/how_to_contribute.rst +++ b/doc/source/contributing/how_to_contribute.rst @@ -75,6 +75,7 @@ to ensure you have the most up to date version of icepyx in your library. If you are modifying portions of code, you will need to run + .. code-block:: shell pip install -e. @@ -82,6 +83,20 @@ If you are modifying portions of code, you will need to run within your Python environment to use your real-time edited version of the code during runtime. +Setting up a Development Work Environment +----------------------------------------- + +icepyx uses a few tools to ensure that files have consistent formatting and run tests. +You can easily install the ones most frequently used by creating a new mamba (or conda) +environment (from the home level of your local copy of the icepyx repo) with + +.. code-block:: shell + + mamba env create --name icepyx-env --channel conda-forge -f requirements-dev.txt -f requirements.txt + +and then pip installing icepyx as described above and below. + + Considerations with Jupyter Notebook ------------------------------------ diff --git a/doc/source/contributing/icepyx_internals.rst b/doc/source/contributing/icepyx_internals.rst new file mode 100644 index 000000000..e224be3ce --- /dev/null +++ b/doc/source/contributing/icepyx_internals.rst @@ -0,0 +1,70 @@ +icepyx Internals +================ + +Authentication +-------------- +Authentication in icepyx is handled using a Mixin class. A Mixin class is a class +which defines functionality that may be desired by multiple other classes within +a library. For example, at this time both the Query and Variables classes need +to be able to authenticate. Instead of defining the same properties and +functionality twice, icepyx has an EarthdataAuthMixin class that is inherited +by both modules. + +**Property Access** + +Even though they aren't explicity defined in the init method, properties +like ``.session`` are accessible on a Query object because they are inherited. The +code that indicates this to Python is ``EarthdataAuthMixin.__init__(self)``. + +For example: + +.. code-block:: python + + import icepyx as ipx + + region_a = ipx.Query('ATL06',[-45, 74, -44,75],['2019-11-30','2019-11-30'], \ + start_time='00:00:00', end_time='23:59:59') + + # authentication can be accessed via the Query object + region_a.session + region_a.s3login_credentials + + +**Adding authentication to a new class** + +To add authentication to an additional icepyx class, one needs to add the Mixin +to the class. To do this: + +1. Add the EarthdataAuthMixin class to the ``class`` constructor (and import the mixin) +2. Add the EarthdataAuthMixin init method within the init method of the new class ``EarthdataAuthMixin.__init__(self)`` +3. Access the properties using the **public** properties (Ex. ``self.session``, not ``self._session``.) + +A minimal example of the new class (saved in ``icepyx/core/newclass.py``) would be: + +.. code-block:: python + + from icepyx.core.auth import EarthdataAuthMixin + + class MyNewClass(EarthdataAuthMixin): + def __init__(self): + self.mynewclassproperty = True + + EarthdataAuthMixin.__init__(self) + + def my_exciting_new_method(self): + # This method requires login + s = self.session + print(s) + return 'We authenticated inside the method!' + + +The class would then be accessible with: + +.. code-block:: python + + from icepyx.core.newclass import MyNewClass + + n = MyNewClass() + + n.session + n.my_exciting_new_method() diff --git a/doc/source/contributing/release_guide.rst b/doc/source/contributing/release_guide.rst new file mode 100644 index 000000000..2bd901fb3 --- /dev/null +++ b/doc/source/contributing/release_guide.rst @@ -0,0 +1,137 @@ +Release Guide +============= + +Interested in the process for creating a new icepyx release? +Here is a guide outlining the process and how to fix common mistakes. + +Create a Release Log +-------------------- + +Create a new branch from the development branch. +You'll create and update the release documents on this branch. + +In ``doc/source/user_guide/changelog`` is a file called ``template.rst``. +Make a *copy* of this file and update the copy's filename to your version release number. +We follow standard `semantic versioning `_ practices. + +Create an entry for the current "Latest Release": + +.. code-block:: rst + + Version 0.x.y + ------------- + .. toctree:: + :maxdepth: 2 + + v0.x.y + + +Add your new version to the ``doc/source/user_guide/changelog/index.rst`` as the "Latest Release". + +.. code-block:: rst + + Latest Release (Version 0.8.0) + ------------------------------ + + .. toctree:: + :maxdepth: 2 + + v0.8.0 + + +Now, populate your new release file by filling in the template. +You will probably need to make the release date a few days out to allow time for review and merging. + +There are no strict rules for how you generate the content for the file. +One method is to use a ``git log`` command to display the commit history of the development branch since the last release. +If you're using git in terminal, ``checkout development`` and make sure your local development branch is up-to-date (``git pull``). +Then run ``git log 'v0.x.y'...HEAD`` where 'v0.x.y' is the current/latest release. +You can sort and edit the commit messages as needed to populate the changelog. + +Add your new changelog file, commit and push your changes, and head to GitHub to open a Pull Request (PR). + + +Create a Release Pull Request to the Development Branch +------------------------------------------------------- + +On GitHub, create a PR from your release branch into the development branch. +Once the PR is reviewed and all the tests pass, you or your reviewer can squash and merge the PR into the development branch. + +Now you're ready to update main and actually package your new release! + + +Create a Pull Request from Development to Main +---------------------------------------------- + +The changelog is completed, we're not waiting for any more PRs to be merged, and we're ready to share the newest version of icepyx with the world. +Create a PR to merge the development branch into main (so main will now be your base branch). +If any tests fail, you may need to do some debugging. +This will involve submitting a new PR to development with whatever debugging changes you've made. +Once merged into development, any changes will automatically be reflected in this step's PR, and the tests will rerun automatically. + +With an approving review and passed tests in hand, you're ready to push the new release! +Unlike when you merge new features into ``development`` with a squash merge, for this step you'll want to use a plain old merge. +This makes it easy to keep ``development`` and ``main`` even instead of diverging due to a series of merge commits. +`This website `_ does a great job explaining the how and why of not using a squash merge here. + +However, if you forget and squash merge, never fear. +You can simply revert the commit and begin again from the beginning of this step. + + +Update the Development Branch Head +---------------------------------- + +We want to make sure at this point that the ``development`` and ``main`` branches are even. +You can do this with a git API, but the way to do it using git in terminal is: + +.. code-block:: shell + + git pull + git checkout development + git merge main + git push origin development:development + + +**If you have to create a merge commit message, STOP!** +You've done something wrong and need to go back to the previous step. +Creating the merge commit will make ``main`` and ``development`` diverge and the repo maintainers sad. + + +Tag the Release +--------------- + +Last, but potentially most importantly, we need to tag and create the release. +This step will trigger the package to be built and update the distribution available from conda and PyPI. +It will also publish the new release on Zenodo. +GitHub makes releases easy - on the repo's home page, simply select "Releases" from the right hand side +and then the "Draft a New Release" button. +Add a new tag with the version number of your release, making sure it points to the ``main`` branch +(by default, GitHub will suggest the ``development`` branch!) +Fill out the form and create the release. + +If you tag the release too soon (and there end up being more commits), or point it to the wrong branch/commit, never fear. +You can delete the release from GitHub with the click of a button. +If you want to reuse the version tag though (you most likely do), you'll first have to remove the tag locally and push the updated (deleted) tag to GitHub: + +.. code-block:: shell + + git push --delete origin tagname + + +See `this guide `_ on how to delete local and remote git tags. + +Then you can go back to the beginning of this step to create a new tag and release. +Alternatively, you may be better off yanking the previous release (but leaving the tag) and increasing your patch number in a new tag+release. +This may be necessary if you have a failing release already on PyPI. + + +Finishing Up +------------ + +If all went according to plan, you should see your most recent version of icepyx available from PyPI within a few moments. +It won't happen immediately, as they need to properly build the installation files. +To make the latest release available via conda-forge, a few bots will run and let the feedstock maintainers know when it's ready or if there are any issues. +Then they can manually approve the merge to the feedstock repo and the new release will be available in a few minutes. + +Congratulations! You released a new version of icepyx! +Share the good news on Twitter or Slack and appreciate your hard work and contributions to open-source development. \ No newline at end of file diff --git a/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb b/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb index dc8256019..0d897c393 100644 --- a/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb +++ b/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb @@ -148,17 +148,6 @@ "### Log in to Earthdata" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "earthdata_uid = 'Jessica.scheick'\n", - "email = 'jessica.scheick@maine.edu'\n", - "region_a.earthdata_login(earthdata_uid, email)" - ] - }, { "cell_type": "code", "execution_count": null, @@ -180,6 +169,19 @@ "region_a.granules.avail" ] }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "```{admonition} Important Authentication Update\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "\n", + "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "```" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -895,7 +897,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.4" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_cloud_data_access.ipynb b/doc/source/example_notebooks/IS2_cloud_data_access.ipynb index b54be1786..fa0931c8a 100644 --- a/doc/source/example_notebooks/IS2_cloud_data_access.ipynb +++ b/doc/source/example_notebooks/IS2_cloud_data_access.ipynb @@ -2,7 +2,9 @@ "cells": [ { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "# ICESat-2 AWS cloud data access\n", "This notebook ({nb-download}`download `) illustrates the use of icepyx for accessing ICESat-2 data currently available through the AWS (Amazon Web Services) us-west2 hub s3 data bucket.\n", @@ -74,23 +76,37 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "## Log in to Earthdata and generate an s3 token\n", - "You can use icepyx's existing login functionality to generate your s3 data access token, which will be valid for *one* hour.\n", + "You can use icepyx's existing login functionality to generate your s3 data access token, which will be valid for *one* hour. The icepyx module will renew the token for you after an hour, but if viewing your token over the course of several hours you may notice the values will change.\n", "\n", - "We currently do not have this set up to automatically renew, but [earthaccess](https://nsidc.github.io/earthaccess/), which icepyx uses under the hood for authentication, is working on handling the limits imposed by expiring s3 tokens. If you're interested in working on helping icepyx and earthaccess address these challenges, please get in touch or submit a PR. Documentation/example testers are always appreciated (so you don't have to understand the code)!" + "You can access your s3 credentials using:" ] }, { "cell_type": "code", "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# uncommenting the line below will print your temporary login credentials\n", + "# reg.s3login_credentials" + ] + }, + { + "cell_type": "markdown", "metadata": { - "scrolled": true + "user_expressions": [] }, - "outputs": [], "source": [ - "reg.earthdata_login(s3token=True)" + "```{admonition} Important Authentication Update\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "\n", + "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "```" ] }, { @@ -106,7 +122,7 @@ "metadata": {}, "outputs": [], "source": [ - "s3 = earthaccess.get_s3fs_session(daac='NSIDC', provider=reg._s3login_credentials)" + "s3 = earthaccess.get_s3fs_session(daac='NSIDC', provider=reg.s3login_credentials)" ] }, { @@ -176,7 +192,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.8" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_access.ipynb b/doc/source/example_notebooks/IS2_data_access.ipynb index dcaf31552..d9d50cdc0 100644 --- a/doc/source/example_notebooks/IS2_data_access.ipynb +++ b/doc/source/example_notebooks/IS2_data_access.ipynb @@ -19,7 +19,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "import icepyx as ipx\n", @@ -30,18 +32,18 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "---------------------------------\n", "\n", "## Quick-Start Guide\n", "\n", - "The entire process of getting ICESat-2 data (from query to download) can ultimately be accomplished in three minimal lines of code:\n", + "The entire process of getting ICESat-2 data (from query to download) can ultimately be accomplished in two minimal lines of code:\n", "\n", "`region_a = ipx.Query(short_name, spatial_extent, date_range)`\n", "\n", - "`region_a.earthdata_login()`\n", - "\n", "`region_a.download_granules(path)`\n", "\n", "where the function inputs are described in more detail below.\n", @@ -51,14 +53,15 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "## Key Steps for Programmatic Data Access\n", "\n", "There are several key steps for accessing data from the NSIDC API:\n", "1. Define your parameters (spatial, temporal, dataset, etc.)\n", "2. Query the NSIDC API to find out more information about the dataset\n", - "3. Log in to NASA Earthdata\n", "4. Define additional parameters (e.g. subsetting/customization options)\n", "5. Order your data\n", "6. Download your data\n", @@ -68,7 +71,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Create an ICESat-2 data object with the desired search parameters\n", "\n", @@ -87,17 +92,25 @@ "Per NSIDC requirements, geospatial polygon files may only contain one feature (polygon).*\n", "\n", "Then, for all non-gridded products (ATL<=13), you must include AT LEAST one of the following inputs (temporal or orbital constraints):\n", - "- `date_range` = the date range for which you would like to search for results. Must be formatted as a set of 'YYYY-MM-DD' strings separated by a comma.\n", + "- `date_range` = the date range for which you would like to search for results. The following formats are accepted: \n", + " - A list of two 'YYYY-MM-DD' strings separated by a comma\n", + " - A list of two 'YYYY-DOY' strings separated by a comma\n", + " - A list of two datetime.date or datetime.datetime objects\n", + " - Dict with the following keys:\n", + " - `start_date`: start date, type can be datetime.datetime, datetime.date, or strings (format 'YYYY-MM-DD' or 'YYYY-DOY')\n", + " - `end_date`: end date, type can be datetime.datetime, datetime.date, or strings (format 'YYYY-MM-DD' or 'YYYY-DOY')\n", "- `cycles` = Which orbital cycle to use, input as a numerical string or a list of strings. If no input is given, this value defaults to all available cycles within the search parameters. An orbital cycle refers to the 91-day repeat period of the ICESat-2 orbit.\n", "- `tracks` = Which [Reference Ground Track (RGT)](https://icesat-2.gsfc.nasa.gov/science/specs) to use, input as a numerical string or a list of strings. If no input is given, this value defaults to all available RGTs within the spatial and temporal search parameters.\n", "\n", - "Below are examples of each type of spatial extent input using a date range and an example using orbital parameters. Please choose and run only one of the next three cells to set your spatial parameters." + "Below are examples of each type of spatial extent and temporal input and an example using orbital parameters. Please choose and run only one of the input option cells to set your spatial and temporal parameters." ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# bounding box\n", @@ -109,7 +122,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# polygon vertices (here equivalent to the bounding box, above)\n", @@ -121,7 +136,55 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# bounding box with 'YYYY-DOY' date range (equivalent to 'YYYY-MM-DD' date ranges above)\n", + "short_name = 'ATL06'\n", + "spatial_extent = [-55, 68, -48, 71]\n", + "date_range = ['2019-051','2019-059']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# polygon vertices with datetime.datetime date ranges\n", + "import datetime as dt\n", + "\n", + "start_dt = dt.datetime(2019, 2, 20, 0, 10, 0)\n", + "end_dt = dt.datetime(2019, 2, 28, 14, 45, 30)\n", + "short_name = 'ATL06'\n", + "spatial_extent = [(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)]\n", + "date_range = [start_dt, end_dt]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# bounding box with dict containing date ranges\n", + "short_name = 'ATL06'\n", + "spatial_extent = [-55, 68, -48, 71]\n", + "date_range = {\"start_date\": start_dt, \"end_date\": '2019-02-28'}" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# polygon geospatial file (metadata match but no subset match)\n", @@ -142,7 +205,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Create the data object using our inputs" ] @@ -150,7 +215,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "region_a = ipx.Query(short_name, spatial_extent, date_range)" @@ -159,7 +226,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# using orbital parameters with one of the above data products + spatial parameters\n", @@ -174,7 +243,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "These properties include visualization of the spatial extent on a map. The style of map you will see depends on whether or not you have a certain library, `geoviews`, installed. Under the hood, this is because the `proj` library must be installed with conda (it is not available from PyPI) to support some `geoviews` dependencies. With `geoviews`, this plotting function returns an interactive map. Otherwise, your spatial extent will plot on a static map using `matplotlib`." ] @@ -182,7 +253,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# print(region_a.spatial_extent)\n", @@ -191,7 +264,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Formatted parameters and function calls allow us to see the the properties of the data object we have created." ] @@ -199,27 +274,34 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "print(region_a.product)\n", - "print(region_a.dates)\n", - "print(region_a.start_time)\n", - "print(region_a.end_time)\n", + "print(region_a.temporal) # .dates, .start_time, .end_time can also be used for a piece of this information\n", + "# print(region_a.dates)\n", + "# print(region_a.start_time)\n", + "# print(region_a.end_time)\n", "print(region_a.cycles)\n", "print(region_a.tracks)\n", "print(region_a.product_version)\n", - "# print(region_a.spatial_extent)\n", "region_a.visualize_spatial_extent()" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ - "There are also several optional inputs to allow the user finer control over their search. Start and end time are only valid inputs on a temporally limited search.\n", + "There are also several optional inputs to allow the user finer control over their search. Start and end time are only valid inputs on a temporally limited search, and they are ignored if your `date_range` input is a datetime.datetime object.\n", "- `start_time` = start time to search for data on the start date. If no input is given, this defaults to 00:00:00.\n", - "- `end_time` = end time for the end date of the temporal search parameter. If no input is given, this defaults to 23:59:59. Times must be input as 'HH:mm:ss' strings.\n", + "- `end_time` = end time for the end date of the temporal search parameter. If no input is given, this defaults to 23:59:59. \n", + "\n", + "Times must be input as 'HH:mm:ss' strings or datetime.time objects.\n", + "\n", "- `version` = What version of the data product to use, input as a numerical string. If no input is given, this value defaults to the most recent version of the product specified in `short_name`.\n", "\n", "*NOTE Version 002 is used as an example in the below cell. However, using it will cause 'no results' errors in granule ordering for some search parameters. These issues have been resolved in later versions of the data products, so it is best to use the most recent version where possible.\n", @@ -230,7 +312,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "region_a = ipx.Query(short_name, spatial_extent, date_range, \\\n", @@ -238,15 +322,16 @@ "\n", "print(region_a.product)\n", "print(region_a.dates)\n", - "print(region_a.start_time)\n", - "print(region_a.end_time)\n", "print(region_a.product_version)\n", - "# print(region_a.spatial_extent)" + "print(region_a.spatial)\n", + "print(region_a.temporal)" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Alternatively, you can also just create the query object without creating named variables first:" ] @@ -254,7 +339,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# region_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-01','2019-02-28'], \n", @@ -263,7 +350,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### More information about your query object\n", "In addition to viewing the stored object information shown above (e.g. product short name, start and end date and time, version, etc.), we can also request summary information about the data product itself or confirm that we have manually specified the latest version." @@ -272,7 +361,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "region_a.product_summary_info()\n", @@ -281,7 +372,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "If the summary does not provide all of the information you are looking for, or you would like to see information for previous versions of the data product, all available metadata for the collection product is available in a readable format." ] @@ -290,7 +383,8 @@ "cell_type": "code", "execution_count": null, "metadata": { - "scrolled": true + "scrolled": true, + "tags": [] }, "outputs": [], "source": [ @@ -299,7 +393,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Querying a data product\n", "In order to search the product collection for available data granules, we need to build our search parameters. This is done automatically behind the scenes when you run `region_a.avail_granules()`, but you can also build and view them by calling `region_a.CMRparams`. These are formatted as a dictionary of key:value pairs according to the [CMR documentation](https://cmr.earthdata.nasa.gov/search/site/docs/search/api.html)." @@ -308,7 +404,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "#build and view the parameters that will be submitted in our query\n", @@ -317,7 +415,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Now that our parameter dictionary is constructed, we can search the CMR database for the available granules.\n", "Granules returned by the CMR metadata search are automatically stored within the data object.\n", @@ -329,7 +429,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "#search for available granules and provide basic summary info about them\n", @@ -339,7 +441,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "#get a list of granule IDs for the available granules\n", @@ -350,7 +454,8 @@ "cell_type": "code", "execution_count": null, "metadata": { - "scrolled": true + "scrolled": true, + "tags": [] }, "outputs": [], "source": [ @@ -360,10 +465,12 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Log in to NASA Earthdata\n", - "In order to download any data from NSIDC, we must first authenticate ourselves using a valid (free) Earthdata account. This creates and handles the required authentication to interface with the data at the DAAC (including ordering and download).\n", + "When downloading data from NSIDC, all users must login using a valid (free) Earthdata account. The process of authenticating is handled by icepyx by creating and handling the required authentication to interface with the data at the DAAC (including ordering and download). Authentication is completed as login-protected features are accessed. In order to allow icepyx to login for us we still have to make sure that we have made our Earthdata credentials available for icepyx to find.\n", "\n", "There are multiple ways to provide your Earthdata credentials via icepyx. Behind the scenes, icepyx is using the [earthaccess library](https://nsidc.github.io/earthaccess/). The [earthaccess documentation](https://nsidc.github.io/earthaccess/tutorials/restricted-datasets/#auth) automatically tries three primary mechanisms for logging in, all of which are supported by icepyx:\n", "- with `EARTHDATA_USERNAME` and `EARTHDATA_PASSWORD` environment variables (these are the same as the ones you might have set for icepyx previously)\n", @@ -373,23 +480,17 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to provide certain inputs to the `earthdata_login()` function, e.g. `region_a.earthdata_login(earthdata_uid, email).\n", - "These inputs are no longer required, but the keywords are still accepted for backwards compatibility.\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed above. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "\n", + "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", "```" ] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "region_a.earthdata_login()" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -417,7 +518,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "print(region_a.reqparams)\n", @@ -450,7 +553,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "region_a.subsetparams()" @@ -467,7 +572,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "region_a.order_granules()\n", @@ -477,7 +584,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "#view a short list of order IDs\n", @@ -495,7 +604,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "path = './download'\n", @@ -515,9 +626,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "icepyx-dev", "language": "python", - "name": "python3" + "name": "icepyx-dev" }, "language_info": { "codemirror_mode": { @@ -529,7 +640,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb b/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb index 35537c641..89247de5f 100644 --- a/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb +++ b/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb @@ -2,7 +2,9 @@ "cells": [ { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "# Subsetting ICESat-2 Data\n", "This notebook ({nb-download}`download `) illustrates the use of icepyx for subsetting ICESat-2 data ordered through the NSIDC DAAC. We'll show how to find out what subsetting options are available and how to specify the subsetting options for your order.\n", @@ -67,12 +69,16 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, "source": [ - "region_a.earthdata_login()" + "```{admonition} Important Authentication Update\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "\n", + "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "```" ] }, { @@ -361,7 +367,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_read-in.ipynb b/doc/source/example_notebooks/IS2_data_read-in.ipynb index a543d03df..115c63044 100644 --- a/doc/source/example_notebooks/IS2_data_read-in.ipynb +++ b/doc/source/example_notebooks/IS2_data_read-in.ipynb @@ -3,7 +3,9 @@ { "cell_type": "markdown", "id": "552e9ef9", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "# Reading ICESat-2 Data in for Analysis\n", "This notebook ({nb-download}`download `) illustrates the use of icepyx for reading ICESat-2 data files, loading them into a data object.\n", @@ -20,10 +22,7 @@ "Instead of needing to manually iterate through the beam pairs, you can provide a few options to the `Read` object and icepyx will do the heavy lifting for you (as detailed in this notebook).\n", "\n", "### Approach\n", - "If you're interested in what's happening under the hood: icepyx turns your instructions into something called a catalog, then uses the Intake library and the catalog to actually load the data into memory. Specifically, icepyx creates an [Intake](https://intake.readthedocs.io/en/latest/) data [catalog](https://intake.readthedocs.io/en/latest/catalog.html) for each requested variable and then merges the read-in data from each of the variables to create a single data object.\n", - "\n", - "Intake catalogs are powerful (and the tool we selected) because they can be saved, shared, modified, and reused to reproducibly read in a set of data files in a consistent way as part of an analysis workflow.\n", - "This approach streamlines the transition between data sources (local/downloaded files or, ultimately, cloud/bucket access) and data object types (e.g. [Xarray Dataset](http://xarray.pydata.org/en/stable/generated/xarray.Dataset.html) or [GeoPandas GeoDataFrame](https://geopandas.org/docs/reference/api/geopandas.GeoDataFrame.html))." + "If you're interested in what's happening under the hood: icepyx uses the [xarray](https://docs.xarray.dev/en/stable/) library to read in each of the requested variables of the dataset. icepyx formats each requested variable and then merges the read-in data from each of the variables to create a single data object. The use of xarray is powerful, because the returned data object can be used with relevant xarray processing tools." ] }, { @@ -47,7 +46,9 @@ { "cell_type": "markdown", "id": "1ffb9a0c", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "---------------------------------\n", "\n", @@ -101,7 +102,9 @@ { "cell_type": "markdown", "id": "b8875936", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "---------------------------------------\n", "## Key steps for loading (reading) ICESat-2 data\n", @@ -119,7 +122,9 @@ { "cell_type": "markdown", "id": "9bf6d38c", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Step 0: Get some data if you haven't already\n", "Here are a few lines of code to get you set up with a few data files if you don't already have some on your local system." @@ -139,21 +144,25 @@ { "cell_type": "code", "execution_count": null, - "id": "2bb83dfe", + "id": "e6f7c047", "metadata": {}, "outputs": [], "source": [ - "region_a.earthdata_login()" + "region_a.download_granules(path=path_root)" ] }, { - "cell_type": "code", - "execution_count": null, - "id": "e6f7c047", - "metadata": {}, - "outputs": [], + "cell_type": "markdown", + "id": "04f62f30-b13c-4cfc-95b0-dd1e048f6a85", + "metadata": { + "user_expressions": [] + }, "source": [ - "region_a.download_granules(path=path_root)" + "```{admonition} Important Authentication Update\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "\n", + "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "```" ] }, { @@ -209,7 +218,9 @@ { "cell_type": "markdown", "id": "92743496", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Step 2: Create a filename pattern for your data files\n", "\n", @@ -265,7 +276,9 @@ { "cell_type": "markdown", "id": "4275b04c", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Step 3: Create an icepyx read object\n", "\n", @@ -273,9 +286,8 @@ "- `path` = a string with the full file path or full directory path to your hdf5 (.h5) format files.\n", "- `product` = the data product you're working with, also known as the \"short name\".\n", "\n", - "The `Read` object also accepts two optional keyword inputs:\n", - "- `pattern` = a formatted string indicating the filename pattern required for Intake's path_as_pattern argument.\n", - "- `catalog` = a string with the full path to an Intake catalog, for users who wish to use their own catalog (note this may have unintended consequenses if multiple granules are being combined)." + "The `Read` object also accepts the optional keyword input:\n", + "- `pattern` = a formatted string indicating the filename pattern required for Intake's path_as_pattern argument." ] }, { @@ -303,7 +315,9 @@ { "cell_type": "markdown", "id": "da8d8024", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Step 4: Specify variables to be read in\n", "\n", @@ -329,7 +343,9 @@ { "cell_type": "markdown", "id": "b2449941", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "To make things easier, you can use icepyx's built-in default list that loads commonly used variables for your non-gridded data product, or create your own list of variables to be read in.\n", "icepyx will determine what variables are available for you to read in by creating a list from one of your source files.\n", @@ -345,7 +361,9 @@ { "cell_type": "markdown", "id": "55092d1b", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "For a basic case, let's say we want to read in height, latitude, and longitude for all beam pairs.\n", "We create our variables list as" @@ -364,7 +382,9 @@ { "cell_type": "markdown", "id": "fff0bb19", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Then we can view a dictionary of the variables we'd like to read in." ] @@ -382,7 +402,9 @@ { "cell_type": "markdown", "id": "9d5b50b5", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Don't forget - if you need to start over, and re-generate your wanted variables list, it's easy!" ] @@ -400,13 +422,23 @@ { "cell_type": "markdown", "id": "473de4d7", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Step 5: Loading your data\n", "\n", "Now that you've set up all the options, you're ready to read your ICESat-2 data into memory!" ] }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a66d889-8d2d-4b9a-821a-96a394ff8d66", + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": null, @@ -420,7 +452,9 @@ { "cell_type": "markdown", "id": "db6560f1", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Within a Jupyter Notebook, you can get a summary view of your data object.\n", "\n", @@ -442,7 +476,9 @@ { "cell_type": "markdown", "id": "b1d7de2d", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "## On to data analysis!\n", "\n", @@ -465,7 +501,9 @@ { "cell_type": "markdown", "id": "6421f67c", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "A developer note to users:\n", "our next steps will be to create an xarray extension with ICESat-2 aware functions (like \"get_strong_beams\", etc.).\n", @@ -474,191 +512,38 @@ }, { "cell_type": "markdown", - "id": "6edfbb25", - "metadata": {}, - "source": [ - "### More on Intake catalogs and the read object\n", - "\n", - "As anyone familiar with ICESat-2 hdf5 files knows, one of the challenges to reading in data is looping through all of the beam pairs for each track.\n", - "The icepyx read module takes advantage of icepyx's variables module, which has some awareness of ICESat-2 data and uses that to save the user the trouble of having to loop through each beam pair.\n", - "The `reader.load()` function does this by automatically creating minimal Intake catalogs for each variable path, reading in the data, and merging each variable into a ready-to-analyze Xarray DataSet.\n", - "The Intake savvy user may wish to view the template catalog or use an existing catalog." - ] - }, - { - "cell_type": "markdown", - "id": "0f0076f9", - "metadata": {}, - "source": [ - "#### Viewing the template catalog\n", - "\n", - "You can access the ICESat-2 catalog template as an attribute of the read object.\n", - "\n", - "***NOTE: accessing `reader.is2catalog` creates a template with a placeholder in the 'group' parameter; thus, it will not work to actually read in data***" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2de29fd8", + "id": "1b0cb477", "metadata": { - "scrolled": true + "user_expressions": [] }, - "outputs": [], - "source": [ - "reader.is2catalog" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "7a5deef8", - "metadata": {}, - "outputs": [], - "source": [ - "reader.is2catalog.gui" - ] - }, - { - "cell_type": "markdown", - "id": "fef43556", - "metadata": {}, "source": [ - "#### Use an existing catalog\n", - "If you already have a catalog for your data, you can supply that when you create the read object." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "64986a60", - "metadata": {}, - "outputs": [], - "source": [ - "catpath = path_root + 'test_catalog.yml'\n", - "reader = ipx.Read(filepath, pattern, catpath)" - ] - }, - { - "cell_type": "markdown", - "id": "cf930e0a", - "metadata": {}, - "source": [ - "Then, you can use the catalog you supplied by calling intake's `read` directly to read in the specified data variable." + "#### Credits\n", + "* original notebook by: Jessica Scheick\n", + "* notebook contributors: Wei Ji and Tian" ] }, { "cell_type": "code", "execution_count": null, - "id": "dd0e086a", + "id": "aaf6f5a6-355b-456a-99fd-ce0b51045b58", "metadata": {}, "outputs": [], - "source": [ - "ds = reader.is2catalog.read()" - ] - }, - { - "cell_type": "markdown", - "id": "60b1a304", - "metadata": {}, - "source": [ - "***NOTE: this means that you will only be able to read in a single data variable!***\n", - "\n", - "To take advantage of icepyx's knowledge of ICESat-2 data nesting of beam pairs and read in multiple related variables at once, you must use the variable approach outlined earlier in this tutorial." - ] + "source": [] }, { "cell_type": "code", "execution_count": null, - "id": "f5e3a221", + "id": "8ea1987f-b6bf-44df-a869-949290f498cb", "metadata": {}, "outputs": [], - "source": [ - "ds = reader.load()\n", - "ds" - ] - }, - { - "cell_type": "markdown", - "id": "d56fc41c", - "metadata": {}, - "source": [ - "### More customization options\n", - "\n", - "If you'd like to use the icepyx ICESat-2 Catalog template to create your own customized catalog, we recommend that you access the `build_catalog` function directly, which returns an Intake Catalog instance.\n", - "\n", - "You'll need to supply the required `data_source`, `path_pattern`, and `source_type` arguments. `data_source` and `path_pattern` are described in Steps 2 and 3 of this tutorial. `source_type` is the string you'd like to use for your Local Catalog entry.\n", - "\n", - "This function accepts as keyword input arguments (kwargs) dictionaries with appropriate keys (depending on the Intake driver you are using).\n", - "The simplest version of this is specifying the variable parameters and paths of interest.\n", - "`grp_paths` may contain \"variables\", each of which must then be further defined by `grp_path_params`.\n", - "You cannot use glob-like path syntax to access variables (so `grp_path = '/*/land_ice_segments'` is NOT VALID)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f174f885", - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "import icepyx.core.is2cat as is2cat\n", - "\n", - "# build a custom ICESat-2 catalog with a group and parameter\n", - "cat = is2cat.build_catalog(data_source = path_root,\n", - " path_pattern = pattern,\n", - " source_type = \"manual_catalog\",\n", - " grp_paths = \"/{{gt}}/land_ice_segments\",\n", - " grp_path_params = [{\"name\": \"gt\",\n", - " \"description\": \"Ground track\",\n", - " \"type\": \"str\",\n", - " \"default\": \"gt1l\",\n", - " \"allowed\": [\"gt1l\", \"gt1r\", \"gt2l\", \"gt2r\", \"gt3l\", \"gt3r\"]\n", - " }]\n", - " )" - ] - }, - { - "cell_type": "markdown", - "id": "bab9c949", - "metadata": {}, - "source": [ - "#### Saving your catalog\n", - "If you create a highly customized ICESat-2 catalog, you can use Intake's `save` to export it as a .yml file.\n", - "\n", - "Don't forget you can easily use an existing catalog (such as this highly customized one you just made) to read in your data with `reader = ipx.Read(filepath, pattern, catalog)` (so it's as easy as re-creating your reader object with your modified catalog)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "30f0122a", - "metadata": {}, - "outputs": [], - "source": [ - "catpath = path_root + 'test_catalog.yml'\n", - "cat.save(catpath)" - ] - }, - { - "cell_type": "markdown", - "id": "1b0cb477", - "metadata": {}, - "source": [ - "#### Credits\n", - "* original notebook by: Jessica Scheick\n", - "* notebook contributors: Wei Ji and Tian\n", - "* templates for default ICESat-2 Intake catalogs from: [Wei Ji](https://github.com/icesat2py/icepyx/issues/106) and [Tian](https://github.com/icetianli/ICESat2_xarray)." - ] + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "general", "language": "python", - "name": "python3" + "name": "general" }, "language_info": { "codemirror_mode": { @@ -670,7 +555,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_variables.ipynb b/doc/source/example_notebooks/IS2_data_variables.ipynb index 568027550..3ac1f99fe 100644 --- a/doc/source/example_notebooks/IS2_data_variables.ipynb +++ b/doc/source/example_notebooks/IS2_data_variables.ipynb @@ -120,12 +120,16 @@ ] }, { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, "source": [ - "region_a.earthdata_login()" + "```{admonition} Important Authentication Update\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "\n", + "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "```" ] }, { @@ -773,7 +777,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_visualization.ipynb b/doc/source/example_notebooks/IS2_data_visualization.ipynb index 8b2f7b05f..ec87829c2 100644 --- a/doc/source/example_notebooks/IS2_data_visualization.ipynb +++ b/doc/source/example_notebooks/IS2_data_visualization.ipynb @@ -193,7 +193,6 @@ "metadata": {}, "outputs": [], "source": [ - "region.earthdata_login()\n", "region.order_granules()\n", "\n", "#view a short list of order IDs\n", @@ -203,6 +202,20 @@ "region.download_granules(path)" ] }, + { + "cell_type": "markdown", + "id": "10cc8fef-7a43-41c4-ab22-6f551ad68659", + "metadata": { + "user_expressions": [] + }, + "source": [ + "```{admonition} Important Authentication Update\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "\n", + "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "```" + ] + }, { "cell_type": "markdown", "id": "textile-casting", @@ -253,7 +266,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/doc/source/getting_started/install.rst b/doc/source/getting_started/install.rst index 7b7b170b4..453e95e7d 100644 --- a/doc/source/getting_started/install.rst +++ b/doc/source/getting_started/install.rst @@ -1,6 +1,6 @@ .. _`zipped file`: https://github.com/icesat2py/icepyx/archive/main.zip .. _`Fiona`: https://pypi.org/project/Fiona/ -.. |Conda install| image:: https://anaconda.org/conda-forge/icepyx/badges/installer/conda.svg +.. |Conda install| image:: https://anaconda.org/conda-forge/icepyx/badges/version.svg :target: https://anaconda.org/conda-forge/icepyx .. |Pypi install| image:: https://badge.fury.io/py/icepyx.svg @@ -14,30 +14,32 @@ Installation Quickstart ---------- -The simplest way to install icepyx is by using the -`conda `__ -package manager. The command below takes care of setting up a virtual +The simplest (and recommended) way to install icepyx is by using the +`mamba `_ package +manager (or `conda `_, +which can be used in place of any of the mamba commands shown here). +The command below takes care of setting up a virtual environment and installs icepyx along with all the necessary dependencies:: - conda create --name icepyx-env --channel conda-forge icepyx + mamba env create --name icepyx-env --channel conda-forge icepyx To activate the virtual environment, you can do:: - conda activate icepyx-env + mamba activate icepyx-env -Using conda |Conda install| +Using mamba |Conda install| --------------------------- -If you already have a virtual conda environment set up and activated, you can +If you already have a virtual mamba/conda environment set up and activated, you can install the latest stable release of icepyx from `conda-forge `__ like so:: - conda install icepyx + mamba install icepyx To upgrade an installed version of icepyx to the latest stable release, do:: - conda update icepyx + mamba update icepyx @@ -51,7 +53,7 @@ Alternatively, you can also install icepyx using `pip `_ -Publications and Presentations +Publications About icepyx +------------------------- +Peer-reviewed publications about icepyx software + +.. bibliography:: icepyx_pubs.bib + :style: mystyle + + joss2023 + +Presentations and Materials Featuring icepyx +-------------------------------------------- +Presentations that feature or explain icepyx + +.. bibliography:: icepyx_pubs.bib + :style: mystyle + + js2022agu + 2022_IS2-HW-tutorials + js2021agu + js2020agu + 2020_IS2-HW-tutorials + js2019agu + +Publications Utilizing icepyx ------------------------------ -Peer-reviewed research that utilizes icepyx for ICESat-2 data and presentations that feature or explain icepyx +Research that utilizes icepyx for ICESat-2 data .. bibliography:: icepyx_pubs.bib :style: mystyle - :all: \ No newline at end of file + + Shean2023 + Eidam2022 + Leeuwen:2022 + Musthafa2022 + Sothe:2022 + Bisson:2021 + Fernando:2021 + Li:2020 diff --git a/doc/source/tracking/icepyx_pubs.bib b/doc/source/tracking/icepyx_pubs.bib index fcc5dddea..a1d945c01 100644 --- a/doc/source/tracking/icepyx_pubs.bib +++ b/doc/source/tracking/icepyx_pubs.bib @@ -1,106 +1,148 @@ % ######### materials about icepyx ################ +@article{joss2023, +doi = {10.21105/joss.04912}, +url = {https://doi.org/10.21105/joss.04912}, +year = {2023}, +publisher = {The Open Journal}, +volume = {8}, +number = {84}, +pages = {4912}, +author = {Jessica Scheick and + Wei Ji Leong and + Kelsey Bisson and + Anthony Arendt and + Shashank Bhushan and + Zachary Fair and + Norland Raphael Hagen and + Scott Henderson and + Friedrich Knuth and + Tian Li and + Zheng Liu and + Romina Piunno and + Nitin Ravinder and + Landung "Don" Setiawan and + Tyler Sutterley and + {JP} Swinski and + Anubhav}, +title = {{icepyx}: querying, obtaining, analyzing, and manipulating {ICESat-2} datasets}, +journal = {Journal of Open Source Software} +} + + +@unpublished{js2022agu, +author = {Scheick, J and Bisson, K and Fair, Z and Piunno, R and Leong, WJ and Lopez, L and Hall, S}, +note = {Invited abstract and poster. American Geophysical Union Fall Meeting, Chicago, IL, USA. 12-16 December 2022.}, +title = {{icepyx} as an icebreaker: starting conversations and building competencies in open science}, +year = {2022}, +comment = {abstract, poster}, +doi = {10.5281/zenodo.7837428}, +} + @misc{2022_IS2-HW-tutorials, - author = {Scheick, Jessica and - Arendt, Anthony and - Haley, Charley and - Henderson, Scott and - Koh, Jane and - Setiawan, Don and - Alterman, Naomi and - Meyer, Joachim and - Cristea, Nicoleta and - Schweiger, Axel and - Barciauskas, Aimee and - Smith, Ben and - Piunno, Romina and - Shapero, Daniel and - Fair, Zachary and - Arndt, Philipp and - Leong, Wei Ji and - Sutterley, Tyler and - Snow, Tasha and - Beig, Mikala and - Besso, Hannah and - Liu, Zheng and - Joughin, Ian and - Bisson, Kelsey and - Sauthoff, Wilson}, - title = {ICESat-2 Hackweek Website}, - month = apr, - year = 2022, - note = {If you use this book, please cite it as below.}, - publisher = {Zenodo}, - version = {2022.04.15}, - doi = {10.5281/zenodo.6462479}, - url = {https://doi.org/10.5281/zenodo.6462479} +author = {Scheick, J. and + Arendt, Anthony and + Haley, Charley and + Henderson, Scott and + Koh, Jane and + Setiawan, Don and + Alterman, Naomi and + Meyer, Joachim and + Cristea, Nicoleta and + Schweiger, Axel and + Barciauskas, Aimee and + Smith, Ben and + Piunno, Romina and + Shapero, Daniel and + Fair, Zachary and + Arndt, Philipp and + Leong, Wei Ji and + Sutterley, Tyler and + Snow, Tasha and + Beig, Mikala and + Besso, Hannah and + Liu, Zheng and + Joughin, Ian and + Bisson, Kelsey and + Sauthoff, Wilson}, +title = {{ICESat-2 Hackweek Website}}, +month = apr, +year = 2022, +note = {If you use this book, please cite it as below.}, +publisher = {Zenodo}, +version = {2022.04.15}, +doi = {10.5281/zenodo.6462479}, +url = {https://doi.org/10.5281/zenodo.6462479} } -@misc{js2021agu, -author = {Scheick, J and Bisson, K and Li, T and Leong, WJ and Arendt, A}, +@article{js2021agu, +author = {Scheick, Jessica and Bisson, Kelsey and Li, Tian and Leong, Wei Ji and Arendt, Anthony}, note = {Abstract and poster (presented by Wei Ji Leong). American Geophysical Union Fall Meeting, New Orleans, LA, USA. 13-17 December 2021.}, title = {Collaborative Computational Resource Development around {ICESat-2} Data: the {icepyx} Community and Library}, -year = {2021}, +journal = {Earth and Space Science Open Archive}, +pages = {9}, +year = {2022}, +DOI = {10.1002/essoar.10511316.1}, +url = {https://doi.org/10.1002/essoar.10511316.1}, comment = {abstract, poster}, } @misc{js2020agu, -author = {Scheick, J and Arendt, A and Heagy, L and Paolo, F and Perez, F and Steiker, A}, +author = {Scheick, J. and Arendt, A. and Heagy, L. and Paolo, F. and Perez, F. and Steiker, A.}, note = {Abstract and eLightning (poster + presentation). American Geophysical Union Fall Meeting, virtual, USA. 1-17 December 2020.}, -title = {{\texttt{icepyx}: Developing Community and Software Around ICESat-2 Data}}, +title = {{\texttt{icepyx}}: Developing Community and Software Around {ICESat-2 Data}}, year = {2020}, comment = {abstract, poster, talk}, } @misc{2020_IS2-HW-tutorials, - author = {Arendt, Anthony and - Scheick, Jessica and - Shean, David and - Buckley, Ellen and - Grigsby, Shane and - Haley, Charley and - Heagy, Lindsey and - Mohajerani, Yara and - Neumann, Tom and - Nilsson, Johan and - Markus, Thorsten and - Paolo, Fernando S. and - Perez, Fernando and - Petty, Alek and - Schweiger, Axel and - Smith, Benjamin and - Steiker, Amy and - Alvis, Sebastian and - Henderson, Scott and - Holschuh, Nick and - Liu, Zheng and - Sutterley, Tyler}, - title = {2020 {ICESat-2 Hackweek Tutorials}}, - month = aug, - year = 2020, - publisher = {Zenodo}, - version = {1.0.0}, - doi = {10.5281/zenodo.3966463}, - url = {https://doi.org/10.5281/zenodo.3966463}, - comment = {tutorials} +author = {Arendt, Anthony and + Scheick, Jessica and + Shean, David and + Buckley, Ellen and + Grigsby, Shane and + Haley, Charley and + Heagy, Lindsey and + Mohajerani, Yara and + Neumann, Tom and + Nilsson, Johan and + Markus, Thorsten and + Paolo, Fernando S. and + Perez, Fernando and + Petty, Alek and + Schweiger, Axel and + Smith, Benjamin and + Steiker, Amy and + Alvis, Sebastian and + Henderson, Scott and + Holschuh, Nick and + Liu, Zheng and + Sutterley, Tyler}, +title = {2020 {ICESat-2 Hackweek Tutorials}}, +month = aug, +year = 2020, +publisher = {Zenodo}, +version = {1.0.0}, +doi = {10.5281/zenodo.3966463}, +url = {https://doi.org/10.5281/zenodo.3966463}, +comment = {tutorials} } @misc{js2019agu, -author = {Scheick, J and Arendt, A and Heagy, L and Perez, F}, +author = {Scheick, J. and Arendt, A. and Heagy, L. and Perez, F.}, note = {Abstract and poster. American Geophysical Union Fall Meeting, San Francisco, California, USA. 9-13 December 2019.}, -title = {{Introducing} icepyx, an open source {Python} library for obtaining and working with {ICESat-2} data}, +title = {Introducing {icepyx}, an open source {Python} library for obtaining and working with {ICESat-2} data}, year = {2019}, comment = {abstract,poster}, doi = {10.1002/essoar.10501423.1}, } -% ######### Articles citing icepyx ################ - +% ######### Articles/Proceedings citing icepyx ################ @article{Bisson:2021, author = {Bisson, K. M. and Cael, B. B.}, -title = {How Are Under Ice Phytoplankton Related to Sea Ice in the Southern Ocean?}, +title = {How Are Under Ice Phytoplankton Related to Sea Ice in the {Southern Ocean}?}, journal = {Geophysical Research Letters}, volume = {48}, number = {21}, @@ -110,8 +152,23 @@ @article{Bisson:2021 year = {2021} } + +@INPROCEEDINGS{Eidam2022, +author={Eidam, Emily and Walker, Catherine and + Bisson, Kelsey and Paris, Matthew and + Cooper, Lillian}, +booktitle={OCEANS 2022, Hampton Roads}, +title={Novel application of {ICESat-2 ATLAS} data to determine coastal light attenuation as a proxy for suspended particulate matter}, +year={2022}, +volume={}, +number={}, +pages={1-7}, +doi={10.1109/OCEANS47191.2022.9977084} +} + + @inProceedings{Fernando:2021, - title = {Mapping the {{Diversity}} of {{Agricultural Systems}} in the {{Cuellaje Sector}}, {{Cotacachi}}, {{Ecuador Using ATL08}} for the {{ICESat-2 Mission}} and {{Machine Learning Techniques}}}, + title = {Mapping the Diversity of Agricultural Systems in the {{Cuellaje Sector, Cotacachi, Ecuador}} Using {{ATL08}} for the {{ICESat-2 Mission}} and Machine Learning Techniques}, booktitle = {Computational {{Science}} and {{Its Applications}} \textendash{} {{ICCSA}} 2021}, author = {Fernando, Garrido}, editor = {Gervasi, Osvaldo and Murgante, Beniamino and Misra, Sanjay and Garau, Chiara and Ble{\v c}i{\'c}, Ivan and Taniar, David and Apduhan, Bernady O. and Rocha, Ana Maria A. C. and Tarantino, Eufemia and Torre, Carmelo Maria}, @@ -125,9 +182,19 @@ @inProceedings{Fernando:2021 langid = {english} } + +@misc{Leeuwen:2022, +author = {van Leeuwen, Gijs}, +title = {The automated retrieval of supraglacial lake depth and extent from {ICESat-2} photon clouds leveraging {DBSCAN} clustering}, +year = {2022}, +url = {https://studenttheses.uu.nl/bitstream/handle/20.500.12932/43402/MSC_thesis_ICESat2_GJvanLeeuwen.pdf?sequence=1}, +note = {Utrecht University Masters Thesis}, +} + + @Article{Li:2020, AUTHOR = {Li, T. and Dawson, G. J. and Chuter, S. J. and Bamber, J. L.}, -TITLE = {Mapping the grounding zone of Larsen~C Ice Shelf, Antarctica, from ICESat-2 laser altimetry}, +TITLE = {Mapping the grounding zone of {Larsen~C Ice Shelf, Antarctica}, from {ICESat-2} laser altimetry}, JOURNAL = {The Cryosphere}, VOLUME = {14}, YEAR = {2020}, @@ -138,21 +205,99 @@ @Article{Li:2020 } +@article{Musthafa2022, +abstract = {Forests absorb atmospheric carbon and hence play a vital role in carbon sequestration and climate regulation. Recent research emphasizes developing technology and methods to understand the carbon sequestration potential in various forest ecosystems. Forest stand height estimation is one of the crucial parameters in allometry that estimates forest biomass. An attempt is made in this study to map forest stand height in tropical and sub-tropical forests in India using recently launched spaceborne LiDAR platforms Ice Cloud and Elevation Satellite (ICESat-2) and Global Ecosystem Dynamics Investigation (GEDI). A geostatistical kriging approach is used to interpolate the forest stand height, and the generated stand height surface is validated using ground truth samples. The results showed that GEDI data performed better with an RMSE of 3.99 m and 2.62 m in tropical forests than the ICESat-2 data, which showed an RMSE of 5.71 m and 5.08 m, respectively. A similar pattern was observed in sub-tropical forests where GEDI modelled stand height outperformed ICESat-2 modelled stand height. This analysis demonstrates the potential of existing spaceborne LiDAR platforms in interpolating forest stand height at different forest types. Also, the research emphasizes the necessity of a high density of LiDAR footprints spread in both across- and along-track directions for accurate interpolation of forest stand height.}, +author = {Musthafa, Mohamed and Singh, Gulab and Kumar, Praveen}, +doi = {10.1007/s10661-022-10657-w}, +issn = {1573-2959}, +journal = {Environmental Monitoring and Assessment}, +number = {1}, +pages = {71}, +title = {{Comparison of forest stand height interpolation of GEDI and ICESat-2 LiDAR measurements over tropical and sub-tropical forests in India}}, +url = {https://doi.org/10.1007/s10661-022-10657-w}, +volume = {195}, +year = {2022} +} + + +% ######### Articles about ICESat-2 ################ + +@article{is2, +title = {The {Ice, Cloud, and land Elevation Satellite-2 (ICESat-2)}: Science requirements, concept, and implementation}, +journal = {Remote Sensing of Environment}, +volume = {190}, +pages = {260-273}, +year = {2017}, +issn = {0034-4257}, +doi = {10.1016/j.rse.2016.12.029}, +url = {https://www.sciencedirect.com/science/article/pii/S0034425716305089}, +author = {Thorsten Markus and Tom Neumann and Anthony Martino and Waleed Abdalati and Kelly Brunt and Beata Csatho and Sinead Farrell and Helen Fricker and Alex Gardner and David Harding and Michael Jasinski and Ron Kwok and Lori Magruder and Dan Lubin and Scott Luthcke and James Morison and Ross Nelson and Amy Neuenschwander and Stephen Palm and Sorin Popescu and CK Shum and Bob E. Schutz and Benjamin Smith and Yuekui Yang and Jay Zwally}, +keywords = {ICESat-2, Land ice, Sea ice, Vegetation, Climate change, Satellite mission}, +abstract = {The Ice, Cloud, and land Elevation Satellite (ICESat) mission used laser altimetry measurements to determine changes in elevations of glaciers and ice sheets, as well as sea ice thickness distribution. These measurements have provided important information on the response of the cryopshere (Earth's frozen surfaces) to changes in atmosphere and ocean condition. ICESat operated from 2003 to 2009 and provided repeat altimetry measurements not only to the cryosphere scientific community but also to the ocean, terrestrial and atmospheric scientific communities. The conclusive assessment of significant ongoing rapid changes in the Earth's ice cover, in part supported by ICESat observations, has strengthened the need for sustained, high accuracy, repeat observations similar to what was provided by the ICESat mission. Following recommendations from the National Research Council for an ICESat follow-on mission, the ICESat-2 mission is now under development for planned launch in 2018. The primary scientific aims of the ICESat-2 mission are to continue measurements of sea ice freeboard and ice sheet elevation to determine their changes at scales from outlet glaciers to the entire ice sheet, and from 10s of meters to the entire polar oceans for sea ice freeboard. ICESat carried a single beam profiling laser altimeter that produced ~70m diameter footprints on the surface of the Earth at ~150m along-track intervals. In contrast, ICESat-2 will operate with three pairs of beams, each pair separated by about 3km cross-track with a pair spacing of 90m. Each of the beams will have a nominal 17m diameter footprint with an along-track sampling interval of 0.7m. The differences in the ICESat-2 measurement concept are a result of overcoming some limitations associated with the approach used in the ICESat mission. The beam pair configuration of ICESat-2 allows for the determination of local cross-track slope, a significant factor in measuring elevation change for the outlet glaciers surrounding the Greenland and Antarctica coasts. The multiple beam pairs also provide improved spatial coverage. The dense spatial sampling eliminates along-track measurement gaps, and the small footprint diameter is especially useful for sea surface height measurements in the often narrow leads needed for sea ice freeboard and ice thickness retrievals. The ICESat-2 instrumentation concept uses a low energy 532nm (green) laser in conjunction with single-photon sensitive detectors to measure range. Combining ICESat-2 data with altimetry data collected since the start of the ICESat mission in 2003, such as Operation IceBridge and ESA's CryoSat-2, will yield a 15+ year record of changes in ice sheet elevation and sea ice thickness. ICESat-2 will also provide information of mountain glacier and ice cap elevations changes, land and vegetation heights, inland water elevations, sea surface heights, and cloud layering and optical thickness.} +} + + % ######### Research/Articles using (but not citing) icepyx ################ -@article{Sothe:2022, -title={Spatially Continuous Mapping of Forest Canopy Height in Canada by Combining GEDI and ICESat-2 with PALSAR and Sentinel}, -volume={14}, -ISSN={2072-4292}, -url={http://dx.doi.org/10.3390/rs14205158}, -DOI={10.3390/rs14205158}, -number={20}, -journal={Remote Sensing}, -publisher={MDPI AG}, -author={Sothe, Camile and Gonsamo, Alemu and Lourenço, Ricardo B. and Kurz, Werner A. and Snider, James}, -year={2022}, -month={Oct}, +@article{Sothe:2022, +title={Spatially Continuous Mapping of Forest Canopy Height in {Canada} by Combining {GEDI} and {ICESat-2} with {PALSAR} and {Sentinel}}, +volume={14}, +ISSN={2072-4292}, +url={http://dx.doi.org/10.3390/rs14205158}, +DOI={10.3390/rs14205158}, +number={20}, +journal={Remote Sensing}, +publisher={MDPI AG}, +author={Sothe, Camile and Gonsamo, Alemu and Lourenço, Ricardo B. and Kurz, Werner A. and Snider, James}, +year={2022}, +month={Oct}, pages={5158}, note={Attribution to icepyx from 15 Oct 2022 Twitter post, https://twitter.com/rblourenco/status/1581320878511382528} } + + +% ######### Related Software ################ + + +@article{Shean2023, +doi = {10.21105/joss.04982}, +url = {https://doi.org/10.21105/joss.04982}, +year = {2023}, +publisher = {The Open Journal}, +volume = {8}, +number = {81}, +pages = {4982}, +author = {David Shean and J.p. Swinski and + Ben Smith and Tyler Sutterley and + Scott Henderson and Carlos Ugarte and + Eric Lidwa and Thomas Neumann}, +title = {{SlideRule}: Enabling rapid, scalable, open science for the {NASA ICESat-2} mission and beyond}, +journal = {Journal of Open Source Software} +} + + +@misc{SR, + author = {JP Swinski and + Eric Lidwa and + Tyler Sutterley and + David Shean and + Joseph H Kennedy and + Scott Henderson}, + title = {{ICESat2-SlideRule}/sliderule: v2.1.0}, + month = mar, + year = 2023, + publisher = {Zenodo}, + version = {v2.1.0}, + doi = {10.5281/zenodo.7705009}, + url = {https://doi.org/10.5281/zenodo.7705009} +} + +@misc{OA, + author = {Khalsa, S.J.S. and Borsa, A. and Nandigam, V. and others}, + title = {{OpenAltimetry} - rapid analysis and visualization of Spaceborne altimeter data}, + year = 2020, + publisher = {Earth Sci Inform}, + doi = {10.1007/s12145-020-00520-2}, + url = {https://openaltimetry.org/} +} \ No newline at end of file diff --git a/doc/source/tracking/paper.md b/doc/source/tracking/paper.md new file mode 100644 index 000000000..7a0e63690 --- /dev/null +++ b/doc/source/tracking/paper.md @@ -0,0 +1,160 @@ +--- +title: 'icepyx: querying, obtaining, analyzing, and manipulating ICESat-2 datasets' +tags: + - Python + - ICESat-2 + - LiDAR + - elevation + - community + - cloud + +authors: + - name: Jessica Scheick^[Corresponding author] # note this makes a footnote + orcid: 0000-0002-3421-4459 + affiliation: 1 + - name: Wei Ji Leong + orcid: 0000-0003-2354-1988 + affiliation: 2 + - name: Kelsey Bisson + orcid: 0000-0003-4230-3467 + affiliation: 3 + - name: Anthony Arendt + orcid: 0000-0003-0429-6905 + affiliation: 4 + - name: Shashank Bhushan + orcid: 0000-0003-3712-996X + affiliation: 4 + - name: Zachary Fair + orcid: 0000-0002-6047-1723 + affiliation: 5 + - name: Norland Raphael Hagen + orcid: 0000-0003-1994-1153 + affiliation: 6 + - name: Scott Henderson + orcid: 0000-0003-0624-4965 + affiliation: 4 + - name: Friedrich Knuth + orcid: 0000-0003-1645-1984 + affiliation: 4 + - name: Tian Li + orcid: 0000-0002-1577-4004 + affiliation: 7 + - name: Zheng Liu + orcid: 0000-0003-4132-8136 + affiliation: 4 + - name: Romina Piunno + orcid: 0009-0000-1144-0915 + affiliation: 8 + - name: Nitin Ravinder + affiliation: 9 + - name: Landung "Don" Setiawan + orcid: 0000-0002-1624-2667 + affiliation: 4 + - name: Tyler Sutterley + orcid: 0000-0002-6964-1194 + affiliation: 4 + - name: JP Swinski + affiliation: 5 + - name: Anubhav + orcid: 0000-0003-4017-2862 + affiliation: 10 + +# Note: first three authors are driving the publication. Additional contributors/authors are listed in alphabetical order by last name. Anyone who also contributes substantially to preparing the JOSS submission will be moved into ABC order after the first three and before the "non-publication" contributors to icepyx. Non-responsive coauthors will be removed from the list since their permission to be included was not granted. + +affiliations: + - name: University of New Hampshire, USA + index: 1 + - name: Development Seed, USA + index: 2 + - name: Oregon State University, USA + index: 3 + - name: University of Washington, USA + index: 4 + - name: NASA Goddard Space Flight Center, USA + index: 5 + - name: CarbonPlan, USA + index: 6 + - name: University of Bristol, UK + index: 7 + - name: University of Toronto, Canada + index: 8 + - name: University of Leeds, UK + index: 9 + - name: University of Maryland, College Park, USA + index: 10 +date: 23 September 2022 +bibliography: icepyx_pubs.bib + +# Optional fields if submitting to a AAS journal too, see this blog post: +# https://blog.joss.theoj.org/2018/12/a-new-collaboration-with-aas-publishing +aas-doi: +aas-journal: +--- + +# Summary + +icepyx is both a software library and a community composed of ICESat-2 (NASA satellite) data users, developers, maintainers, and the scientific community. +We are working together to develop a shared library of resources - including existing resources, new code, tutorials, and use-cases/examples - that simplify the process of querying, obtaining, analyzing, and manipulating ICESat-2 datasets to enable scientific discovery. + +# Statement of need + +icepyx aims to provide a clearinghouse for code, functionality to improve interoperability, documentation, examples, and educational resources that tackle disciplinary research questions while minimizing the amount of repeated effort across groups utilizing similar datasets. +icepyx also hopes to foster collaboration, open-science practices, and reproducible workflows by integrating and sharing resources. + +The Ice, Cloud, and Land Elevation Satellite-2 (ICESat-2) [@is2] was launched by NASA in September 2018. +The laser altimeter on board the satellite emits green light to the Earth's surface and measures the time until each pulse is returned to the satellite's sensors. +This information is used to determine the surface height of the land, ice, snow, trees, water, clouds, etc. that the satellite is passing over. +The instrument provides close to 500 GB of data per day, allowing scientists to investigate the surface height of earth's features in unprecedented detail. + +icepyx began during the cryosphere-themed ICESat-2 Hackweek at the University of Washington in June 2019. +At the event, there was a clear need for a collaborative, shared community space that combined and generalized the tools and materials written by past, present, and future Hackweek participants, ICESat-2 Science Team members, and the data user community. +A unified framework of code and documentated examples for downloading, reading, and visualizing ICESat-2 data that is well tested makes it more accessible for everyone to use. +The library and community continue to grow and evolve, adding new features and building scientific literacy in open-science, cloud computing, and collaborative development best practices. + +icepyx is now a foundational tool for accessing and working with ICESat-2 data, responsible for nearly a quarter of all NASA data center granule downloads in 2022. The library is complemented by a series of other specialized tools for interacting with and obtaining ICESat-2 data. These include OpenAltimetry [@OA], a browser based web tool to visualize and download selected ICESat and ICESat-2 surface heights, SlideRule [@SR], a server-side framework to create cloud-based, on-demand customized data processing for ICESat-2 data, and multiple product-focused tools for scientific data analysis (e.g. PhoREAL, PhotonLabeler). These tools are described in more detail in the icepyx documentation's [ICESat-2 Resource Guide](https://icepyx.readthedocs.io/en/latest/community/resources.html). + +icepyx is also featured in multiple scientific publications [@Bisson:2021; @Fernando:2021; @Li:2020], presentations [@js2021agu; @js2020agu; @js2019agu], and educational events/Hackweeks [@2022_IS2-HW-tutorials; @2020_IS2-HW-tutorials]. + +# Acknowledgements + +We acknowledge funding support from NASA and the University of Washington eScience Institute. +Anthony Arendt, Fernando Perez, Lindsey Heagy, and the Pangeo team provided invaluable support and guidance in establishing this library and welcoming us to the open-source community. +Amy Steiker, Mikala Beig, Nick Kotlinski, Luis Lopez, and many others at the National Snow and Ice Data Center (NSIDC) provide technical support and data access guidance. +The icepyx contributors list also includes many wonderful folks who shared ideas, provided mentoring, embraced the opportunity to engage in open-science practices while working with ICESat-2 data products, and contributed to icepyx. + + + + + + +# References diff --git a/doc/source/tracking/pypistats/downloads.svg b/doc/source/tracking/pypistats/downloads.svg index 89bcdb73a..9263dcd5d 100644 --- a/doc/source/tracking/pypistats/downloads.svg +++ b/doc/source/tracking/pypistats/downloads.svg @@ -6,11 +6,11 @@ - 2023-02-02T16:32:01.484871 + 2023-08-01T20:56:59.425644 image/svg+xml - Matplotlib v3.6.3, https://matplotlib.org/ + Matplotlib v3.7.2, https://matplotlib.org/ @@ -41,12 +41,12 @@ z - - + @@ -205,12 +205,12 @@ z - + - + - + - + @@ -283,12 +283,12 @@ z - + - + - + - + + + + + + + + + + + + + + + + + + + + + + + + @@ -482,895 +504,1011 @@ z - + - - + - + - + - - - - - - - - - - - - - - - + - - - - - - + + + - + - + - - - - - - - - - - - - - - - - - + + + + - - + +L 200.903743 123.12 +L 201.40107 120.018462 +L 201.898396 123.12 +L 202.395722 122.344615 +L 202.893048 123.12 +L 203.390374 120.018462 +L 203.887701 121.569231 +L 204.385027 122.344615 +L 204.882353 122.344615 +L 205.379679 120.793846 +L 205.877005 122.344615 +L 206.374332 121.569231 +L 206.871658 121.569231 +L 207.368984 122.344615 +L 207.86631 122.344615 +L 208.363636 123.12 +L 208.860963 118.467692 +L 209.358289 122.344615 +L 209.855615 122.344615 +L 210.352941 123.12 +L 210.850267 122.344615 +L 211.347594 123.12 +L 211.84492 123.12 +L 212.839572 121.569231 +L 213.336898 115.366154 +L 213.834225 100.633846 +L 214.331551 117.692308 +L 214.828877 123.12 +L 215.326203 122.344615 +L 215.823529 118.467692 +L 216.320856 120.018462 +L 216.818182 119.243077 +L 217.315508 113.04 +L 217.812834 123.12 +L 218.31016 122.344615 +L 218.807487 120.793846 +L 219.304813 118.467692 +L 219.802139 115.366154 +L 220.299465 106.061538 +L 220.796791 112.264615 +L 221.294118 122.344615 +L 221.791444 97.532308 +L 222.28877 111.489231 +L 222.786096 117.692308 +L 223.283422 113.04 +L 223.780749 120.018462 +L 224.278075 113.04 +L 224.775401 113.815385 +L 225.272727 112.264615 +L 225.770053 106.061538 +L 226.26738 121.569231 +L 226.764706 118.467692 +L 227.262032 121.569231 +L 227.759358 123.12 +L 228.256684 120.018462 +L 228.754011 123.12 +L 229.251337 121.569231 +L 229.748663 123.12 +L 230.245989 122.344615 +L 230.743316 113.04 +L 231.240642 121.569231 +L 231.737968 119.243077 +L 232.235294 120.793846 +L 232.73262 120.018462 +L 233.229947 123.12 +L 233.727273 118.467692 +L 234.224599 120.018462 +L 234.721925 122.344615 +L 235.219251 119.243077 +L 235.716578 123.12 +L 236.213904 121.569231 +L 236.71123 123.12 +L 237.208556 120.793846 +L 237.705882 122.344615 +L 238.203209 120.793846 +L 238.700535 120.018462 +L 239.197861 122.344615 +L 240.192513 122.344615 +L 240.68984 120.793846 +L 241.187166 120.018462 +L 241.684492 106.836923 +L 242.181818 108.387692 +L 242.679144 121.569231 +L 243.176471 122.344615 +L 243.673797 119.243077 +L 244.171123 120.018462 +L 244.668449 118.467692 +L 245.165775 122.344615 +L 245.663102 123.12 +L 246.160428 119.243077 +L 246.657754 118.467692 +L 247.15508 116.141538 +L 247.652406 122.344615 +L 248.149733 120.018462 +L 248.647059 122.344615 +L 249.144385 122.344615 +L 249.641711 123.12 +L 250.636364 120.018462 +L 251.631016 123.12 +L 252.128342 121.569231 +L 252.625668 122.344615 +L 253.122995 118.467692 +L 253.620321 116.916923 +L 254.117647 118.467692 +L 254.614973 119.243077 +L 255.112299 117.692308 +L 255.609626 111.489231 +L 256.106952 119.243077 +L 256.604278 120.793846 +L 257.101604 120.018462 +L 257.59893 117.692308 +L 258.096257 112.264615 +L 258.593583 122.344615 +L 259.090909 121.569231 +L 259.588235 123.12 +L 260.085561 116.916923 +L 260.582888 117.692308 +L 261.080214 120.793846 +L 261.57754 116.141538 +L 262.074866 114.590769 +L 262.572193 122.344615 +L 263.069519 122.344615 +L 263.566845 116.141538 +L 264.064171 102.184615 +L 264.561497 113.04 +L 265.058824 119.243077 +L 265.55615 120.018462 +L 266.053476 120.018462 +L 267.048128 121.569231 +L 267.545455 119.243077 +L 268.042781 119.243077 +L 268.540107 122.344615 +L 269.037433 122.344615 +L 269.534759 121.569231 +L 270.032086 122.344615 +L 270.529412 117.692308 +L 271.026738 117.692308 +L 271.524064 119.243077 +L 272.02139 116.916923 +L 272.518717 111.489231 +L 273.016043 117.692308 +L 273.513369 117.692308 +L 274.010695 121.569231 +L 274.508021 121.569231 +L 275.005348 118.467692 +L 275.502674 123.12 +L 276 109.938462 +L 276.497326 108.387692 +L 276.994652 119.243077 +L 277.491979 120.018462 +L 277.989305 113.04 +L 278.983957 121.569231 +L 279.481283 120.018462 +L 279.97861 123.12 +L 280.475936 116.916923 +L 280.973262 106.836923 +L 281.470588 122.344615 +L 281.967914 121.569231 +L 282.962567 123.12 +L 283.459893 120.793846 +L 283.957219 108.387692 +L 284.454545 117.692308 +L 284.951872 122.344615 +L 285.449198 122.344615 +L 285.946524 111.489231 +L 286.44385 121.569231 +L 286.941176 122.344615 +L 287.438503 121.569231 +L 289.427807 121.569231 +L 289.925134 120.018462 +L 290.42246 123.12 +L 290.919786 122.344615 +L 291.417112 122.344615 +L 291.914439 123.12 +L 292.411765 123.12 +L 292.909091 122.344615 +L 293.406417 123.12 +L 293.903743 119.243077 +L 294.40107 122.344615 +L 294.898396 120.018462 +L 295.395722 120.018462 +L 295.893048 118.467692 +L 296.390374 120.018462 +L 296.887701 123.12 +L 297.385027 121.569231 +L 298.379679 121.569231 +L 298.877005 123.12 +L 299.374332 122.344615 +L 299.871658 122.344615 +L 300.368984 120.793846 +L 300.86631 122.344615 +L 302.358289 122.344615 +L 302.855615 95.981538 +L 303.352941 120.018462 +L 303.850267 123.12 +L 304.347594 122.344615 +L 304.84492 120.793846 +L 305.342246 123.12 +L 305.839572 120.018462 +L 306.336898 122.344615 +L 306.834225 119.243077 +L 307.331551 118.467692 +L 307.828877 118.467692 +L 308.326203 117.692308 +L 308.823529 108.387692 +L 309.320856 108.387692 +L 309.818182 118.467692 +L 310.315508 120.793846 +L 310.812834 122.344615 +L 311.31016 110.713846 +L 311.807487 120.018462 +L 312.304813 122.344615 +L 312.802139 122.344615 +L 313.299465 121.569231 +L 313.796791 121.569231 +L 314.294118 122.344615 +L 314.791444 122.344615 +L 315.28877 120.018462 +L 315.786096 123.12 +L 316.283422 122.344615 +L 316.780749 120.018462 +L 317.278075 120.018462 +L 317.775401 121.569231 +L 318.770053 108.387692 +L 319.26738 116.916923 +L 319.764706 116.141538 +L 320.262032 111.489231 +L 320.759358 114.590769 +L 321.754011 116.141538 +L 322.251337 115.366154 +L 322.748663 110.713846 +L 323.245989 113.04 +L 323.743316 120.018462 +L 324.240642 120.793846 +L 324.737968 119.243077 +L 325.235294 119.243077 +L 325.73262 116.916923 +L 326.229947 118.467692 +L 326.727273 113.815385 +L 327.224599 117.692308 +L 327.721925 118.467692 +L 328.219251 115.366154 +L 328.716578 108.387692 +L 329.213904 115.366154 +L 329.71123 118.467692 +L 330.705882 115.366154 +L 331.203209 118.467692 +L 331.700535 113.04 +L 332.197861 115.366154 +L 332.695187 118.467692 +L 333.192513 118.467692 +L 333.68984 121.569231 +L 334.187166 113.04 +L 334.684492 121.569231 +L 335.181818 120.018462 +L 335.679144 120.793846 +L 336.176471 117.692308 +L 336.673797 116.916923 +L 337.171123 104.510769 +L 337.668449 102.184615 +L 338.165775 116.141538 +L 338.663102 95.206154 +L 339.160428 121.569231 +L 339.657754 120.018462 +L 340.15508 121.569231 +L 340.652406 101.409231 +L 341.149733 102.184615 +L 342.144385 122.344615 +L 342.641711 120.018462 +L 343.139037 113.815385 +L 343.636364 120.018462 +L 344.13369 120.018462 +L 344.631016 102.184615 +L 345.128342 115.366154 +L 345.625668 117.692308 +L 346.620321 119.243077 +L 347.117647 119.243077 +L 347.614973 117.692308 +L 349.106952 122.344615 +L 349.604278 116.916923 +L 350.101604 122.344615 +L 350.59893 122.344615 +L 351.593583 120.793846 +L 352.090909 123.12 +L 352.588235 120.793846 +L 353.085561 119.243077 +L 353.582888 121.569231 +L 354.080214 122.344615 +L 354.57754 122.344615 +L 355.074866 116.916923 +L 355.572193 123.12 +L 356.069519 106.061538 +L 356.566845 121.569231 +L 357.064171 122.344615 +L 357.561497 120.018462 +L 358.058824 120.018462 +L 358.55615 120.793846 +L 359.053476 122.344615 +L 359.550802 117.692308 +L 360.048128 121.569231 +L 360.545455 120.018462 +L 361.042781 120.793846 +L 361.540107 105.286154 +L 362.037433 121.569231 +L 362.534759 119.243077 +L 363.032086 123.12 +L 363.529412 123.12 +L 364.026738 122.344615 +L 364.524064 116.916923 +L 365.02139 122.344615 +L 365.518717 117.692308 +L 366.016043 120.018462 +L 366.513369 123.12 +L 367.010695 120.018462 +L 368.005348 120.018462 +L 368.502674 117.692308 +L 369 116.141538 +L 369.497326 120.793846 +L 369.994652 120.793846 +L 370.491979 120.018462 +L 370.989305 121.569231 +L 371.486631 121.569231 +L 371.983957 120.018462 +L 372.481283 117.692308 +L 372.97861 117.692308 +L 373.475936 119.243077 +L 373.973262 122.344615 +L 374.967914 122.344615 +L 375.465241 108.387692 +L 375.962567 75.821538 +L 376.459893 91.329231 +L 376.957219 119.243077 +L 377.454545 121.569231 +L 377.951872 120.018462 +L 378.946524 109.163077 +L 379.44385 118.467692 +L 379.941176 105.286154 +L 380.438503 88.227692 +L 380.935829 120.018462 +L 381.433155 87.452308 +L 381.930481 113.815385 +L 382.427807 122.344615 +L 382.925134 78.147692 +L 383.42246 102.96 +L 383.919786 111.489231 +L 384.417112 73.495385 +L 384.914439 119.243077 +L 385.411765 120.018462 +L 385.909091 102.184615 +L 386.406417 113.04 +L 386.903743 99.858462 +L 387.40107 113.04 +L 387.898396 120.793846 +L 388.395722 56.436923 +L 388.893048 119.243077 +L 389.390374 120.018462 +L 389.887701 120.018462 +L 390.385027 114.590769 +L 390.882353 118.467692 +L 391.379679 107.612308 +L 391.877005 118.467692 +L 392.374332 120.793846 +L 392.871658 102.96 +L 393.368984 102.96 +L 393.86631 114.590769 +L 394.363636 100.633846 +L 394.860963 119.243077 +L 395.358289 120.018462 +L 395.855615 102.96 +L 396.352941 123.12 +L 396.850267 121.569231 +L 397.347594 120.793846 +L 397.84492 119.243077 +L 398.342246 121.569231 +L 398.839572 120.793846 +L 399.336898 119.243077 +L 399.834225 115.366154 +L 400.331551 117.692308 +L 400.828877 110.713846 +L 401.823529 122.344615 +L 402.320856 123.12 +L 402.818182 116.916923 +L 403.812834 121.569231 +L 404.31016 123.12 +L 404.807487 119.243077 +L 405.304813 122.344615 +L 405.802139 119.243077 +L 406.299465 123.12 +L 407.294118 121.569231 +L 407.791444 123.12 +L 408.28877 123.12 +L 408.786096 121.569231 +L 409.780749 120.018462 +L 410.278075 120.018462 +L 410.775401 120.793846 +L 411.272727 120.793846 +L 411.770053 123.12 +L 412.26738 121.569231 +L 412.764706 96.756923 +L 413.262032 120.793846 +L 413.759358 122.344615 +L 414.754011 122.344615 +L 415.251337 118.467692 +L 415.748663 119.243077 +L 416.245989 110.713846 +L 416.743316 121.569231 +L 417.240642 117.692308 +L 417.737968 122.344615 +L 418.235294 123.12 +L 418.73262 120.793846 +L 419.229947 121.569231 +L 419.727273 117.692308 +L 420.224599 119.243077 +L 420.721925 121.569231 +L 421.219251 122.344615 +L 421.716578 121.569231 +L 422.213904 118.467692 +L 422.71123 120.018462 +L 423.208556 119.243077 +L 423.705882 120.793846 +L 424.203209 116.916923 +L 424.700535 118.467692 +L 425.197861 123.12 +L 425.695187 119.243077 +L 426.192513 120.018462 +L 426.68984 111.489231 +L 427.187166 122.344615 +L 427.684492 119.243077 +L 428.181818 119.243077 +L 428.679144 120.018462 +L 429.176471 112.264615 +L 429.673797 99.858462 +L 430.171123 120.018462 +L 430.668449 122.344615 +L 431.165775 117.692308 +L 431.663102 121.569231 +L 432.160428 120.793846 +L 432.657754 120.793846 +L 433.15508 123.12 +L 434.149733 121.569231 +L 434.647059 120.018462 +L 435.144385 122.344615 +L 435.641711 120.018462 +L 436.139037 122.344615 +L 436.636364 120.018462 +L 437.13369 123.12 +L 437.631016 120.018462 +L 438.128342 121.569231 +L 438.625668 121.569231 +L 439.122995 120.018462 +L 439.620321 123.12 +L 440.117647 120.018462 +L 440.614973 121.569231 +L 441.112299 116.916923 +L 441.609626 122.344615 +L 442.106952 116.141538 +L 442.604278 99.858462 +L 443.101604 122.344615 +L 443.59893 123.12 +L 444.096257 121.569231 +L 444.593583 117.692308 +L 445.090909 121.569231 +L 446.085561 123.12 +L 446.582888 116.916923 +L 447.080214 121.569231 +L 447.57754 120.793846 +L 448.572193 108.387692 +L 449.069519 118.467692 +L 449.566845 121.569231 +L 450.064171 121.569231 +L 450.561497 117.692308 +L 451.058824 91.329231 +L 451.55615 110.713846 +L 452.053476 117.692308 +L 452.550802 120.793846 +L 453.048128 120.793846 +L 453.545455 98.307692 +L 454.042781 120.018462 +L 454.540107 122.344615 +L 455.534759 122.344615 +L 456.032086 120.793846 +L 456.529412 120.018462 +L 457.026738 121.569231 +L 457.524064 119.243077 +L 458.02139 120.793846 +L 458.518717 123.12 +L 459.016043 121.569231 +L 459.513369 118.467692 +L 460.010695 121.569231 +L 460.508021 117.692308 +L 461.005348 117.692308 +L 461.502674 119.243077 +L 462 121.569231 +L 462.497326 107.612308 +L 462.994652 120.018462 +L 463.491979 110.713846 +L 463.989305 117.692308 +L 464.486631 119.243077 +L 464.983957 111.489231 +L 465.481283 121.569231 +L 466.475936 114.590769 +L 466.973262 120.018462 +L 467.470588 116.916923 +L 467.967914 110.713846 +L 468.465241 109.938462 +L 468.962567 119.243077 +L 469.459893 105.286154 +L 469.957219 111.489231 +L 470.454545 120.793846 +L 470.951872 109.938462 +L 471.449198 122.344615 +L 471.946524 121.569231 +L 472.44385 114.590769 +L 472.941176 116.916923 +L 473.438503 101.409231 +L 473.935829 114.590769 +L 474.433155 113.815385 +L 474.930481 113.815385 +L 475.427807 115.366154 +L 475.925134 119.243077 +L 476.42246 120.018462 +L 476.919786 117.692308 +L 477.417112 119.243077 +L 477.914439 113.04 +L 478.411765 120.018462 +L 478.909091 115.366154 +L 479.406417 109.163077 +L 479.903743 115.366154 +L 480.40107 116.141538 +L 480.898396 115.366154 +L 481.395722 122.344615 +L 481.893048 117.692308 +L 482.390374 121.569231 +L 482.887701 120.793846 +L 483.385027 113.815385 +L 483.882353 113.04 +L 484.379679 101.409231 +L 484.877005 113.04 +L 485.374332 110.713846 +L 485.871658 118.467692 +L 486.368984 120.018462 +L 486.86631 119.243077 +L 487.363636 119.243077 +L 487.860963 122.344615 +L 488.358289 119.243077 +L 488.855615 109.163077 +L 489.352941 119.243077 +L 490.347594 119.243077 +L 490.84492 122.344615 +L 491.342246 123.12 +L 491.839572 122.344615 +L 492.336898 118.467692 +L 492.834225 106.836923 +L 493.331551 120.018462 +L 493.828877 120.018462 +L 494.326203 123.12 +L 494.823529 123.12 +L 495.320856 98.307692 +L 495.818182 106.836923 +L 496.315508 117.692308 +L 496.812834 113.04 +L 497.807487 122.344615 +L 498.304813 121.569231 +L 498.802139 122.344615 +L 499.299465 116.141538 +L 499.796791 122.344615 +L 500.294118 120.793846 +L 500.791444 121.569231 +L 501.28877 123.12 +L 501.786096 100.633846 +L 502.283422 120.018462 +L 502.780749 111.489231 +L 503.278075 111.489231 +L 503.775401 114.590769 +L 504.272727 120.793846 +L 504.770053 121.569231 +L 505.26738 116.141538 +L 505.764706 118.467692 +L 506.262032 117.692308 +L 506.759358 116.141538 +L 507.256684 122.344615 +L 507.754011 120.018462 +L 508.251337 121.569231 +L 509.245989 123.12 +L 511.235294 120.018462 +L 512.229947 105.286154 +L 512.727273 118.467692 +L 513.224599 116.141538 +L 513.721925 122.344615 +L 514.219251 116.141538 +L 514.716578 123.12 +L 515.213904 121.569231 +L 516.208556 120.018462 +L 516.705882 120.793846 +L 517.203209 120.018462 +L 517.700535 118.467692 +L 518.197861 107.612308 +L 518.695187 118.467692 +L 519.192513 117.692308 +L 519.68984 106.061538 +L 520.187166 122.344615 +L 520.684492 114.590769 +L 521.181818 122.344615 +L 521.679144 118.467692 +L 522.176471 121.569231 +L 522.673797 123.12 +L 523.171123 120.793846 +L 523.668449 122.344615 +L 524.165775 120.018462 +L 524.663102 123.12 +L 525.657754 123.12 +L 526.15508 120.018462 +L 526.652406 123.12 +L 527.149733 118.467692 +L 527.647059 123.12 +L 528.144385 122.344615 +L 528.641711 120.018462 +L 529.139037 107.612308 +L 529.636364 123.12 +L 530.13369 123.12 +L 530.631016 112.264615 +L 531.128342 113.04 +L 531.625668 110.713846 +L 532.122995 113.815385 +L 532.620321 122.344615 +L 533.117647 112.264615 +L 533.614973 120.018462 +L 534.112299 121.569231 +L 534.609626 122.344615 +L 535.106952 122.344615 +L 535.604278 123.12 +L 536.101604 122.344615 +L 536.59893 119.243077 +L 537.096257 121.569231 +L 538.090909 123.12 +L 538.588235 123.12 +L 539.085561 117.692308 +L 539.582888 118.467692 +L 540.080214 121.569231 +L 541.074866 121.569231 +L 541.572193 120.793846 +L 542.069519 120.793846 +L 542.566845 116.141538 +L 543.064171 122.344615 +L 543.561497 105.286154 +L 544.058824 118.467692 +L 544.55615 122.344615 +L 545.053476 119.243077 +L 545.550802 120.018462 +L 546.545455 113.04 +L 547.042781 118.467692 +L 547.540107 103.735385 +L 548.037433 123.12 +L 548.534759 117.692308 +L 549.529412 102.96 +L 550.026738 110.713846 +L 550.524064 82.8 +L 551.02139 116.141538 +L 551.518717 123.12 +L 552.016043 93.655385 +L 552.513369 113.04 +L 553.010695 109.163077 +L 553.508021 110.713846 +L 554.005348 117.692308 +L 554.502674 113.815385 +L 555 122.344615 +L 555.497326 106.836923 +L 555.994652 112.264615 +L 556.491979 112.264615 +L 556.989305 107.612308 +L 557.486631 108.387692 +L 557.983957 121.569231 +L 558.481283 123.12 +L 558.97861 92.104615 +L 559.475936 22.32 +L 559.973262 102.184615 +L 560.470588 106.836923 +L 560.967914 106.061538 +L 561.465241 120.018462 +L 561.962567 120.793846 +L 562.459893 92.88 +L 562.957219 73.495385 +L 563.454545 110.713846 +L 563.951872 108.387692 +L 564.449198 103.735385 +L 564.946524 120.018462 +L 565.44385 123.12 +L 565.941176 85.901538 +L 566.935829 90.553846 +L 567.930481 120.018462 +L 568.427807 121.569231 +L 568.925134 122.344615 +L 569.42246 117.692308 +L 569.919786 106.836923 +L 570.417112 113.04 +L 570.914439 90.553846 +L 571.411765 92.88 +L 571.909091 121.569231 +L 572.406417 120.018462 +L 572.903743 101.409231 +L 573.40107 115.366154 +L 573.898396 96.756923 +L 574.395722 116.141538 +L 574.893048 123.12 +L 575.390374 123.12 +L 575.887701 116.141538 +L 576.385027 105.286154 +L 576.882353 118.467692 +L 577.379679 118.467692 +L 577.877005 121.569231 +L 578.374332 123.12 +L 578.871658 123.12 +L 579.368984 121.569231 +L 579.86631 113.815385 +L 580.363636 120.793846 +L 580.860963 123.12 +L 581.358289 99.083077 +L 581.855615 117.692308 +L 582.352941 116.141538 +L 582.850267 95.981538 +L 583.347594 114.590769 +L 583.84492 122.344615 +L 584.342246 120.793846 +L 584.839572 123.12 +L 585.336898 121.569231 +L 585.834225 100.633846 +L 586.331551 98.307692 +L 586.828877 115.366154 +L 587.326203 109.938462 +L 587.823529 116.916923 +L 588.320856 106.836923 +L 588.818182 120.793846 +L 589.315508 114.590769 +L 589.812834 121.569231 +L 590.31016 120.793846 +L 590.807487 121.569231 +L 591.304813 123.12 +L 591.802139 105.286154 +L 592.299465 111.489231 +L 592.796791 120.793846 +L 593.294118 122.344615 +L 594.786096 122.344615 +L 595.283422 119.243077 +L 595.780749 94.430769 +L 596.278075 118.467692 +L 596.775401 111.489231 +L 597.272727 118.467692 +L 597.770053 120.018462 +L 598.26738 120.793846 +L 598.764706 119.243077 +L 599.262032 121.569231 +L 599.759358 122.344615 +L 600.256684 120.018462 +L 600.754011 122.344615 +L 601.251337 109.938462 +L 601.748663 119.243077 +L 602.245989 117.692308 +L 602.743316 120.793846 +L 603.240642 95.981538 +L 603.737968 120.018462 +L 604.235294 120.793846 +L 604.73262 119.243077 +L 605.229947 120.793846 +L 605.727273 119.243077 +L 606.224599 97.532308 +L 606.721925 116.141538 +L 607.219251 118.467692 +L 607.716578 120.018462 +L 608.213904 116.141538 +L 608.71123 120.018462 +L 609.208556 120.018462 +L 609.705882 120.793846 +L 610.203209 119.243077 +L 610.700535 119.243077 +L 611.197861 120.793846 +L 611.695187 119.243077 +L 612.192513 119.243077 +L 612.68984 120.018462 +L 613.187166 108.387692 +L 613.684492 113.815385 +L 614.181818 122.344615 +L 614.679144 121.569231 +L 615.176471 120.018462 +L 615.673797 121.569231 +L 616.171123 120.018462 +L 616.668449 121.569231 +L 617.165775 116.141538 +L 617.663102 115.366154 +L 618.160428 109.938462 +L 618.657754 119.243077 +L 619.15508 120.793846 +L 619.652406 117.692308 +L 620.149733 115.366154 +L 620.647059 118.467692 +L 621.144385 119.243077 +L 621.641711 121.569231 +L 622.139037 121.569231 +L 622.636364 116.141538 +L 622.636364 116.141538 +" clip-path="url(#p85f66bdc88)" style="fill: none; stroke: #1f77b4; stroke-width: 1.5; stroke-linecap: square"/> - - - + - + - + + diff --git a/doc/source/tracking/pypistats/downloads_data.csv b/doc/source/tracking/pypistats/downloads_data.csv index d25610f4c..17f1e78d8 100644 --- a/doc/source/tracking/pypistats/downloads_data.csv +++ b/doc/source/tracking/pypistats/downloads_data.csv @@ -940,6 +940,184 @@ with_mirrors,2023-01-28,16 with_mirrors,2023-01-30,2 with_mirrors,2023-01-31,30 with_mirrors,2023-02-01,14 +with_mirrors,2023-02-02,8 +with_mirrors,2023-02-03,32 +with_mirrors,2023-02-04,6 +with_mirrors,2023-02-05,1 +with_mirrors,2023-02-06,1 +with_mirrors,2023-02-07,8 +with_mirrors,2023-02-08,7 +with_mirrors,2023-02-09,32 +with_mirrors,2023-02-10,61 +with_mirrors,2023-02-11,32 +with_mirrors,2023-02-12,39 +with_mirrors,2023-02-13,65 +with_mirrors,2023-02-14,39 +with_mirrors,2023-02-15,118 +with_mirrors,2023-02-16,82 +with_mirrors,2023-02-17,65 +with_mirrors,2023-02-18,2 +with_mirrors,2023-02-19,6 +with_mirrors,2023-02-20,34 +with_mirrors,2023-02-21,10 +with_mirrors,2023-02-22,51 +with_mirrors,2023-02-23,7 +with_mirrors,2023-02-24,76 +with_mirrors,2023-02-25,33 +with_mirrors,2023-02-26,8 +with_mirrors,2023-02-27,48 +with_mirrors,2023-02-28,56 +with_mirrors,2023-03-01,52 +with_mirrors,2023-03-02,102 +with_mirrors,2023-03-03,16 +with_mirrors,2023-03-04,30 +with_mirrors,2023-03-06,39 +with_mirrors,2023-03-07,72 +with_mirrors,2023-03-08,19 +with_mirrors,2023-03-09,66 +with_mirrors,2023-03-10,14 +with_mirrors,2023-03-11,42 +with_mirrors,2023-03-12,6 +with_mirrors,2023-03-13,42 +with_mirrors,2023-03-14,17 +with_mirrors,2023-03-15,60 +with_mirrors,2023-03-16,79 +with_mirrors,2023-03-17,25 +with_mirrors,2023-03-18,32 +with_mirrors,2023-03-19,3 +with_mirrors,2023-03-20,99 +with_mirrors,2023-03-21,217 +with_mirrors,2023-03-22,77 +with_mirrors,2023-03-23,40 +with_mirrors,2023-03-24,50 +with_mirrors,2023-03-25,69 +with_mirrors,2023-03-26,99 +with_mirrors,2023-03-27,73 +with_mirrors,2023-03-28,69 +with_mirrors,2023-03-29,56 +with_mirrors,2023-03-30,21 +with_mirrors,2023-03-31,93 +with_mirrors,2023-04-01,50 +with_mirrors,2023-04-02,3 +with_mirrors,2023-04-03,57 +with_mirrors,2023-04-04,122 +with_mirrors,2023-04-05,76 +with_mirrors,2023-04-06,38 +with_mirrors,2023-04-07,63 +with_mirrors,2023-04-08,28 +with_mirrors,2023-04-09,10 +with_mirrors,2023-04-10,12 +with_mirrors,2023-04-11,26 +with_mirrors,2023-04-12,18 +with_mirrors,2023-04-13,65 +with_mirrors,2023-04-14,42 +with_mirrors,2023-04-15,12 +with_mirrors,2023-04-16,35 +with_mirrors,2023-04-17,25 +with_mirrors,2023-04-18,39 +with_mirrors,2023-04-19,49 +with_mirrors,2023-04-20,35 +with_mirrors,2023-04-21,11 +with_mirrors,2023-04-22,89 +with_mirrors,2023-04-23,4 +with_mirrors,2023-04-24,41 +with_mirrors,2023-04-25,77 +with_mirrors,2023-04-26,48 +with_mirrors,2023-04-27,17 +with_mirrors,2023-04-28,38 +with_mirrors,2023-04-29,32 +with_mirrors,2023-04-30,33 +with_mirrors,2023-05-02,77 +with_mirrors,2023-05-03,2 +with_mirrors,2023-05-04,13 +with_mirrors,2023-05-05,4 +with_mirrors,2023-05-06,1 +with_mirrors,2023-05-07,8 +with_mirrors,2023-05-08,36 +with_mirrors,2023-05-09,43 +with_mirrors,2023-05-10,12 +with_mirrors,2023-05-11,86 +with_mirrors,2023-05-12,28 +with_mirrors,2023-05-13,39 +with_mirrors,2023-05-14,66 +with_mirrors,2023-05-15,1 +with_mirrors,2023-05-16,67 +with_mirrors,2023-05-17,63 +with_mirrors,2023-05-18,47 +with_mirrors,2023-05-19,23 +with_mirrors,2023-05-20,78 +with_mirrors,2023-05-21,59 +with_mirrors,2023-05-22,9 +with_mirrors,2023-05-23,53 +with_mirrors,2023-05-24,41 +with_mirrors,2023-05-25,45 +with_mirrors,2023-05-26,5 +with_mirrors,2023-05-27,60 +with_mirrors,2023-05-28,3 +with_mirrors,2023-05-29,1 +with_mirrors,2023-05-30,30 +with_mirrors,2023-05-31,18 +with_mirrors,2023-06-01,4 +with_mirrors,2023-06-02,2 +with_mirrors,2023-06-03,2 +with_mirrors,2023-06-04,9 +with_mirrors,2023-06-05,46 +with_mirrors,2023-06-06,69 +with_mirrors,2023-06-07,38 +with_mirrors,2023-06-08,9 +with_mirrors,2023-06-09,78 +with_mirrors,2023-06-10,7 +with_mirrors,2023-06-11,98 +with_mirrors,2023-06-12,4 +with_mirrors,2023-06-13,10 +with_mirrors,2023-06-14,19 +with_mirrors,2023-06-15,2 +with_mirrors,2023-06-16,5 +with_mirrors,2023-06-17,72 +with_mirrors,2023-06-18,18 +with_mirrors,2023-06-19,35 +with_mirrors,2023-06-20,39 +with_mirrors,2023-06-21,43 +with_mirrors,2023-06-22,38 +with_mirrors,2023-06-23,57 +with_mirrors,2023-06-24,4 +with_mirrors,2023-06-25,6 +with_mirrors,2023-06-26,14 +with_mirrors,2023-06-27,12 +with_mirrors,2023-06-28,97 +with_mirrors,2023-06-29,10 +with_mirrors,2023-06-30,11 +with_mirrors,2023-07-01,5 +with_mirrors,2023-07-02,30 +with_mirrors,2023-07-03,36 +with_mirrors,2023-07-04,42 +with_mirrors,2023-07-05,4 +with_mirrors,2023-07-06,6 +with_mirrors,2023-07-07,119 +with_mirrors,2023-07-08,7 +with_mirrors,2023-07-09,68 +with_mirrors,2023-07-10,14 +with_mirrors,2023-07-11,28 +with_mirrors,2023-07-12,69 +with_mirrors,2023-07-13,17 +with_mirrors,2023-07-14,4 +with_mirrors,2023-07-15,5 +with_mirrors,2023-07-16,129 +with_mirrors,2023-07-17,36 +with_mirrors,2023-07-18,5 +with_mirrors,2023-07-19,3 +with_mirrors,2023-07-20,26 +with_mirrors,2023-07-21,12 +with_mirrors,2023-07-22,18 +with_mirrors,2023-07-23,22 +with_mirrors,2023-07-24,4 +with_mirrors,2023-07-25,8 +with_mirrors,2023-07-26,11 +with_mirrors,2023-07-27,70 +with_mirrors,2023-07-28,6 +with_mirrors,2023-07-29,3 +with_mirrors,2023-07-30,3 +with_mirrors,2023-07-31,72 without_mirrors,2020-06-18,22 without_mirrors,2020-06-19,14 without_mirrors,2020-06-21,4 @@ -1787,3 +1965,177 @@ without_mirrors,2023-01-27,2 without_mirrors,2023-01-30,2 without_mirrors,2023-01-31,1 without_mirrors,2023-02-01,2 +without_mirrors,2023-02-02,6 +without_mirrors,2023-02-03,3 +without_mirrors,2023-02-04,2 +without_mirrors,2023-02-05,1 +without_mirrors,2023-02-06,1 +without_mirrors,2023-02-07,8 +without_mirrors,2023-02-08,7 +without_mirrors,2023-02-09,3 +without_mirrors,2023-02-10,3 +without_mirrors,2023-02-11,3 +without_mirrors,2023-02-12,4 +without_mirrors,2023-02-13,4 +without_mirrors,2023-02-14,10 +without_mirrors,2023-02-15,2 +without_mirrors,2023-02-16,24 +without_mirrors,2023-02-17,7 +without_mirrors,2023-02-18,2 +without_mirrors,2023-02-19,6 +without_mirrors,2023-02-20,5 +without_mirrors,2023-02-21,10 +without_mirrors,2023-02-22,14 +without_mirrors,2023-02-23,7 +without_mirrors,2023-02-24,26 +without_mirrors,2023-02-25,1 +without_mirrors,2023-02-26,8 +without_mirrors,2023-02-27,18 +without_mirrors,2023-02-28,27 +without_mirrors,2023-03-01,17 +without_mirrors,2023-03-02,53 +without_mirrors,2023-03-03,10 +without_mirrors,2023-03-04,1 +without_mirrors,2023-03-06,39 +without_mirrors,2023-03-07,14 +without_mirrors,2023-03-08,19 +without_mirrors,2023-03-09,17 +without_mirrors,2023-03-10,8 +without_mirrors,2023-03-11,13 +without_mirrors,2023-03-12,2 +without_mirrors,2023-03-13,22 +without_mirrors,2023-03-14,15 +without_mirrors,2023-03-15,15 +without_mirrors,2023-03-16,21 +without_mirrors,2023-03-17,20 +without_mirrors,2023-03-18,3 +without_mirrors,2023-03-19,1 +without_mirrors,2023-03-20,41 +without_mirrors,2023-03-21,131 +without_mirrors,2023-03-22,28 +without_mirrors,2023-03-23,22 +without_mirrors,2023-03-24,23 +without_mirrors,2023-03-25,5 +without_mirrors,2023-03-26,4 +without_mirrors,2023-03-27,40 +without_mirrors,2023-03-28,65 +without_mirrors,2023-03-29,17 +without_mirrors,2023-03-30,20 +without_mirrors,2023-03-31,26 +without_mirrors,2023-04-01,5 +without_mirrors,2023-04-02,1 +without_mirrors,2023-04-03,49 +without_mirrors,2023-04-04,46 +without_mirrors,2023-04-05,43 +without_mirrors,2023-04-06,22 +without_mirrors,2023-04-07,5 +without_mirrors,2023-04-08,3 +without_mirrors,2023-04-09,2 +without_mirrors,2023-04-10,8 +without_mirrors,2023-04-11,22 +without_mirrors,2023-04-12,14 +without_mirrors,2023-04-13,43 +without_mirrors,2023-04-14,40 +without_mirrors,2023-04-16,3 +without_mirrors,2023-04-17,5 +without_mirrors,2023-04-18,29 +without_mirrors,2023-04-19,11 +without_mirrors,2023-04-20,35 +without_mirrors,2023-04-21,10 +without_mirrors,2023-04-22,1 +without_mirrors,2023-04-23,1 +without_mirrors,2023-04-24,10 +without_mirrors,2023-04-25,24 +without_mirrors,2023-04-26,7 +without_mirrors,2023-04-27,7 +without_mirrors,2023-04-28,3 +without_mirrors,2023-04-29,1 +without_mirrors,2023-04-30,1 +without_mirrors,2023-05-02,3 +without_mirrors,2023-05-04,13 +without_mirrors,2023-05-05,4 +without_mirrors,2023-05-06,1 +without_mirrors,2023-05-08,32 +without_mirrors,2023-05-09,8 +without_mirrors,2023-05-10,10 +without_mirrors,2023-05-11,36 +without_mirrors,2023-05-12,12 +without_mirrors,2023-05-13,2 +without_mirrors,2023-05-14,4 +without_mirrors,2023-05-15,1 +without_mirrors,2023-05-16,3 +without_mirrors,2023-05-17,30 +without_mirrors,2023-05-18,33 +without_mirrors,2023-05-19,11 +without_mirrors,2023-05-21,18 +without_mirrors,2023-05-22,9 +without_mirrors,2023-05-23,22 +without_mirrors,2023-05-24,4 +without_mirrors,2023-05-25,12 +without_mirrors,2023-05-26,3 +without_mirrors,2023-05-27,4 +without_mirrors,2023-05-28,3 +without_mirrors,2023-05-29,1 +without_mirrors,2023-05-30,24 +without_mirrors,2023-05-31,16 +without_mirrors,2023-06-01,4 +without_mirrors,2023-06-02,2 +without_mirrors,2023-06-03,2 +without_mirrors,2023-06-04,2 +without_mirrors,2023-06-05,2 +without_mirrors,2023-06-06,6 +without_mirrors,2023-06-07,38 +without_mirrors,2023-06-08,7 +without_mirrors,2023-06-09,16 +without_mirrors,2023-06-10,7 +without_mirrors,2023-06-11,5 +without_mirrors,2023-06-12,4 +without_mirrors,2023-06-13,6 +without_mirrors,2023-06-14,3 +without_mirrors,2023-06-15,2 +without_mirrors,2023-06-16,5 +without_mirrors,2023-06-17,2 +without_mirrors,2023-06-18,18 +without_mirrors,2023-06-19,6 +without_mirrors,2023-06-20,8 +without_mirrors,2023-06-21,4 +without_mirrors,2023-06-22,36 +without_mirrors,2023-06-23,5 +without_mirrors,2023-06-24,4 +without_mirrors,2023-06-25,6 +without_mirrors,2023-06-26,4 +without_mirrors,2023-06-27,6 +without_mirrors,2023-06-28,34 +without_mirrors,2023-06-29,10 +without_mirrors,2023-06-30,7 +without_mirrors,2023-07-01,5 +without_mirrors,2023-07-02,10 +without_mirrors,2023-07-03,5 +without_mirrors,2023-07-04,5 +without_mirrors,2023-07-05,4 +without_mirrors,2023-07-06,6 +without_mirrors,2023-07-07,6 +without_mirrors,2023-07-08,4 +without_mirrors,2023-07-09,6 +without_mirrors,2023-07-10,6 +without_mirrors,2023-07-11,5 +without_mirrors,2023-07-12,20 +without_mirrors,2023-07-13,13 +without_mirrors,2023-07-14,2 +without_mirrors,2023-07-15,3 +without_mirrors,2023-07-16,5 +without_mirrors,2023-07-17,3 +without_mirrors,2023-07-18,5 +without_mirrors,2023-07-19,3 +without_mirrors,2023-07-20,10 +without_mirrors,2023-07-21,11 +without_mirrors,2023-07-22,18 +without_mirrors,2023-07-23,6 +without_mirrors,2023-07-24,4 +without_mirrors,2023-07-25,8 +without_mirrors,2023-07-26,11 +without_mirrors,2023-07-27,7 +without_mirrors,2023-07-28,6 +without_mirrors,2023-07-29,3 +without_mirrors,2023-07-30,3 +without_mirrors,2023-07-31,10 diff --git a/doc/source/tracking/pypistats/sys_downloads_data.csv b/doc/source/tracking/pypistats/sys_downloads_data.csv index 4d406a8e2..c5867dc23 100644 --- a/doc/source/tracking/pypistats/sys_downloads_data.csv +++ b/doc/source/tracking/pypistats/sys_downloads_data.csv @@ -127,6 +127,34 @@ Darwin,2023-01-14,1 Darwin,2023-01-23,2 Darwin,2023-01-24,1 Darwin,2023-01-27,1 +Darwin,2023-02-10,1 +Darwin,2023-02-13,1 +Darwin,2023-02-14,2 +Darwin,2023-02-23,1 +Darwin,2023-02-26,1 +Darwin,2023-03-02,2 +Darwin,2023-03-08,1 +Darwin,2023-03-21,2 +Darwin,2023-03-29,2 +Darwin,2023-04-03,2 +Darwin,2023-04-04,1 +Darwin,2023-04-10,1 +Darwin,2023-04-14,1 +Darwin,2023-04-16,1 +Darwin,2023-04-18,4 +Darwin,2023-04-20,1 +Darwin,2023-04-24,1 +Darwin,2023-05-04,1 +Darwin,2023-05-11,1 +Darwin,2023-05-28,1 +Darwin,2023-06-08,1 +Darwin,2023-06-15,1 +Darwin,2023-06-18,1 +Darwin,2023-06-29,1 +Darwin,2023-07-07,1 +Darwin,2023-07-10,1 +Darwin,2023-07-16,1 +Darwin,2023-07-26,1 Linux,2020-06-18,9 Linux,2020-06-19,2 Linux,2020-06-22,2 @@ -681,6 +709,143 @@ Linux,2023-01-26,2 Linux,2023-01-27,1 Linux,2023-01-30,1 Linux,2023-02-01,1 +Linux,2023-02-02,1 +Linux,2023-02-03,3 +Linux,2023-02-07,5 +Linux,2023-02-08,5 +Linux,2023-02-09,2 +Linux,2023-02-11,1 +Linux,2023-02-14,5 +Linux,2023-02-15,2 +Linux,2023-02-16,1 +Linux,2023-02-17,5 +Linux,2023-02-18,2 +Linux,2023-02-19,3 +Linux,2023-02-21,9 +Linux,2023-02-22,3 +Linux,2023-02-23,3 +Linux,2023-02-24,25 +Linux,2023-02-25,1 +Linux,2023-02-26,2 +Linux,2023-02-27,8 +Linux,2023-02-28,21 +Linux,2023-03-02,4 +Linux,2023-03-03,6 +Linux,2023-03-06,9 +Linux,2023-03-08,3 +Linux,2023-03-09,3 +Linux,2023-03-10,3 +Linux,2023-03-11,1 +Linux,2023-03-13,3 +Linux,2023-03-14,2 +Linux,2023-03-15,2 +Linux,2023-03-16,1 +Linux,2023-03-17,1 +Linux,2023-03-18,1 +Linux,2023-03-20,6 +Linux,2023-03-21,11 +Linux,2023-03-22,3 +Linux,2023-03-23,10 +Linux,2023-03-25,1 +Linux,2023-03-27,3 +Linux,2023-03-28,5 +Linux,2023-03-29,2 +Linux,2023-03-30,4 +Linux,2023-03-31,3 +Linux,2023-04-02,1 +Linux,2023-04-03,1 +Linux,2023-04-04,2 +Linux,2023-04-05,4 +Linux,2023-04-06,3 +Linux,2023-04-07,1 +Linux,2023-04-08,2 +Linux,2023-04-10,4 +Linux,2023-04-12,1 +Linux,2023-04-13,10 +Linux,2023-04-14,16 +Linux,2023-04-16,2 +Linux,2023-04-17,2 +Linux,2023-04-18,6 +Linux,2023-04-19,6 +Linux,2023-04-20,9 +Linux,2023-04-21,6 +Linux,2023-04-24,6 +Linux,2023-04-25,15 +Linux,2023-04-26,4 +Linux,2023-04-27,3 +Linux,2023-04-28,1 +Linux,2023-04-29,1 +Linux,2023-04-30,1 +Linux,2023-05-02,1 +Linux,2023-05-04,2 +Linux,2023-05-05,1 +Linux,2023-05-08,2 +Linux,2023-05-09,2 +Linux,2023-05-10,7 +Linux,2023-05-11,2 +Linux,2023-05-12,1 +Linux,2023-05-14,1 +Linux,2023-05-16,1 +Linux,2023-05-17,1 +Linux,2023-05-19,6 +Linux,2023-05-22,6 +Linux,2023-05-23,4 +Linux,2023-05-24,2 +Linux,2023-05-25,12 +Linux,2023-05-26,1 +Linux,2023-05-30,7 +Linux,2023-05-31,12 +Linux,2023-06-01,1 +Linux,2023-06-02,1 +Linux,2023-06-03,1 +Linux,2023-06-05,1 +Linux,2023-06-06,1 +Linux,2023-06-07,22 +Linux,2023-06-08,2 +Linux,2023-06-09,4 +Linux,2023-06-10,2 +Linux,2023-06-12,2 +Linux,2023-06-13,1 +Linux,2023-06-15,1 +Linux,2023-06-16,3 +Linux,2023-06-17,1 +Linux,2023-06-20,3 +Linux,2023-06-21,4 +Linux,2023-06-22,32 +Linux,2023-06-23,1 +Linux,2023-06-25,3 +Linux,2023-06-26,2 +Linux,2023-06-27,5 +Linux,2023-06-28,4 +Linux,2023-06-29,7 +Linux,2023-06-30,5 +Linux,2023-07-01,2 +Linux,2023-07-02,3 +Linux,2023-07-03,4 +Linux,2023-07-04,4 +Linux,2023-07-05,4 +Linux,2023-07-06,4 +Linux,2023-07-07,4 +Linux,2023-07-08,2 +Linux,2023-07-09,3 +Linux,2023-07-10,4 +Linux,2023-07-11,4 +Linux,2023-07-12,3 +Linux,2023-07-13,5 +Linux,2023-07-14,1 +Linux,2023-07-16,1 +Linux,2023-07-17,1 +Linux,2023-07-19,3 +Linux,2023-07-20,3 +Linux,2023-07-21,7 +Linux,2023-07-23,2 +Linux,2023-07-24,2 +Linux,2023-07-25,4 +Linux,2023-07-26,9 +Linux,2023-07-28,5 +Linux,2023-07-29,3 +Linux,2023-07-30,2 +Linux,2023-07-31,4 Windows,2020-06-21,1 Windows,2020-06-25,1 Windows,2020-06-30,1 @@ -1122,6 +1287,130 @@ Windows,2023-01-22,2 Windows,2023-01-26,1 Windows,2023-01-30,1 Windows,2023-02-01,1 +Windows,2023-02-02,5 +Windows,2023-02-04,2 +Windows,2023-02-05,1 +Windows,2023-02-06,1 +Windows,2023-02-07,3 +Windows,2023-02-08,2 +Windows,2023-02-09,1 +Windows,2023-02-10,2 +Windows,2023-02-11,1 +Windows,2023-02-12,2 +Windows,2023-02-13,3 +Windows,2023-02-14,1 +Windows,2023-02-16,14 +Windows,2023-02-17,1 +Windows,2023-02-19,3 +Windows,2023-02-20,4 +Windows,2023-02-23,2 +Windows,2023-02-26,3 +Windows,2023-02-27,3 +Windows,2023-03-01,3 +Windows,2023-03-02,2 +Windows,2023-03-03,4 +Windows,2023-03-07,14 +Windows,2023-03-08,10 +Windows,2023-03-09,1 +Windows,2023-03-10,1 +Windows,2023-03-11,11 +Windows,2023-03-12,2 +Windows,2023-03-14,3 +Windows,2023-03-15,7 +Windows,2023-03-16,3 +Windows,2023-03-17,2 +Windows,2023-03-19,1 +Windows,2023-03-20,1 +Windows,2023-03-21,3 +Windows,2023-03-22,3 +Windows,2023-03-23,1 +Windows,2023-03-25,1 +Windows,2023-03-26,1 +Windows,2023-03-27,6 +Windows,2023-03-28,3 +Windows,2023-03-29,2 +Windows,2023-03-30,1 +Windows,2023-04-01,3 +Windows,2023-04-03,3 +Windows,2023-04-04,7 +Windows,2023-04-05,6 +Windows,2023-04-08,1 +Windows,2023-04-09,1 +Windows,2023-04-10,3 +Windows,2023-04-11,2 +Windows,2023-04-12,3 +Windows,2023-04-13,17 +Windows,2023-04-17,1 +Windows,2023-04-18,2 +Windows,2023-04-20,1 +Windows,2023-04-21,1 +Windows,2023-04-22,1 +Windows,2023-04-25,1 +Windows,2023-04-26,2 +Windows,2023-04-27,4 +Windows,2023-04-28,1 +Windows,2023-05-02,1 +Windows,2023-05-04,9 +Windows,2023-05-05,3 +Windows,2023-05-06,1 +Windows,2023-05-09,6 +Windows,2023-05-10,3 +Windows,2023-05-11,1 +Windows,2023-05-12,5 +Windows,2023-05-14,1 +Windows,2023-05-15,1 +Windows,2023-05-16,2 +Windows,2023-05-17,13 +Windows,2023-05-18,1 +Windows,2023-05-19,2 +Windows,2023-05-22,2 +Windows,2023-05-23,2 +Windows,2023-05-24,2 +Windows,2023-05-26,1 +Windows,2023-05-27,1 +Windows,2023-05-30,3 +Windows,2023-05-31,2 +Windows,2023-06-01,2 +Windows,2023-06-05,1 +Windows,2023-06-06,3 +Windows,2023-06-07,16 +Windows,2023-06-08,3 +Windows,2023-06-09,4 +Windows,2023-06-10,2 +Windows,2023-06-11,1 +Windows,2023-06-12,1 +Windows,2023-06-13,4 +Windows,2023-06-14,2 +Windows,2023-06-16,1 +Windows,2023-06-18,1 +Windows,2023-06-20,2 +Windows,2023-06-22,2 +Windows,2023-06-23,2 +Windows,2023-06-24,4 +Windows,2023-06-25,2 +Windows,2023-06-26,1 +Windows,2023-06-27,1 +Windows,2023-06-28,2 +Windows,2023-06-30,1 +Windows,2023-07-02,1 +Windows,2023-07-04,1 +Windows,2023-07-06,2 +Windows,2023-07-07,1 +Windows,2023-07-09,2 +Windows,2023-07-12,17 +Windows,2023-07-13,3 +Windows,2023-07-14,1 +Windows,2023-07-15,1 +Windows,2023-07-16,2 +Windows,2023-07-18,4 +Windows,2023-07-20,7 +Windows,2023-07-21,1 +Windows,2023-07-22,1 +Windows,2023-07-23,1 +Windows,2023-07-24,1 +Windows,2023-07-26,1 +Windows,2023-07-27,3 +Windows,2023-07-31,4 null,2020-06-18,12 null,2020-06-19,12 null,2020-06-21,2 @@ -1688,3 +1977,127 @@ null,2023-01-19,14 null,2023-01-21,1 null,2023-01-24,1 null,2023-01-31,1 +null,2023-02-11,1 +null,2023-02-12,2 +null,2023-02-14,2 +null,2023-02-16,9 +null,2023-02-17,1 +null,2023-02-20,1 +null,2023-02-21,1 +null,2023-02-22,11 +null,2023-02-23,1 +null,2023-02-24,1 +null,2023-02-26,2 +null,2023-02-27,7 +null,2023-02-28,6 +null,2023-03-01,14 +null,2023-03-02,45 +null,2023-03-04,1 +null,2023-03-06,30 +null,2023-03-08,5 +null,2023-03-09,13 +null,2023-03-10,4 +null,2023-03-11,1 +null,2023-03-13,19 +null,2023-03-14,10 +null,2023-03-15,6 +null,2023-03-16,17 +null,2023-03-17,17 +null,2023-03-18,2 +null,2023-03-20,34 +null,2023-03-21,115 +null,2023-03-22,22 +null,2023-03-23,11 +null,2023-03-24,23 +null,2023-03-25,3 +null,2023-03-26,3 +null,2023-03-27,31 +null,2023-03-28,57 +null,2023-03-29,11 +null,2023-03-30,15 +null,2023-03-31,23 +null,2023-04-01,2 +null,2023-04-03,43 +null,2023-04-04,36 +null,2023-04-05,33 +null,2023-04-06,19 +null,2023-04-07,4 +null,2023-04-09,1 +null,2023-04-11,20 +null,2023-04-12,10 +null,2023-04-13,16 +null,2023-04-14,23 +null,2023-04-17,2 +null,2023-04-18,17 +null,2023-04-19,5 +null,2023-04-20,24 +null,2023-04-21,3 +null,2023-04-23,1 +null,2023-04-24,3 +null,2023-04-25,8 +null,2023-04-26,1 +null,2023-04-28,1 +null,2023-05-02,1 +null,2023-05-04,1 +null,2023-05-08,30 +null,2023-05-11,32 +null,2023-05-12,6 +null,2023-05-13,2 +null,2023-05-14,2 +null,2023-05-17,16 +null,2023-05-18,32 +null,2023-05-19,3 +null,2023-05-21,18 +null,2023-05-22,1 +null,2023-05-23,16 +null,2023-05-26,1 +null,2023-05-27,3 +null,2023-05-28,2 +null,2023-05-29,1 +null,2023-05-30,14 +null,2023-05-31,2 +null,2023-06-01,1 +null,2023-06-02,1 +null,2023-06-03,1 +null,2023-06-04,2 +null,2023-06-06,2 +null,2023-06-08,1 +null,2023-06-09,8 +null,2023-06-10,3 +null,2023-06-11,4 +null,2023-06-12,1 +null,2023-06-13,1 +null,2023-06-14,1 +null,2023-06-16,1 +null,2023-06-17,1 +null,2023-06-18,16 +null,2023-06-19,6 +null,2023-06-20,3 +null,2023-06-22,2 +null,2023-06-23,2 +null,2023-06-25,1 +null,2023-06-26,1 +null,2023-06-28,28 +null,2023-06-29,2 +null,2023-06-30,1 +null,2023-07-01,3 +null,2023-07-02,6 +null,2023-07-03,1 +null,2023-07-08,2 +null,2023-07-09,1 +null,2023-07-10,1 +null,2023-07-11,1 +null,2023-07-13,5 +null,2023-07-15,2 +null,2023-07-16,1 +null,2023-07-17,2 +null,2023-07-18,1 +null,2023-07-21,3 +null,2023-07-22,17 +null,2023-07-23,3 +null,2023-07-24,1 +null,2023-07-25,4 +null,2023-07-27,4 +null,2023-07-28,1 +null,2023-07-30,1 +null,2023-07-31,2 diff --git a/doc/source/tracking/traffic/clones.csv b/doc/source/tracking/traffic/clones.csv index ddf2b8a47..24e6bea49 100644 --- a/doc/source/tracking/traffic/clones.csv +++ b/doc/source/tracking/traffic/clones.csv @@ -624,3 +624,165 @@ _date,total_clones,unique_clones 2023-01-26,1,1 2023-01-27,30,13 2023-01-29,2,2 +2023-01-30,3,3 +2023-01-31,2,2 +2023-02-01,2,2 +2023-02-02,29,11 +2023-02-03,2,1 +2023-02-04,2,1 +2023-02-05,1,1 +2023-02-06,8,5 +2023-02-07,1,1 +2023-02-08,10,6 +2023-02-09,28,10 +2023-02-10,4,3 +2023-02-11,8,3 +2023-02-13,3,3 +2023-02-14,33,18 +2023-02-15,5,3 +2023-02-16,4,2 +2023-02-17,39,16 +2023-02-18,3,2 +2023-02-20,5,4 +2023-02-21,2,2 +2023-02-22,5,3 +2023-02-23,2,2 +2023-02-24,8,6 +2023-02-25,1,1 +2023-02-27,3,3 +2023-02-28,1,1 +2023-03-01,2,2 +2023-03-02,7,4 +2023-03-03,10,6 +2023-03-04,2,1 +2023-03-05,2,2 +2023-03-06,9,6 +2023-03-07,23,10 +2023-03-08,1,1 +2023-03-09,4,3 +2023-03-13,13,8 +2023-03-14,2,2 +2023-03-15,29,13 +2023-03-16,11,6 +2023-03-17,11,7 +2023-03-18,2,2 +2023-03-19,8,4 +2023-03-20,93,36 +2023-03-21,58,24 +2023-03-22,1,1 +2023-03-23,3,2 +2023-03-24,1,1 +2023-03-25,3,2 +2023-03-27,3,3 +2023-03-28,5,3 +2023-03-29,8,4 +2023-03-30,3,3 +2023-04-01,1,1 +2023-04-02,2,2 +2023-04-03,3,3 +2023-04-04,11,7 +2023-04-05,11,6 +2023-04-06,17,8 +2023-04-07,1,1 +2023-04-08,1,1 +2023-04-10,2,2 +2023-04-11,24,15 +2023-04-12,16,9 +2023-04-13,1,1 +2023-04-14,3,3 +2023-04-16,2,1 +2023-04-17,45,20 +2023-04-18,41,19 +2023-04-19,1,1 +2023-04-20,3,2 +2023-04-22,1,1 +2023-04-24,5,4 +2023-04-25,1,1 +2023-04-26,1,1 +2023-04-27,3,3 +2023-04-29,1,1 +2023-05-01,3,2 +2023-05-02,8,4 +2023-05-03,29,12 +2023-05-04,1,1 +2023-05-06,1,1 +2023-05-07,1,1 +2023-05-08,4,3 +2023-05-11,1,1 +2023-05-12,8,4 +2023-05-15,4,3 +2023-05-17,38,20 +2023-05-18,1,1 +2023-05-19,30,14 +2023-05-20,3,2 +2023-05-21,2,1 +2023-05-22,24,12 +2023-05-23,2,1 +2023-05-28,3,3 +2023-05-29,3,3 +2023-05-30,4,2 +2023-06-01,4,4 +2023-06-03,6,1 +2023-06-04,7,3 +2023-06-05,3,3 +2023-06-06,11,7 +2023-06-07,8,4 +2023-06-08,10,6 +2023-06-09,2,1 +2023-06-10,2,1 +2023-06-12,8,5 +2023-06-13,5,3 +2023-06-14,8,3 +2023-06-17,25,2 +2023-06-18,32,4 +2023-06-19,3,3 +2023-06-20,4,2 +2023-06-21,4,3 +2023-06-22,3,2 +2023-06-26,3,3 +2023-07-01,3,3 +2023-07-02,1,1 +2023-07-03,4,4 +2023-07-05,1,1 +2023-07-06,1,1 +2023-07-07,4,2 +2023-07-08,2,2 +2023-07-10,3,3 +2023-07-13,1,1 +2023-07-14,2,2 +2023-07-17,9,6 +2023-07-18,7,5 +2023-07-19,1,1 +2023-07-22,1,1 +2023-07-23,2,1 +2023-07-24,3,3 +2023-07-25,1,1 +2023-07-26,1,1 +2023-07-27,1,1 +2023-07-28,16,8 +2023-07-29,1,1 +2023-07-31,21,7 +2023-08-01,7,5 +2023-08-03,6,3 +2023-08-04,2,2 +2023-08-05,3,3 +2023-08-06,3,3 +2023-08-07,3,3 +2023-08-08,23,19 +2023-08-09,18,13 +2023-08-10,15,10 +2023-08-11,4,4 +2023-08-12,1,1 +2023-08-14,21,17 +2023-08-15,7,6 +2023-08-16,3,3 +2023-08-17,2,2 +2023-08-18,10,10 +2023-08-19,4,4 +2023-08-21,4,4 +2023-08-22,16,13 +2023-08-23,9,8 +2023-08-24,19,17 +2023-08-25,2,2 +2023-08-26,2,1 +2023-08-27,2,2 diff --git a/doc/source/tracking/traffic/plots.svg b/doc/source/tracking/traffic/plots.svg index 09c33fbc6..427b06351 100644 --- a/doc/source/tracking/traffic/plots.svg +++ b/doc/source/tracking/traffic/plots.svg @@ -6,11 +6,11 @@ - 2023-01-30T00:40:12.360255 + 2023-08-28T00:36:38.038589 image/svg+xml - Matplotlib v3.6.3, https://matplotlib.org/ + Matplotlib v3.7.2, https://matplotlib.org/ @@ -41,12 +41,12 @@ z - - + @@ -136,12 +136,12 @@ z - + - + - + - + @@ -224,12 +224,12 @@ z - + - + - + - + - + - + @@ -352,12 +352,12 @@ z - + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -530,17 +559,17 @@ z - + - - + - + @@ -548,12 +577,12 @@ L -3.5 0 - + - + - + @@ -563,12 +592,12 @@ L -3.5 0 - + - + - + @@ -578,12 +607,12 @@ L -3.5 0 - + - + - + @@ -593,1174 +622,1477 @@ L -3.5 0 - + +" clip-path="url(#pd3e11f5902)" style="fill: none; stroke: #1f77b4; stroke-width: 1.5; stroke-linecap: square"/> - + +" clip-path="url(#pd3e11f5902)" style="fill: none; stroke: #ff7f0e; stroke-width: 1.5; stroke-linecap: square"/> - - - + - + - + - - + - + - + - - + + - + - + @@ -2150,15 +2482,15 @@ z - - + + - + - + - + - + + - + - + - + @@ -2221,15 +2553,15 @@ z - - + + - + - + - + - + + - + - + - + @@ -2297,7 +2629,29 @@ z - + + + + + + + + + + + + + + + + + + + + + + + @@ -2310,12 +2664,12 @@ z - + - + - + @@ -2323,12 +2677,12 @@ z - + - + - + @@ -2338,12 +2692,12 @@ z - + - + - + @@ -2353,12 +2707,12 @@ z - + - + - + @@ -2368,1589 +2722,1973 @@ z - + +L 115.856134 231.910431 +L 116.348632 229.498947 +L 116.84113 244.771675 +L 117.333628 251.470239 +L 117.826125 239.412823 +L 118.318623 239.14488 +L 118.811121 235.393684 +L 119.303619 239.14488 +L 119.796117 239.948708 +L 120.288614 250.666411 +L 120.781112 248.522871 +L 121.27361 247.719043 +L 121.766108 239.948708 +L 122.258605 213.690335 +L 122.751103 227.355407 +L 123.243601 244.235789 +L 124.228597 251.202297 +L 125.213592 225.747751 +L 125.70609 246.111388 +L 126.198588 243.699904 +L 126.691086 237.537225 +L 127.183583 237.537225 +L 127.676081 249.862584 +L 128.168579 237.805167 +L 128.661077 234.321914 +L 129.153575 243.967847 +L 129.646072 243.967847 +L 130.13857 239.412823 +L 130.631068 247.183158 +L 131.123566 249.326699 +L 131.616064 227.623349 +L 132.108561 249.058756 +L 132.601059 248.254928 +L 133.093557 249.862584 +L 133.586055 247.986986 +L 134.078553 251.470239 +L 134.57105 251.470239 +L 135.063548 229.231005 +L 135.556046 217.173589 +L 136.048544 244.235789 +L 136.541041 245.575502 +L 137.033539 227.891292 +L 137.526037 250.934354 +L 138.018535 245.30756 +L 138.511033 242.092249 +L 139.00353 246.37933 +L 139.496028 237.00134 +L 139.988526 247.183158 +L 140.481024 250.934354 +L 140.973522 251.738182 +L 141.466019 244.235789 +L 141.958517 242.628134 +L 142.451015 242.896077 +L 142.943513 241.556364 +L 143.436011 249.058756 +L 143.928508 250.934354 +L 144.421006 250.666411 +L 144.913504 247.4511 +L 145.406002 247.986986 +L 145.8985 243.164019 +L 146.390997 250.130526 +L 146.883495 251.202297 +L 147.375993 251.470239 +L 147.868491 248.522871 +L 148.360989 249.594641 +L 148.853486 250.398469 +L 149.345984 251.738182 +L 149.838482 247.4511 +L 150.33098 247.719043 +L 150.823477 250.934354 +L 151.808473 243.164019 +L 152.300971 240.752536 +L 152.793469 247.986986 +L 153.285966 249.058756 +L 153.778464 233.518086 +L 154.270962 250.398469 +L 154.76346 235.125742 +L 155.255958 236.733397 +L 155.748455 240.752536 +L 156.240953 234.589856 +L 156.733451 247.183158 +L 157.225949 235.929569 +L 157.718447 251.202297 +L 158.210944 234.857799 +L 158.703442 236.197512 +L 159.19594 228.159234 +L 159.688438 233.786029 +L 160.180936 240.752536 +L 160.673433 243.431962 +L 161.165931 248.522871 +L 161.658429 230.302775 +L 162.150927 226.015694 +L 162.643425 237.269282 +L 163.135922 235.929569 +L 163.62842 233.518086 +L 164.120918 247.719043 +L 164.613416 248.790813 +L 165.105914 231.374545 +L 165.598411 247.183158 +L 166.583407 234.321914 +L 167.568402 250.934354 +L 168.0609 247.986986 +L 168.553398 236.465455 +L 169.045896 235.929569 +L 169.538394 226.819522 +L 170.030891 242.628134 +L 170.523389 250.398469 +L 171.015887 250.666411 +L 171.508385 249.326699 +L 172.000883 243.164019 +L 172.49338 243.431962 +L 172.985878 247.183158 +L 173.478376 232.178373 +L 173.970874 248.522871 +L 174.463372 242.628134 +L 174.955869 239.948708 +L 175.448367 241.556364 +L 175.940865 227.087464 +L 176.433363 236.733397 +L 176.925861 235.929569 +L 177.418358 240.752536 +L 177.910856 239.14488 +L 178.403354 240.216651 +L 178.895852 187.431962 +L 179.38835 246.111388 +L 179.880847 232.178373 +L 180.373345 235.661627 +L 180.865843 228.427177 +L 181.358341 242.360191 +L 181.850838 202.972632 +L 182.343336 201.364976 +L 182.835834 206.99177 +L 183.328332 230.570718 +L 183.82083 178.321914 +L 184.313327 225.211866 +L 184.805825 246.37933 +L 185.298323 245.575502 +L 186.283319 232.446316 +L 186.775816 229.231005 +L 187.268314 235.661627 +L 187.760812 225.479809 +L 188.25331 244.235789 +L 188.745808 246.111388 +L 189.238305 208.063541 +L 189.730803 233.786029 +L 190.223301 235.393684 +L 190.715799 246.647273 +L 191.208297 248.790813 +L 191.700794 248.254928 +L 192.193292 234.053971 +L 192.68579 187.967847 +L 193.178288 231.106603 +L 193.670786 235.393684 +L 194.163283 216.369761 +L 194.655781 244.771675 +L 195.148279 248.522871 +L 195.640777 245.30756 +L 196.133274 223.604211 +L 196.625772 239.412823 +L 197.11827 224.943923 +L 197.610768 203.240574 +L 198.103266 234.857799 +L 198.595763 241.824306 +L 199.088261 246.37933 +L 199.580759 180.465455 +L 200.073257 228.963062 +L 200.565755 243.967847 +L 201.058252 236.197512 +L 201.55075 237.537225 +L 202.043248 249.058756 +L 202.535746 247.183158 +L 203.028244 191.183158 +L 203.520741 184.752536 +L 204.013239 201.364976 +L 204.505737 243.431962 +L 204.998235 249.326699 +L 205.490733 237.537225 +L 205.98323 247.719043 +L 206.968226 226.015694 +L 207.460724 167.336268 +L 207.953222 223.872153 +L 208.445719 223.068325 +L 208.938217 227.623349 +L 209.430715 245.843445 +L 209.923213 246.647273 +L 210.415711 221.996555 +L 210.908208 237.00134 +L 211.400706 236.733397 +L 211.893204 227.891292 +L 212.385702 243.699904 +L 212.878199 249.326699 +L 213.370697 201.097033 +L 213.863195 238.07311 +L 214.355693 215.030048 +L 214.848191 209.135311 +L 215.340688 215.565933 +L 215.833186 249.862584 +L 216.325684 249.862584 +L 216.818182 224.140096 +L 217.31068 234.053971 +L 217.803177 233.518086 +L 218.295675 239.680766 +L 218.788173 243.164019 +L 219.280671 248.522871 +L 219.773169 246.37933 +L 220.265666 227.355407 +L 220.758164 244.503732 +L 221.250662 241.020478 +L 221.74316 233.250144 +L 222.235658 239.412823 +L 222.728155 249.862584 +L 223.220653 249.862584 +L 223.713151 243.431962 +L 224.205649 240.752536 +L 224.698147 246.647273 +L 225.190644 241.824306 +L 225.683142 243.967847 +L 226.17564 249.326699 +L 226.668138 247.719043 +L 227.160635 232.178373 +L 227.653133 237.269282 +L 228.145631 172.69512 +L 228.638129 215.833876 +L 229.130627 244.503732 +L 229.623124 242.628134 +L 230.115622 250.130526 +L 230.60812 233.250144 +L 231.100618 235.125742 +L 231.593116 242.360191 +L 232.085613 241.556364 +L 232.578111 236.465455 +L 233.070609 249.862584 +L 233.563107 250.398469 +L 234.055605 196.274067 +L 234.548102 239.948708 +L 235.0406 245.30756 +L 235.533098 247.4511 +L 236.025596 204.312344 +L 236.518094 247.4511 +L 237.010591 248.254928 +L 237.995587 242.360191 +L 238.488085 242.628134 +L 238.980583 225.479809 +L 239.47308 244.771675 +L 239.965578 249.862584 +L 240.458076 243.967847 +L 240.950574 245.575502 +L 241.443071 183.948708 +L 241.935569 200.293206 +L 242.428067 241.020478 +L 243.413063 245.039617 +L 243.90556 244.503732 +L 244.398058 232.714258 +L 244.890556 242.628134 +L 245.383054 243.699904 +L 245.875552 223.872153 +L 246.368049 239.948708 +L 246.860547 250.398469 +L 247.353045 251.738182 +L 247.845543 245.039617 +L 248.338041 245.575502 +L 248.830538 246.915215 +L 249.323036 243.967847 +L 249.815534 246.37933 +L 250.308032 249.594641 +L 250.80053 251.202297 +L 251.293027 243.967847 +L 251.785525 243.699904 +L 252.278023 231.910431 +L 252.770521 230.302775 +L 253.263019 250.666411 +L 253.755516 246.37933 +L 254.248014 251.738182 +L 254.740512 239.680766 +L 255.23301 240.752536 +L 255.725508 243.164019 +L 256.218005 240.484593 +L 256.710503 248.522871 +L 257.203001 245.843445 +L 257.695499 250.130526 +L 258.187996 237.537225 +L 258.680494 239.412823 +L 259.172992 244.235789 +L 259.66549 241.288421 +L 260.157988 232.446316 +L 260.650485 242.360191 +L 261.142983 248.522871 +L 261.635481 233.518086 +L 262.127979 250.398469 +L 262.620477 246.111388 +L 263.112974 244.503732 +L 263.605472 251.202297 +L 264.09797 250.934354 +L 264.590468 245.039617 +L 265.082966 245.575502 +L 265.575463 233.250144 +L 266.067961 231.642488 +L 266.560459 249.058756 +L 267.052957 249.862584 +L 267.545455 250.398469 +L 268.037952 242.628134 +L 268.53045 228.159234 +L 269.022948 236.465455 +L 269.515446 236.197512 +L 270.007944 245.30756 +L 270.500441 249.862584 +L 270.992939 245.843445 +L 271.485437 246.647273 +L 271.977935 249.862584 +L 272.470432 237.269282 +L 272.96293 247.719043 +L 273.455428 242.628134 +L 273.947926 226.015694 +L 274.440424 250.934354 +L 274.932921 248.254928 +L 275.917917 216.905646 +L 276.410415 236.733397 +L 276.902913 248.254928 +L 277.39541 247.719043 +L 277.887908 250.398469 +L 278.380406 246.647273 +L 278.872904 232.178373 +L 279.365402 230.570718 +L 279.857899 221.46067 +L 280.842895 248.522871 +L 281.335393 251.470239 +L 281.827891 248.522871 +L 282.320388 249.862584 +L 282.812886 239.948708 +L 283.305384 189.575502 +L 283.797882 229.498947 +L 284.29038 242.628134 +L 284.782877 241.020478 +L 285.275375 230.83866 +L 285.767873 230.83866 +L 286.260371 220.924785 +L 286.752868 198.417608 +L 287.245366 217.441531 +L 287.737864 246.37933 +L 288.230362 250.666411 +L 288.72286 224.675981 +L 289.215357 228.159234 +L 289.707855 212.618565 +L 290.200353 240.216651 +L 290.692851 236.197512 +L 291.185349 246.111388 +L 291.677846 248.254928 +L 292.170344 215.565933 +L 292.662842 238.608995 +L 293.15534 249.594641 +L 293.647838 223.604211 +L 294.140335 164.924785 +L 294.632833 240.484593 +L 295.125331 235.929569 +L 295.617829 227.891292 +L 296.110327 228.963062 +L 296.602824 242.896077 +L 297.095322 232.446316 +L 297.58782 238.876938 +L 298.080318 246.915215 +L 298.572816 245.843445 +L 299.065313 235.929569 +L 299.557811 249.058756 +L 300.050309 243.431962 +L 300.542807 229.498947 +L 301.527802 250.934354 +L 302.0203 250.130526 +L 302.512798 245.039617 +L 303.005296 231.642488 +L 303.497793 240.752536 +L 303.990291 247.183158 +L 304.482789 247.986986 +L 304.975287 247.986986 +L 305.467785 248.790813 +L 305.960282 239.412823 +L 306.45278 235.661627 +L 306.945278 243.967847 +L 307.930274 237.537225 +L 308.422771 250.130526 +L 308.915269 249.862584 +L 309.407767 234.589856 +L 309.900265 232.982201 +L 310.392763 245.575502 +L 311.377758 250.934354 +L 311.870256 248.522871 +L 312.362754 247.719043 +L 312.855252 244.503732 +L 313.347749 228.69512 +L 313.840247 238.341053 +L 314.332745 238.876938 +L 314.825243 226.819522 +L 315.317741 249.862584 +L 315.810238 228.159234 +L 316.302736 237.269282 +L 316.795234 202.972632 +L 317.287732 239.948708 +L 317.780229 221.996555 +L 318.272727 247.4511 +L 318.765225 248.254928 +L 319.750221 208.599426 +L 320.242718 234.589856 +L 320.735216 237.269282 +L 321.227714 243.431962 +L 321.720212 233.250144 +L 322.21271 249.058756 +L 322.705207 217.173589 +L 323.197705 235.929569 +L 323.690203 239.948708 +L 324.182701 242.628134 +L 324.675199 242.896077 +L 325.167696 246.915215 +L 325.660194 249.058756 +L 326.152692 245.843445 +L 326.64519 248.790813 +L 327.137688 250.934354 +L 327.630185 249.594641 +L 328.122683 249.326699 +L 328.615181 246.915215 +L 329.107679 217.709474 +L 330.092674 242.092249 +L 330.585172 243.164019 +L 331.07767 249.862584 +L 331.570168 250.934354 +L 332.062665 251.738182 +L 333.047661 236.733397 +L 333.540159 241.288421 +L 334.032657 219.585072 +L 334.525154 246.915215 +L 335.017652 246.111388 +L 335.51015 250.934354 +L 336.002648 247.719043 +L 336.495146 239.412823 +L 336.987643 244.771675 +L 337.480141 242.092249 +L 337.972639 245.843445 +L 338.465137 250.666411 +L 338.957635 250.130526 +L 339.450132 219.585072 +L 339.94263 220.120957 +L 340.435128 235.929569 +L 340.927626 247.719043 +L 341.420124 217.441531 +L 341.912621 246.915215 +L 342.405119 235.929569 +L 342.897617 212.886507 +L 343.390115 238.07311 +L 343.882613 209.939139 +L 344.37511 198.68555 +L 344.867608 232.714258 +L 345.360106 250.666411 +L 345.852604 249.862584 +L 346.345102 196.809952 +L 346.837599 208.867368 +L 347.330097 223.872153 +L 347.822595 224.675981 +L 348.315093 179.125742 +L 348.80759 243.967847 +L 349.300088 245.575502 +L 349.792586 217.441531 +L 350.285084 226.015694 +L 350.777582 242.628134 +L 351.270079 240.752536 +L 351.762577 226.283636 +L 352.255075 247.719043 +L 352.747573 249.862584 +L 353.240071 225.747751 +L 353.732568 235.929569 +L 354.225066 223.336268 +L 354.717564 232.178373 +L 355.210062 216.369761 +L 355.70256 239.680766 +L 356.195057 248.254928 +L 356.687555 222.264498 +L 357.180053 228.427177 +L 357.672551 231.910431 +L 358.165049 227.623349 +L 358.657546 185.824306 +L 359.150044 223.872153 +L 359.642542 241.288421 +L 360.13504 160.101818 +L 360.627538 200.829091 +L 361.120035 224.943923 +L 361.612533 241.288421 +L 362.105031 239.680766 +L 363.090026 247.4511 +L 363.582524 211.814737 +L 364.075022 223.068325 +L 364.56752 209.671196 +L 365.060018 213.15445 +L 365.552515 225.211866 +L 366.045013 250.130526 +L 366.537511 249.594641 +L 367.030009 213.422392 +L 367.522507 235.661627 +L 368.015004 222.53244 +L 368.507502 230.570718 +L 369 235.393684 +L 369.492498 242.896077 +L 369.984996 247.719043 +L 370.477493 229.76689 +L 370.969991 236.197512 +L 371.462489 216.637703 +L 371.954987 214.22622 +L 372.447485 242.896077 +L 372.939982 249.058756 +L 373.43248 251.738182 +L 373.924978 239.680766 +L 374.417476 237.805167 +L 374.909974 243.699904 +L 375.402471 235.661627 +L 375.894969 244.235789 +L 376.387467 249.326699 +L 376.879965 250.130526 +L 377.86496 234.589856 +L 378.357458 247.986986 +L 378.849956 247.986986 +L 379.834951 240.752536 +L 380.327449 229.498947 +L 380.819947 239.14488 +L 381.312445 238.608995 +L 381.804943 246.37933 +L 382.789938 247.4511 +L 383.282436 250.398469 +L 383.774934 248.790813 +L 384.759929 232.178373 +L 385.252427 234.857799 +L 385.744925 242.896077 +L 386.237423 235.661627 +L 386.729921 249.862584 +L 387.222418 251.470239 +L 387.714916 243.699904 +L 388.207414 246.37933 +L 388.699912 233.786029 +L 389.19241 213.15445 +L 389.684907 244.235789 +L 390.177405 250.666411 +L 390.669903 249.326699 +L 391.162401 240.484593 +L 391.654898 236.733397 +L 392.147396 239.412823 +L 392.639894 238.341053 +L 393.132392 234.857799 +L 393.62489 250.934354 +L 394.117387 241.288421 +L 394.609885 228.159234 +L 395.102383 226.819522 +L 395.594881 245.575502 +L 396.087379 248.254928 +L 396.579876 211.278852 +L 397.072374 247.986986 +L 397.564872 250.666411 +L 398.05737 227.891292 +L 398.549868 246.111388 +L 399.042365 221.192727 +L 399.534863 229.498947 +L 400.027361 246.111388 +L 400.519859 251.470239 +L 401.012357 251.470239 +L 401.504854 249.862584 +L 401.997352 244.503732 +L 402.48985 245.843445 +L 402.982348 243.431962 +L 403.474846 251.470239 +L 403.967343 249.862584 +L 404.459841 251.738182 +L 404.952339 245.575502 +L 405.444837 247.986986 +L 405.937335 244.771675 +L 406.429832 243.164019 +L 406.92233 250.666411 +L 407.414828 251.202297 +L 407.907326 241.020478 +L 408.399823 220.656842 +L 408.892321 245.843445 +L 409.384819 246.111388 +L 409.877317 240.484593 +L 410.369815 248.254928 +L 410.862312 251.470239 +L 411.35481 247.4511 +L 411.847308 241.556364 +L 412.339806 243.699904 +L 412.832304 244.235789 +L 413.324801 249.326699 +L 413.817299 251.470239 +L 414.309797 251.202297 +L 414.802295 243.164019 +L 415.294793 243.967847 +L 415.78729 247.4511 +L 416.279788 237.537225 +L 416.772286 249.058756 +L 417.264784 250.934354 +L 417.757282 248.790813 +L 418.249779 250.934354 +L 418.742277 239.14488 +L 419.727273 227.623349 +L 420.219771 228.427177 +L 420.712268 247.719043 +L 421.204766 249.594641 +L 421.697264 221.728612 +L 422.189762 238.07311 +L 422.682259 246.647273 +L 423.174757 243.699904 +L 423.667255 234.589856 +L 424.159753 251.202297 +L 424.652251 246.111388 +L 425.144748 238.876938 +L 425.637246 247.4511 +L 426.129744 228.159234 +L 426.622242 234.321914 +L 427.11474 250.934354 +L 427.607237 251.470239 +L 428.099735 250.130526 +L 428.592233 239.948708 +L 429.084731 241.556364 +L 429.577229 248.790813 +L 430.069726 231.106603 +L 430.562224 249.594641 +L 431.054722 251.470239 +L 431.54722 249.058756 +L 432.039718 249.058756 +L 432.532215 241.020478 +L 433.024713 247.183158 +L 433.517211 241.288421 +L 434.009709 249.326699 +L 434.502207 251.738182 +L 434.994704 248.254928 +L 435.487202 227.087464 +L 435.9797 246.915215 +L 436.472198 241.824306 +L 436.964695 247.719043 +L 437.457193 249.862584 +L 437.949691 251.470239 +L 438.442189 244.771675 +L 438.934687 215.833876 +L 439.427184 249.862584 +L 439.919682 226.551579 +L 440.41218 243.164019 +L 440.904678 251.738182 +L 441.397176 251.470239 +L 441.889673 250.130526 +L 442.382171 248.254928 +L 442.874669 238.876938 +L 443.367167 242.092249 +L 443.859665 203.240574 +L 444.352162 249.862584 +L 444.84466 249.326699 +L 445.829656 212.082679 +L 446.322154 237.805167 +L 446.814651 246.647273 +L 447.307149 247.4511 +L 447.799647 250.666411 +L 448.292145 250.934354 +L 448.784643 249.594641 +L 449.27714 237.00134 +L 449.769638 245.575502 +L 450.262136 219.317129 +L 450.754634 237.00134 +L 451.247132 240.216651 +L 451.739629 251.202297 +L 452.232127 216.905646 +L 452.724625 211.010909 +L 453.217123 238.07311 +L 453.70962 248.522871 +L 454.202118 249.058756 +L 454.694616 250.398469 +L 455.187114 243.164019 +L 455.679612 234.053971 +L 456.172109 238.876938 +L 456.664607 235.661627 +L 457.157105 235.929569 +L 457.649603 242.360191 +L 458.142101 251.470239 +L 458.634598 234.321914 +L 459.127096 247.719043 +L 459.619594 249.594641 +L 460.112092 234.857799 +L 460.60459 244.503732 +L 461.097087 249.594641 +L 461.589585 251.202297 +L 462.082083 242.360191 +L 462.574581 241.288421 +L 463.067079 243.164019 +L 463.559576 243.699904 +L 464.052074 239.412823 +L 464.544572 251.202297 +L 465.03707 249.326699 +L 465.529568 244.503732 +L 466.022065 247.719043 +L 466.514563 244.771675 +L 467.007061 249.594641 +L 467.499559 233.518086 +L 467.992056 243.699904 +L 468.484554 250.666411 +L 468.977052 235.929569 +L 469.46955 241.020478 +L 470.454545 229.498947 +L 470.947043 232.178373 +L 471.932039 247.986986 +L 472.424537 235.929569 +L 472.917034 235.125742 +L 473.409532 232.982201 +L 473.90203 237.805167 +L 474.394528 236.465455 +L 474.887026 249.594641 +L 475.379523 249.594641 +L 475.872021 237.269282 +L 476.364519 241.020478 +L 476.857017 238.876938 +L 477.349515 245.039617 +L 477.842012 235.125742 +L 478.33451 250.398469 +L 478.827008 250.666411 +L 479.319506 241.020478 +L 479.812004 244.503732 +L 480.304501 243.164019 +L 480.796999 247.4511 +L 481.289497 247.719043 +L 481.781995 250.666411 +L 482.274492 251.202297 +L 482.76699 241.288421 +L 483.259488 250.130526 +L 483.751986 241.556364 +L 484.244484 241.288421 +L 484.736981 249.594641 +L 485.229479 251.470239 +L 485.721977 248.522871 +L 486.214475 250.130526 +L 487.19947 241.556364 +L 487.691968 249.058756 +L 488.184466 250.130526 +L 488.676964 245.039617 +L 489.169462 249.326699 +L 489.661959 230.83866 +L 490.154457 245.30756 +L 490.646955 247.4511 +L 491.139453 243.431962 +L 492.124448 251.470239 +L 492.616946 250.398469 +L 493.109444 237.537225 +L 493.601942 241.020478 +L 494.09444 245.30756 +L 494.586937 245.843445 +L 495.079435 249.326699 +L 495.571933 249.058756 +L 496.064431 247.986986 +L 496.556929 234.321914 +L 497.049426 240.216651 +L 497.541924 248.522871 +L 498.034422 250.398469 +L 499.019417 245.30756 +L 499.511915 243.164019 +L 500.004413 249.862584 +L 500.496911 245.039617 +L 500.989409 247.986986 +L 501.481906 251.738182 +L 501.974404 251.738182 +L 502.466902 251.202297 +L 502.9594 251.738182 +L 503.451898 250.666411 +L 504.436893 251.738182 +L 504.929391 247.4511 +L 505.421889 247.986986 +L 505.914387 250.666411 +L 506.406884 230.302775 +L 506.899382 239.14488 +L 507.39188 239.14488 +L 507.884378 246.647273 +L 508.376876 250.666411 +L 508.869373 243.431962 +L 509.361871 223.872153 +L 509.854369 244.503732 +L 510.346867 243.164019 +L 510.839365 241.288421 +L 511.331862 247.986986 +L 511.82436 250.666411 +L 512.316858 250.666411 +L 512.809356 247.183158 +L 513.301853 248.254928 +L 513.794351 243.699904 +L 514.286849 244.235789 +L 514.779347 239.412823 +L 515.271845 245.575502 +L 515.764342 243.967847 +L 516.25684 223.336268 +L 516.749338 213.958278 +L 517.241836 246.37933 +L 517.734334 245.039617 +L 518.226831 234.589856 +L 518.719329 248.522871 +L 519.211827 249.594641 +L 519.704325 226.015694 +L 520.196823 249.326699 +L 520.68932 246.37933 +L 521.181818 237.805167 +L 521.674316 249.058756 +L 522.166814 250.666411 +L 522.659312 241.556364 +L 523.151809 241.556364 +L 523.644307 243.431962 +L 524.136805 239.680766 +L 524.629303 226.819522 +L 525.121801 230.034833 +L 525.614298 251.470239 +L 526.106796 249.862584 +L 526.599294 243.431962 +L 527.091792 229.498947 +L 527.584289 243.164019 +L 528.076787 239.14488 +L 528.569285 227.891292 +L 529.061783 248.522871 +L 529.554281 248.790813 +L 530.046778 248.254928 +L 530.539276 240.216651 +L 531.031774 246.111388 +L 531.524272 240.752536 +L 532.01677 248.522871 +L 532.509267 245.30756 +L 533.001765 248.790813 +L 533.494263 232.178373 +L 533.986761 243.164019 +L 534.479259 241.020478 +L 534.971756 228.69512 +L 535.464254 220.120957 +L 535.956752 249.326699 +L 536.44925 244.771675 +L 536.941748 234.321914 +L 537.434245 237.00134 +L 538.419241 244.503732 +L 538.911739 246.37933 +L 539.404237 249.594641 +L 539.896734 250.934354 +L 540.389232 245.039617 +L 540.88173 228.963062 +L 541.374228 231.910431 +L 541.866726 233.518086 +L 542.359223 234.589856 +L 542.851721 245.039617 +L 543.344219 247.986986 +L 543.836717 214.494163 +L 544.329214 231.910431 +L 544.821712 243.699904 +L 545.31421 240.216651 +L 546.299206 250.398469 +L 546.791703 250.398469 +L 547.284201 228.963062 +L 547.776699 240.216651 +L 548.269197 235.393684 +L 548.761695 244.235789 +L 549.254192 235.661627 +L 549.74669 250.934354 +L 550.239188 250.398469 +L 551.224184 238.608995 +L 551.716681 245.039617 +L 552.209179 212.886507 +L 552.701677 250.666411 +L 553.194175 250.934354 +L 553.686673 250.398469 +L 554.671668 233.250144 +L 555.164166 234.589856 +L 555.656664 243.431962 +L 556.149162 243.967847 +L 556.641659 251.470239 +L 557.134157 250.130526 +L 557.626655 211.010909 +L 558.119153 235.393684 +L 558.61165 248.522871 +L 559.104148 241.020478 +L 559.596646 250.398469 +L 560.089144 250.130526 +L 560.581642 250.934354 +L 561.074139 236.733397 +L 561.566637 235.393684 +L 562.059135 246.111388 +L 562.551633 240.216651 +L 563.044131 244.771675 +L 563.536628 247.183158 +L 564.029126 251.738182 +L 564.521624 246.915215 +L 565.014122 236.465455 +L 565.50662 230.570718 +L 565.999117 246.111388 +L 566.491615 251.202297 +L 566.984113 250.398469 +L 567.476611 249.058756 +L 567.969109 244.503732 +L 568.461606 247.183158 +L 568.954104 242.628134 +L 569.446602 234.589856 +L 569.9391 247.183158 +L 570.431598 240.752536 +L 570.924095 236.465455 +L 571.416593 236.197512 +L 571.909091 238.07311 +L 572.401589 230.034833 +L 572.894086 238.341053 +L 573.386584 241.288421 +L 573.879082 251.470239 +L 574.37158 238.608995 +L 574.864078 230.83866 +L 575.849073 247.4511 +L 576.341571 244.235789 +L 576.834069 249.594641 +L 577.326567 250.398469 +L 577.819064 246.111388 +L 578.311562 249.594641 +L 578.80406 238.876938 +L 579.789056 248.254928 +L 580.281553 237.269282 +L 580.774051 249.058756 +L 581.266549 250.666411 +L 581.759047 246.37933 +L 582.251545 244.771675 +L 582.744042 241.556364 +L 583.23654 233.250144 +L 583.729038 248.790813 +L 584.221536 248.790813 +L 584.714034 249.862584 +L 585.206531 243.699904 +L 585.699029 231.374545 +L 586.191527 234.857799 +L 586.684025 237.00134 +L 587.176523 211.814737 +L 587.66902 247.986986 +L 588.161518 251.738182 +L 588.654016 238.07311 +L 589.146514 240.484593 +L 589.639011 248.790813 +L 590.131509 247.183158 +L 591.116505 251.470239 +L 591.609003 249.594641 +L 592.1015 232.714258 +L 592.593998 246.915215 +L 593.086496 241.824306 +L 593.578994 246.111388 +L 594.071492 234.857799 +L 594.563989 250.934354 +L 595.056487 244.771675 +L 595.548985 245.575502 +L 596.041483 250.130526 +L 596.533981 237.269282 +L 597.026478 230.302775 +L 598.011474 245.039617 +L 598.503972 246.915215 +L 598.99647 245.575502 +L 599.488967 242.092249 +L 599.981465 243.699904 +L 600.473963 246.647273 +L 600.966461 239.412823 +L 601.458959 250.666411 +L 601.951456 250.666411 +L 602.936452 232.982201 +L 603.42895 230.570718 +L 603.921447 235.929569 +L 604.413945 249.862584 +L 604.906443 251.738182 +L 605.398941 241.020478 +L 605.891439 241.288421 +L 606.383936 246.111388 +L 606.876434 234.589856 +L 607.368932 249.326699 +L 607.86143 215.833876 +L 608.353928 238.876938 +L 608.846425 237.00134 +L 609.338923 167.068325 +L 609.831421 202.436746 +L 610.323919 214.22622 +L 610.816417 204.580287 +L 611.308914 230.570718 +L 611.801412 226.015694 +L 612.29391 245.30756 +L 612.786408 235.661627 +L 613.278906 198.953493 +L 613.771403 228.427177 +L 614.263901 246.111388 +L 615.248897 232.178373 +L 615.741395 229.231005 +L 616.233892 238.608995 +L 616.72639 242.896077 +L 617.218888 212.886507 +L 617.711386 242.092249 +L 618.203883 212.082679 +L 618.696381 235.929569 +L 619.188879 235.393684 +L 619.681377 233.250144 +L 620.173875 221.46067 +L 620.666372 236.733397 +L 621.15887 226.015694 +L 621.651368 245.039617 +L 622.143866 250.130526 +L 622.636364 250.666411 +L 622.636364 250.666411 +" clip-path="url(#p6635c9c314)" style="fill: none; stroke: #1f77b4; stroke-width: 1.5; stroke-linecap: square"/> - + +L 318.765225 250.398469 +L 319.257723 248.790813 +L 319.750221 248.790813 +L 320.242718 249.862584 +L 320.735216 249.326699 +L 321.227714 250.398469 +L 321.720212 250.666411 +L 322.21271 250.666411 +L 322.705207 248.254928 +L 323.197705 249.862584 +L 323.690203 250.666411 +L 324.182701 250.934354 +L 324.675199 249.594641 +L 325.167696 251.738182 +L 325.660194 251.470239 +L 326.152692 250.934354 +L 326.64519 250.934354 +L 327.137688 251.202297 +L 327.630185 250.666411 +L 328.122683 251.202297 +L 328.615181 250.398469 +L 329.107679 244.771675 +L 329.600177 247.4511 +L 330.092674 249.326699 +L 331.570168 250.934354 +L 332.062665 251.738182 +L 332.555163 249.594641 +L 333.047661 249.594641 +L 333.540159 249.058756 +L 334.032657 246.111388 +L 334.525154 250.666411 +L 335.017652 251.738182 +L 335.51015 251.470239 +L 336.002648 250.666411 +L 336.495146 248.254928 +L 336.987643 250.934354 +L 337.480141 250.130526 +L 337.972639 248.522871 +L 338.465137 251.202297 +L 338.957635 251.202297 +L 339.450132 250.130526 +L 339.94263 247.986986 +L 340.435128 248.790813 +L 340.927626 249.862584 +L 341.420124 249.326699 +L 341.912621 250.934354 +L 342.405119 250.666411 +L 342.897617 248.254928 +L 343.390115 249.326699 +L 343.882613 247.183158 +L 344.37511 246.647273 +L 344.867608 247.183158 +L 345.360106 251.202297 +L 345.852604 251.738182 +L 346.345102 247.986986 +L 346.837599 246.111388 +L 347.330097 247.986986 +L 347.822595 246.647273 +L 348.315093 243.431962 +L 348.80759 249.594641 +L 349.300088 249.326699 +L 349.792586 247.4511 +L 350.285084 247.183158 +L 350.777582 248.254928 +L 351.270079 248.790813 +L 351.762577 247.986986 +L 352.255075 249.862584 +L 352.747573 250.934354 +L 353.240071 247.719043 +L 354.225066 248.254928 +L 354.717564 246.915215 +L 355.210062 247.4511 +L 355.70256 247.719043 +L 356.195057 250.666411 +L 356.687555 245.843445 +L 357.180053 245.843445 +L 357.672551 243.967847 +L 358.165049 247.183158 +L 358.657546 247.183158 +L 359.150044 249.862584 +L 359.642542 249.058756 +L 360.13504 247.986986 +L 360.627538 245.843445 +L 361.120035 248.522871 +L 361.612533 249.058756 +L 362.105031 249.326699 +L 362.597529 249.326699 +L 363.090026 249.862584 +L 363.582524 246.647273 +L 364.075022 247.183158 +L 364.56752 245.039617 +L 365.060018 247.986986 +L 365.552515 247.183158 +L 366.045013 250.934354 +L 366.537511 251.202297 +L 367.030009 243.967847 +L 367.522507 247.4511 +L 368.015004 245.575502 +L 368.507502 246.37933 +L 369 249.594641 +L 369.492498 250.398469 +L 369.984996 250.398469 +L 370.477493 248.522871 +L 370.969991 249.326699 +L 371.462489 247.183158 +L 371.954987 246.915215 +L 372.447485 249.594641 +L 372.939982 249.862584 +L 373.43248 251.738182 +L 373.924978 248.522871 +L 374.417476 248.254928 +L 374.909974 249.058756 +L 375.402471 244.771675 +L 375.894969 249.326699 +L 376.387467 251.470239 +L 376.879965 250.934354 +L 377.372462 249.058756 +L 377.86496 249.594641 +L 378.357458 249.594641 +L 378.849956 250.666411 +L 379.342454 250.666411 +L 379.834951 251.202297 +L 380.327449 250.666411 +L 380.819947 249.326699 +L 381.312445 249.058756 +L 381.804943 249.326699 +L 382.29744 250.130526 +L 382.789938 249.594641 +L 383.282436 250.666411 +L 383.774934 249.594641 +L 384.267432 248.790813 +L 384.759929 248.522871 +L 385.252427 248.522871 +L 385.744925 249.862584 +L 386.237423 246.915215 +L 386.729921 251.470239 +L 387.222418 251.470239 +L 387.714916 249.058756 +L 388.207414 250.398469 +L 388.699912 250.398469 +L 389.19241 248.254928 +L 390.177405 251.202297 +L 390.669903 250.398469 +L 391.162401 249.326699 +L 391.654898 248.522871 +L 392.147396 249.058756 +L 392.639894 247.4511 +L 393.132392 249.594641 +L 393.62489 250.934354 +L 394.117387 250.130526 +L 394.609885 246.647273 +L 395.102383 247.986986 +L 395.594881 250.666411 +L 396.087379 250.130526 +L 396.579876 248.522871 +L 397.564872 251.470239 +L 398.05737 249.594641 +L 398.549868 250.130526 +L 399.042365 249.594641 +L 399.534863 248.790813 +L 400.027361 250.398469 +L 400.519859 251.470239 +L 401.012357 251.470239 +L 401.504854 250.666411 +L 401.997352 250.130526 +L 402.48985 250.130526 +L 402.982348 249.058756 +L 403.474846 251.470239 +L 403.967343 250.934354 +L 404.459841 251.738182 +L 404.952339 250.398469 +L 405.444837 249.862584 +L 405.937335 249.594641 +L 406.429832 248.522871 +L 406.92233 251.202297 +L 407.414828 251.470239 +L 407.907326 249.326699 +L 408.399823 249.594641 +L 408.892321 250.130526 +L 409.384819 250.398469 +L 409.877317 249.862584 +L 410.369815 251.470239 +L 410.862312 251.738182 +L 411.35481 250.934354 +L 411.847308 250.398469 +L 412.339806 249.058756 +L 412.832304 251.202297 +L 413.324801 250.666411 +L 413.817299 251.470239 +L 414.309797 251.202297 +L 414.802295 250.130526 +L 415.294793 249.594641 +L 415.78729 250.130526 +L 416.279788 249.058756 +L 416.772286 250.666411 +L 417.264784 251.470239 +L 417.757282 250.130526 +L 418.249779 251.202297 +L 418.742277 250.666411 +L 419.234775 250.666411 +L 419.727273 248.522871 +L 420.219771 249.594641 +L 420.712268 250.934354 +L 421.204766 251.202297 +L 421.697264 249.594641 +L 422.189762 249.594641 +L 422.682259 250.666411 +L 423.174757 249.594641 +L 423.667255 249.594641 +L 424.159753 251.470239 +L 424.652251 249.594641 +L 425.144748 249.326699 +L 425.637246 250.666411 +L 426.129744 249.594641 +L 426.622242 249.326699 +L 427.11474 251.202297 +L 427.607237 251.738182 +L 428.099735 250.130526 +L 428.592233 249.594641 +L 429.084731 250.666411 +L 430.069726 250.130526 +L 430.562224 250.666411 +L 431.054722 251.470239 +L 431.54722 250.130526 +L 432.039718 250.130526 +L 432.532215 249.326699 +L 433.024713 249.862584 +L 433.517211 249.862584 +L 434.009709 250.934354 +L 434.502207 251.738182 +L 434.994704 251.202297 +L 435.487202 250.398469 +L 435.9797 250.398469 +L 436.472198 250.934354 +L 436.964695 250.130526 +L 437.457193 250.934354 +L 437.949691 251.470239 +L 438.442189 250.398469 +L 438.934687 249.862584 +L 439.427184 250.130526 +L 439.919682 249.058756 +L 440.41218 249.594641 +L 440.904678 251.738182 +L 441.397176 251.470239 +L 441.889673 250.398469 +L 442.382171 249.862584 +L 442.874669 249.058756 +L 443.367167 249.594641 +L 443.859665 245.039617 +L 444.352162 251.202297 +L 444.84466 250.666411 +L 445.829656 247.183158 +L 446.322154 248.790813 +L 446.814651 249.862584 +L 447.307149 250.398469 +L 447.799647 251.202297 +L 448.292145 251.202297 +L 448.784643 250.130526 +L 449.27714 248.254928 +L 449.769638 249.862584 +L 450.262136 246.915215 +L 450.754634 246.915215 +L 451.247132 251.202297 +L 451.739629 251.470239 +L 452.232127 246.37933 +L 452.724625 246.915215 +L 453.217123 247.183158 +L 453.70962 249.594641 +L 454.202118 250.130526 +L 454.694616 251.202297 +L 455.187114 249.594641 +L 455.679612 248.522871 +L 456.172109 249.862584 +L 456.664607 246.111388 +L 457.649603 250.666411 +L 458.142101 251.470239 +L 458.634598 247.986986 +L 459.619594 250.666411 +L 460.112092 248.254928 +L 460.60459 249.862584 +L 461.097087 250.934354 +L 461.589585 251.202297 +L 462.574581 248.790813 +L 463.559576 249.862584 +L 464.052074 249.594641 +L 464.544572 251.202297 +L 465.03707 251.202297 +L 465.529568 249.058756 +L 466.022065 249.058756 +L 466.514563 249.326699 +L 467.007061 250.398469 +L 467.499559 248.790813 +L 468.484554 251.470239 +L 468.977052 243.431962 +L 469.46955 247.986986 +L 469.962048 247.719043 +L 470.454545 248.790813 +L 470.947043 248.522871 +L 471.439541 250.934354 +L 471.932039 250.130526 +L 472.424537 248.254928 +L 472.917034 248.522871 +L 473.409532 248.522871 +L 473.90203 247.4511 +L 474.887026 250.934354 +L 476.364519 248.522871 +L 476.857017 249.326699 +L 477.349515 248.790813 +L 477.842012 248.522871 +L 478.33451 250.666411 +L 478.827008 251.202297 +L 479.319506 248.254928 +L 479.812004 248.522871 +L 480.796999 250.130526 +L 481.289497 249.594641 +L 481.781995 251.202297 +L 482.274492 251.470239 +L 482.76699 248.254928 +L 483.259488 250.934354 +L 484.244484 247.986986 +L 484.736981 250.666411 +L 485.229479 251.738182 +L 485.721977 250.398469 +L 486.214475 250.666411 +L 486.706973 250.398469 +L 487.19947 249.326699 +L 487.691968 251.202297 +L 488.184466 250.398469 +L 488.676964 251.202297 +L 489.169462 251.202297 +L 489.661959 249.326699 +L 490.154457 248.790813 +L 490.646955 250.398469 +L 491.139453 248.522871 +L 492.124448 251.470239 +L 492.616946 251.738182 +L 493.109444 249.594641 +L 494.09444 250.130526 +L 494.586937 250.934354 +L 495.571933 250.398469 +L 496.064431 250.934354 +L 496.556929 249.594641 +L 497.049426 246.647273 +L 497.541924 250.666411 +L 498.034422 250.934354 +L 498.52692 251.470239 +L 499.019417 249.862584 +L 499.511915 249.862584 +L 500.004413 250.666411 +L 500.496911 250.130526 +L 500.989409 250.398469 +L 501.481906 251.738182 +L 501.974404 251.738182 +L 502.466902 251.202297 +L 502.9594 251.738182 +L 503.451898 250.934354 +L 503.944395 251.738182 +L 504.436893 251.738182 +L 504.929391 250.398469 +L 505.421889 250.934354 +L 505.914387 250.934354 +L 506.406884 249.862584 +L 506.899382 249.594641 +L 507.39188 250.130526 +L 507.884378 249.326699 +L 508.376876 250.666411 +L 508.869373 251.470239 +L 509.361871 248.254928 +L 509.854369 250.130526 +L 510.346867 250.130526 +L 510.839365 249.058756 +L 511.331862 249.862584 +L 511.82436 251.202297 +L 512.316858 251.202297 +L 512.809356 250.398469 +L 513.301853 250.934354 +L 513.794351 250.130526 +L 514.779347 250.130526 +L 515.271845 251.202297 +L 515.764342 250.666411 +L 516.25684 246.37933 +L 516.749338 249.594641 +L 517.241836 249.862584 +L 517.734334 249.058756 +L 518.226831 249.058756 +L 518.719329 251.202297 +L 519.211827 250.666411 +L 519.704325 249.326699 +L 520.196823 250.934354 +L 520.68932 249.058756 +L 521.181818 249.862584 +L 521.674316 250.398469 +L 522.166814 251.470239 +L 522.659312 250.130526 +L 523.151809 249.594641 +L 523.644307 250.130526 +L 524.136805 248.254928 +L 524.629303 248.522871 +L 525.121801 249.326699 +L 525.614298 251.738182 +L 526.106796 250.934354 +L 526.599294 249.058756 +L 527.091792 249.862584 +L 527.584289 249.058756 +L 528.076787 248.522871 +L 528.569285 247.183158 +L 529.061783 250.398469 +L 529.554281 251.202297 +L 530.539276 248.790813 +L 531.031774 250.398469 +L 531.524272 249.326699 +L 532.01677 250.130526 +L 533.001765 250.666411 +L 533.494263 248.790813 +L 533.986761 249.594641 +L 534.971756 246.111388 +L 535.464254 247.4511 +L 535.956752 250.934354 +L 536.44925 249.862584 +L 536.941748 249.326699 +L 537.434245 247.719043 +L 537.926743 248.790813 +L 538.419241 250.398469 +L 538.911739 249.326699 +L 539.404237 250.130526 +L 539.896734 251.470239 +L 540.389232 251.202297 +L 541.374228 248.254928 +L 541.866726 248.522871 +L 542.359223 249.594641 +L 542.851721 250.130526 +L 543.344219 250.934354 +L 543.836717 249.058756 +L 544.329214 250.130526 +L 544.821712 249.862584 +L 545.31421 248.254928 +L 545.806708 247.986986 +L 546.299206 251.202297 +L 546.791703 250.934354 +L 547.284201 249.594641 +L 547.776699 248.790813 +L 548.269197 248.254928 +L 548.761695 248.522871 +L 549.74669 251.202297 +L 550.239188 250.666411 +L 550.731686 248.790813 +L 551.224184 248.522871 +L 551.716681 250.398469 +L 552.209179 249.326699 +L 552.701677 251.202297 +L 553.194175 251.202297 +L 553.686673 250.666411 +L 554.17917 247.719043 +L 554.671668 248.790813 +L 555.164166 248.254928 +L 555.656664 247.986986 +L 556.149162 250.666411 +L 556.641659 251.738182 +L 557.134157 250.666411 +L 557.626655 247.986986 +L 558.61165 250.398469 +L 559.104148 248.254928 +L 559.596646 250.398469 +L 560.089144 251.202297 +L 560.581642 251.202297 +L 561.074139 248.790813 +L 561.566637 250.130526 +L 562.059135 249.862584 +L 563.044131 250.398469 +L 563.536628 250.934354 +L 564.029126 251.738182 +L 564.521624 248.790813 +L 565.50662 249.862584 +L 565.999117 248.790813 +L 566.491615 251.470239 +L 566.984113 250.934354 +L 567.476611 250.666411 +L 567.969109 250.130526 +L 568.461606 249.058756 +L 568.954104 247.4511 +L 569.446602 247.4511 +L 570.431598 251.470239 +L 571.416593 249.326699 +L 572.401589 249.326699 +L 572.894086 248.790813 +L 573.386584 249.326699 +L 573.879082 251.738182 +L 574.37158 250.130526 +L 574.864078 247.183158 +L 575.356575 248.790813 +L 575.849073 249.326699 +L 576.341571 248.790813 +L 577.326567 251.470239 +L 577.819064 250.934354 +L 578.80406 249.326699 +L 579.296558 248.790813 +L 579.789056 249.862584 +L 580.281553 249.326699 +L 580.774051 250.666411 +L 581.266549 250.934354 +L 581.759047 250.666411 +L 582.251545 247.719043 +L 582.744042 249.058756 +L 583.23654 249.326699 +L 583.729038 249.862584 +L 584.221536 251.202297 +L 584.714034 251.202297 +L 585.206531 248.522871 +L 585.699029 249.326699 +L 586.191527 249.862584 +L 586.684025 247.719043 +L 587.176523 249.594641 +L 587.66902 250.398469 +L 588.161518 251.738182 +L 588.654016 250.666411 +L 589.146514 250.130526 +L 589.639011 250.666411 +L 590.131509 250.934354 +L 590.624007 250.398469 +L 591.116505 251.738182 +L 591.609003 250.666411 +L 592.1015 248.254928 +L 592.593998 248.522871 +L 593.086496 250.666411 +L 593.578994 248.790813 +L 594.071492 247.986986 +L 594.563989 251.470239 +L 595.056487 250.398469 +L 595.548985 250.398469 +L 596.041483 250.934354 +L 596.533981 249.862584 +L 597.026478 249.862584 +L 598.011474 250.398469 +L 598.503972 250.398469 +L 598.99647 249.326699 +L 599.488967 249.862584 +L 599.981465 249.058756 +L 600.473963 250.130526 +L 600.966461 249.862584 +L 601.458959 251.738182 +L 601.951456 251.202297 +L 602.443954 248.522871 +L 602.936452 247.986986 +L 603.42895 248.790813 +L 603.921447 249.058756 +L 604.413945 250.666411 +L 604.906443 251.738182 +L 605.398941 250.666411 +L 605.891439 249.058756 +L 606.383936 249.862584 +L 606.876434 248.790813 +L 607.368932 250.130526 +L 607.86143 250.130526 +L 608.353928 244.503732 +L 608.846425 244.503732 +L 609.338923 249.058756 +L 609.831421 243.699904 +L 610.323919 240.484593 +L 610.816417 249.058756 +L 611.308914 245.30756 +L 611.801412 242.896077 +L 612.29391 250.934354 +L 612.786408 244.235789 +L 613.278906 240.216651 +L 613.771403 242.628134 +L 614.263901 251.470239 +L 614.756399 244.503732 +L 615.248897 246.647273 +L 615.741395 243.431962 +L 616.233892 250.666411 +L 616.72639 250.398469 +L 617.218888 239.14488 +L 617.711386 247.719043 +L 618.203883 249.326699 +L 618.696381 244.771675 +L 619.188879 246.111388 +L 619.681377 248.522871 +L 620.666372 247.986986 +L 621.15887 249.594641 +L 621.651368 250.398469 +L 622.143866 250.934354 +L 622.636364 251.202297 +L 622.636364 251.202297 +" clip-path="url(#p6635c9c314)" style="fill: none; stroke: #ff7f0e; stroke-width: 1.5; stroke-linecap: square"/> - + - + @@ -4043,13 +4781,13 @@ z - + - + @@ -4077,10 +4815,10 @@ L 525.782812 183.296562 - + - + diff --git a/doc/source/tracking/traffic/views.csv b/doc/source/tracking/traffic/views.csv index 72721bedd..82cb0bb06 100644 --- a/doc/source/tracking/traffic/views.csv +++ b/doc/source/tracking/traffic/views.csv @@ -820,3 +820,213 @@ _date,total_views,unique_views 2023-01-27,65,11 2023-01-28,13,3 2023-01-29,9,5 +2023-01-30,97,10 +2023-01-31,10,4 +2023-02-01,21,11 +2023-02-02,53,8 +2023-02-03,11,6 +2023-02-04,5,2 +2023-02-05,39,7 +2023-02-06,39,9 +2023-02-07,32,7 +2023-02-08,46,14 +2023-02-09,94,13 +2023-02-10,82,10 +2023-02-11,2,1 +2023-02-12,8,4 +2023-02-13,32,11 +2023-02-14,84,8 +2023-02-15,33,11 +2023-02-16,48,13 +2023-02-17,90,18 +2023-02-18,13,6 +2023-02-19,12,3 +2023-02-20,14,8 +2023-02-21,44,12 +2023-02-22,22,6 +2023-02-23,42,10 +2023-02-24,13,7 +2023-02-25,25,6 +2023-02-26,12,5 +2023-02-27,74,12 +2023-02-28,33,9 +2023-03-01,41,16 +2023-03-02,87,22 +2023-03-03,119,17 +2023-03-04,10,4 +2023-03-05,27,8 +2023-03-06,66,10 +2023-03-07,56,16 +2023-03-08,41,12 +2023-03-09,28,6 +2023-03-10,21,10 +2023-03-11,9,7 +2023-03-12,4,2 +2023-03-13,26,3 +2023-03-14,86,9 +2023-03-15,75,14 +2023-03-16,69,13 +2023-03-17,65,9 +2023-03-18,26,7 +2023-03-19,15,4 +2023-03-20,140,11 +2023-03-21,75,7 +2023-03-22,31,8 +2023-03-23,44,14 +2023-03-24,26,15 +2023-03-25,6,3 +2023-03-26,6,4 +2023-03-27,86,9 +2023-03-28,44,12 +2023-03-29,62,14 +2023-03-30,29,13 +2023-03-31,61,8 +2023-04-01,4,3 +2023-04-02,6,5 +2023-04-03,27,12 +2023-04-04,50,13 +2023-04-05,26,6 +2023-04-06,146,10 +2023-04-07,5,3 +2023-04-08,4,3 +2023-04-09,6,5 +2023-04-10,40,16 +2023-04-11,70,12 +2023-04-12,65,14 +2023-04-13,32,15 +2023-04-14,30,5 +2023-04-15,2,1 +2023-04-16,7,5 +2023-04-17,153,15 +2023-04-18,62,10 +2023-04-19,13,6 +2023-04-20,41,14 +2023-04-21,6,6 +2023-04-22,7,3 +2023-04-23,4,3 +2023-04-24,57,12 +2023-04-25,62,7 +2023-04-26,22,8 +2023-04-27,44,7 +2023-04-28,27,6 +2023-04-29,18,4 +2023-04-30,1,1 +2023-05-01,19,12 +2023-05-02,58,10 +2023-05-03,80,8 +2023-05-04,22,12 +2023-05-05,3,2 +2023-05-06,6,4 +2023-05-07,11,5 +2023-05-08,28,7 +2023-05-09,18,11 +2023-05-10,35,17 +2023-05-11,65,17 +2023-05-12,18,9 +2023-05-13,42,2 +2023-05-14,58,6 +2023-05-15,59,10 +2023-05-16,52,10 +2023-05-17,82,10 +2023-05-18,51,12 +2023-05-19,40,10 +2023-05-20,2,1 +2023-05-21,50,7 +2023-05-22,79,18 +2023-05-23,51,12 +2023-05-24,17,10 +2023-05-25,29,12 +2023-05-26,9,7 +2023-05-27,6,2 +2023-05-28,22,4 +2023-05-29,9,7 +2023-05-30,49,10 +2023-05-31,33,12 +2023-06-01,14,8 +2023-06-02,55,10 +2023-06-03,11,5 +2023-06-04,5,4 +2023-06-05,21,5 +2023-06-06,27,16 +2023-06-07,39,11 +2023-06-08,70,10 +2023-06-09,12,8 +2023-06-10,12,3 +2023-06-11,8,3 +2023-06-12,31,13 +2023-06-13,77,10 +2023-06-14,64,8 +2023-06-15,56,16 +2023-06-16,150,9 +2023-06-17,15,6 +2023-06-18,1,1 +2023-06-19,52,5 +2023-06-20,43,7 +2023-06-21,12,5 +2023-06-22,18,4 +2023-06-23,9,6 +2023-06-24,2,1 +2023-06-25,9,5 +2023-06-26,72,14 +2023-06-27,19,13 +2023-06-28,38,5 +2023-06-29,22,12 +2023-06-30,64,15 +2023-07-01,4,2 +2023-07-02,27,6 +2023-07-03,24,6 +2023-07-04,7,4 +2023-07-05,55,8 +2023-07-06,81,8 +2023-07-07,51,7 +2023-07-08,26,6 +2023-07-09,19,6 +2023-07-10,24,10 +2023-07-11,37,8 +2023-07-12,31,11 +2023-07-13,20,7 +2023-07-14,47,8 +2023-07-15,5,1 +2023-07-16,5,3 +2023-07-17,42,13 +2023-07-18,71,15 +2023-07-19,80,12 +2023-07-20,60,11 +2023-07-21,8,5 +2023-07-22,1,1 +2023-07-23,41,5 +2023-07-24,40,11 +2023-07-25,22,8 +2023-07-26,65,12 +2023-07-27,10,7 +2023-07-28,135,7 +2023-07-29,49,28 +2023-07-30,56,28 +2023-07-31,317,11 +2023-08-01,185,31 +2023-08-02,141,43 +2023-08-03,177,11 +2023-08-04,80,25 +2023-08-05,97,34 +2023-08-06,25,4 +2023-08-07,61,29 +2023-08-08,198,44 +2023-08-09,88,35 +2023-08-10,22,2 +2023-08-11,48,28 +2023-08-12,74,20 +2023-08-13,85,32 +2023-08-14,50,5 +2023-08-15,34,6 +2023-08-16,146,48 +2023-08-17,37,16 +2023-08-18,149,10 +2023-08-19,60,27 +2023-08-20,62,22 +2023-08-21,70,13 +2023-08-22,114,14 +2023-08-23,57,15 +2023-08-24,97,9 +2023-08-25,26,6 +2023-08-26,7,4 +2023-08-27,5,3 diff --git a/doc/source/user_guide/changelog/index.rst b/doc/source/user_guide/changelog/index.rst index c781ea041..edd1c9884 100644 --- a/doc/source/user_guide/changelog/index.rst +++ b/doc/source/user_guide/changelog/index.rst @@ -6,9 +6,17 @@ icepyx ChangeLog This is the list of changes made to icepyx in between each release. Full details can be found in the `commit logs `_. -Latest Release (Version 0.7.0) +Latest Release (Version 0.8.0) ------------------------------ +.. toctree:: + :maxdepth: 2 + + v0.8.0 + +Version 0.7.0 +------------- + .. toctree:: :maxdepth: 2 diff --git a/doc/source/user_guide/changelog/template.rst b/doc/source/user_guide/changelog/template.rst index e7957d265..c09dd972f 100644 --- a/doc/source/user_guide/changelog/template.rst +++ b/doc/source/user_guide/changelog/template.rst @@ -1,5 +1,3 @@ -.. _whatsnew_0x0: - What's new in 0.4.0 (DD MONTH YYYY) ----------------------------------- diff --git a/doc/source/user_guide/changelog/v0.8.0.rst b/doc/source/user_guide/changelog/v0.8.0.rst new file mode 100644 index 000000000..4f60f57f4 --- /dev/null +++ b/doc/source/user_guide/changelog/v0.8.0.rst @@ -0,0 +1,86 @@ +What's new in 0.8.0 (12 September 2023) +----------------------------------- + +These are the changes in icepyx 0.8.0 See :ref:`release` for a full changelog +including other versions of icepyx. + + +New Features +~~~~~~~~~~~~ + +- create temporal module and add input types and testing (#327) + + - create temporal module + - create temporal testing module + - add support for more temporal input types (datetime objects) and formats (dict) + - temporal docstring, user guide updates + - updated example notebook for new temporal inputs + - update temporal info in data access tutorial example notebook + - GitHub action UML generation auto-update + +- Refactor authentication (#435) + + - modularize authentication using a mixin class + - add docstrings and update example notebooks + - add tests + +- add atl23 (new product) to lists and tests (#445) + + +Deprecations +~~~~~~~~~~~~ + +- Remove intake catalog from Read module (#438) + + - delete is2cat.py and references + - remove intake and related modules + +- Raise warning for use of catalog in Read module (#446) + + +Maintenance +^^^^^^^^^^^ + +- update codecov action and remove from deps (#421) + +- is2ref tests for product formatting and default var lists (#424) + +- get s3urls for all data products and update doctests to v006 (#426) + + - Always send CMR query to provider NSIDC_CPRD to make sure s3 urls are returned. + +- Traffic updates 2023 Feb-Aug (#442) + +Documentation +^^^^^^^^^^^^^ + +- update install instructions (#409) + + - add s3fs as requirement to make cloud access default + - transition to recommending mamba over conda + +- add release guide to docs (#255) + +- docs maintenance and pubs/citations update (#422) + + - add JOSS to bib and badges + - switch zenodo links to nonversioned icepyx + + +Other +^^^^^ + +- JOSS submission (#361) + + Matches Release v0.6.4_JOSS per #420 plus a few editorial edits available via the pubs/joss branch. + +- update and clarify authorship, citation, and attribution policies (#419) + + - add CITATION.cff file + - update citation docs with Zenodo doi and 'icepyx Developers' as author + + +Contributors +~~~~~~~~~~~~ + +.. contributors:: v0.7.0..v0.8.0|HEAD diff --git a/doc/source/user_guide/documentation/classes_dev_uml.svg b/doc/source/user_guide/documentation/classes_dev_uml.svg index e966a8f09..fd5033938 100644 --- a/doc/source/user_guide/documentation/classes_dev_uml.svg +++ b/doc/source/user_guide/documentation/classes_dev_uml.svg @@ -4,331 +4,392 @@ - - + + classes_dev_uml - - + + +icepyx.core.auth.AuthenticationError + +AuthenticationError + + + + + + +icepyx.core.exceptions.DeprecationError + +DeprecationError + + + + + + +icepyx.core.auth.EarthdataAuthMixin + +EarthdataAuthMixin + +_auth : Auth, NoneType +_s3_initial_ts : NoneType, datetime +_s3login_credentials : NoneType, dict +_session : NoneType, Session +auth +s3login_credentials +session + +__init__(auth) +__str__() +earthdata_login(uid, email, s3token): None + + + icepyx.core.query.GenQuery - -GenQuery - -_end : datetime -_spatial -_start : datetime - -__init__(spatial_extent, date_range, start_time, end_time) -__str__() + +GenQuery + +_spatial +_temporal + +__init__(spatial_extent, date_range, start_time, end_time) +__str__() - + icepyx.core.granules.Granules - -Granules - -avail : list -orderIDs : list - -__init__ -() -download(verbose, path, session, restart) -get_avail(CMRparams, reqparams, cloud) -place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) + +Granules + +avail : list +orderIDs : list + +__init__ +() +download(verbose, path, session, restart) +get_avail(CMRparams, reqparams, cloud) +place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) - + icepyx.core.query.Query - -Query - -CMRparams -_CMRparams -_about_product -_auth : Auth -_cust_options : dict -_cycles : list -_file_vars -_granules -_order_vars -_prod : NoneType, str -_readable_granule_name : list -_reqparams -_s3login_credentials : dict -_session : Session -_source : str -_subsetparams : NoneType -_tracks : list -_version -cycles -dataset -dates -end_time -file_vars -granules -order_vars -product -product_version -reqparams -spatial -spatial_extent -start_time -tracks - -__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, files) -__str__() -avail_granules(ids, cycles, tracks, cloud) -download_granules(path, verbose, subset, restart) -earthdata_login(uid, email, s3token): None -latest_version() -order_granules(verbose, subset, email) -product_all_info() -product_summary_info() -show_custom_options(dictview) -subsetparams() -visualize_elevation() -visualize_spatial_extent() + +Query + +CMRparams +_CMRparams +_about_product +_cust_options : dict +_cycles : list +_file_vars +_granules +_order_vars +_prod : NoneType, str +_readable_granule_name : list +_reqparams +_source : str +_subsetparams : NoneType +_tracks : list +_version +cycles +dataset +dates +end_time +file_vars +granules +order_vars +product +product_version +reqparams +spatial +spatial_extent +start_time +temporal +tracks + +__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, files, auth) +__str__() +avail_granules(ids, cycles, tracks, cloud) +download_granules(path, verbose, subset, restart) +latest_version() +order_granules(verbose, subset, email) +product_all_info() +product_summary_info() +show_custom_options(dictview) +subsetparams() +visualize_elevation() +visualize_spatial_extent() - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.icesat2data.Icesat2Data - -Icesat2Data - - -__init__() + +Icesat2Data + + +__init__() - + icepyx.core.exceptions.NsidcQueryError - -NsidcQueryError - -errmsg -msgtxt : str - -__init__(errmsg, msgtxt) -__str__() + +NsidcQueryError + +errmsg +msgtxt : str + +__init__(errmsg, msgtxt) +__str__() - + icepyx.core.exceptions.QueryError - -QueryError - - - + +QueryError + + + icepyx.core.exceptions.NsidcQueryError->icepyx.core.exceptions.QueryError - - + + - + icepyx.core.APIformatting.Parameters - -Parameters - -_fmted_keys : dict, NoneType -_poss_keys : dict -_reqtype : str, NoneType -fmted_keys -partype -poss_keys - -__init__(partype, values, reqtype) -_check_valid_keys() -_get_possible_keys() -build_params() -check_req_values() -check_values() + +Parameters + +_fmted_keys : NoneType, dict +_poss_keys : dict +_reqtype : NoneType, str +fmted_keys +partype +poss_keys + +__init__(partype, values, reqtype) +_check_valid_keys() +_get_possible_keys() +build_params() +check_req_values() +check_values() - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_CMRparams + + +_CMRparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_reqparams + + +_reqparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams - + +icepyx.core.query.Query->icepyx.core.auth.EarthdataAuthMixin + + + + + icepyx.core.query.Query->icepyx.core.query.GenQuery - - + + - + icepyx.core.read.Read - -Read - -_catalog_path -_filelist : list, NoneType -_is2catalog : Catalog -_out_obj : Dataset -_pattern : str -_prod : str -_read_vars -_source_type : str -data_source -is2catalog -vars - -__init__(data_source, product, filename_pattern, catalog, out_obj_type) -_add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict) -_build_dataset_template(file) -_build_single_file_dataset(file, groups_list) -_check_source_for_pattern(source, filename_pattern) -_combine_nested_vars(is2ds, ds, grp_path, wanted_dict) -_read_single_grp(file, grp_path) -load() + +Read + +_filelist : list, NoneType +_out_obj : Dataset +_pattern : str +_prod : str +_read_vars +_source_type : str +data_source +vars + +__init__(data_source, product, filename_pattern, catalog, out_obj_type) +_add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict) +_build_dataset_template(file) +_build_single_file_dataset(file, groups_list) +_check_source_for_pattern(source, filename_pattern) +_combine_nested_vars(is2ds, ds, grp_path, wanted_dict) +_read_single_grp(file, grp_path) +load() - + icepyx.core.spatial.Spatial - -Spatial - -_ext_type : str -_gdf_spat : GeoDataFrame, DataFrame -_geom_file : NoneType -_spatial_ext -_xdateln -extent -extent_as_gdf -extent_file -extent_type - -__init__(spatial_extent) -__str__() -fmt_for_CMR() -fmt_for_EGI() + +Spatial + +_ext_type : str +_gdf_spat : GeoDataFrame, DataFrame +_geom_file : NoneType +_spatial_ext +_xdateln +extent +extent_as_gdf +extent_file +extent_type + +__init__(spatial_extent) +__str__() +fmt_for_CMR() +fmt_for_EGI() - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial + + + +icepyx.core.temporal.Temporal + +Temporal + +_end : datetime +_start : datetime +end +start + +__init__(date_range, start_time, end_time) +__str__() + + + +icepyx.core.temporal.Temporal->icepyx.core.query.GenQuery + + +_temporal - + icepyx.core.variables.Variables - -Variables - -_avail : list, NoneType -_session : NoneType -_vartype -_version : NoneType -path : NoneType -product : NoneType -wanted : dict, NoneType - -__init__(vartype, avail, wanted, session, product, version, path) -_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) -_get_combined_list(beam_list, keyword_list) -_get_sum_varlist(var_list, all_vars, defaults) -_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list) -_iter_vars(sum_varlist, req_vars, vgrp) -append(defaults, var_list, beam_list, keyword_list) -avail(options, internal) -parse_var_list(varlist, tiered, tiered_vars) -remove(all, var_list, beam_list, keyword_list) + +Variables + +_avail : NoneType, list +_vartype +_version : NoneType +path : NoneType +product : NoneType +wanted : NoneType, dict + +__init__(vartype, avail, wanted, product, version, path, auth) +_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) +_get_combined_list(beam_list, keyword_list) +_get_sum_varlist(var_list, all_vars, defaults) +_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list) +_iter_vars(sum_varlist, req_vars, vgrp) +append(defaults, var_list, beam_list, keyword_list) +avail(options, internal) +parse_var_list(varlist, tiered, tiered_vars) +remove(all, var_list, beam_list, keyword_list) + + + +icepyx.core.variables.Variables->icepyx.core.auth.EarthdataAuthMixin + + - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_file_vars + + +_file_vars - + icepyx.core.variables.Variables->icepyx.core.read.Read - - -_read_vars + + +_read_vars - + icepyx.core.visualization.Visualize - -Visualize - -bbox : list -cycles : NoneType -date_range : NoneType -product : NoneType, str -tracks : NoneType - -__init__(query_obj, product, spatial_extent, date_range, cycles, tracks) -generate_OA_parameters(): list -grid_bbox(binsize): list -make_request(base_url, payload) -parallel_request_OA(): da.array -query_icesat2_filelist(): tuple -request_OA_data(paras): da.array -viz_elevation(): (hv.DynamicMap, hv.Layout) + +Visualize + +bbox : list +cycles : NoneType +date_range : NoneType +product : NoneType, str +tracks : NoneType + +__init__(query_obj, product, spatial_extent, date_range, cycles, tracks) +generate_OA_parameters(): list +grid_bbox(binsize): list +make_request(base_url, payload) +parallel_request_OA(): da.array +query_icesat2_filelist(): tuple +request_OA_data(paras): da.array +viz_elevation(): (hv.DynamicMap, hv.Layout) diff --git a/doc/source/user_guide/documentation/classes_user_uml.svg b/doc/source/user_guide/documentation/classes_user_uml.svg index 53895fb22..1c9184379 100644 --- a/doc/source/user_guide/documentation/classes_user_uml.svg +++ b/doc/source/user_guide/documentation/classes_user_uml.svg @@ -4,266 +4,325 @@ - - + + classes_user_uml - - + + +icepyx.core.auth.AuthenticationError + +AuthenticationError + + + + + + +icepyx.core.exceptions.DeprecationError + +DeprecationError + + + + + + +icepyx.core.auth.EarthdataAuthMixin + +EarthdataAuthMixin + +auth +s3login_credentials +session + +earthdata_login(uid, email, s3token): None + + + icepyx.core.query.GenQuery - -GenQuery - - - + +GenQuery + + + - + icepyx.core.granules.Granules - -Granules - -avail : list -orderIDs : list - -download(verbose, path, session, restart) -get_avail(CMRparams, reqparams, cloud) -place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) + +Granules + +avail : list +orderIDs : list + +download(verbose, path, session, restart) +get_avail(CMRparams, reqparams, cloud) +place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) - + icepyx.core.query.Query - -Query - -CMRparams -cycles -dataset -dates -end_time -file_vars -granules -order_vars -product -product_version -reqparams -spatial -spatial_extent -start_time -tracks - -avail_granules(ids, cycles, tracks, cloud) -download_granules(path, verbose, subset, restart) -earthdata_login(uid, email, s3token): None -latest_version() -order_granules(verbose, subset, email) -product_all_info() -product_summary_info() -show_custom_options(dictview) -subsetparams() -visualize_elevation() -visualize_spatial_extent() + +Query + +CMRparams +cycles +dataset +dates +end_time +file_vars +granules +order_vars +product +product_version +reqparams +spatial +spatial_extent +start_time +temporal +tracks + +avail_granules(ids, cycles, tracks, cloud) +download_granules(path, verbose, subset, restart) +latest_version() +order_granules(verbose, subset, email) +product_all_info() +product_summary_info() +show_custom_options(dictview) +subsetparams() +visualize_elevation() +visualize_spatial_extent() - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.icesat2data.Icesat2Data - -Icesat2Data - - - + +Icesat2Data + + + - + icepyx.core.exceptions.NsidcQueryError - -NsidcQueryError - -errmsg -msgtxt : str - - + +NsidcQueryError + +errmsg +msgtxt : str + + - + icepyx.core.exceptions.QueryError - -QueryError - - - + +QueryError + + + icepyx.core.exceptions.NsidcQueryError->icepyx.core.exceptions.QueryError - - + + - + icepyx.core.APIformatting.Parameters - -Parameters - -fmted_keys -partype -poss_keys - -build_params() -check_req_values() -check_values() + +Parameters + +fmted_keys +partype +poss_keys + +build_params() +check_req_values() +check_values() - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_CMRparams + + +_CMRparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_reqparams + + +_reqparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams - + +icepyx.core.query.Query->icepyx.core.auth.EarthdataAuthMixin + + + + + icepyx.core.query.Query->icepyx.core.query.GenQuery - - + + - + icepyx.core.read.Read - -Read - -data_source -is2catalog -vars - -load() + +Read + +data_source +vars + +load() - + icepyx.core.spatial.Spatial - -Spatial - -extent -extent_as_gdf -extent_file -extent_type - -fmt_for_CMR() -fmt_for_EGI() + +Spatial + +extent +extent_as_gdf +extent_file +extent_type + +fmt_for_CMR() +fmt_for_EGI() - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial + + + +icepyx.core.temporal.Temporal + +Temporal + +end +start + + + + + +icepyx.core.temporal.Temporal->icepyx.core.query.GenQuery + + +_temporal - + icepyx.core.variables.Variables - -Variables - -path : NoneType -product : NoneType -wanted : NoneType, dict - -append(defaults, var_list, beam_list, keyword_list) -avail(options, internal) -parse_var_list(varlist, tiered, tiered_vars) -remove(all, var_list, beam_list, keyword_list) + +Variables + +path : NoneType +product : NoneType +wanted : NoneType, dict + +append(defaults, var_list, beam_list, keyword_list) +avail(options, internal) +parse_var_list(varlist, tiered, tiered_vars) +remove(all, var_list, beam_list, keyword_list) + + + +icepyx.core.variables.Variables->icepyx.core.auth.EarthdataAuthMixin + + - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars - + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_file_vars + + +_file_vars - + icepyx.core.variables.Variables->icepyx.core.read.Read - - -_read_vars + + +_read_vars - + icepyx.core.visualization.Visualize - -Visualize - -bbox : list -cycles : NoneType -date_range : NoneType -product : NoneType, str -tracks : NoneType - -generate_OA_parameters(): list -grid_bbox(binsize): list -make_request(base_url, payload) -parallel_request_OA(): da.array -query_icesat2_filelist(): tuple -request_OA_data(paras): da.array -viz_elevation(): (hv.DynamicMap, hv.Layout) + +Visualize + +bbox : list +cycles : NoneType +date_range : NoneType +product : NoneType, str +tracks : NoneType + +generate_OA_parameters(): list +grid_bbox(binsize): list +make_request(base_url, payload) +parallel_request_OA(): da.array +query_icesat2_filelist(): tuple +request_OA_data(paras): da.array +viz_elevation(): (hv.DynamicMap, hv.Layout) diff --git a/doc/source/user_guide/documentation/components.rst b/doc/source/user_guide/documentation/components.rst index 7e3d6e769..b4b658385 100644 --- a/doc/source/user_guide/documentation/components.rst +++ b/doc/source/user_guide/documentation/components.rst @@ -10,11 +10,11 @@ APIformatting :members: :undoc-members: :show-inheritance: + +EarthdataAuthMixin +------------------ -spatial ----------- - -.. automodule:: icepyx.core.spatial +.. automodule:: icepyx.core.auth :members: :undoc-members: :show-inheritance: @@ -43,6 +43,22 @@ is2ref :undoc-members: :show-inheritance: +spatial +---------- + +.. automodule:: icepyx.core.spatial + :members: + :undoc-members: + :show-inheritance: + +temporal +---------- + +.. automodule:: icepyx.core.temporal + :members: + :undoc-members: + :show-inheritance: + validate\_inputs ---------------- diff --git a/doc/source/user_guide/documentation/packages_user_uml.svg b/doc/source/user_guide/documentation/packages_user_uml.svg index 9983d732b..44a041c77 100644 --- a/doc/source/user_guide/documentation/packages_user_uml.svg +++ b/doc/source/user_guide/documentation/packages_user_uml.svg @@ -4,11 +4,11 @@ - - + + packages_user_uml - + icepyx.core @@ -21,95 +21,119 @@ icepyx.core.APIformatting - + +icepyx.core.auth + +icepyx.core.auth + + + icepyx.core.exceptions - -icepyx.core.exceptions + +icepyx.core.exceptions - + icepyx.core.granules - -icepyx.core.granules + +icepyx.core.granules - -icepyx.core.icesat2data - -icepyx.core.icesat2data - - -icepyx.core.is2cat - -icepyx.core.is2cat +icepyx.core.icesat2data + +icepyx.core.icesat2data icepyx.core.is2ref - -icepyx.core.is2ref + +icepyx.core.is2ref icepyx.core.query - -icepyx.core.query + +icepyx.core.query - + +icepyx.core.query->icepyx.core.auth + + + + + icepyx.core.query->icepyx.core.granules - - + + - + icepyx.core.variables - -icepyx.core.variables + +icepyx.core.variables - + icepyx.core.query->icepyx.core.variables - - + + - + icepyx.core.visualization - -icepyx.core.visualization + +icepyx.core.visualization - + icepyx.core.query->icepyx.core.visualization - - + + icepyx.core.read - -icepyx.core.read + +icepyx.core.read + + + +icepyx.core.read->icepyx.core.exceptions + + - + icepyx.core.read->icepyx.core.variables - - + + icepyx.core.spatial - -icepyx.core.spatial + +icepyx.core.spatial - + +icepyx.core.temporal + +icepyx.core.temporal + + + icepyx.core.validate_inputs - -icepyx.core.validate_inputs + +icepyx.core.validate_inputs + + + +icepyx.core.variables->icepyx.core.auth + + diff --git a/doc/source/user_guide/documentation/query.rst b/doc/source/user_guide/documentation/query.rst index f27e3e6fd..168986548 100644 --- a/doc/source/user_guide/documentation/query.rst +++ b/doc/source/user_guide/documentation/query.rst @@ -21,18 +21,19 @@ Attributes Query.CMRparams Query.cycles - Query.product - Query.product_version Query.dates Query.end_time Query.file_vars Query.granules Query.order_vars + Query.product + Query.product_version Query.reqparams Query.spatial Query.spatial_extent Query.subsetparams Query.start_time + Query.temporal Query.tracks Methods @@ -42,12 +43,12 @@ Methods :toctree: ../../_icepyx/ Query.avail_granules - Query.product_all_info - Query.product_summary_info Query.download_granules Query.earthdata_login Query.latest_version Query.order_granules + Query.product_all_info + Query.product_summary_info Query.show_custom_options Query.visualize_spatial_extent Query.visualize_elevation \ No newline at end of file diff --git a/doc/source/user_guide/documentation/read.rst b/doc/source/user_guide/documentation/read.rst index b076ef210..a5beedf4e 100644 --- a/doc/source/user_guide/documentation/read.rst +++ b/doc/source/user_guide/documentation/read.rst @@ -19,7 +19,6 @@ Attributes .. autosummary:: :toctree: ../../_icepyx/ - Read.is2catalog Read.vars diff --git a/icepyx/core/auth.py b/icepyx/core/auth.py new file mode 100644 index 000000000..7c36126f9 --- /dev/null +++ b/icepyx/core/auth.py @@ -0,0 +1,145 @@ +import copy +import datetime +import warnings + +import earthaccess + +class AuthenticationError(Exception): + ''' + Raised when an error is encountered while authenticating Earthdata credentials + ''' + pass + + +class EarthdataAuthMixin(): + """ + This mixin class generates the needed authentication sessions and tokens, including for NASA Earthdata cloud access. + Authentication is completed using the [earthaccess library](https://nsidc.github.io/earthaccess/). + Methods for authenticating are: + 1. Storing credentials as environment variables ($EARTHDATA_LOGIN and $EARTHDATA_PASSWORD) + 2. Entering credentials interactively + 3. Storing credentials in a .netrc file (not recommended for security reasons) + More details on using these methods is available in the [earthaccess documentation](https://nsidc.github.io/earthaccess/tutorials/restricted-datasets/#auth). + + This class can be inherited by any other class that requires authentication. For + example, the `Query` class inherits this one, and so a Query object has the + `.session` property. The method `earthdata_login()` is included for backwards compatibility. + + The class can be created without any initialization parameters, and the properties will + be populated when they are called. It can alternately be initialized with an + earthaccess.auth.Auth object, which will then be used to create a session or + s3login_credentials as they are called. + + Parameters + ---------- + auth : earthaccess.auth.Auth, default None + Optional parameter to initialize an object with existing credentials. + + Examples + -------- + >>> a = EarthdataAuthMixin() + >>> a.session # doctest: +SKIP + >>> a.s3login_credentials # doctest: +SKIP + """ + def __init__(self, auth=None): + self._auth = copy.deepcopy(auth) + # initializatin of session and s3 creds is not allowed because those are generated + # from the auth object + self._session = None + self._s3login_credentials = None + self._s3_initial_ts = None # timer for 1h expiration on s3 credentials + + def __str__(self): + if self.session: + repr_string = "EarthdataAuth obj with session initialized" + else: + repr_string = "EarthdataAuth obj without session initialized" + return repr_string + + @property + def auth(self): + ''' + Authentication object returned from earthaccess.login() which stores user authentication. + ''' + # Only login the first time .auth is accessed + if self._auth is None: + auth = earthaccess.login() + # check for a valid auth response + if auth.authenticated is False: + raise AuthenticationError('Earthdata authentication failed. Check output for error message') + else: + self._auth = auth + + return self._auth + + @property + def session(self): + ''' + Earthaccess session object for connecting to Earthdata resources. + ''' + # Only generate a session the first time .session is accessed + if self._session is None: + self._session = self.auth.get_session() + return self._session + + @property + def s3login_credentials(self): + ''' + A dictionary which stores login credentials for AWS s3 access. This property is accessed + if using AWS cloud data. + + Because s3 tokens are only good for one hour, this function will automatically check if an + hour has elapsed since the last token use and generate a new token if necessary. + ''' + + def set_s3_creds(): + ''' Store s3login creds from `auth`and reset the last updated timestamp''' + self._s3login_credentials = self.auth.get_s3_credentials(daac="NSIDC") + self._s3_initial_ts = datetime.datetime.now() + + # Only generate s3login_credentials the first time credentials are accessed, or if an hour + # has passed since the last login + if self._s3login_credentials is None: + set_s3_creds() + elif (datetime.datetime.now() - self._s3_initial_ts) >= datetime.timedelta(hours=1): + set_s3_creds() + return self._s3login_credentials + + def earthdata_login(self, uid=None, email=None, s3token=None, **kwargs) -> None: + """ + Authenticate with NASA Earthdata to enable data ordering and download. + Credential storage details are described in the EathdataAuthMixin class section. + + **Note:** This method is maintained for backward compatibility. It is no longer required to explicitly run `.earthdata_login()`. Authentication will be performed by the module as needed when `.session` or `.s3login_credentials` are accessed. + + Parameters + ---------- + uid : string, default None + Deprecated keyword for Earthdata login user ID. + email : string, default None + Deprecated keyword for backwards compatibility. + s3token : boolean, default None + Deprecated keyword to generate AWS s3 ICESat-2 data access credentials + kwargs : key:value pairs + Keyword arguments to be passed into earthaccess.login(). + + Examples + -------- + >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP + >>> reg_a.earthdata_login() # doctest: +SKIP + Enter your Earthdata Login username: ___________________ + + EARTHDATA_USERNAME and EARTHDATA_PASSWORD are not set in the current environment, try setting them or use a different strategy (netrc, interactive) + No .netrc found in /Users/username + + """ + warnings.warn( + "It is no longer required to explicitly run the `.earthdata_login()` method. Authentication will be performed by the module as needed.", + DeprecationWarning, stacklevel=2 + ) + + if uid != None or email != None or s3token != None: + warnings.warn( + "The user id (uid) and/or email keyword arguments are no longer required.", + DeprecationWarning, stacklevel=2 + ) diff --git a/icepyx/core/exceptions.py b/icepyx/core/exceptions.py index 2c29f1fa0..a36a1b645 100644 --- a/icepyx/core/exceptions.py +++ b/icepyx/core/exceptions.py @@ -1,3 +1,10 @@ +class DeprecationError(Exception): + """ + Class raised for use of functionality that is no longer supported by icepyx. + """ + pass + + class QueryError(Exception): """ Base class for Query object exceptions @@ -20,3 +27,5 @@ def __init__( def __str__(self): return f"{self.msgtxt}: {self.errmsg}" + + diff --git a/icepyx/core/granules.py b/icepyx/core/granules.py index 81c848246..a0849968e 100644 --- a/icepyx/core/granules.py +++ b/icepyx/core/granules.py @@ -35,7 +35,7 @@ def info(grans): # DevNote: currently this fn is not tested # DevNote: could add flag to separate ascending and descending orbits based on ATL03 granule region -def gran_IDs(grans, ids=True, cycles=False, tracks=False, dates=False, cloud=False): +def gran_IDs(grans, ids=False, cycles=False, tracks=False, dates=False, cloud=False): """ Returns a list of granule information for each granule dictionary in the input list of granule dictionaries. Granule info may be from a list of those available from NSIDC (for ordering/download) @@ -55,9 +55,6 @@ def gran_IDs(grans, ids=True, cycles=False, tracks=False, dates=False, cloud=Fal Return a list of the available dates for the granule dictionary. cloud : boolean, default False Return a a list of AWS s3 urls for the available granules in the granule dictionary. - Note: currently, NSIDC does not provide metadata on which granules are available on s3. - Thus, all of the urls may not be valid and may return FileNotFoundErrors. - s3 data access is currently limited access to beta testers. """ assert len(grans) > 0, "Your data object has no granules associated with it" # regular expression for extracting parameters from file names @@ -74,19 +71,15 @@ def gran_IDs(grans, ids=True, cycles=False, tracks=False, dates=False, cloud=Fal producer_granule_id = gran["producer_granule_id"] gran_ids.append(producer_granule_id) - prod = int(gran["producer_granule_id"][3:5]) - - # manual creation of s3 urls for ATL15 for FOGSS March 2023 workshop - # note that s3 urls were not available in the CMR metadata retrieved by icepyx at the time of implementation - if prod==15 and cloud==True: - url = r"s3://nsidc-cumulus-prod-protected/ATLAS/ATL15/002/2019/{}".format(producer_granule_id) - gran_s3urls.append(url) - - elif prod == 11 or prod > 13: - warnings.warn("We are still working in implementing ID generation for this data product.", UserWarning) - continue + if cloud == True: + try: + for link in gran["links"]: + if link["href"].startswith("s3") and link["href"].endswith(".h5"): + gran_s3urls.append(link["href"]) + except KeyError: + pass - else: + if any([param == True for param in [cycles, tracks, dates]]): # PRD: ICESat-2 product # HEM: Sea Ice Hemisphere flag # YY,MM,DD,HH,MN,SS: Year, Month, Day, Hour, Minute, Second @@ -120,13 +113,6 @@ def gran_IDs(grans, ids=True, cycles=False, tracks=False, dates=False, cloud=Fal str(datetime.datetime(year=int(YY), month=int(MM), day=int(DD)).date()) ) - try: - for link in gran["links"]: - if link["href"].startswith("s3") and link["href"].endswith(".h5"): - gran_s3urls.append(link["href"]) - except KeyError: - pass - # list of granule parameters gran_list = [] # granule IDs @@ -190,8 +176,8 @@ def get_avail(self, CMRparams, reqparams, cloud=False): reqparams : dictionary Dictionary of properly formatted parameters required for searching, ordering, or downloading from NSIDC. - cloud : boolean, default False - Whether or not you want data available in the cloud (versus on premises). + cloud : deprecated, boolean, default False + CMR metadata is always collected for the cloud system. Notes ----- @@ -216,13 +202,10 @@ def get_avail(self, CMRparams, reqparams, cloud=False): headers = {"Accept": "application/json", "Client-Id": "icepyx"} # note we should also check for errors whenever we ping NSIDC-API - make a function to check for errors - if cloud: - prov_flag = "NSIDC_CPRD" - else: - prov_flag = "NSIDC_ECS" - params = apifmt.combine_params( - CMRparams, {k: reqparams[k] for k in ["page_size"]}, {"provider": prov_flag} + CMRparams, + {k: reqparams[k] for k in ["page_size"]}, + {"provider": "NSIDC_CPRD"}, ) cmr_search_after = None diff --git a/icepyx/core/is2cat.py b/icepyx/core/is2cat.py deleted file mode 100644 index f4e66a7bf..000000000 --- a/icepyx/core/is2cat.py +++ /dev/null @@ -1,178 +0,0 @@ -from intake.catalog import Catalog - -# Need to post on intake's page to see if this would be a useful contribution... -# https://github.com/intake/intake/blob/0.6.4/intake/source/utils.py#L216 -def _pattern_to_glob(pattern): - """ - Adapted from intake.source.utils.path_to_glob to convert a path as pattern into a glob style path - that uses the pattern's indicated number of '?' instead of '*' where an int was specified. - - Returns pattern if pattern is not a string. - - Parameters - ---------- - pattern : str - Path as pattern optionally containing format_strings - - Returns - ------- - glob_path : str - Path with int format strings replaced with the proper number of '?' and '*' otherwise. - - Examples - -------- - >>> _pattern_to_glob('{year}/{month}/{day}.csv') - '*/*/*.csv' - >>> _pattern_to_glob('{year:4}/{month:2}/{day:2}.csv') - '????/??/??.csv' - >>> _pattern_to_glob('data/{year:4}{month:02}{day:02}.csv') - 'data/????????.csv' - >>> _pattern_to_glob('data/*.csv') - 'data/*.csv' - """ - from string import Formatter - - if not isinstance(pattern, str): - return pattern - - fmt = Formatter() - glob_path = "" - # prev_field_name = None - for literal_text, field_name, format_specs, _ in fmt.parse(format_string=pattern): - glob_path += literal_text - if field_name and (glob_path != "*"): - try: - glob_path += "?" * int(format_specs) - except ValueError: - glob_path += "*" - # alternatively, you could use bits=utils._get_parts_of_format_string(resolved_string, literal_texts, format_specs) - # and then use len(bits[i]) to get the length of each format_spec - # print(glob_path) - return glob_path - - -def build_catalog( - data_source, - path_pattern, - source_type, - grp_paths=None, - grp_path_params=None, - extra_engine_kwargs=None, - **kwargs -): - """ - Build a general Intake catalog for reading in ICESat-2 data. - This function is used by the read class object to create catalogs from lists of ICESat-2 variables. - - Parameters - ---------- - data_source : string - A string with a full file path or full directory path to ICESat-2 hdf5 (.h5) format files. - Files within a directory must have a consistent filename pattern that includes the "ATL??" data product name. - Files must all be within a single directory. - - path_pattern : string - String that shows the filename pattern as required for Intake's path_as_pattern argument. - - source_type : string - String to use as the Local Catalog Entry name. - - grp_paths : str, default None - Variable paths to load. - Can include general parameter names, which must be contained within double curly brackets and further - described in `grp_path_params`. - Default list based on data product of provided files. - If multiple data products are included in the files, the default list will be for the product of the first file. - This may result in errors during read-in if all files do not have the same variable paths. - - grp_path_params : [dict], default None - List of dictionaries with a keyword for each parameter name specified in the `grp_paths` string. - Each parameter keyword should contain a dictionary with the acceptable keyword-value pairs for the driver being used. - - **kwargs : - Keyword arguments to be passed through to `intake.catalog.Catalog.from_dict()`. - Keywords needed to override default inputs include: - - `source_args_dict` # highest level source information; keys include: "urlpath", "path_as_pattern", driver-specific ("xarray_kwargs" is default) - - `metadata_dict` - - `source_dict` # individual source entry information (default is supplied by data object; "name", "description", "driver", "args") - - `defaults_dict` # catalog "name", "description", "metadata", "entries", etc. - - Returns - ------- - intake.catalog.Catalog object - - See Also - -------- - read.Read - - """ - from intake.catalog.local import LocalCatalogEntry, UserParameter - import intake_xarray - - import icepyx.core.APIformatting as apifmt - - assert ( - grp_paths - ), "You must enter a variable path or you will not be able to read in any data." - - # generalize this/make it so the [engine] values can be entered as kwargs... - engine_key = "xarray_kwargs" - xarray_kwargs_dict = {"engine": "h5netcdf", "group": grp_paths} - if extra_engine_kwargs: - for key in extra_engine_kwargs.keys(): - xarray_kwargs_dict[key] = extra_engine_kwargs[key] - - source_args_dict = { - "urlpath": data_source, - "path_as_pattern": path_pattern, - engine_key: xarray_kwargs_dict, - } - - metadata_dict = {"version": 1} - - source_dict = { - "name": source_type, - "description": "", - "driver": "intake_xarray.netcdf.NetCDFSource", # NOTE: this must be a string or the catalog cannot be imported after saving - "args": source_args_dict, - } - - if grp_path_params: - source_dict = apifmt.combine_params( - source_dict, - {"parameters": [UserParameter(**params) for params in grp_path_params]}, - ) - - # NOTE: LocalCatalogEntry has some required positional args (name, description, driver) - # I tried doing this generally with *source_dict after the positional args (instead of as part of the if) - # but apparently I don't quite get something about passing dicts with * and ** and couldn't make it work - local_cat_source = { - source_type: LocalCatalogEntry( - name=source_dict.pop("name"), - description=source_dict.pop("description"), - driver=source_dict.pop("driver"), - parameters=source_dict.pop("parameters"), - args=source_dict.pop("args"), - ) - } - - else: - local_cat_source = { - source_type: LocalCatalogEntry( - name=source_dict.pop("name"), - description=source_dict.pop("description"), - driver=source_dict.pop("driver"), - args=source_dict.pop("args"), - ) - } - - defaults_dict = { - "name": "IS2-hdf5-icepyx-intake-catalog", - "description": "an icepyx-generated catalog for creating local ICESat-2 intake entries", - "metadata": metadata_dict, - "entries": local_cat_source, - } - - build_cat_dict = apifmt.combine_params(defaults_dict, kwargs) - - return Catalog.from_dict(**build_cat_dict) diff --git a/icepyx/core/is2ref.py b/icepyx/core/is2ref.py index 883772a9e..52cf0e3a1 100644 --- a/icepyx/core/is2ref.py +++ b/icepyx/core/is2ref.py @@ -39,6 +39,7 @@ def _validate_product(product): "ATL19", "ATL20", "ATL21", + "ATL23", ], "Please enter a valid product" else: raise TypeError("Please enter a product string") diff --git a/icepyx/core/query.py b/icepyx/core/query.py index ad50ad497..e8f1d8e7c 100644 --- a/icepyx/core/query.py +++ b/icepyx/core/query.py @@ -1,25 +1,26 @@ import datetime as dt -import earthaccess -import os -import json -import warnings -import pprint -import time import geopandas as gpd +import json import matplotlib.pyplot as plt import numpy as np +import os from pathlib import Path +import pprint +import time +import warnings import icepyx.core.APIformatting as apifmt -import icepyx.core.is2ref as is2ref +from icepyx.core.auth import EarthdataAuthMixin import icepyx.core.granules as granules from icepyx.core.granules import Granules as Granules +import icepyx.core.is2ref as is2ref # QUESTION: why doesn't from granules import Granules as Granules work, since granules=icepyx.core.granules? # from icepyx.core.granules import Granules -from icepyx.core.variables import Variables as Variables -import icepyx.core.validate_inputs as val import icepyx.core.spatial as spat +import icepyx.core.temporal as tp +import icepyx.core.validate_inputs as val +from icepyx.core.variables import Variables as Variables from icepyx.core.visualization import Visualize @@ -50,19 +51,30 @@ class GenQuery: Geospatial polygon files are entered as strings with the full file path and must contain only one polygon with the area of interest. Currently supported formats are: kml, shp, and gpkg - date_range : list of 'YYYY-MM-DD' strings + date_range : list or dict, as follows Date range of interest, provided as start and end dates, inclusive. - The required date format is 'YYYY-MM-DD' strings, where - YYYY = 4 digit year, MM = 2 digit month, DD = 2 digit day. + Accepted input date formats are: + * YYYY-MM-DD string + * YYYY-DOY string + * datetime.date object (if times are included) + * datetime.datetime objects (if no times are included) + where YYYY = 4 digit year, MM = 2 digit month, DD = 2 digit day, DOY = 3 digit day of year. + Date inputs are accepted as a list or dictionary with `start_date` and `end_date` keys. Currently, a list of specific dates (rather than a range) is not accepted. - TODO: accept date-time objects, dicts (with 'start_date' and 'end_date' keys, and DOY inputs). TODO: allow searches with a list of dates, rather than a range. - start_time : HH:mm:ss, default 00:00:00 - Start time in UTC/Zulu (24 hour clock). If None, use default. - TODO: check for time in date-range date-time object, if that's used for input. - end_time : HH:mm:ss, default 23:59:59 - End time in UTC/Zulu (24 hour clock). If None, use default. - TODO: check for time in date-range date-time object, if that's used for input. + start_time : str, datetime.time, default None + Start time in UTC/Zulu (24 hour clock). + Input types are an HH:mm:ss string or datetime.time object + where HH = hours, mm = minutes, ss = seconds. + If None is given (and a datetime.datetime object is not supplied for `date_range`), + a default of 00:00:00 is applied. + end_time : str, datetime.time, default None + End time in UTC/Zulu (24 hour clock). + Input types are an HH:mm:ss string or datetime.time object + where HH = hours, mm = minutes, ss = seconds. + If None is given (and a datetime.datetime object is not supplied for `date_range`), + a default of 23:59:59 is applied. + If a datetime.datetime object was created without times, the datetime package defaults will apply over those of icepyx xdateline : boolean, default None Keyword argument to enforce spatial inputs that cross the International Date Line. Internally, this will translate your longitudes to 0 to 360 to construct the @@ -124,16 +136,15 @@ def __init__( self._spatial = spat.Spatial(spatial_extent) # valiidate and init temporal constraints - # TODO: Update this to use Temporal class when completed if date_range: - self._start, self._end = val.temporal(date_range, start_time, end_time) + self._temporal = tp.Temporal(date_range, start_time, end_time) def __str__(self): str = "Extent type: {0} \nCoordinates: {1}\nDate range: ({2}, {3})".format( self._spatial._ext_type, self._spatial._spatial_ext, - self._start, - self._end, + self._temporal._start, + self._temporal._end, ) return str @@ -141,7 +152,7 @@ def __str__(self): # DevGoal: update docs throughout to allow for polygon spatial extent # Note: add files to docstring once implemented # DevNote: currently this class is not tested -class Query(GenQuery): +class Query(GenQuery, EarthdataAuthMixin): """ Query and get ICESat-2 data @@ -182,7 +193,7 @@ class Query(GenQuery): >>> reg_a_dates = ['2019-02-20','2019-02-28'] >>> reg_a = Query('ATL06', reg_a_bbox, reg_a_dates) >>> print(reg_a) - Product ATL06 v005 + Product ATL06 v006 ('bounding_box', [-55.0, 68.0, -48.0, 71.0]) Date range ['2019-02-20', '2019-02-28'] @@ -200,7 +211,7 @@ class Query(GenQuery): >>> reg_a_dates = ['2019-02-22','2019-02-28'] >>> reg_a = Query('ATL06', aoi, reg_a_dates) >>> print(reg_a) - Product ATL06 v005 + Product ATL06 v006 ('polygon', [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0]) Date range ['2019-02-22', '2019-02-28'] @@ -223,6 +234,7 @@ def __init__( cycles=None, tracks=None, files=None, # NOTE: if you end up implemeting this feature here, use a better variable name than "files" + auth=None, **kwargs, ): @@ -265,6 +277,8 @@ def __init__( self._prod, cycles=self.cycles, tracks=self.tracks ) + # initialize authentication properties + EarthdataAuthMixin.__init__(self) # ---------------------------------------------------------------------- # Properties @@ -311,14 +325,42 @@ def product_version(self): -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.product_version - '005' + '006' - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='1') + >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='4') >>> reg_a.product_version - '001' + '004' """ return self._version + @property + def temporal(self): + """ + Return the Temporal object containing date/time range information for the query object. + + See Also + -------- + temporal.Temporal.start + temporal.Temporal.end + temporal.Temporal + + Examples + -------- + >>> reg_a = Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> print(reg_a.temporal) + Start date and time: 2019-02-20 00:00:00 + End date and time: 2019-02-28 23:59:59 + + >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> print(reg_a.temporal) + ['No temporal parameters set'] + """ + + if hasattr(self, "_temporal"): + return self._temporal + else: + return ["No temporal parameters set"] + @property def spatial(self): """ @@ -398,13 +440,17 @@ def dates(self): >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.dates ['2019-02-20', '2019-02-28'] + + >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> reg_a.dates + ['No temporal parameters set'] """ - if not hasattr(self, "_start"): + if not hasattr(self, "_temporal"): return ["No temporal parameters set"] else: return [ - self._start.strftime("%Y-%m-%d"), - self._end.strftime("%Y-%m-%d"), + self._temporal._start.strftime("%Y-%m-%d"), + self._temporal._end.strftime("%Y-%m-%d"), ] # could also use self._start.date() @property @@ -421,11 +467,15 @@ def start_time(self): >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], start_time='12:30:30') >>> reg_a.start_time '12:30:30' + + >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> reg_a.start_time + ['No temporal parameters set'] """ - if not hasattr(self, "_start"): + if not hasattr(self, "_temporal"): return ["No temporal parameters set"] else: - return self._start.strftime("%H:%M:%S") + return self._temporal._start.strftime("%H:%M:%S") @property def end_time(self): @@ -441,11 +491,15 @@ def end_time(self): >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], end_time='10:20:20') >>> reg_a.end_time '10:20:20' + + >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> reg_a.end_time + ['No temporal parameters set'] """ - if not hasattr(self, "_end"): + if not hasattr(self, "_temporal"): return ["No temporal parameters set"] else: - return self._end.strftime("%H:%M:%S") + return self._temporal._end.strftime("%H:%M:%S") @property def cycles(self): @@ -497,7 +551,7 @@ def CMRparams(self): >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.CMRparams {'short_name': 'ATL06', - 'version': '005', + 'version': '006', 'temporal': '2019-02-20T00:00:00Z,2019-02-28T23:59:59Z', 'bounding_box': '-55.0,68.0,-48.0,71.0'} """ @@ -510,9 +564,9 @@ def CMRparams(self): # dictionary of optional CMR parameters kwargs = {} # temporal CMR parameters - if hasattr(self, "_start") and hasattr(self, "_end"): - kwargs["start"] = self._start - kwargs["end"] = self._end + if hasattr(self, "_temporal"): + kwargs["start"] = self._temporal._start + kwargs["end"] = self._temporal._end # granule name CMR parameters (orbital or file name) # DevGoal: add to file name search to optional queries if hasattr(self, "_readable_granule_name"): @@ -543,7 +597,6 @@ def reqparams(self): {'page_size': 2000} >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP >>> reg_a.order_granules() # doctest: +SKIP >>> reg_a.reqparams # doctest: +SKIP {'page_size': 2000, 'page_num': 1, 'request_mode': 'async', 'include_meta': 'Y', 'client_string': 'icepyx'} @@ -585,9 +638,9 @@ def subsetparams(self, **kwargs): self._subsetparams = apifmt.Parameters("subset") # temporal subsetting parameters - if hasattr(self, "_start") and hasattr(self, "_end"): - kwargs["start"] = self._start - kwargs["end"] = self._end + if hasattr(self, "temporal"): + kwargs["start"] = self._temporal._start + kwargs["end"] = self._temporal._end if self._subsetparams == None and not kwargs: return {} @@ -625,7 +678,6 @@ def order_vars(self): Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP >>> reg_a.order_vars # doctest: +SKIP """ @@ -636,14 +688,14 @@ def order_vars(self): if hasattr(self, "_cust_options"): self._order_vars = Variables( self._source, - session=self._session, + auth = self.auth, product=self.product, avail=self._cust_options["variables"], ) else: self._order_vars = Variables( self._source, - session=self._session, + auth=self.auth, product=self.product, version=self._version, ) @@ -669,14 +721,17 @@ def file_vars(self): Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP + >>> reg_a.file_vars # doctest: +SKIP """ if not hasattr(self, "_file_vars"): if self._source == "file": - self._file_vars = Variables(self._source, product=self.product) + self._file_vars = Variables(self._source, + auth=self.auth, + product=self.product, + ) return self._file_vars @@ -718,15 +773,15 @@ def product_summary_info(self): Examples -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='005') + >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='006') >>> reg_a.product_summary_info() - title : ATLAS/ICESat-2 L3A Land Ice Height V005 + title : ATLAS/ICESat-2 L3A Land Ice Height V006 short_name : ATL06 - version_id : 005 + version_id : 006 time_start : 2018-10-14T00:00:00.000Z coordinate_system : CARTESIAN summary : This data set (ATL06) provides geolocated, land-ice surface heights (above the WGS 84 ellipsoid, ITRF2014 reference frame), plus ancillary parameters that can be used to interpret and assess the quality of the height estimates. The data were acquired by the Advanced Topographic Laser Altimeter System (ATLAS) instrument on board the Ice, Cloud and land Elevation Satellite-2 (ICESat-2) observatory. - orbit_parameters : {'swath_width': '36.0', 'period': '96.8', 'inclination_angle': '92.0', 'number_of_orbits': '0.071428571', 'start_circular_latitude': '0.0'} + orbit_parameters : {} """ if not hasattr(self, "_about_product"): self._about_product = is2ref.about_product(self._prod) @@ -765,7 +820,7 @@ def latest_version(self): -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.latest_version() - '005' + '006' """ if not hasattr(self, "_about_product"): self._about_product = is2ref.about_product(self._prod) @@ -787,7 +842,6 @@ def show_custom_options(self, dictview=False): Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP >>> reg_a.show_custom_options(dictview=True) # doctest: +SKIP Subsetting options [{'id': 'ICESAT2', @@ -835,7 +889,7 @@ def show_custom_options(self, dictview=False): all(key in self._cust_options.keys() for key in keys) except AttributeError or KeyError: self._cust_options = is2ref._get_custom_options( - self._session, self.product, self._version + self.session, self.product, self._version ) for h, k in zip(headers, keys): @@ -847,57 +901,7 @@ def show_custom_options(self, dictview=False): pprint.pprint(self._cust_options[k]) # ---------------------------------------------------------------------- - # Methods - Login and Granules (NSIDC-API) - - def earthdata_login(self, uid=None, email=None, s3token=False, **kwargs) -> None: - """ - Authenticate with NASA Earthdata to enable data ordering and download. - - Generates the needed authentication sessions and tokens, including for cloud access. - Authentication is completed using the [earthaccess library](https://nsidc.github.io/earthaccess/). - Methods for authenticating are: - 1. Storing credentials as environment variables ($EARTHDATA_LOGIN and $EARTHDATA_PASSWORD) - 2. Entering credentials interactively - 3. Storing credentials in a .netrc file (not recommended for security reasons) - More details on using these methods is available in the [earthaccess documentation](https://nsidc.github.io/earthaccess/tutorials/restricted-datasets/#auth). - The input parameters listed here are provided for backwards compatibility; - before earthaccess existed, icepyx handled authentication and required these inputs. - - Parameters - ---------- - uid : string, default None - Deprecated keyword for Earthdata login user ID. - email : string, default None - Deprecated keyword for backwards compatibility. - s3token : boolean, default False - Deprecated keyword to generate AWS s3 ICESat-2 data access credentials - kwargs : key:value pairs - Keyword arguments to be passed into earthaccess.login(). - - Examples - -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP - Enter your Earthdata Login username: ___________________ - - EARTHDATA_USERNAME and EARTHDATA_PASSWORD are not set in the current environment, try setting them or use a different strategy (netrc, interactive) - No .netrc found in /Users/username - - """ - - auth = earthaccess.login(**kwargs) - if auth.authenticated: - self._auth = auth - self._session = auth.get_session() - - if s3token == True: - self._s3login_credentials = auth.get_s3_credentials(daac="NSIDC") - - if uid != None or email != None: - warnings.warn( - "The user id (uid) and/or email keyword arguments are no longer required.", - DeprecationWarning, - ) + # Methods - Granules (NSIDC-API) # DevGoal: check to make sure the see also bits of the docstrings work properly in RTD def avail_granules(self, ids=False, cycles=False, tracks=False, cloud=False): @@ -928,12 +932,12 @@ def avail_granules(self, ids=False, cycles=False, tracks=False, cloud=False): >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.avail_granules() {'Number of available granules': 4, - 'Average size of granules (MB)': 53.948360681525, - 'Total size of all granules (MB)': 215.7934427261} + 'Average size of granules (MB)': 55.166646003723145, + 'Total size of all granules (MB)': 220.66658401489258} >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-23']) >>> reg_a.avail_granules(ids=True) - [['ATL06_20190221121851_08410203_005_01.h5', 'ATL06_20190222010344_08490205_005_01.h5']] + [['ATL06_20190221121851_08410203_006_01.h5', 'ATL06_20190222010344_08490205_006_01.h5']] >>> reg_a.avail_granules(cycles=True) [['02', '02']] >>> reg_a.avail_granules(tracks=True) @@ -946,7 +950,7 @@ def avail_granules(self, ids=False, cycles=False, tracks=False, cloud=False): try: self.granules.avail except AttributeError: - self.granules.get_avail(self.CMRparams, self.reqparams, cloud=cloud) + self.granules.get_avail(self.CMRparams, self.reqparams) if ids or cycles or tracks or cloud: # list of outputs in order of ids, cycles, tracks, cloud @@ -995,7 +999,6 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP >>> reg_a.order_granules() # doctest: +SKIP order ID: [###############] [order status output] @@ -1016,7 +1019,7 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): if "email" in self._reqparams.fmted_keys.keys() or email == False: self._reqparams.build_params(**self._reqparams.fmted_keys) elif email == True: - user_profile = self._auth.get_user_profile() + user_profile = self.auth.get_user_profile() self._reqparams.build_params( **self._reqparams.fmted_keys, email=user_profile["email_address"] ) @@ -1051,7 +1054,7 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): self.subsetparams(**kwargs), verbose, subset, - session=self._session, + session=self.session, geom_filepath=self._spatial._geom_file, ) @@ -1062,7 +1065,7 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): self.subsetparams(**kwargs), verbose, subset, - session=self._session, + session=self.session, geom_filepath=self._spatial._geom_file, ) @@ -1106,7 +1109,6 @@ def download_granules( Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP >>> reg_a.download_granules('/path/to/download/folder') # doctest: +SKIP Beginning download of zipped output... Data request [##########] of x order(s) is complete. @@ -1128,7 +1130,7 @@ def download_granules( ): self.order_granules(verbose=verbose, subset=subset, **kwargs) - self._granules.download(verbose, path, session=self._session, restart=restart) + self._granules.download(verbose, path, session=self.session, restart=restart) # DevGoal: add testing? What do we test, and how, given this is a visualization. # DevGoal(long term): modify this to accept additional inputs, etc. diff --git a/icepyx/core/read.py b/icepyx/core/read.py index 5a497279a..a7ee15db7 100644 --- a/icepyx/core/read.py +++ b/icepyx/core/read.py @@ -5,7 +5,7 @@ import numpy as np import xarray as xr -import icepyx.core.is2cat as is2cat +from icepyx.core.exceptions import DeprecationError import icepyx.core.is2ref as is2ref from icepyx.core.variables import Variables as Variables from icepyx.core.variables import list_of_dict_vals @@ -207,10 +207,61 @@ def _run_fast_scandir(dir, fn_glob): return subfolders, files +# Need to post on intake's page to see if this would be a useful contribution... +# https://github.com/intake/intake/blob/0.6.4/intake/source/utils.py#L216 +def _pattern_to_glob(pattern): + """ + Adapted from intake.source.utils.path_to_glob to convert a path as pattern into a glob style path + that uses the pattern's indicated number of '?' instead of '*' where an int was specified. + + Returns pattern if pattern is not a string. + + Parameters + ---------- + pattern : str + Path as pattern optionally containing format_strings + + Returns + ------- + glob_path : str + Path with int format strings replaced with the proper number of '?' and '*' otherwise. + + Examples + -------- + >>> _pattern_to_glob('{year}/{month}/{day}.csv') + '*/*/*.csv' + >>> _pattern_to_glob('{year:4}/{month:2}/{day:2}.csv') + '????/??/??.csv' + >>> _pattern_to_glob('data/{year:4}{month:02}{day:02}.csv') + 'data/????????.csv' + >>> _pattern_to_glob('data/*.csv') + 'data/*.csv' + """ + from string import Formatter + + if not isinstance(pattern, str): + return pattern + + fmt = Formatter() + glob_path = "" + # prev_field_name = None + for literal_text, field_name, format_specs, _ in fmt.parse(format_string=pattern): + glob_path += literal_text + if field_name and (glob_path != "*"): + try: + glob_path += "?" * int(format_specs) + except ValueError: + glob_path += "*" + # alternatively, you could use bits=utils._get_parts_of_format_string(resolved_string, literal_texts, format_specs) + # and then use len(bits[i]) to get the length of each format_spec + # print(glob_path) + return glob_path + + # To do: test this class and functions therein class Read: """ - Data object to create and use Intake catalogs to read ICESat-2 data into the specified formats. + Data object to read ICESat-2 data into the specified formats. Provides flexiblity for reading nested hdf5 files into common analysis formats. Parameters @@ -228,10 +279,11 @@ class Read: String that shows the filename pattern as required for Intake's path_as_pattern argument. The default describes files downloaded directly from NSIDC (subsetted and non-subsetted) for most products (e.g. ATL06). The ATL11 filename pattern from NSIDC is: 'ATL{product:2}_{rgt:4}{orbitsegment:2}_{cycles:4}_{version:3}_{revision:2}.h5'. - + catalog : string, default None Full path to an Intake catalog for reading in data. If you still need to create a catalog, leave as default. + **Deprecation warning:** This argument has been depreciated. Please use the data_source argument to pass in valid data. out_obj_type : object, default xarray.Dataset The desired format for the data to be read in. @@ -258,6 +310,12 @@ def __init__( catalog=None, out_obj_type=None, # xr.Dataset, ): + # Raise error for depreciated argument + if catalog: + raise DeprecationError( + 'The `catalog` argument has been deprecated and intake is no longer supported. ' + 'Please use the `data_source` argument to specify your dataset instead.' + ) if data_source is None: raise ValueError("Please provide a data source.") @@ -271,7 +329,6 @@ def __init__( ) else: self._prod = is2ref._validate_product(product) - pattern_ck, filelist = Read._check_source_for_pattern( data_source, filename_pattern ) @@ -298,11 +355,6 @@ def __init__( self._filelist = filelist # after validation, use the notebook code and code outline to start implementing the rest of the class - if catalog is not None: - assert os.path.isfile( - catalog - ), f"Your catalog path '{catalog}' does not point to a valid file." - self._catalog_path = catalog if out_obj_type is not None: print( @@ -314,28 +366,6 @@ def __init__( # ---------------------------------------------------------------------- # Properties - @property - def is2catalog(self): - """ - Print a generic ICESat-2 Intake catalog. - This catalog does not specify groups, so it cannot be used to read in data. - - """ - if not hasattr(self, "_is2catalog") and hasattr(self, "_catalog_path"): - from intake import open_catalog - - self._is2catalog = open_catalog(self._catalog_path) - - else: - self._is2catalog = is2cat.build_catalog( - self.data_source, - self._pattern, - self._source_type, - grp_paths="/paths/to/variables", - ) - - return self._is2catalog - # I cut and pasted this directly out of the Query class - going to need to reconcile the _source/file stuff there @property @@ -370,7 +400,7 @@ def _check_source_for_pattern(source, filename_pattern): """ Check that the entered data source contains files that match the input filename_pattern """ - glob_pattern = is2cat._pattern_to_glob(filename_pattern) + glob_pattern = _pattern_to_glob(filename_pattern) if os.path.isdir(source): _, filelist = _run_fast_scandir(source, glob_pattern) @@ -601,9 +631,6 @@ def load(self): All items in the wanted variables list will be loaded from the files into memory. If you do not provide a wanted variables list, a default one will be created for you. - - If you would like to use the Intake catalog you provided to read in a single data variable, - simply call Intake's `read()` function on the is2catalog property (e.g. `reader.is2catalog.read()`). """ # todo: @@ -668,7 +695,7 @@ def _build_dataset_template(self, file): def _read_single_grp(self, file, grp_path): """ - For a given file and variable group path, construct an Intake catalog and use it to read in the data. + For a given file and variable group path, construct an xarray Dataset. Parameters ---------- @@ -685,24 +712,12 @@ def _read_single_grp(self, file, grp_path): """ - try: - grpcat = is2cat.build_catalog( - file, self._pattern, self._source_type, grp_paths=grp_path - ) - ds = grpcat[self._source_type].read() - - # NOTE: could also do this with h5py, but then would have to read in each variable in the group separately - except ValueError: - grpcat = is2cat.build_catalog( - file, - self._pattern, - self._source_type, - grp_paths=grp_path, - extra_engine_kwargs={"phony_dims": "access"}, - ) - ds = grpcat[self._source_type].read() - - return ds + return xr.open_dataset( + file, + group=grp_path, + engine="h5netcdf", + backend_kwargs={"phony_dims": "access"}, + ) def _build_single_file_dataset(self, file, groups_list): """ @@ -722,7 +737,6 @@ def _build_single_file_dataset(self, file, groups_list): ------- Xarray Dataset """ - file_product = self._read_single_grp(file, "/").attrs["identifier_product_type"] assert ( file_product == self._prod @@ -745,6 +759,7 @@ def _build_single_file_dataset(self, file, groups_list): "ATL19", "ATL20", "ATL21", + "ATL23", ]: is2ds = xr.open_dataset(file) diff --git a/icepyx/core/temporal.py b/icepyx/core/temporal.py new file mode 100644 index 000000000..c7e2dda1c --- /dev/null +++ b/icepyx/core/temporal.py @@ -0,0 +1,488 @@ +import datetime as dt +import warnings + + +""" +Helper functions for validation of dates +""" + + +def convert_string_to_date(date): + """ + Converts a string to a datetime object. + Throws an error if an invalid format is passed in. + + Parameters + ---------- + date: string + A string representation for the date value. Current supported date formats are: + * "YYYY-MM-DD" + * "YYYY-DOY" + + Returns + ------- + datetime.date object, representing the date from the string parameter. + + Examples + -------- + >>> mmdd = "2016-01-01" + >>> converted = convert_string_to_date(mmdd) + >>> converted + datetime.date(2016, 1, 1) + + >>> doy = "2020-40" + >>> converted = convert_string_to_date(doy) + >>> converted + datetime.date(2020, 2, 9) + + """ + + for fmt in ("%Y-%m-%d", "%Y-%-j", "%Y-%j"): + try: + return dt.datetime.strptime(date, fmt).date() + except ValueError: + pass + raise ValueError( + "No valid date format found. The following formats are accepted:\n" + "%Y-%m-%d\n" + "%Y-%-j\n" # skips leading zeros + "%Y-%j\n" + ) + + +def check_valid_date_range(start, end): + + """ + Helper function for checking if a date range is valid. + + AssertionError is raised if the start date is later than the end date. + + Parameters + ---------- + start: datetime.datetime, datetime.date + Starting date of date range to check. + end: datetime.datetime, datetime.date + Ending date of date range to check + + Returns + ------- + boolean (true if date range is valid, false otherwise) + + Examples + -------- + >>> start = dt.datetime.strptime("2016-01-01", "%Y-%m-%d") + >>> end = dt.datetime.strptime("2020-01-01", "%Y-%m-%d") + >>> drange = check_valid_date_range(start, end) + >>> drange + + + >>> drange = check_valid_date_range(end, start) # doctest: +SKIP + AssertionError: Your date range is invalid; end date MUST be on or after the start date. + """ + if isinstance(start, dt.datetime): + start = start.date() + if isinstance(end, dt.datetime): + end = end.date() + assert ( + start <= end + ), "Your date range is invalid; end date MUST be on or after the start date." + + +def validate_times(start_time, end_time): + + """ + Validates the start and end times passed into __init__ and returns them as datetime.time objects. + + NOTE: If start and/or end times are not provided (are of type None), the defaults are 00:00:00 and 23:59:59, respectively. + + Parameters + ---------- + start_time: string, datetime.time, None + end_time: string, datetime.time, None + + Returns + ------- + start_time, end_time as datetime.time objects + + Examples + -------- + >>> val_time = validate_times("00:00:00", "23:59:59") + >>> val_time + (datetime.time(0, 0), datetime.time(23, 59, 59)) + + """ + valid_time_types = (str, dt.time) + + # Validate start/end time types; then convert them to the appropriate datetime object + if start_time is not None: + # user specified a start time, need to first check if it's a valid type (if not, throw an AssertionError) + assert isinstance(start_time, valid_time_types), ( + "start_time must be one of the following types: \n" + "str (format: HH:MM:SS)\n" + "datetime.time" + ) + + # if start_time is a string, then it must be converted to a datetime using strptime + if isinstance(start_time, str): + start_time = dt.datetime.strptime(start_time, "%H:%M:%S").time() + else: + start_time = dt.time(0, 0, 0) + + if end_time is not None: + # user specified an end time, need to first check if it's a valid type (if not, throw an AssertionError) + assert isinstance(end_time, valid_time_types), ( + "end_time must be one of the following types: \n" + "str (format: HH:MM:SS)\n" + "datetime.time" + ) + # if end_time is a string, then it must be converted to a datetime using strptime + if not isinstance(end_time, dt.time): + end_time = dt.datetime.strptime(end_time, "%H:%M:%S").time() + else: + end_time = dt.time(23, 59, 59) + + return start_time, end_time + + +def validate_date_range_datestr(date_range, start_time=None, end_time=None): + + """ + Validates a date range provided in the form of a list of strings. + + Combines the start and end dates with their respective start and end times + to create complete start and end datetime.datetime objects. + + Parameters + ---------- + date_range: list(str) + A date range provided in the form of a list of strings + Strings must be of formats accepted by validate_inputs_temporal.convert_string_to_date(). + List must be of length 2. + start_time: string, datetime.time, None + end_time: string, datetime.time, None + + Returns + ------- + Start and end dates and times as datetime.datetime objects + + Examples + -------- + >>> daterange = validate_date_range_datestr(["2016-01-01", "2020-01-01"]) + >>> daterange + (datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2020, 1, 1, 23, 59, 59)) + + """ + # Check if start format is valid + _start = convert_string_to_date(date_range[0]) + + # Check if end format is valid + _end = convert_string_to_date(date_range[1]) + + # Check if date range passed in is valid + check_valid_date_range(_start, _end) + + start_time, end_time = validate_times(start_time, end_time) + + _start = dt.datetime.combine(_start, start_time) + _end = dt.datetime.combine(_end, end_time) + + return _start, _end + + +def validate_date_range_datetime(date_range, start_time=None, end_time=None): + + """ + Validates a date range provided in the form of a list of datetimes. + + Parameters + ---------- + date_range: list(datetime.datetime) + A date range provided in the form of a list of datetimes. + List must be of length 2. + start_time: None, string, datetime.time + end_time: None, string, datetime.time + + NOTE: If start and/or end times are given, + they will be **ignored** in favor of the time from the start/end datetime.datetime objects. + + Returns + ------- + Start and end dates and times as datetime.datetime objects + + Examples + -------- + >>> drange = [dt.datetime(2016, 1, 14, 1, 0, 0), dt.datetime(2020, 2, 9, 13, 10, 1)] + >>> valid_drange = validate_date_range_datetime(drange) + >>> valid_drange + (datetime.datetime(2016, 1, 14, 1, 0), datetime.datetime(2020, 2, 9, 13, 10, 1)) + + """ + + check_valid_date_range(date_range[0], date_range[1]) + + warnings.warn( + "If you submitted datetime.datetime objects that were created without times, \n" + "your time values will use the datetime package defaults of all 0s rather than \n" + "the icepyx defaults or times entered using the `start_time` or `end_time` arguments." + ) + + return date_range[0], date_range[1] + + +def validate_date_range_date(date_range, start_time=None, end_time=None): + + """ + Validates a date range provided in the form of a list of datetime.date objects. + + Combines the start and end dates with their respective start and end times + to create complete start and end datetime.datetime objects. + + Parameters + ---------- + date_range: list(str) + A date range provided in the form of a list of datetime.dates. + List must be of length 2. + start_time: string or datetime.time + end_time: string or datetime.time + + Returns + ------- + Start and end datetimes as datetime.datetime objects + + Examples + -------- + >>> drange = [dt.date(2016, 1, 1), dt.date(2020, 1, 1)] + >>> valid_drange = validate_date_range_date(drange, "00:10:00", "21:00:59") + >>> valid_drange + (datetime.datetime(2016, 1, 1, 0, 10), datetime.datetime(2020, 1, 1, 21, 0, 59)) + + """ + + check_valid_date_range(date_range[0], date_range[1]) + start_time, end_time = validate_times(start_time, end_time) + + _start = dt.datetime.combine(date_range[0], start_time) + _end = dt.datetime.combine(date_range[1], end_time) + + return _start, _end + + +def validate_date_range_dict(date_range, start_time=None, end_time=None): + + """ + Validates a date range provided in the form of a dict with the following keys: + + + Parameters + ---------- + date_range: dict(str, datetime.datetime, datetime.date) + A date range provided in the form of a dict. + date_range must contain only the following keys: + * `start_date`: start date, type can be of dt.datetime, dt.date, or string + * `end_date`: end date, type can be of dt.datetime, dt.date, or string + Keys MUST have the exact names/formatting above or a ValueError will be thrown by this function. + + If the values are of type dt.datetime and were created without times, + the datetime package defaults of all 0s are used and + the start_time/end_time parameters will be ignored! + start_time: string or datetime.time + end_time: string or datetime.time + + + Returns + ------- + Start and end datetimes as datetime.datetime objects + (by combining the start/end dates with their respective start/end times, if the dict type is not datetime) + + Examples + -------- + >>> drange = {"start_date": "2016-01-01", "end_date": "2020-01-01"} + >>> valid_drange = validate_date_range_dict(drange, "00:00:00", "23:59:59") + >>> valid_drange + (datetime.datetime(2016, 1, 1, 0, 0), datetime.datetime(2020, 1, 1, 23, 59, 59)) + + """ + + # Try to get keys from date_range dict + _start_date = date_range.get("start_date") + _end_date = date_range.get("end_date") + + # If either is none, then we can assume bad keys and raise a ValueError for the user + + if _start_date is None or _end_date is None: + raise ValueError( + "Dicts containing date ranges must have the following keys:\n" + "start_date: start date, type can be of dt.datetime, dt.date, or string\n" + "end_date: end date, type can be of dt.datetime, dt.date, or string" + ) + + start_time, end_time = validate_times(start_time, end_time) + + # start_date + + # if is datetime + if isinstance(_start_date, dt.datetime): + pass + + # if is only date + elif isinstance(_start_date, dt.date): + _start_date = dt.datetime.combine(_start_date, start_time) + + # if is string date + elif isinstance(_start_date, str): + + _start_date = convert_string_to_date(_start_date) + _start_date = dt.datetime.combine(_start_date, start_time) + + # else; raise valueerror, some invalid type + else: + raise ValueError( + "Invalid type for key 'start_date'.\n" + "Dicts containing date ranges must have the following keys:\n" + "start_date: start date, type can be of dt.datetime, dt.date, or string\n" + "end_date: end date, type can be of dt.datetime, dt.date, or string" + ) + + # ######################### end_date ####################################### + # if is datetime + + if isinstance(_end_date, dt.datetime): + pass + + # if is only date + elif isinstance(_end_date, dt.date): + _end_date = dt.datetime.combine(_end_date, end_time) + + # if is string date + elif isinstance(_end_date, str): + _end_date = convert_string_to_date(_end_date) + _end_date = dt.datetime.combine(_end_date, end_time) + + # else; raise valueerror, some invalid type + else: + raise ValueError( + "Invalid type for key 'end_date'.\n" + "Dicts containing date ranges must have the following keys:\n" + "start_date: start date, type can be of dt.datetime, dt.date, or string\n" + "end_date: end date, type can be of dt.datetime, dt.date, or string" + ) + + # Ensure the date range is valid + check_valid_date_range(_start_date, _end_date) + + return _start_date, _end_date + + +class Temporal: + def __init__(self, date_range, start_time=None, end_time=None): + """ + Validates input from "date_range" argument (and start/end time arguments, if provided), + then creates a Temporal object with validated inputs as properties of the object. + + Temporal objects are to be used by icepyx.Query to store validated temporal information + required by the Query. + + Parameters + ---------- + date_range : list or dict, as follows + Date range of interest, provided as start and end dates, inclusive. + Accepted input date formats are: + * YYYY-MM-DD string + * YYYY-DOY string + * datetime.date object (if times are included) + * datetime.datetime objects (if no times are included) + where YYYY = 4 digit year, MM = 2 digit month, DD = 2 digit day, DOY = 3 digit day of year. + Date inputs are accepted as a list or dictionary with `start_date` and `end_date` keys. + Currently, a list of specific dates (rather than a range) is not accepted. + TODO: allow searches with a list of dates, rather than a range. + start_time : str, datetime.time, default None + Start time in UTC/Zulu (24 hour clock). + Input types are an HH:mm:ss string or datetime.time object + where HH = hours, mm = minutes, ss = seconds. + If None is given (and a datetime.datetime object is not supplied for `date_range`), + a default of 00:00:00 is applied. + end_time : str, datetime.time, default None + End time in UTC/Zulu (24 hour clock). + Input types are an HH:mm:ss string or datetime.time object + where HH = hours, mm = minutes, ss = seconds. + If None is given (and a datetime.datetime object is not supplied for `date_range`), + a default of 23:59:59 is applied. + If a datetime.datetime object was created without times, the datetime package defaults will apply over those of icepyx + """ + + if len(date_range) == 2: + + # date range is provided as dict of strings, dates, or datetimes + if isinstance(date_range, dict): + self._start, self._end = validate_date_range_dict( + date_range, start_time, end_time + ) + + # date range is provided as list of strings + elif all(isinstance(i, str) for i in date_range): + self._start, self._end = validate_date_range_datestr( + date_range, start_time, end_time + ) + + # date range is provided as list of datetimes + elif all(isinstance(i, dt.datetime) for i in date_range): + self._start, self._end = validate_date_range_datetime( + date_range, start_time, end_time + ) + + # date range is provided as list of dates + elif all(isinstance(i, dt.date) for i in date_range): + self._start, self._end = validate_date_range_date( + date_range, start_time, end_time + ) + + else: + # input type is invalid + raise TypeError( + "date_range must be a list of one of the following: \n" + " list of strs with one of the following formats: \n" + " YYYY-MM-DD, YYYY-DOY \n" + " list of datetime.date or datetime.datetime objects \n" + " dict with the following keys:\n" + " start_date: start date, type can be datetime.datetime, datetime.date, or str\n" + " end_date: end date, type can be datetime.datetime, datetime.date, or str\n" + ) + + else: + raise ValueError( + "Your date range list is the wrong length. It should be of length 2, with start and end dates only." + ) + + def __str__(self): + return "Start date and time: {0}\nEnd date and time: {1}".format( + self._start.strftime("%Y-%m-%d %H:%M:%S"), + self._end.strftime("%Y-%m-%d %H:%M:%S"), + ) + + @property + def start(self): + """ + Return the start date and time of the Temporal object as a datetime.datetime object. + + Examples + ------- + >>> tmp_a = Temporal(["2016-01-01", "2020-01-01"]) + >>> tmp_a.start + datetime.datetime(2016, 1, 1, 0, 0) + + """ + return self._start + + @property + def end(self): + """ + Return the end date and time of the Temporal object as a datetime.datetime object. + + Examples + ------- + >>> tmp_a = Temporal(["2016-01-01", "2020-01-01"]) + >>> tmp_a.end + datetime.datetime(2020, 1, 1, 23, 59, 59) + + """ + return self._end diff --git a/icepyx/core/validate_inputs.py b/icepyx/core/validate_inputs.py index f74ffc209..c7ba55a6d 100644 --- a/icepyx/core/validate_inputs.py +++ b/icepyx/core/validate_inputs.py @@ -104,51 +104,3 @@ def tracks(track): warnings.warn("Listed Reference Ground Track is not available") return track_list - - -def temporal(date_range, start_time, end_time): - """ - Validate the input temporal parameters and return the needed parameters to the query object. - """ - if isinstance(date_range, list): - if len(date_range) == 2: - _start = dt.datetime.strptime(date_range[0], "%Y-%m-%d") - _end = dt.datetime.strptime(date_range[1], "%Y-%m-%d") - assert _start.date() <= _end.date(), "Your date range is invalid" - - else: - raise ValueError( - "Your date range list is the wrong length. It should have start and end dates only." - ) - - # DevGoal: accept more date/time input formats - # elif isinstance(date_range, date-time object): - # print('it is a date-time object') - # elif isinstance(date_range, dict): - # print('it is a dictionary. now check the keys for start and end dates') - - if start_time is None: - _start = _start.combine( - _start.date(), dt.datetime.strptime("00:00:00", "%H:%M:%S").time() - ) - else: - if isinstance(start_time, str): - _start = _start.combine( - _start.date(), dt.datetime.strptime(start_time, "%H:%M:%S").time() - ) - else: - raise TypeError("Please enter your start time as a string") - - if end_time is None: - _end = _start.combine( - _end.date(), dt.datetime.strptime("23:59:59", "%H:%M:%S").time() - ) - else: - if isinstance(end_time, str): - _end = _start.combine( - _end.date(), dt.datetime.strptime(end_time, "%H:%M:%S").time() - ) - else: - raise TypeError("Please enter your end time as a string") - - return _start, _end diff --git a/icepyx/core/variables.py b/icepyx/core/variables.py index 58e5a1e8f..d46561f46 100644 --- a/icepyx/core/variables.py +++ b/icepyx/core/variables.py @@ -2,6 +2,7 @@ import os import pprint +from icepyx.core.auth import EarthdataAuthMixin import icepyx.core.is2ref as is2ref # DEVGOAL: use h5py to simplify some of these tasks, if possible! @@ -18,7 +19,7 @@ def list_of_dict_vals(input_dict): # REFACTOR: class needs better docstrings # DevNote: currently this class is not tested -class Variables: +class Variables(EarthdataAuthMixin): """ Get, create, interact, and manipulate lists of variables and variable paths contained in ICESat-2 products. @@ -50,19 +51,21 @@ def __init__( vartype, avail=None, wanted=None, - session=None, product=None, version=None, path=None, + auth=None, ): assert vartype in ["order", "file"], "Please submit a valid variables type flag" - + + # initialize authentication properties + EarthdataAuthMixin.__init__(self, auth=auth) + self._vartype = vartype self.product = product self._avail = avail self.wanted = wanted - self._session = session # DevGoal: put some more/robust checks here to assess validity of inputs @@ -84,7 +87,6 @@ def avail(self, options=False, internal=False): Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='5') # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP >>> reg_a.order_vars.avail() # doctest: +SKIP ['ancillary_data/atlas_sdp_gps_epoch', 'ancillary_data/control', @@ -101,7 +103,7 @@ def avail(self, options=False, internal=False): if not hasattr(self, "_avail") or self._avail == None: if self._vartype == "order": self._avail = is2ref._get_custom_options( - self._session, self.product, self._version + self.session, self.product, self._version )["variables"] elif self._vartype == "file": @@ -156,7 +158,6 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False): Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='1') # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP >>> var_dict, paths = reg_a.order_vars.parse_var_list(reg_a.order_vars.avail()) # doctest: +SKIP >>> var_dict # doctest: +SKIP {'atlas_sdp_gps_epoch': ['ancillary_data/atlas_sdp_gps_epoch'], @@ -420,7 +421,6 @@ def append(self, defaults=False, var_list=None, beam_list=None, keyword_list=Non Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP To add all variables related to a specific ICESat-2 beam @@ -553,7 +553,6 @@ def remove(self, all=False, var_list=None, beam_list=None, keyword_list=None): Examples -------- >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - >>> reg_a.earthdata_login() # doctest: +SKIP To clear the list of wanted variables diff --git a/icepyx/core/visualization.py b/icepyx/core/visualization.py index 6d38a5cb8..c6bef2333 100644 --- a/icepyx/core/visualization.py +++ b/icepyx/core/visualization.py @@ -169,8 +169,11 @@ def __init__( self.bbox = [lonmin, latmin, lonmax, latmax] self.date_range = ( - [query_obj._start.strftime("%Y-%m-%d"), query_obj._end.strftime("%Y-%m-%d")] - if hasattr(query_obj, "_start") + [ + query_obj._temporal._start.strftime("%Y-%m-%d"), + query_obj._temporal._end.strftime("%Y-%m-%d"), + ] + if hasattr(query_obj, "_temporal") else None ) self.cycles = query_obj._cycles if hasattr(query_obj, "_cycles") else None diff --git a/icepyx/tests/conftest.py b/icepyx/tests/conftest.py index 8ec8f6cfb..fca31847a 100644 --- a/icepyx/tests/conftest.py +++ b/icepyx/tests/conftest.py @@ -29,3 +29,12 @@ def password(): @pytest.fixture(scope="session") def email(): return os.environ.get("EARTHDATA_EMAIL") + + +def pytest_configure(config): + # append to netrc file and set permissions level + args = ("icepyx_devteam", "urs.earthdata.nasa.gov", os.getenv("NSIDC_LOGIN")) + netrc_file = os.path.join(os.path.expanduser("~"), ".netrc") + with open(netrc_file, "a+") as f: + f.write("machine {1} login {0} password {2}\n".format(*args)) + os.chmod(netrc_file, 0o600) diff --git a/icepyx/tests/test_auth.py b/icepyx/tests/test_auth.py new file mode 100644 index 000000000..6ac77c864 --- /dev/null +++ b/icepyx/tests/test_auth.py @@ -0,0 +1,37 @@ +import pytest +import requests + +import earthaccess + +from icepyx.core.auth import EarthdataAuthMixin + + +@pytest.fixture() +def auth_instance(): + ''' + An EarthdatAuthMixin object for each of the tests. Default scope is function + level, so a new instance should be created for each of the tests. + ''' + return EarthdataAuthMixin() + +# Test that .session creates a session +def test_get_session(auth_instance): + assert isinstance(auth_instance.session, requests.sessions.Session) + +# Test that .s3login_credentials creates a dict with the correct keys +def test_get_s3login_credentials(auth_instance): + assert isinstance(auth_instance.s3login_credentials, dict) + expected_keys = set(['accessKeyId', 'secretAccessKey', 'sessionToken', + 'expiration']) + assert set(auth_instance.s3login_credentials.keys()) == expected_keys + +# Test that earthdata_login generates an auth object +def test_login_function(auth_instance): + auth_instance.earthdata_login() + assert isinstance(auth_instance.auth, earthaccess.auth.Auth) + assert auth_instance.auth.authenticated + +# Test that earthdata_login raises a warning if email is provided +def test_depreciation_warning(auth_instance): + with pytest.warns(DeprecationWarning): + auth_instance.earthdata_login(email='me@gmail.com') diff --git a/icepyx/tests/test_behind_NSIDC_API_login.py b/icepyx/tests/test_behind_NSIDC_API_login.py index cc4275225..3e7b645d7 100644 --- a/icepyx/tests/test_behind_NSIDC_API_login.py +++ b/icepyx/tests/test_behind_NSIDC_API_login.py @@ -20,16 +20,7 @@ def reg(): @pytest.fixture(scope="module") def session(reg): - - # append to netrc file and set permissions level - args = ("icepyx_devteam", "urs.earthdata.nasa.gov", os.getenv("NSIDC_LOGIN")) - netrc_file = os.path.join(os.path.expanduser("~"), ".netrc") - with open(netrc_file, "a+") as f: - f.write("machine {1} login {0} password {2}\n".format(*args)) - os.chmod(netrc_file, 0o600) - - reg.earthdata_login() - ed_obj = reg._session + ed_obj = reg.session yield ed_obj ed_obj.close() @@ -51,7 +42,6 @@ def test_get_custom_options_output(session): # NOTE: best this test can do at the moment is a successful download with no errors... def test_download_granules_with_subsetting(reg, session): path = "./downloads_subset" - reg._session = session reg.order_granules() reg.download_granules(path) diff --git a/icepyx/tests/test_is2ref.py b/icepyx/tests/test_is2ref.py index 3b1e3df31..8d50568fe 100644 --- a/icepyx/tests/test_is2ref.py +++ b/icepyx/tests/test_is2ref.py @@ -4,11 +4,6 @@ import icepyx.core.is2ref as is2ref ########## _validate_product ########## -def test_lowercase_product(): - lcds = "atl03" - obs = is2ref._validate_product(lcds) - expected = "ATL03" - assert obs == expected def test_num_product(): @@ -25,6 +20,271 @@ def test_bad_product(): is2ref._validate_product(wrngds) +# test all possible product types +def test_atl01_product(): + lcds = "atl01" + obs = is2ref._validate_product(lcds) + expected = "ATL01" + assert obs == expected + + ucds = "ATL01" + obs = is2ref._validate_product(ucds) + expected = "ATL01" + assert obs == expected + + +def test_atl02product(): + lcds = "atl02" + obs = is2ref._validate_product(lcds) + expected = "ATL02" + assert obs == expected + + ucds = "ATL02" + obs = is2ref._validate_product(ucds) + expected = "ATL02" + assert obs == expected + + +def test_atl03_product(): + lcds = "atl03" + obs = is2ref._validate_product(lcds) + expected = "ATL03" + assert obs == expected + + ucds = "ATL03" + obs = is2ref._validate_product(ucds) + expected = "ATL03" + assert obs == expected + + +def test_atl04_product(): + lcds = "atl04" + obs = is2ref._validate_product(lcds) + expected = "ATL04" + assert obs == expected + + ucds = "ATL04" + obs = is2ref._validate_product(ucds) + expected = "ATL04" + assert obs == expected + + +def test_atl06_product(): + lcds = "atl06" + obs = is2ref._validate_product(lcds) + expected = "ATL06" + assert obs == expected + + ucds = "ATL06" + obs = is2ref._validate_product(ucds) + expected = "ATL06" + assert obs == expected + + +def test_atl07_product(): + lcds = "atl07" + obs = is2ref._validate_product(lcds) + expected = "ATL07" + assert obs == expected + + ucds = "ATL07" + obs = is2ref._validate_product(ucds) + expected = "ATL07" + assert obs == expected + + +def test_atl07ql_product(): + lcds = "atl07ql" + obs = is2ref._validate_product(lcds) + expected = "ATL07QL" + assert obs == expected + + ucds = "ATL07QL" + obs = is2ref._validate_product(ucds) + expected = "ATL07QL" + assert obs == expected + + +def test_atl08_product(): + lcds = "atl08" + obs = is2ref._validate_product(lcds) + expected = "ATL08" + assert obs == expected + + ucds = "ATL08" + obs = is2ref._validate_product(ucds) + expected = "ATL08" + assert obs == expected + + +def test_atl09_product(): + lcds = "atl09" + obs = is2ref._validate_product(lcds) + expected = "ATL09" + assert obs == expected + + ucds = "ATL09" + obs = is2ref._validate_product(ucds) + expected = "ATL09" + assert obs == expected + + +def test_atl09ql_product(): + lcds = "atl09ql" + obs = is2ref._validate_product(lcds) + expected = "ATL09QL" + assert obs == expected + + ucds = "ATL09QL" + obs = is2ref._validate_product(ucds) + expected = "ATL09QL" + assert obs == expected + + +def test_atl10_product(): + lcds = "atl10" + obs = is2ref._validate_product(lcds) + expected = "ATL10" + assert obs == expected + + ucds = "ATL10" + obs = is2ref._validate_product(ucds) + expected = "ATL10" + assert obs == expected + + +def test_atl11_product(): + lcds = "atl11" + obs = is2ref._validate_product(lcds) + expected = "ATL11" + assert obs == expected + + ucds = "ATL11" + obs = is2ref._validate_product(ucds) + expected = "ATL11" + assert obs == expected + + +def test_atl12_product(): + lcds = "atl12" + obs = is2ref._validate_product(lcds) + expected = "ATL12" + assert obs == expected + + ucds = "ATL12" + obs = is2ref._validate_product(ucds) + expected = "ATL12" + assert obs == expected + + +def test_atl13_product(): + lcds = "atl13" + obs = is2ref._validate_product(lcds) + expected = "ATL13" + assert obs == expected + + ucds = "ATL13" + obs = is2ref._validate_product(ucds) + expected = "ATL13" + assert obs == expected + + +def test_atl14_product(): + lcds = "atl14" + obs = is2ref._validate_product(lcds) + expected = "ATL14" + assert obs == expected + + ucds = "ATL14" + obs = is2ref._validate_product(ucds) + expected = "ATL14" + assert obs == expected + + +def test_atl15_product(): + lcds = "atl15" + obs = is2ref._validate_product(lcds) + expected = "ATL15" + assert obs == expected + + ucds = "ATL15" + obs = is2ref._validate_product(ucds) + expected = "ATL15" + assert obs == expected + + +def test_atl16_product(): + lcds = "atl16" + obs = is2ref._validate_product(lcds) + expected = "ATL16" + assert obs == expected + + ucds = "ATL16" + obs = is2ref._validate_product(ucds) + expected = "ATL16" + assert obs == expected + + +def test_atl17_product(): + lcds = "atl17" + obs = is2ref._validate_product(lcds) + expected = "ATL17" + assert obs == expected + + ucds = "ATL17" + obs = is2ref._validate_product(ucds) + expected = "ATL17" + assert obs == expected + + +def test_atl19_product(): + lcds = "atl19" + obs = is2ref._validate_product(lcds) + expected = "ATL19" + assert obs == expected + + ucds = "ATL19" + obs = is2ref._validate_product(ucds) + expected = "ATL19" + assert obs == expected + + +def test_atl20_product(): + lcds = "atl20" + obs = is2ref._validate_product(lcds) + expected = "ATL20" + assert obs == expected + + ucds = "ATL20" + obs = is2ref._validate_product(ucds) + expected = "ATL20" + assert obs == expected + + +def test_atl21_product(): + lcds = "atl21" + obs = is2ref._validate_product(lcds) + expected = "ATL21" + assert obs == expected + + ucds = "ATL21" + obs = is2ref._validate_product(ucds) + expected = "ATL21" + assert obs == expected + + +def test_atl23_product(): + lcds = "atl23" + obs = is2ref._validate_product(lcds) + expected = "ATL23" + assert obs == expected + + ucds = "ATL23" + obs = is2ref._validate_product(ucds) + expected = "ATL23" + assert obs == expected + + ########## about_product ########## # Note: requires internet connection # could the github flat data option be used here? https://octo.github.com/projects/flat-data @@ -159,6 +419,61 @@ def test_bad_product(): ########## _default_varlists ########## + + +def test_ATL06_default_varlist(): + obs = is2ref._default_varlists("ATL06") + expected = [ + "delta_time", + "latitude", + "longitude", + "h_li", + "h_li_sigma", + "atl06_quality_summary", + "segment_id", + "sigma_geo_h", + "x_atc", + "y_atc", + "seg_azimuth", + "sigma_geo_at", + "sigma_geo_xt", + "dh_fit_dx", + "dh_fit_dx_sigma", + "h_mean", + "dh_fit_dy", + "h_rms_misfit", + "h_robust_sprd", + "n_fit_photons", + "signal_selection_source", + "snr_significance", + "w_surface_window_final", + "bsnow_conf", + "bsnow_h", + "cloud_flg_asr", + "cloud_flg_atm", + "r_eff", + "tide_ocean", + ] + assert obs == expected + + +def test_ATL07_default_varlist(): + obs = is2ref._default_varlists("ATL07") + expected = [ + "delta_time", + "latitude", + "longitude", + "seg_dist_x", + "height_segment_height", + "height_segment_length_seg", + "height_segment_ssh_flag", + "height_segment_type", + "height_segment_quality", + "height_segment_confidence", + ] + assert obs == expected + + def test_ATL09_default_varlist(): obs = is2ref._default_varlists("ATL09") expected = [ @@ -189,6 +504,28 @@ def test_ATL09_default_varlist(): assert obs == expected +def test_ATL10_default_varlist(): + obs = is2ref._default_varlists("ATL10") + expected = [ + "delta_time", + "latitude", + "longitude", + "seg_dist_x", + "lead_height", + "lead_length", + "beam_fb_height", + "beam_fb_length", + "beam_fb_confidence", + "beam_fb_quality_flag", + "height_segment_height", + "height_segment_length_seg", + "height_segment_ssh_flag", + "height_segment_type", + "height_segment_confidence", + ] + assert obs == expected + + def test_ATL11_default_varlist(): obs = is2ref._default_varlists("ATL11") expected = [ @@ -201,3 +538,80 @@ def test_ATL11_default_varlist(): "quality_summary", ] assert obs == expected + + +def test_unsupported_default_varlist(): + obs = is2ref._default_varlists("ATL999") + expected = [ + "delta_time", + "latitude", + "longitude", + ] + assert obs == expected + + +# #################### gt2spot tests ################################# + + +def test_gt2spot_sc_orient_1(): + # gt1l + obs = is2ref.gt2spot("gt1l", 1) + expected = 2 + assert obs == expected + + # gt1r + obs = is2ref.gt2spot("gt1r", 1) + expected = 1 + assert obs == expected + + # gt2l + obs = is2ref.gt2spot("gt2l", 1) + expected = 4 + assert obs == expected + + # gt2r + obs = is2ref.gt2spot("gt2r", 1) + expected = 3 + assert obs == expected + + # gt3l + obs = is2ref.gt2spot("gt3l", 1) + expected = 6 + assert obs == expected + + # gt3r + obs = is2ref.gt2spot("gt3r", 1) + expected = 5 + assert obs == expected + + +def test_gt2spot_sc_orient_0(): + # gt1l + obs = is2ref.gt2spot("gt1l", 0) + expected = 5 + assert obs == expected + + # gt1r + obs = is2ref.gt2spot("gt1r", 0) + expected = 6 + assert obs == expected + + # gt2l + obs = is2ref.gt2spot("gt2l", 0) + expected = 3 + assert obs == expected + + # gt2r + obs = is2ref.gt2spot("gt2r", 0) + expected = 4 + assert obs == expected + + # gt3l + obs = is2ref.gt2spot("gt3l", 0) + expected = 1 + assert obs == expected + + # gt3r + obs = is2ref.gt2spot("gt3r", 0) + expected = 2 + assert obs == expected diff --git a/icepyx/tests/test_query.py b/icepyx/tests/test_query.py index 8b5d53ffe..55b25ef4a 100644 --- a/icepyx/tests/test_query.py +++ b/icepyx/tests/test_query.py @@ -6,9 +6,11 @@ # Generic Query tests # ------------------------------------ +# seem to be adequately covered in docstrings; +# may want to focus on testing specific queries # ------------------------------------ -# Icepyx-specific tests +# icepyx-specific tests # ------------------------------------ def test_icepyx_boundingbox_query(): reg_a = ipx.Query( diff --git a/icepyx/tests/test_read.py b/icepyx/tests/test_read.py index 9748ae992..018435968 100644 --- a/icepyx/tests/test_read.py +++ b/icepyx/tests/test_read.py @@ -63,7 +63,6 @@ def test_validate_source_str_not_a_dir_or_file(): ), sorted( [ - "./icepyx/core/is2cat.py", "./icepyx/core/is2ref.py", "./icepyx/tests/is2class_query.py", ] @@ -73,7 +72,7 @@ def test_validate_source_str_not_a_dir_or_file(): ( "./icepyx/core", "is2*.py", - ([], ["./icepyx/core/is2cat.py", "./icepyx/core/is2ref.py"]), + ([], ["./icepyx/core/is2ref.py"]), ), ( "./icepyx", diff --git a/icepyx/tests/test_spatial.py b/icepyx/tests/test_spatial.py index c7731152b..2666d857d 100644 --- a/icepyx/tests/test_spatial.py +++ b/icepyx/tests/test_spatial.py @@ -413,8 +413,7 @@ def test_gdf_from_multi_bbox(): assert obs.geometry[0].equals(exp.geometry[0]) -# TestQuestions: 1) Do these need to be tested? -# 2) Is the best way to test them with lengthy inputs and seeing if the gdfs are the same? +# Potential tests to include once multipolygon and complex polygons are handled # def test_gdf_from_strpoly_one_simple(): diff --git a/icepyx/tests/test_temporal.py b/icepyx/tests/test_temporal.py new file mode 100644 index 000000000..83926946e --- /dev/null +++ b/icepyx/tests/test_temporal.py @@ -0,0 +1,284 @@ +import datetime as dt +import numpy as np +import pytest +from shapely.geometry import Polygon +import warnings + +import icepyx.core.temporal as tp + + +# DevNote: this test suite needs to be parameterized (and fixtured) to include ALL acceptable input types +# Currently it tests most cases for lists and dicts where the input type is str +# A couple of tests were added for datetime.datetime type inputs, but they are incomplete +# dt.date type inputs remain untested + + +# ####### INCOMPLETE DT.DATETIME tests ########### + + +def test_range_dt_dt_list(): + start_dt = dt.datetime(2019, 2, 20, 0, 10, 0) + end_dt = dt.datetime(2019, 2, 28, 14, 45, 30) + result = tp.Temporal([start_dt, end_dt]) + expected_range = [ + dt.datetime(2019, 2, 20, 0, 10, 0), + dt.datetime(2019, 2, 28, 14, 45, 30), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_dt_dt_dict(): + start_dt = dt.datetime(2019, 2, 20, 0, 10, 0) + end_dt = dt.datetime(2019, 2, 28, 14, 45, 30) + result = tp.Temporal({"start_date": start_dt, "end_date": end_dt}) + expected_range = [ + dt.datetime(2019, 2, 20, 0, 10, 0), + dt.datetime(2019, 2, 28, 14, 45, 30), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_dt_date_list(): + start_dt = dt.date(2019, 2, 20) + end_dt = dt.date(2019, 2, 28) + result = tp.Temporal([start_dt, end_dt]) + expected_range = [ + dt.datetime(2019, 2, 20, 0, 0, 0), + dt.datetime(2019, 2, 28, 23, 59, 59), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_dt_date_dict(): + start_dt = dt.date(2019, 2, 20) + end_dt = dt.date(2019, 2, 28) + result = tp.Temporal({"start_date": start_dt, "end_date": end_dt}) + expected_range = [ + dt.datetime(2019, 2, 20, 0, 0, 0), + dt.datetime(2019, 2, 28, 23, 59, 59), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_dt_notime_list(): + start_dt = dt.datetime(2019, 2, 20) + end_dt = dt.datetime(2019, 2, 28) + result = tp.Temporal([start_dt, end_dt]) + expected_range = [ + dt.datetime(2019, 2, 20, 0, 0, 0), + dt.datetime(2019, 2, 28, 0, 0, 0), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_dt_notime_dict(): + start_dt = dt.datetime(2019, 2, 20) + end_dt = dt.datetime(2019, 2, 28) + result = tp.Temporal({"start_date": start_dt, "end_date": end_dt}) + expected_range = [ + dt.datetime(2019, 2, 20, 0, 0, 0), + dt.datetime(2019, 2, 28, 0, 0, 0), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +# ####### BEGIN DATE RANGE TESTS ########### + + +def test_range_str_yyyymmdd_list_no_start_end_time(): + result = tp.Temporal(["2016-01-01", "2020-01-01"]) + expected_range = [ + dt.datetime(2016, 1, 1, 0, 0, 0), + dt.datetime(2020, 1, 1, 23, 59, 59), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyydoy_list_no_start_end_time(): + result = tp.Temporal(["2016-14", "2020-40"]) + expected_range = [ + dt.datetime(2016, 1, 14, 0, 0, 0), + dt.datetime(2020, 2, 9, 23, 59, 59), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyymmdd_dict_no_start_end_time(): + result = tp.Temporal({"start_date": "2016-01-01", "end_date": "2020-01-01"}) + expected_range = [ + dt.datetime(2016, 1, 1, 0, 0, 0), + dt.datetime(2020, 1, 1, 23, 59, 59), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyydoy_dict_no_start_end_time(): + result = tp.Temporal({"start_date": "2016-14", "end_date": "2020-40"}) + expected_range = [ + dt.datetime(2016, 1, 14, 0, 0, 0), + dt.datetime(2020, 2, 9, 23, 59, 59), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +# Test using actual start/end time inputs + +# string start/end + + +def test_range_str_yyyymmdd_list_string_start_end(): + result = tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", "13:10:01") + expected_range = [ + dt.datetime(2016, 1, 1, 1, 0, 0), + dt.datetime(2020, 1, 1, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyydoy_list_string_start_end(): + result = tp.Temporal(["2016-14", "2020-40"], "01:00:00", "13:10:01") + expected_range = [ + dt.datetime(2016, 1, 14, 1, 0, 0), + dt.datetime(2020, 2, 9, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyymmdd_dict_string_start_end(): + result = tp.Temporal( + {"start_date": "2016-01-01", "end_date": "2020-01-01"}, "01:00:00", "13:10:01" + ) + expected_range = [ + dt.datetime(2016, 1, 1, 1, 0, 0), + dt.datetime(2020, 1, 1, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyydoy_dict_string_start_end(): + result = tp.Temporal( + {"start_date": "2016-14", "end_date": "2020-40"}, "01:00:00", "13:10:01" + ) + expected_range = [ + dt.datetime(2016, 1, 14, 1, 0, 0), + dt.datetime(2020, 2, 9, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +# dt.time start/end + + +def test_range_str_yyyymmdd_list_time_start_end(): + result = tp.Temporal( + ["2016-01-01", "2020-01-01"], dt.time(1, 0, 0), dt.time(13, 10, 1) + ) + expected_range = [ + dt.datetime(2016, 1, 1, 1, 0, 0), + dt.datetime(2020, 1, 1, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyydoy_list_time_start_end(): + result = tp.Temporal(["2016-14", "2020-40"], dt.time(1, 0, 0), dt.time(13, 10, 1)) + expected_range = [ + dt.datetime(2016, 1, 14, 1, 0, 0), + dt.datetime(2020, 2, 9, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyymmdd_dict_time_start_end(): + result = tp.Temporal( + {"start_date": "2016-01-01", "end_date": "2020-01-01"}, + dt.time(1, 0, 0), + dt.time(13, 10, 1), + ) + expected_range = [ + dt.datetime(2016, 1, 1, 1, 0, 0), + dt.datetime(2020, 1, 1, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +def test_range_str_yyyydoy_dict_time_start_end(): + result = tp.Temporal( + {"start_date": "2016-14", "end_date": "2020-40"}, + dt.time(1, 0, 0), + dt.time(13, 10, 1), + ) + expected_range = [ + dt.datetime(2016, 1, 14, 1, 0, 0), + dt.datetime(2020, 2, 9, 13, 10, 1), + ] + assert result.start == expected_range[0] + assert result.end == expected_range[1] + + +# Date Range Errors + +# (The following inputs are bad, testing to ensure the temporal class handles this elegantly) +def test_bad_start_time_type(): + with pytest.raises(AssertionError): + bad_start = tp.Temporal(["2016-01-01", "2020-01-01"], 100000, "13:10:01") + + +def test_bad_end_time_type(): + with pytest.raises(AssertionError): + bad_end = tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) + + +def test_range_bad_list_len(): + with pytest.raises(ValueError): + result = tp.Temporal(["2016-01-01", "2020-01-01", "2022-02-15"]) + + +def test_range_str_bad_yyyydoy(): + with pytest.raises(AssertionError): + bad_end = tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) + + +def test_range_str_bad_yyyymmdd(): + with pytest.raises(AssertionError): + bad_end = tp.Temporal(["2016-01-01", "2020-01-01"], "01:00:00", 131001) + + +# a "bad dict" is assumed to be one of the wrong length or with the wrong key names +def test_bad_dict_keys(): + with pytest.raises(ValueError): + result = tp.Temporal({"startdate": "2016-01-01", "enddate": "2020-01-01"}) + + +def test_bad_dict_length(): + with pytest.raises(ValueError): + result = tp.Temporal({"start_date": "2016-01-01"}) + + +# A "bad range" is a range where the start_date > end date +def test_range_str_bad_range(): + with pytest.raises(AssertionError): + result = tp.Temporal({"start_date": "2020-01-01", "end_date": "2016-01-01"}) + + +# NOTE: Not testing bad datetime/time inputs because it is assumed the datetime library +# will throw errors if the user inputs a bad value of either type + +# ####### END DATE RANGE TESTS ############# diff --git a/icepyx/tests/test_validate_inputs.py b/icepyx/tests/test_validate_inputs.py index f8615906e..0b5f2f2eb 100644 --- a/icepyx/tests/test_validate_inputs.py +++ b/icepyx/tests/test_validate_inputs.py @@ -36,46 +36,6 @@ def test_old_version(): with pytest.warns(UserWarning, match=wrng): val.prod_version("003", "001") -########## temporal ########## -def test_date_range_order(): - ermsg = "Your date range is invalid" - with pytest.raises(AssertionError, match=ermsg): - val.temporal(["2019-03-22", "2019-02-28"], None, None) - - -def test_bad_date_range(): - ermsg = "Your date range list is the wrong length. It should have start and end dates only." - with pytest.raises(ValueError, match=ermsg): - val.temporal(["2019-02-22"], None, None) - - -def test_time_defaults(): - obs_st, obs_end = val.temporal(["2019-02-22", "2019-02-28"], None, None) - exp_start = dt.datetime(2019, 2, 22, 00, 00, 00) - exp_end = dt.datetime(2019, 2, 28, 23, 59, 59) - assert obs_st == exp_start - assert obs_end == exp_end - - -def test_time_validstr(): - obs_st, obs_end = val.temporal(["2019-02-22", "2019-02-28"], "13:50:59", "23:15:00") - exp_start = dt.datetime(2019, 2, 22, 13, 50, 59) - exp_end = dt.datetime(2019, 2, 28, 23, 15) - assert obs_st == exp_start - assert obs_end == exp_end - - -def test_starttime_validstr(): - ermsg = "Please enter your start time as a string" - with pytest.raises(TypeError, match=ermsg): - val.temporal(["2019-02-22", "2019-02-28"], 121500, None) - - -def test_endtime_validstr(): - ermsg = "Please enter your end time as a string" - with pytest.raises(TypeError, match=ermsg): - val.temporal(["2019-02-22", "2019-02-28"], "00:15:00", 235959) - ########## orbital ########## diff --git a/requirements-dev.txt b/requirements-dev.txt index 5d8935d7b..e0fc68545 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,4 @@ black -codecov flake8 pre-commit pypistats diff --git a/requirements.txt b/requirements.txt index 3f276c5cb..06f4ad9a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,10 +7,9 @@ h5netcdf h5py holoviews hvplot -intake -intake-xarray matplotlib numpy requests +s3fs shapely xarray \ No newline at end of file diff --git a/setup.py b/setup.py index f41b565c4..003a5043f 100644 --- a/setup.py +++ b/setup.py @@ -8,9 +8,9 @@ EXTRAS_REQUIRE = { "viz": ["geoviews >= 1.9.0", "cartopy >= 0.18.0", "scipy"], - "cloud": ["s3fs"], } EXTRAS_REQUIRE["complete"] = sorted(set(sum(EXTRAS_REQUIRE.values(), []))) +# install with `pip install "icepyx[complete]"` There is no way to use this functionality with conda. setuptools.setup( name="icepyx", @@ -27,6 +27,7 @@ install_requires=INSTALL_REQUIRES, extras_require=EXTRAS_REQUIRE, python_requires=">=3", + # classifiers are a set of standard descriptions. Possible list: https://pypi.org/pypi?%3Aaction=list_classifiers classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Science/Research", @@ -38,6 +39,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Topic :: Scientific/Engineering", "Topic :: Scientific/Engineering :: GIS", "Topic :: Software Development :: Libraries",