diff --git a/.all-contributorsrc b/.all-contributorsrc index 8f9a076e4..64258c003 100644 --- a/.all-contributorsrc +++ b/.all-contributorsrc @@ -382,7 +382,8 @@ "avatar_url": "https://avatars.githubusercontent.com/u/54070345?v=4", "profile": "https://github.com/jpswinski", "contributions": [ - "code" + "code", + "review" ] }, { @@ -422,6 +423,50 @@ "contributions": [ "review" ] + }, + { + "login": "rwegener2", + "name": "Rachel Wegener", + "avatar_url": "https://avatars.githubusercontent.com/u/35503632?v=4", + "profile": "https://rwegener2.github.io/", + "contributions": [ + "bug", + "code", + "doc", + "ideas", + "maintenance", + "review", + "test", + "tutorial" + ] + }, + { + "login": "whyjz", + "name": "Whyjay Zheng", + "avatar_url": "https://avatars.githubusercontent.com/u/19339926?v=4", + "profile": "https://whyjz.github.io/", + "contributions": [ + "tutorial" + ] + }, + { + "login": "lheagy", + "name": "Lindsey Heagy", + "avatar_url": "https://avatars.githubusercontent.com/u/6361812?v=4", + "profile": "http://lindseyjh.ca/", + "contributions": [ + "mentoring", + "review" + ] + }, + { + "login": "rtilling", + "name": "rtilling", + "avatar_url": "https://avatars.githubusercontent.com/u/52253385?v=4", + "profile": "https://github.com/rtilling", + "contributions": [ + "ideas" + ] } ], "contributorsPerLine": 7, @@ -430,5 +475,6 @@ "repoType": "github", "repoHost": "https://github.com", "skipCi": true, - "commitConvention": "angular" + "commitConvention": "angular", + "commitType": "docs" } diff --git a/.flake8 b/.flake8 new file mode 100644 index 000000000..16b39f3fe --- /dev/null +++ b/.flake8 @@ -0,0 +1,41 @@ +[flake8] +#GOAL: max_line_length = 79 or 99 +max_line_length = 99 +per-file-ignores = + # too many leading '#' for block comment + */tests/*:E266 + # line too long (several test strs) + test_granules.py:E501 + # imported but unused + __init__.py:F401 + # import not at top of file + doc/source/conf.py:E402 + +# GOAL: remove these ignores +ignore = + # line too long + E501 + # comparison syntax + E711 + # comparison syntax + E712 + # comparison syntax in tests + E714 + # comparison syntax in tests + E721 + # bare except + E722 + # ambiguous var name + E741 + # imported but unused + F401 + # unable to detect undefined names + F403 + # assigned and unused (in tests) + F841 + # line break before binary operator + W503 + + # GOAL: + # syntax check doctests in docstrings + # doctests = True \ No newline at end of file diff --git a/.github/workflows/flake8_action.yml b/.github/workflows/flake8_action.yml deleted file mode 100644 index 2e754ad33..000000000 --- a/.github/workflows/flake8_action.yml +++ /dev/null @@ -1,17 +0,0 @@ -# use the flake8 linter to annotate improperly formatted code -# from: https://github.com/marketplace/actions/run-flake8-on-your-pr-with-annotations -name: Run flake8 linter on PRs - -on: - pull_request: - branches: - - development - - master -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Annotate PR after running flake8 - uses: TrueBrain/actions-flake8@master - with: - max_line_length: 88 diff --git a/.github/workflows/linter_actions.yml b/.github/workflows/linter_actions.yml new file mode 100644 index 000000000..dc3836536 --- /dev/null +++ b/.github/workflows/linter_actions.yml @@ -0,0 +1,19 @@ +name: Run linters on PRs + +on: + pull_request: + branches: + - development + - main +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Run black linter + uses: psf/black@stable + # use the flake8 linter to annotate improperly formatted code + # note linter arguments are supplied via the .flake8 config file + - name: Annotate PR after running flake8 + uses: TrueBrain/actions-flake8@v2 + diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml index e36a321d5..0841712c4 100644 --- a/.github/workflows/publish_to_pypi.yml +++ b/.github/workflows/publish_to_pypi.yml @@ -17,6 +17,8 @@ jobs: name: Publish to PyPI runs-on: ubuntu-latest if: github.repository == 'icesat2py/icepyx' + permissions: + id-token: write steps: - name: Checkout @@ -52,11 +54,8 @@ jobs: - name: Publish to Test PyPI uses: pypa/gh-action-pypi-publish@release/v1 with: - password: ${{ secrets.TEST_PYPI_API_TOKEN }} - repository_url: https://test.pypi.org/legacy/ + repository-url: https://test.pypi.org/legacy/ - name: Publish to PyPI if: startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/uml_action.yml b/.github/workflows/uml_action.yml index ba893e5e9..db0a59fa6 100644 --- a/.github/workflows/uml_action.yml +++ b/.github/workflows/uml_action.yml @@ -2,6 +2,7 @@ name: Update UML diagrams on: pull_request_review: types: [submitted] + branches: development jobs: diagrams: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 74a6e3028..57a5b81bf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,7 @@ repos: - repo: https://github.com/psf/black - rev: 22.6.0 + rev: 23.12.0 hooks: - - id: black \ No newline at end of file + - id: black + +# you can run `pre-commit autoupdate` to automatically update to the latest version of hooks! \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 308925d31..b2c4ccde6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,5 +32,5 @@ jobs: - stage: behind Earthdata script: - - export NSIDC_LOGIN=$NSIDC_LOGIN + - export EARTHDATA_PASSWORD=$NSIDC_LOGIN - pytest icepyx/tests/test_behind_NSIDC_API_login.py diff --git a/CONTRIBUTORS.rst b/CONTRIBUTORS.rst index c6b0c84f5..302cc5fd4 100644 --- a/CONTRIBUTORS.rst +++ b/CONTRIBUTORS.rst @@ -23,46 +23,50 @@ Thanks goes to these wonderful people (`emoji key Fernando Perez
Fernando Perez

🎨 💼 🤔 - JP Swinski
JP Swinski

💻 + JP Swinski
JP Swinski

💻 👀 Jessica
Jessica

🐛 💻 🖋 📖 🎨 💡 🤔 🚧 🧑‍🏫 📆 💬 👀 Joachim Meyer
Joachim Meyer

🧑‍🏫 🚧 Kelsey Bisson
Kelsey Bisson

🐛 💻 📖 🤔 💡 🤔 🧑‍🏫 💬 👀 + Lindsey Heagy
Lindsey Heagy

🧑‍🏫 👀 Molly Wieringa
Molly Wieringa

🤔 - Nicole Abib
Nicole Abib

💻 🤔 + Nicole Abib
Nicole Abib

💻 🤔 + Rachel Wegener
Rachel Wegener

🐛 💻 📖 🤔 🚧 👀 ⚠️ Raphael Hagen
Raphael Hagen

📖 🎨 💻 🚇 👀 Romina Piunno
Romina Piunno

💻 🤔 🧑‍🏫 👀 Sarah Hall
Sarah Hall

🐛 💻 📖 🚧 ⚠️ Scott Henderson
Scott Henderson

🚧 Sebastian Alvis
Sebastian Alvis

📖 🚇 - Shashank Bhushan
Shashank Bhushan

💡 - Tian Li
Tian Li

🐛 💻 📖 💡 🤔 👀 ⚠️ 🔧 + Shashank Bhushan
Shashank Bhushan

💡 + Tian Li
Tian Li

🐛 💻 📖 💡 🤔 👀 ⚠️ 🔧 Tom Johnson
Tom Johnson

📖 🚇 Tyler Sutterley
Tyler Sutterley

📖 💻 🤔 💬 🛡️ ⚠️ Wei Ji
Wei Ji

🐛 💻 📖 💡 🤔 🚇 🚧 🧑‍🏫 💬 👀 ⚠️ 📢 + Whyjay Zheng
Whyjay Zheng

Wilson Sauthoff
Wilson Sauthoff

👀 + + Zach Fair
Zach Fair

🐛 💻 📖 🤔 💬 👀 alexdibella
alexdibella

🐛 🤔 💻 bidhya
bidhya

💡 - - learn2phoenix
learn2phoenix

💻 liuzheng-arctic
liuzheng-arctic

📖 🐛 💻 🤔 👀 🔧 💡 nitin-ravinder
nitin-ravinder

🐛 👀 ravindraK08
ravindraK08

👀 + + + rtilling
rtilling

🤔 smithb
smithb

🤔 tedmaksym
tedmaksym

🤔 trevorskaggs
trevorskaggs

🐛 💻 - - trey-stafford
trey-stafford

💻 🤔 🚧 👀 💬 - + diff --git a/doc/source/conf.py b/doc/source/conf.py index 0bc0003a6..a77670850 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -10,12 +10,12 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # +import datetime import os import sys sys.path.insert(0, os.path.abspath("../..")) sys.path.insert(0, os.path.abspath("../sphinxext")) -import datetime import icepyx @@ -121,7 +121,8 @@ def setup(app): # this should possibly be moved to the sphinxext directory as a standalone .py file # -- custom style for pybtex output ------------------------------------------- from pybtex.style.formatting.unsrt import Style as UnsrtStyle -from pybtex.style.labels.alpha import LabelStyle as AlphaLabelStyle + +# from pybtex.style.labels.alpha import LabelStyle as AlphaLabelStyle from pybtex.plugin import register_plugin # I seem to be unable to figure out how to control what is used for the label. It would diff --git a/doc/source/contributing/quest-available-datasets.rst b/doc/source/contributing/quest-available-datasets.rst new file mode 100644 index 000000000..86901f7ed --- /dev/null +++ b/doc/source/contributing/quest-available-datasets.rst @@ -0,0 +1,29 @@ +.. _quest_supported_label: + +QUEST Supported Datasets +======================== + +On this page, we outline the datasets that are supported by the QUEST module. Click on the links for each dataset to view information about the API and sensor/data platform used. + + +List of Datasets +---------------- + +`Argo `_ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +The Argo mission involves a series of floats that are designed to capture vertical ocean profiles of temperature, salinity, and pressure down to ~2000 m. Some floats are in support of BGC-Argo, which also includes data relevant for biogeochemical applications: oxygen, nitrate, chlorophyll, backscatter, and solar irradiance. + +A paper outlining the Argo extension to QUEST is currently in preparation, with a citable preprint available in the near future. + +:ref:`Argo Workflow Example` + + +Adding a Dataset to QUEST +------------------------- + +Want to add a new dataset to QUEST? No problem! QUEST includes a template script (``dataset.py``) that may be used to create your own querying module for a dataset of interest. + +Once you have developed a script with the template, you may request for the module to be added to QUEST via GitHub. +Please see the How to Contribute page :ref:`dev_guide_label` for instructions on how to contribute to icepyx. + +Detailed guidelines on how to construct your dataset module are currently a work in progress. diff --git a/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb b/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb index 0d897c393..bf1bc8af4 100644 --- a/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb +++ b/doc/source/example_notebooks/IS2_DEM_comparison_WIP.ipynb @@ -176,9 +176,7 @@ }, "source": [ "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", - "\n", - "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is deprecated and will result in an error, as icepyx will call the login function as needed. The user will still need to provide their credentials.\n", "```" ] }, diff --git a/doc/source/example_notebooks/IS2_cloud_data_access.ipynb b/doc/source/example_notebooks/IS2_cloud_data_access.ipynb index fa0931c8a..194e25210 100644 --- a/doc/source/example_notebooks/IS2_cloud_data_access.ipynb +++ b/doc/source/example_notebooks/IS2_cloud_data_access.ipynb @@ -12,35 +12,59 @@ "## Notes\n", "1. ICESat-2 data became publicly available on the cloud on 29 September 2022. Thus, access methods and example workflows are still being developed by NSIDC, and the underlying code in icepyx will need to be updated now that these data (and the associated metadata) are available. We appreciate your patience and contributions (e.g. reporting bugs, sharing your code, etc.) during this transition!\n", "2. This example and the code it describes are part of ongoing development. Current limitations to using these features are described throughout the example, as appropriate.\n", - "3. You **MUST** be working within an AWS instance. Otherwise, you will get a permissions error.\n", - "4. Cloud authentication is still more user-involved than we'd like. We're working to address this - let us know if you'd like to join the conversation!" + "3. You **MUST** be working within an AWS instance. Otherwise, you will get a permissions error." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "## Querying for data and finding s3 urls" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "import earthaccess\n", "import icepyx as ipx" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Make sure the user sees important warnings if they try to read a lot of data from the cloud\n", + "import warnings\n", + "warnings.filterwarnings(\"always\")" + ] + }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ - "Create an icepyx Query object" + "We will start the way we often do: by creating an icepyx Query object." ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "# bounding box\n", - "# \"producerGranuleId\": \"ATL03_20191130221008_09930503_004_01.h5\",\n", "short_name = 'ATL03'\n", "spatial_extent = [-45, 58, -35, 75]\n", "date_range = ['2019-11-30','2019-11-30']" @@ -49,25 +73,32 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "reg=ipx.Query(short_name, spatial_extent, date_range)" + "reg = ipx.Query(short_name, spatial_extent, date_range)" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "tags": [], + "user_expressions": [] + }, "source": [ - "## Get the granule s3 urls\n", - "You must specify `cloud=True` to get the needed s3 urls.\n", - "This function returns a list containing the list of the granule IDs and a list of the corresponding urls." + "### Get the granule s3 urls\n", + "\n", + "With this query object you can get a list of available granules. This function returns a list containing the list of the granule IDs and a list of the corresponding urls. Use `cloud=True` to get the needed s3 urls." ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "gran_ids = reg.avail_granules(ids=True, cloud=True)\n", @@ -80,20 +111,27 @@ "user_expressions": [] }, "source": [ - "## Log in to Earthdata and generate an s3 token\n", - "You can use icepyx's existing login functionality to generate your s3 data access token, which will be valid for *one* hour. The icepyx module will renew the token for you after an hour, but if viewing your token over the course of several hours you may notice the values will change.\n", - "\n", - "You can access your s3 credentials using:" + "## Determining variables of interest" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "There are several ways to view available variables. One is to use the existing Query object:" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "# uncommenting the line below will print your temporary login credentials\n", - "# reg.s3login_credentials" + "reg.order_vars.avail()" ] }, { @@ -102,43 +140,118 @@ "user_expressions": [] }, "source": [ - "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", + "Another way is to use the variables module:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ipx.Variables(product=short_name).avail()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "We can also do this using a specific s3 filepath from the Query object:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ipx.Variables(path=gran_ids[1][0]).avail()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "From any of these methods we can see that `h_ph` is a variable for this data product, so we will read that variable in the next step." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "#### A Note on listing variables using s3 urls\n", "\n", - "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", - "```" + "We can use the Variables module with an s3 url to explore available data variables the same way we do with local files. An important difference, however, is how the available variables list is created. When reading a local file the variables module will traverse the entire file and search for variables that are present in that file. This method it too time intensive with the s3 data, so instead the the product / version of the data product is read from the file and all possible variables associated with that product/version are reporting as available. As long as you are using the NSIDC provided s3 paths provided via Earthdata search and the Query object these lists will be the same." ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "tags": [], + "user_expressions": [] + }, "source": [ - "## Set up your s3 file system using your credentials" + "#### A Note on authentication\n", + "\n", + "Notice that accessing cloud data requires two layers of authentication: 1) authenticating with your Earthdata Login 2) authenticating for cloud access. These both happen behind the scenes, without the need for users to provide any explicit commands.\n", + "\n", + "Icepyx uses earthaccess to generate your s3 data access token, which will be valid for *one* hour. Icepyx will also renew the token for you after an hour, so if viewing your token over the course of several hours you may notice the values will change.\n", + "\n", + "If you do want to see your s3 credentials, you can access them using:" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "s3 = earthaccess.get_s3fs_session(daac='NSIDC', provider=reg.s3login_credentials)" + "# uncommenting the line below will print your temporary aws login credentials\n", + "# reg.s3login_credentials" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ - "## Select an s3 url and access the data\n", - "Data read in capabilities for cloud data are coming soon in icepyx (targeted Spring 2023). Stay tuned and we'd love for you to join us and contribute!\n", + "```{admonition} Important Authentication Update\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is deprecated and will result in an error, as icepyx will call the login function as needed. The user will still need to provide their credentials.\n", + "```" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "## Choose a data file and access the data\n", + "\n", + "**Note: If you get a PermissionDenied Error when trying to read in the data, you may not be sending your request from an AWS hub in us-west2. We're currently working on how to alert users if they will not be able to access ICESat-2 data in the cloud for this reason**\n", "\n", - "**Note: If you get a PermissionDenied Error when trying to read in the data, you may not be sending your request from an AWS hub in us-west2. We're currently working on how to alert users if they will not be able to access ICESat-2 data in the cloud for this reason**" + "We are ready to read our data! We do this by creating a reader object and using the s3 url returned from the Query object." ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "# the first index, [1], gets us into the list of s3 urls\n", @@ -147,32 +260,111 @@ "# s3url = 's3://nsidc-cumulus-prod-protected/ATLAS/ATL03/004/2019/11/30/ATL03_20191130221008_09930503_004_01.h5'" ] }, + { + "cell_type": "markdown", + "metadata": { + "tags": [], + "user_expressions": [] + }, + "source": [ + "Create the Read object" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "reader = ipx.Read(s3url)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "This reader object gives us yet another way to view available variables." + ] + }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "import h5py\n", - "import numpy as np" + "reader.vars.avail()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Next, we append our desired variable to the `wanted_vars` list:" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "%time f = h5py.File(s3.open(s3url,'rb'),'r')" + "reader.vars.append(var_list=['h_ph'])" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, + "source": [ + "Finally, we load the data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "%%time\n", + "\n", + "# This may take 5-10 minutes\n", + "reader.load()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "### Some important caveats\n", + "\n", + "While the cloud data reading is functional within icepyx, it is very slow. Approximate timing shows it takes ~6 minutes of load time per variable per file from s3. Because of this you will recieve a warning if you try to load either more than three variables or two files at once.\n", + "\n", + "The slow load speed is a demonstration of the many steps involved in making cloud data actionable - the data supply chain needs optimized source data, efficient low level data readers, and high level libraries which are enabled to use the fastest low level data readers. Not all of these pieces fully developed right now, but the progress being made it exciting and there is lots of room for contribution!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, "source": [ "#### Credits\n", - "* notebook by: Jessica Scheick\n", - "* historic source material: [is2-nsidc-cloud.py](https://gist.github.com/bradlipovsky/80ab6a7aff3d3524b9616a9fc176065e#file-is2-nsidc-cloud-py-L28) by Brad Lipovsky" + "* notebook by: Jessica Scheick and Rachel Wegener" ] } ], @@ -192,7 +384,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.10.10" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_access.ipynb b/doc/source/example_notebooks/IS2_data_access.ipynb index d9d50cdc0..704abe10c 100644 --- a/doc/source/example_notebooks/IS2_data_access.ipynb +++ b/doc/source/example_notebooks/IS2_data_access.ipynb @@ -79,7 +79,7 @@ "\n", "There are three required inputs, depending on how you want to search for data. Two are required in all cases:\n", "- `short_name` = the data product of interest, known as its \"short name\".\n", - "See https://nsidc.org/data/icesat-2/data-sets for a list of the available data products.\n", + "See https://nsidc.org/data/icesat-2/products for a list of the available data products.\n", "- `spatial extent` = a region of interest to search within. This can be entered as a bounding box, polygon vertex coordinate pairs, or a polygon geospatial file (currently shp, kml, and gpkg are supported).\n", " - bounding box: Given in decimal degrees for the lower left longitude, lower left latitude, upper right longitude, and upper right latitude\n", " - polygon vertices: Given as longitude, latitude coordinate pairs of decimal degrees with the last entry a repeat of the first.\n", @@ -485,9 +485,7 @@ }, "source": [ "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed above. The `.earthdata_login()` function is still available for backwards compatibility.\n", - "\n", - "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is deprecated and will result in an error, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed above.\n", "```" ] }, @@ -626,9 +624,9 @@ ], "metadata": { "kernelspec": { - "display_name": "icepyx-dev", + "display_name": "icepyx", "language": "python", - "name": "icepyx-dev" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -640,7 +638,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.4" + "version": "3.10.10" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb b/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb index 89247de5f..1bb178d8a 100644 --- a/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb +++ b/doc/source/example_notebooks/IS2_data_access2-subsetting.ipynb @@ -51,7 +51,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Create a query object and log in to Earthdata\n", "\n", @@ -75,15 +77,15 @@ }, "source": [ "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", - "\n", - "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is deprecated and will result in an error, as icepyx will call the login function as needed. The user will still need to provide their credentials.\n", "```" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "## Discover Subsetting Options\n", "\n", @@ -108,7 +110,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "By default, spatial and temporal subsetting based on your initial inputs is applied to your order unless you specify `subset=False` to `order_granules()` or `download_granules()` (which calls `order_granules` under the hood if you have not already placed your order) functions.\n", "Additional subsetting options must be specified as keyword arguments to the order/download functions.\n", @@ -118,7 +122,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### _Why do I have to provide spatial bounds to icepyx even if I don't use them to subset my data order?_\n", "\n", @@ -132,7 +138,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "## About Data Variables in a query object\n", "\n", @@ -145,7 +153,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Determine what variables are available for your data product\n", "There are multiple ways to get a complete list of available variables.\n", @@ -159,7 +169,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "region_a.order_vars.avail()" @@ -167,7 +179,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "By passing the boolean `options=True` to the `avail` method, you can obtain lists of unique possible variable inputs (var_list inputs) and path subdirectory inputs (keyword_list and beam_list inputs) for your data product. These can be helpful for building your wanted variable list." ] @@ -175,7 +189,9 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ "region_a.order_vars.avail(options=True)" @@ -353,9 +369,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "icepyx-dev", "language": "python", - "name": "python3" + "name": "icepyx-dev" }, "language_info": { "codemirror_mode": { @@ -367,7 +383,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_read-in.ipynb b/doc/source/example_notebooks/IS2_data_read-in.ipynb index 115c63044..c20cfbc95 100644 --- a/doc/source/example_notebooks/IS2_data_read-in.ipynb +++ b/doc/source/example_notebooks/IS2_data_read-in.ipynb @@ -63,9 +63,8 @@ "metadata": {}, "outputs": [], "source": [ - "path_root = '/full/path/to/your/data/'\n", - "pattern = \"processed_ATL{product:2}_{datetime:%Y%m%d%H%M%S}_{rgt:4}{cycle:2}{orbitsegment:2}_{version:3}_{revision:2}.h5\"\n", - "reader = ipx.Read(path_root, \"ATL06\", pattern) # or ipx.Read(filepath, \"ATLXX\") if your filenames match the default pattern" + "path_root = '/full/path/to/your/ATL06_data/'\n", + "reader = ipx.Read(path_root)" ] }, { @@ -111,10 +110,9 @@ "\n", "Reading in ICESat-2 data with icepyx happens in a few simple steps:\n", "1. Let icepyx know where to find your data (this might be local files or urls to data in cloud storage)\n", - "2. Tell icepyx how to interpret the filename format\n", - "3. Create an icepyx `Read` object\n", - "4. Make a list of the variables you want to read in (does not apply for gridded products)\n", - "5. Load your data into memory (or read it in lazily, if you're using Dask)\n", + "2. Create an icepyx `Read` object\n", + "3. Make a list of the variables you want to read in (does not apply for gridded products)\n", + "4. Load your data into memory (or read it in lazily, if you're using Dask)\n", "\n", "We go through each of these steps in more detail in this notebook." ] @@ -159,30 +157,25 @@ }, "source": [ "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", - "\n", - "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is deprecated and will result in an error, as icepyx will call the login function as needed. The user will still need to provide their credentials.\n", "```" ] }, { "cell_type": "markdown", "id": "e8da42c1", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### Step 1: Set data source path\n", "\n", "Provide a full path to the data to be read in (i.e. opened).\n", "Currently accepted inputs are:\n", - "* a directory\n", - "* a single file\n", - "\n", - "All files to be read in *must* have a consistent filename pattern.\n", - "If a directory is supplied as the data source, all files in any subdirectories that match the filename pattern will be included.\n", - "\n", - "S3 bucket data access is currently under development, and requires you are registered with NSIDC as a beta tester for cloud-based ICESat-2 data.\n", - "icepyx is working to ensure a smooth transition to working with remote files.\n", - "We'd love your help exploring and testing these features as they become available!" + "* a string path to directory - all files from the directory will be opened\n", + "* a string path to single file - one file will be opened\n", + "* a list of filepaths - all files in the list will be opened\n", + "* a glob string (see [glob](https://docs.python.org/3/library/glob.html)) - any files matching the glob pattern will be opened" ] }, { @@ -208,86 +201,147 @@ { "cell_type": "code", "execution_count": null, - "id": "e683ebf7", + "id": "fac636c2-e0eb-4e08-adaa-8f47623e46a1", "metadata": {}, "outputs": [], "source": [ - "# urlpath = 's3://nsidc-cumulus-prod-protected/ATLAS/ATL03/004/2019/11/30/ATL03_20191130221008_09930503_004_01.h5'" + "# list_of_files = ['/my/data/ATL06/processed_ATL06_20190226005526_09100205_006_02.h5', \n", + "# '/my/other/data/ATL06/processed_ATL06_20191202102922_10160505_006_01.h5']" ] }, { "cell_type": "markdown", - "id": "92743496", + "id": "ba3ebeb0-3091-4712-b0f7-559ddb95ca5a", "metadata": { "user_expressions": [] }, "source": [ - "### Step 2: Create a filename pattern for your data files\n", + "#### Glob Strings\n", + "\n", + "[glob](https://docs.python.org/3/library/glob.html) is a Python library which allows users to list files in their file systems whose paths match a given pattern. Icepyx uses the glob library to give users greater flexibility over their input file lists.\n", + "\n", + "glob works using `*` and `?` as wildcard characters, where `*` matches any number of characters and `?` matches a single character. For example:\n", "\n", - "Files provided by NSIDC typically match the format `\"ATL{product:2}_{datetime:%Y%m%d%H%M%S}_{rgt:4}{cycle:2}{orbitsegment:2}_{version:3}_{revision:2}.h5\"` where the parameters in curly brackets indicate a parameter name (left of the colon) and character length or format (right of the colon).\n", - "Some of this information is used during data opening to help correctly read and label the data within the data structure, particularly when multiple files are opened simultaneously.\n", + "* `/this/path/*.h5`: refers to all `.h5` files in the `/this/path` folder (Example matches: \"/this/path/processed_ATL03_20191130221008_09930503_006_01.h5\" or \"/this/path/myfavoriteicsat-2file.h5\")\n", + "* `/this/path/*ATL07*.h5`: refers to all `.h5` files in the `/this/path` folder that have ATL07 in the filename. (Example matches: \"/this/path/ATL07-02_20221012220720_03391701_005_01.h5\" or \"/this/path/processed_ATL07.h5\")\n", + "* `/this/path/ATL??/*.h5`: refers to all `.h5` files that are in a subfolder of `/this/path` and a subdirectory of `ATL` followed by any 2 characters (Example matches: \"/this/path/ATL03/processed_ATL03_20191130221008_09930503_006_01.h5\", \"/this/path/ATL06/myfile.h5\")\n", "\n", - "By default, icepyx will assume your filenames follow the default format.\n", - "However, you can easily read in other ICESat-2 data files by supplying your own filename pattern.\n", - "For instance, `pattern=\"ATL{product:2}-{datetime:%Y%m%d%H%M%S}-Sample.h5\"`. A few example patterns are provided below." + "See the glob documentation or other online explainer tutorials for more in depth explanation, or advanced glob paths such as character classes and ranges." ] }, { - "cell_type": "code", - "execution_count": null, - "id": "7318abd0", - "metadata": {}, - "outputs": [], + "cell_type": "markdown", + "id": "20286c76-5632-4420-b2c9-a5a6b1952672", + "metadata": { + "user_expressions": [] + }, "source": [ - "# pattern = 'ATL06-{datetime:%Y%m%d%H%M%S}-Sample.h5'\n", - "# pattern = 'ATL{product:2}-{datetime:%Y%m%d%H%M%S}-Sample.h5'" + "#### Recursive Directory Search" + ] + }, + { + "cell_type": "markdown", + "id": "632bd1ce-2397-4707-a63f-9d5d2fc02fbc", + "metadata": { + "user_expressions": [] + }, + "source": [ + "glob will not by default search all of the subdirectories for matching filepaths, but it has the ability to do so.\n", + "\n", + "If you would like to search recursively, you can achieve this by either:\n", + "1. passing the `recursive` argument into `glob_kwargs` and including `\\**\\` in your filepath\n", + "2. using glob directly to create a list of filepaths\n", + "\n", + "Each of these two methods are shown below." + ] + }, + { + "cell_type": "markdown", + "id": "da0cacd8-9ddc-4c31-86b6-167d850b989e", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Method 1: passing the `recursive` argument into `glob_kwargs`" ] }, { "cell_type": "code", "execution_count": null, - "id": "f43e8664", + "id": "e276b876-9ec7-4991-8520-05c97824b896", "metadata": {}, "outputs": [], "source": [ - "# pattern = \"ATL{product:2}_{datetime:%Y%m%d%H%M%S}_{rgt:4}{cycle:2}{orbitsegment:2}_{version:3}_{revision:2}.h5\"" + "ipx.Read('/path/to/**/folder', glob_kwargs={'recursive': True})" + ] + }, + { + "cell_type": "markdown", + "id": "f5a1e85e-fc4a-405f-9710-0cb61b827f2c", + "metadata": { + "user_expressions": [] + }, + "source": [ + "You can use `glob_kwargs` for any additional argument to Python's builtin `glob.glob` that you would like to pass in via icepyx." + ] + }, + { + "cell_type": "markdown", + "id": "76de9539-710c-49f6-9e9e-238849382c33", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Method 2: using glob directly to create a list of filepaths" ] }, { "cell_type": "code", "execution_count": null, - "id": "992a77fb", + "id": "be79b0dd-efcf-4d50-bdb0-8e3ae8e8e38c", "metadata": {}, "outputs": [], "source": [ - "# grid_pattern = \"ATL{product:2}_GL_0311_{res:3}m_{version:3}_{revision:2}.nc\"" + "import glob" ] }, { "cell_type": "code", "execution_count": null, - "id": "6aec1a70", - "metadata": {}, + "id": "5d088571-496d-479a-9fb7-833ed7e98676", + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "pattern = \"processed_ATL{product:2}_{datetime:%Y%m%d%H%M%S}_{rgt:4}{cycle:2}{orbitsegment:2}_{version:3}_{revision:2}.h5\"" + "list_of_files = glob.glob('/path/to/**/folder', recursive=True)\n", + "ipx.Read(list_of_files)" ] }, { "cell_type": "markdown", - "id": "4275b04c", + "id": "08df2874-7c54-4670-8f37-9135ea296ff5", "metadata": { "user_expressions": [] }, "source": [ - "### Step 3: Create an icepyx read object\n", + "```{admonition} Read Module Update\n", + "Previously, icepyx required two additional conditions: 1) a `product` argument and 2) that your files either matched the default `filename_pattern` or that the user provided their own `filename_pattern`. These two requirements have been removed. `product` is now read directly from the file metadata (the root group's `short_name` attribute). Flexibility to specify multiple files via the `filename_pattern` has been replaced with the [glob string](https://docs.python.org/3/library/glob.html) feature, and by allowing a list of filepaths as an argument.\n", "\n", - "The `Read` object has two required inputs:\n", - "- `path` = a string with the full file path or full directory path to your hdf5 (.h5) format files.\n", - "- `product` = the data product you're working with, also known as the \"short name\".\n", + "The `product` and `filename_pattern` arguments are now deprecated and were removed in icepyx version 1.0.0.\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "4275b04c", + "metadata": { + "user_expressions": [] + }, + "source": [ + "### Step 2: Create an icepyx read object\n", "\n", - "The `Read` object also accepts the optional keyword input:\n", - "- `pattern` = a formatted string indicating the filename pattern required for Intake's path_as_pattern argument." + "Using the `data_source` described in Step 1, we can create our Read object." ] }, { @@ -299,7 +353,17 @@ }, "outputs": [], "source": [ - "reader = ipx.Read(data_source=path_root, product=\"ATL06\", filename_pattern=pattern) # or ipx.Read(filepath, \"ATLXX\") if your filenames match the default pattern" + "reader = ipx.Read(data_source=path_root)" + ] + }, + { + "cell_type": "markdown", + "id": "7b2acfdb-75eb-4c64-b583-2ab19326aaee", + "metadata": { + "user_expressions": [] + }, + "source": [ + "The Read object now contains the list of matching files that will eventually be loaded into Python. You can inspect its properties, such as the files that were located or the identified product, directly on the Read object." ] }, { @@ -309,7 +373,17 @@ "metadata": {}, "outputs": [], "source": [ - "reader._filelist" + "reader.filelist" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7455ee3f-f9ab-486e-b4c7-2fa2314d4084", + "metadata": {}, + "outputs": [], + "source": [ + "reader.product" ] }, { @@ -319,7 +393,7 @@ "user_expressions": [] }, "source": [ - "### Step 4: Specify variables to be read in\n", + "### Step 3: Specify variables to be read in\n", "\n", "To load your data into memory or prepare it for analysis, icepyx needs to know which variables you'd like to read in.\n", "If you've used icepyx to download data from NSIDC with variable subsetting (which is the default), then you may already be familiar with the icepyx `Variables` module and how to create and modify lists of variables.\n", @@ -426,7 +500,7 @@ "user_expressions": [] }, "source": [ - "### Step 5: Loading your data\n", + "### Step 4: Loading your data\n", "\n", "Now that you've set up all the options, you're ready to read your ICESat-2 data into memory!" ] @@ -541,9 +615,9 @@ ], "metadata": { "kernelspec": { - "display_name": "general", + "display_name": "icepyx-dev", "language": "python", - "name": "general" + "name": "icepyx-dev" }, "language_info": { "codemirror_mode": { diff --git a/doc/source/example_notebooks/IS2_data_variables.ipynb b/doc/source/example_notebooks/IS2_data_variables.ipynb index 3ac1f99fe..90fa8500c 100644 --- a/doc/source/example_notebooks/IS2_data_variables.ipynb +++ b/doc/source/example_notebooks/IS2_data_variables.ipynb @@ -2,7 +2,9 @@ "cells": [ { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "# ICESat-2's Nested Variables\n", "\n", @@ -13,10 +15,10 @@ "\n", "A given ICESat-2 product may have over 200 variable + path combinations.\n", "icepyx includes a custom `Variables` module that is \"aware\" of the ATLAS sensor and how the ICESat-2 data products are stored.\n", - "The module can be accessed independently, but is optimally used as a component of a `Query` object (Case 1) or `Read` object (Case 2).\n", + "The module can be accessed independently and can also be accessed as a component of a `Query` object or `Read` object.\n", "\n", - "This notebook illustrates in detail how the `Variables` module behaves using a `Query` data access example.\n", - "However, module usage is analogous through an icepyx ICESat-2 `Read` object.\n", + "This notebook illustrates in detail how the `Variables` module behaves. We use the module independently and also show how powerful it is directly in the icepyx workflow using a `Query` data access example.\n", + "Module usage using `Query` is analogous through an icepyx ICESat-2 `Read` object.\n", "More detailed example workflows specifically for the [query](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) and [read](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_read-in.html) tools within icepyx are available as separate Jupyter Notebooks.\n", "\n", "Questions? Be sure to check out the FAQs throughout this notebook, indicated as italic headings." @@ -24,11 +26,15 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### _Why do ICESat-2 products need a custom variable manager?_\n", "\n", "_It can be confusing and cumbersome to comb through the 200+ variable and path combinations contained in ICESat-2 data products._\n", + "_An hdf5 file is built like a folder with files in it. Opening an ICESat-2 file can be like opening a new folder with over 200 files in it and manually searching for only ones you want!_\n", + "\n", "_The icepyx `Variables` module makes it easier for users to quickly find and extract the specific variables they would like to work with across multiple beams, keywords, and variables and provides reader-friendly formatting to browse variables._\n", "_A future development goal for `icepyx` includes developing an interactive widget to further improve the user experience._\n", "_For data read-in, additional tools are available to target specific beam characteristics (e.g. strong versus weak beams)._" @@ -38,35 +44,248 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "#### Some technical details about the Variables module\n", - "For those eager to push the limits or who want to know more implementation details...\n", + "Import packages, including icepyx" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import icepyx as ipx\n", + "from pprint import pprint" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "## Creating or Accessing ICESat-2 Variables" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "There are three ways to create or access an ICESat-2 Variables object in icepyx:\n", + "1. Access via the `.order_vars` property of a Query object\n", + "2. Access via the `.vars` property of a Read object\n", + "3. Create a stand-alone ICESat-2 Variables object using a local file, cloud file, or a product name\n", "\n", - "The only required input to the `Variables` module is `vartype`.\n", - "`vartype` has two acceptible string values, 'order' and 'file'.\n", - "If you use the module as shown in icepyx examples (namely through a `Read` or `Query` object), then this flag will be passed automatically.\n", - "It simply tells the software how to generate the list of possible variable values - either by pinging NSIDC for a list of available variables (`query`) or from the user-supplied file (`read`)." + "An example of each of these is shown below." ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ - "Import packages, including icepyx" + "### 1. Access `Variables` via the `.order_vars` property of a Query object" ] }, { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [] + }, "outputs": [], "source": [ - "import icepyx as ipx\n", - "from pprint import pprint" + "region_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-22','2019-02-28'], \\\n", + " start_time='00:00:00', end_time='23:59:59')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Accessing Variables\n", + "region_a.order_vars" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Showing the variable paths\n", + "region_a.order_vars.avail()" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "tags": [], + "user_expressions": [] + }, + "source": [ + "### 2. Access via the `.vars` property of a Read object" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "path_root = '/full/path/to/your/data/'\n", + "reader = ipx.Read(path_root)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Accessing Variables\n", + "reader.vars" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Showing the variable paths\n", + "# reader.vars.avail()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "### 3. Create a stand-alone Variables object\n", + "\n", + "You can also generate an independent Variables object. This can be done using either:\n", + "1. The filepath to a local or cloud file you'd like a variables list for\n", + "2. The product name (and optionally version) of a an ICESat-2 product\n", + "\n", + "*Note: Cloud data access requires a valid Earthdata login; \n", + "you will be prompted to log in if you are not already authenticated.*" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Create a variables object from a filepath:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "filepath = '/full/path/to/your/data.h5'\n", + "v = ipx.Variables(path=filepath)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# v.avail()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Create a variables object from a product. The version argument is optional." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "v = ipx.Variables(product='ATL03')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# v.avail()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "v = ipx.Variables(product='ATL03', version='006')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# v.avail()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Now that you know how to create or access Variables the remainder of this notebook showcases the functions availble for building and modifying variables lists. Remember, the example shown below uses a Query object, but the same methods are available if you are using a Read object or a Variables object." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, "source": [ "## Interacting with ICESat-2 Data Variables\n", "\n", @@ -88,7 +307,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "Create a query object and log in to Earthdata\n", "\n", @@ -126,15 +347,15 @@ }, "source": [ "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", - "\n", - "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is deprecated and will result in an error, as icepyx will call the login function as needed. The user will still need to provide their credentials.\n", "```" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "### ICESat-2 data variables\n", "\n", @@ -157,7 +378,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "To increase readability, you can use built in functions to show the 200+ variable + path combinations as a dictionary where the keys are variable names and the values are the paths to that variable.\n", "`region_a.order_vars.parse_var_list(region_a.order_vars.avail())` will return a dictionary of variable:paths key:value pairs." @@ -174,7 +397,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "By passing the boolean `options=True` to the `avail` method, you can obtain lists of unique possible variable inputs (var_list inputs) and path subdirectory inputs (keyword_list and beam_list inputs) for your data product. These can be helpful for building your wanted variable list." ] @@ -188,6 +413,30 @@ "region_a.order_vars.avail(options=True)" ] }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "```{admonition} Remember\n", + "You can run these same methods no matter how you created or accessed your ICESat-2 Variables. So the methods in this section could be equivalently be accessed using a Read object, or by directly accessing a file on your computer:\n", + "\n", + "```\n", + "```python\n", + "# Using a Read object\n", + "reader.vars.avail()\n", + "reader.vars.parse_var_list(reader.vars.avail())\n", + "reader.vars.avail(options=True)\n", + "\n", + "# Using a file on your computer\n", + "v = Variables(path='/my/file.h5')\n", + "v.avail()\n", + "v.parse_var_list(v.avail())\n", + "v.avail(options=True)\n", + "```\n" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -228,7 +477,9 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "The keywords available for this product are shown in the error message upon entering a blank keyword_list, as seen in the next cell." ] @@ -745,13 +996,62 @@ }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "user_expressions": [] + }, "source": [ "#### With a `Read` object\n", "Calling the `load()` method on your `Read` object will automatically look for your wanted variable list and use it.\n", "Please see the [read-in example Jupyter Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_read-in.html) for a complete example of this usage.\n" ] }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "#### With a local filepath\n", + "\n", + "One of the benefits of using a local filepath in variables is that it allows you to easily inspect the variables that are available in your file. Once you have a variable of interest from the `avail` list, you could read that variable in with another library, such as xarray. The example below demonstrates this assuming an ATL06 ICESat-2 file." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "filepath = '/full/path/to/my/ATL06_file.h5'\n", + "v = ipx.Variables(path=filepath)\n", + "v.avail()\n", + "# Browse paths and decide you need `gt1l/land_ice_segments/`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import xarray as xr\n", + "\n", + "xr.open_dataset(filepath, group='gt1l/land_ice_segments/', engine='h5netcdf')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "user_expressions": [] + }, + "source": [ + "You'll notice in this workflow you are limited to viewing data only within a particular group. Icepyx also provides functionality for merging variables within or even across files. See the [read-in example Jupyter Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_read-in.html) for more details about these features of icepyx." + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -763,9 +1063,9 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "icepyx-dev", "language": "python", - "name": "python3" + "name": "icepyx-dev" }, "language_info": { "codemirror_mode": { @@ -777,7 +1077,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.11.4" } }, "nbformat": 4, diff --git a/doc/source/example_notebooks/IS2_data_visualization.ipynb b/doc/source/example_notebooks/IS2_data_visualization.ipynb index ec87829c2..0f2819862 100644 --- a/doc/source/example_notebooks/IS2_data_visualization.ipynb +++ b/doc/source/example_notebooks/IS2_data_visualization.ipynb @@ -210,9 +210,7 @@ }, "source": [ "```{admonition} Important Authentication Update\n", - "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is no longer required, as icepyx will call the login function as needed. The user will still need to provide their credentials using one of the three methods decribed in the [ICESat-2 Data Access Notebook](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) example. The `.earthdata_login()` function is still available for backwards compatibility.\n", - "\n", - "If you are unable to remove `earthdata_login()` calls from your workflow, note that certain inputs, such as `earthdata_uid` and `email`, are no longer required. e.g. `region_a.earthdata_login(earthdata_uid, email)` becomes `region_a.earthdata_login()`\n", + "Previously, icepyx required you to explicitly use the `.earthdata_login()` function to login. Running this function is deprecated and will result in an error, as icepyx will call the login function as needed. The user will still need to provide their credentials.\n", "```" ] }, diff --git a/doc/source/example_notebooks/QUEST_argo_data_access.ipynb b/doc/source/example_notebooks/QUEST_argo_data_access.ipynb new file mode 100644 index 000000000..1bdb5fd0c --- /dev/null +++ b/doc/source/example_notebooks/QUEST_argo_data_access.ipynb @@ -0,0 +1,626 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "16806722-f5bb-4063-bd4b-60c8b0d24d2a", + "metadata": { + "user_expressions": [] + }, + "source": [ + "# QUEST Example: Finding Argo and ICESat-2 data\n", + "\n", + "In this notebook, we are going to find Argo and ICESat-2 data over a region of the Pacific Ocean. Normally, we would require multiple data portals or Python packages to accomplish this. However, thanks to the [QUEST (Query, Unify, Explore SpatioTemporal) module](https://icepyx.readthedocs.io/en/latest/contributing/quest-available-datasets.html), we can use icepyx to find both!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ed25d839-4114-41db-9166-8c027368686c", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Basic packages\n", + "import geopandas as gpd\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "from pprint import pprint\n", + "\n", + "# icepyx and QUEST\n", + "import icepyx as ipx" + ] + }, + { + "cell_type": "markdown", + "id": "5c35f5df-b4fb-4a36-8d6f-d20f1552767a", + "metadata": { + "user_expressions": [] + }, + "source": [ + "## Define the Quest Object\n", + "\n", + "QUEST builds off of the general querying process originally designed for ICESat-2, but makes it applicable to other datasets.\n", + "\n", + "Just like the ICESat-2 Query object, we begin by defining our Quest object. We provide the following bounding parameters:\n", + "* `spatial_extent`: Data is constrained to the given box over the Pacific Ocean.\n", + "* `date_range`: Only grab data from April 18-19, 2022 (to keep download sizes small for this example)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c5d0546d-f0b8-475d-9fd4-62ace696e316", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Spatial bounds, given as SW/NE corners\n", + "spatial_extent = [-154, 30, -143, 37]\n", + "\n", + "# Start and end dates, in YYYY-MM-DD format\n", + "date_range = ['2022-04-18', '2022-04-19']\n", + "\n", + "# Initialize the QUEST object\n", + "reg_a = ipx.Quest(spatial_extent=spatial_extent, date_range=date_range)\n", + "\n", + "print(reg_a)" + ] + }, + { + "cell_type": "markdown", + "id": "8732bf56-1d44-4182-83f7-4303a87d231a", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Notice that we have defined our spatial and temporal domains, but we do not have any datasets in our QUEST object. The next section leads us through that process." + ] + }, + { + "cell_type": "markdown", + "id": "1598bbca-3dcb-4b63-aeb1-81c27d92a1a2", + "metadata": { + "user_expressions": [] + }, + "source": [ + "## Getting the data\n", + "\n", + "Let's first query the ICESat-2 data. If we want to extract information about the water column, the ATL03 product is likely the desired choice.\n", + "* `short_name`: ATL03" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "309a7b26-cfc3-46fc-a683-43e154412074", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# ICESat-2 product\n", + "short_name = 'ATL03'\n", + "\n", + "# Add ICESat-2 to QUEST datasets\n", + "reg_a.add_icesat2(product=short_name)\n", + "print(reg_a)" + ] + }, + { + "cell_type": "markdown", + "id": "ad4bbcfe-3199-4a28-8739-c930d1572538", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Let's see the available files over this region." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a2b4e56f-ceff-45e7-b52c-e7725dc6c812", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "pprint(reg_a.datasets['icesat2'].avail_granules(ids=True))" + ] + }, + { + "cell_type": "markdown", + "id": "7a081854-dae4-4e99-a550-02c02a71b6de", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Note the ICESat-2 functions shown here are the same as those used for direct icepyx queries. The user is referred to other [example workbooks](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_access.html) for detailed explanations about icepyx functionality.\n", + "\n", + "Accessing ICESat-2 data requires Earthdata login credentials. When running the `download_all()` function below, an authentication check will be passed when attempting to download the ICESat-2 files." + ] + }, + { + "cell_type": "markdown", + "id": "8264515a-00f1-4f57-b927-668a71294079", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Now let's grab Argo data using the same constraints. This is as simple as using the below function." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c857fdcc-e271-4960-86a9-02f693cc13fe", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Add argo to the desired QUEST datasets\n", + "reg_a.add_argo()" + ] + }, + { + "cell_type": "markdown", + "id": "7bade19e-5939-410a-ad54-363636289082", + "metadata": { + "user_expressions": [] + }, + "source": [ + "When accessing Argo data, the variables of interest will be organized as vertical profiles as a function of pressure. By default, only temperature is queried, so the user should supply a list of desired parameters using the code below. The user may also limit the pressure range of the returned data by passing `presRange=\"0,200\"`.\n", + "\n", + "*Note: Our example shows only physical Argo float parameters, but the process is identical for including BGC float parameters.*" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6739c3aa-1a88-4d8e-9fd8-479528c20e97", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Customized variable query to retrieve salinity instead of temperature\n", + "reg_a.add_argo(params=['salinity'])" + ] + }, + { + "cell_type": "markdown", + "id": "2d06436c-2271-4229-8196-9f5180975ab1", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Additionally, a user may view or update the list of requested Argo and Argo-BGC parameters at any time through `reg_a.datasets['argo'].params`. If a user submits an invalid parameter (\"temp\" instead of \"temperature\", for example), an `AssertionError` will be raised. `reg_a.datasets['argo'].presRange` behaves anologously for limiting the pressure range of Argo data." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e34756b8", + "metadata": {}, + "outputs": [], + "source": [ + "# update the list of argo parameters\n", + "reg_a.datasets['argo'].params = ['temperature','salinity']\n", + "\n", + "# show the current list\n", + "reg_a.datasets['argo'].params" + ] + }, + { + "cell_type": "markdown", + "id": "453900c1-cd62-40c9-820c-0615f63f17f5", + "metadata": { + "user_expressions": [] + }, + "source": [ + "As for ICESat-2 data, the user can interact directly with the Argo data object (`reg_a.datasets['argo']`) to search or download data outside of the `Quest.search_all()` and `Quest.download_all()` functionality shown below.\n", + "\n", + "The approach to directly search or download Argo data is to use `reg_a.datasets['argo'].search_data()`, and `reg_a.datasets['argo'].download()`. In both cases, the existing parameters and pressure ranges are used unless the user passes new `params` and/or `presRange` kwargs, respectively, which will directly update those values (stored attributes)." + ] + }, + { + "cell_type": "markdown", + "id": "3f55be4e-d261-49c1-ac14-e19d8e0ff828", + "metadata": { + "user_expressions": [] + }, + "source": [ + "With our current setup, let's see what Argo parameters we will get." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "435a1243", + "metadata": {}, + "outputs": [], + "source": [ + "# see what argo parameters will be searched for or downloaded\n", + "reg_a.datasets['argo'].params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c15675df", + "metadata": {}, + "outputs": [], + "source": [ + "reg_a.datasets['argo'].search_data()" + ] + }, + { + "cell_type": "markdown", + "id": "70d36566-0d3c-4781-a199-09bb11dad975", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Now we can access the data for both Argo and ICESat-2! The below function will do this for us.\n", + "\n", + "**Important**: The Argo data will be compiled into a Pandas DataFrame, which must be manually saved by the user as demonstrated below. The ICESat-2 data is saved as processed HDF-5 files to the directory provided." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a818c5d7-d69a-4aad-90a2-bc670a54c3a7", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "path = './quest/downloaded-data/'\n", + "\n", + "# Access Argo and ICESat-2 data simultaneously\n", + "reg_a.download_all(path=path)" + ] + }, + { + "cell_type": "markdown", + "id": "ad29285e-d161-46ea-8a57-95891fa2b237", + "metadata": { + "tags": [], + "user_expressions": [] + }, + "source": [ + "We now have one available Argo profile, containing `temperature` and `pressure`, in a Pandas DataFrame. BGC Argo is also available through QUEST, so we could add more variables to this list.\n", + "\n", + "If the user wishes to add more profiles, parameters, and/or pressure ranges to a pre-existing DataFrame, then they should use `reg_a.datasets['argo'].download(keep_existing=True)` to retain previously downloaded data and have the new data added." + ] + }, + { + "cell_type": "markdown", + "id": "6970f0ad-9364-4732-a5e6-f93cf3fc31a3", + "metadata": { + "user_expressions": [] + }, + "source": [ + "The `reg_a.download_all()` function also provided a file containing ICESat-2 ATL03 data. Recall that because these data files are very large, we focus on only one file for this example.\n", + "\n", + "The below workflow uses the icepyx Read module to quickly load ICESat-2 data into an Xarray DataSet. To read in multiple files, see the [icepyx Read tutorial](https://icepyx.readthedocs.io/en/latest/example_notebooks/IS2_data_read-in.html) for how to change your input source." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "88f4b1b0-8c58-414c-b6a8-ce1662979943", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "filename = 'processed_ATL03_20220419002753_04111506_006_02.h5'\n", + "\n", + "reader = ipx.Read(data_source=path+filename)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "665d79a7-7360-4846-99c2-222b34df2a92", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# decide which portions of the file to read in\n", + "reader.vars.append(beam_list=['gt2l'], \n", + " var_list=['h_ph', \"lat_ph\", \"lon_ph\", 'signal_conf_ph'])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e7158814-50f0-4940-980c-9bb800360982", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "ds = reader.load()\n", + "ds" + ] + }, + { + "cell_type": "markdown", + "id": "1040438c-d806-4964-b4f0-1247da9f3f1f", + "metadata": { + "user_expressions": [] + }, + "source": [ + "To make the data more easily plottable, let's convert the data into a Pandas DataFrame. Note that this method is memory-intensive for ATL03 data, so users are suggested to look at small spatial domains to prevent the notebook from crashing. Here, since we only have data from one granule and ground track, we have sped up the conversion to a dataframe by first removing extra data dimensions we don't need for our plots. Several of the other steps completed below using Pandas have analogous operations in Xarray that would further reduce memory requirements and computation times." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "50d23a8e", + "metadata": {}, + "outputs": [], + "source": [ + "is2_pd =(ds.squeeze()\n", + " .reset_coords()\n", + " .drop_vars([\"source_file\",\"data_start_utc\",\"data_end_utc\",\"gran_idx\"])\n", + " .to_dataframe()\n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "01bb5a12", + "metadata": {}, + "outputs": [], + "source": [ + "is2_pd" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fc67e039-338c-4348-acaf-96f605cf0030", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Create a new dataframe with only \"ocean\" photons, as indicated by the \"ds_surf_type\" flag\n", + "is2_pd = is2_pd.reset_index(level=[0,1])\n", + "is2_pd_ocean = is2_pd[is2_pd.ds_surf_type==1].drop(columns=\"photon_idx\")\n", + "is2_pd_ocean" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "976ed530-1dc9-412f-9d2d-e51abd28c564", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Set Argo data as its own DataFrame\n", + "argo_df = reg_a.datasets['argo'].argodata" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f9a3b8cf-f3b9-4522-841b-bf760672e37f", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Convert both DataFrames into GeoDataFrames\n", + "is2_gdf = gpd.GeoDataFrame(is2_pd_ocean, \n", + " geometry=gpd.points_from_xy(is2_pd_ocean['lon_ph'], is2_pd_ocean['lat_ph']),\n", + " crs='EPSG:4326'\n", + ")\n", + "argo_gdf = gpd.GeoDataFrame(argo_df, \n", + " geometry=gpd.points_from_xy(argo_df.lon, argo_df.lat),\n", + " crs='EPSG:4326'\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "86cb8463-dc14-4c1d-853e-faf7bf4300a5", + "metadata": { + "user_expressions": [] + }, + "source": [ + "To view the relative locations of ICESat-2 and Argo, the below cell uses the `explore()` function from GeoPandas. The time variables cause errors in the function, so we will drop those variables first. \n", + "\n", + "Note that for large datasets like ICESat-2, loading the map might take a while." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7178fecc-6ca1-42a1-98d4-08f57c050daa", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Drop time variables that would cause errors in explore() function\n", + "is2_gdf = is2_gdf.drop(['delta_time','atlas_sdp_gps_epoch'], axis=1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ff40f7b-3a0f-4e32-8187-322a5b7cb44d", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Plot ICESat-2 track (medium/high confidence photons only) on a map\n", + "m = is2_gdf[is2_gdf['signal_conf_ph']>=3].explore(column='rgt', tiles='Esri.WorldImagery',\n", + " name='ICESat-2')\n", + "\n", + "# Add Argo float locations to map\n", + "argo_gdf.explore(m=m, name='Argo', marker_kwds={\"radius\": 6}, color='red')" + ] + }, + { + "cell_type": "markdown", + "id": "8b7063ec-a2f8-4509-a7ce-5b0482b48682", + "metadata": { + "user_expressions": [] + }, + "source": [ + "While we're at it, let's plot temperature and pressure profiles for each of the Argo floats in the area." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "da2748b7-b174-4abb-a44a-bd73d1d36eba", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Plot vertical profile of temperature vs. pressure for all of the floats\n", + "fig, ax = plt.subplots(figsize=(12, 6))\n", + "for pid in np.unique(argo_df['profile_id']):\n", + " argo_df[argo_df['profile_id']==pid].plot(ax=ax, x='temperature', y='pressure', label=pid)\n", + "plt.gca().invert_yaxis()\n", + "plt.xlabel('Temperature [$\\degree$C]')\n", + "plt.ylabel('Pressure [hPa]')\n", + "plt.ylim([750, -10])\n", + "plt.tight_layout()" + ] + }, + { + "cell_type": "markdown", + "id": "08481fbb-2298-432b-bd50-df2e1ca45cf5", + "metadata": { + "user_expressions": [] + }, + "source": [ + "Lastly, let's look at some near-coincident ICESat-2 and Argo data in a multi-panel plot." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1269de3c-c15d-4120-8284-3b072069d5ee", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "# Only consider ICESat-2 signal photons\n", + "is2_pd_signal = is2_pd_ocean[is2_pd_ocean['signal_conf_ph']>=0]\n", + "\n", + "## Multi-panel plot showing ICESat-2 and Argo data\n", + "\n", + "# Calculate Extent\n", + "lons = [-154, -143, -143, -154, -154]\n", + "lats = [30, 30, 37, 37, 30]\n", + "lon_margin = (max(lons) - min(lons)) * 0.1\n", + "lat_margin = (max(lats) - min(lats)) * 0.1\n", + "\n", + "# Create Plot\n", + "fig,([ax1,ax2],[ax3,ax4]) = plt.subplots(2, 2, figsize=(12, 6))\n", + "\n", + "# Plot Relative Global View\n", + "world = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres'))\n", + "world.plot(ax=ax1, color='0.8', edgecolor='black')\n", + "argo_df.plot.scatter(ax=ax1, x='lon', y='lat', s=25.0, c='green', zorder=3, alpha=0.3)\n", + "is2_pd_signal.plot.scatter(ax=ax1, x='lon_ph', y='lat_ph', s=10.0, zorder=2, alpha=0.3)\n", + "ax1.plot(lons, lats, linewidth=1.5, color='orange', zorder=2)\n", + "ax1.set_xlim(-160,-100)\n", + "ax1.set_ylim(20,50)\n", + "ax1.set_aspect('equal', adjustable='box')\n", + "ax1.set_xlabel('Longitude', fontsize=18)\n", + "ax1.set_ylabel('Latitude', fontsize=18)\n", + "\n", + "# Plot Zoomed View of Ground Tracks\n", + "argo_df.plot.scatter(ax=ax2, x='lon', y='lat', s=50.0, c='green', zorder=3, alpha=0.3)\n", + "is2_pd_signal.plot.scatter(ax=ax2, x='lon_ph', y='lat_ph', s=10.0, zorder=2, alpha=0.3)\n", + "ax2.plot(lons, lats, linewidth=1.5, color='orange', zorder=1)\n", + "ax2.set_xlim(min(lons) - lon_margin, max(lons) + lon_margin)\n", + "ax2.set_ylim(min(lats) - lat_margin, max(lats) + lat_margin)\n", + "ax2.set_aspect('equal', adjustable='box')\n", + "ax2.set_xlabel('Longitude', fontsize=18)\n", + "ax2.set_ylabel('Latitude', fontsize=18)\n", + "\n", + "# Plot ICESat-2 along-track vertical profile. A dotted line notes the location of a nearby Argo float\n", + "is2 = ax3.scatter(is2_pd_signal['lat_ph'], is2_pd_signal['h_ph']+13.1, s=0.1)\n", + "ax3.axvline(34.43885, linestyle='--', linewidth=3, color='black')\n", + "ax3.set_xlim([34.3, 34.5])\n", + "ax3.set_ylim([-20, 5])\n", + "ax3.set_xlabel('Latitude', fontsize=18)\n", + "ax3.set_ylabel('Approx. IS-2 Depth [m]', fontsize=16)\n", + "ax3.set_yticklabels(['15', '10', '5', '0', '-5'])\n", + "\n", + "# Plot vertical ocean profile of the nearby Argo float\n", + "argo_df.plot(ax=ax4, x='temperature', y='pressure', linewidth=3)\n", + "# ax4.set_yscale('log')\n", + "ax4.invert_yaxis()\n", + "ax4.get_legend().remove()\n", + "ax4.set_xlabel('Temperature [$\\degree$C]', fontsize=18)\n", + "ax4.set_ylabel('Argo Pressure', fontsize=16)\n", + "\n", + "plt.tight_layout()\n", + "\n", + "# Save figure\n", + "#plt.savefig('/icepyx/quest/figures/is2_argo_figure.png', dpi=500)" + ] + }, + { + "cell_type": "markdown", + "id": "37720c79", + "metadata": {}, + "source": [ + "Recall that the Argo data must be saved manually.\n", + "The dataframe associated with the Quest object can be saved using `reg_a.save_all(path)`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9b6548e2-0662-4c8b-a251-55ca63aff99b", + "metadata": {}, + "outputs": [], + "source": [ + "reg_a.save_all(path)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.10" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/doc/source/index.rst b/doc/source/index.rst index 719f528b2..612af6adc 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -128,6 +128,7 @@ ICESat-2 datasets to enable scientific discovery. example_notebooks/IS2_data_visualization example_notebooks/IS2_data_read-in example_notebooks/IS2_cloud_data_access + example_notebooks/QUEST_argo_data_access .. toctree:: :maxdepth: 2 @@ -145,8 +146,9 @@ ICESat-2 datasets to enable scientific discovery. contributing/contributors_link contributing/contribution_guidelines contributing/how_to_contribute - contributing/icepyx_internals contributing/attribution_link + contributing/icepyx_internals + contributing/quest-available-datasets contributing/development_plan contributing/release_guide contributing/code_of_conduct_link diff --git a/doc/source/tracking/citations.rst b/doc/source/tracking/citations.rst index b31132be8..bf5672587 100644 --- a/doc/source/tracking/citations.rst +++ b/doc/source/tracking/citations.rst @@ -49,6 +49,8 @@ Research that utilizes icepyx for ICESat-2 data .. bibliography:: icepyx_pubs.bib :style: mystyle + Freer2023 + Idestrom2023 Shean2023 Eidam2022 Leeuwen:2022 diff --git a/doc/source/tracking/icepyx_pubs.bib b/doc/source/tracking/icepyx_pubs.bib index a1d945c01..d13c9653f 100644 --- a/doc/source/tracking/icepyx_pubs.bib +++ b/doc/source/tracking/icepyx_pubs.bib @@ -183,6 +183,30 @@ @inProceedings{Fernando:2021 } +@Article{Freer2023, +AUTHOR = {Freer, B. I. D. and Marsh, O. J. and Hogg, A. E. and Fricker, H. A. and Padman, L.}, +TITLE = {Modes of {Antarctic} tidal grounding line migration revealed by {Ice, Cloud, and land Elevation Satellite-2 (ICESat-2)} laser altimetry}, +JOURNAL = {The Cryosphere}, +VOLUME = {17}, +YEAR = {2023}, +NUMBER = {9}, +PAGES = {4079--4101}, +URL = {https://tc.copernicus.org/articles/17/4079/2023/}, +DOI = {10.5194/tc-17-4079-2023} +} + + +@mastersthesis{Idestrom2023, + author = {Petter Idestr\"{o}m}, + title = {Remote Sensing of Cryospheric Surfaces: Small Scale Surface Roughness Signatures in Satellite Altimetry Data}, + school = {Ume\aa University}, + year = {2023}, + address = {Sweden}, + month = {Sept.}, + url = {https://www.diva-portal.org/smash/get/diva2:1801057/FULLTEXT01.pdf} +} + + @misc{Leeuwen:2022, author = {van Leeuwen, Gijs}, title = {The automated retrieval of supraglacial lake depth and extent from {ICESat-2} photon clouds leveraging {DBSCAN} clustering}, diff --git a/doc/source/tracking/pypistats/downloads.svg b/doc/source/tracking/pypistats/downloads.svg index 9263dcd5d..cda5938b4 100644 --- a/doc/source/tracking/pypistats/downloads.svg +++ b/doc/source/tracking/pypistats/downloads.svg @@ -6,11 +6,11 @@ - 2023-08-01T20:56:59.425644 + 2023-12-01T20:57:28.674190 image/svg+xml - Matplotlib v3.7.2, https://matplotlib.org/ + Matplotlib v3.8.2, https://matplotlib.org/ @@ -41,12 +41,12 @@ z - - + @@ -205,12 +205,12 @@ z - + - + - + - + @@ -283,12 +283,12 @@ z - + - + - + - + - + - + @@ -506,12 +506,12 @@ z - - + @@ -524,7 +524,7 @@ L -3.5 0 - + @@ -538,7 +538,7 @@ L -3.5 0 - + @@ -553,962 +553,1072 @@ L -3.5 0 +L 115.810179 113.04 +L 116.256722 120.793846 +L 117.596351 123.12 +L 118.042894 121.569231 +L 118.489437 117.692308 +L 118.93598 122.344615 +L 119.382522 121.569231 +L 119.829065 122.344615 +L 120.275608 105.286154 +L 120.722151 118.467692 +L 121.168694 121.569231 +L 121.615237 117.692308 +L 122.06178 122.344615 +L 122.508323 119.243077 +L 122.954866 118.467692 +L 123.401408 123.12 +L 123.847951 121.569231 +L 124.294494 116.141538 +L 124.741037 117.692308 +L 125.18758 120.793846 +L 125.634123 121.569231 +L 126.080666 121.569231 +L 126.973752 123.12 +L 127.420294 116.916923 +L 127.866837 84.350769 +L 128.31338 119.243077 +L 128.759923 121.569231 +L 129.206466 121.569231 +L 129.653009 120.018462 +L 130.099552 122.344615 +L 130.546095 120.793846 +L 130.992638 123.12 +L 131.439181 121.569231 +L 131.885723 121.569231 +L 132.332266 123.12 +L 132.778809 122.344615 +L 133.225352 120.793846 +L 133.671895 120.793846 +L 134.118438 119.243077 +L 134.564981 123.12 +L 135.011524 123.12 +L 135.458067 122.344615 +L 135.904609 122.344615 +L 136.351152 123.12 +L 136.797695 123.12 +L 137.244238 111.489231 +L 137.690781 120.018462 +L 138.137324 118.467692 +L 138.583867 122.344615 +L 139.03041 120.018462 +L 139.476953 118.467692 +L 139.923496 120.018462 +L 140.370038 123.12 +L 140.816581 122.344615 +L 141.263124 120.018462 +L 141.709667 123.12 +L 142.602753 121.569231 +L 143.049296 118.467692 +L 143.942382 123.12 +L 144.388924 120.793846 +L 144.835467 120.018462 +L 145.28201 123.12 +L 145.728553 120.793846 +L 146.175096 123.12 +L 146.621639 122.344615 +L 147.068182 120.018462 +L 147.514725 123.12 +L 147.961268 122.344615 +L 148.40781 122.344615 +L 148.854353 123.12 +L 149.300896 120.018462 +L 149.747439 123.12 +L 150.193982 121.569231 +L 151.087068 121.569231 +L 151.533611 120.793846 +L 151.980154 119.243077 +L 152.426697 121.569231 +L 152.873239 119.243077 +L 153.319782 123.12 +L 154.212868 121.569231 +L 154.659411 120.018462 +L 155.105954 122.344615 +L 155.552497 123.12 +L 155.99904 122.344615 +L 156.445583 120.793846 +L 156.892125 121.569231 +L 157.338668 123.12 +L 157.785211 113.04 +L 158.231754 123.12 +L 158.678297 121.569231 +L 159.12484 120.793846 +L 159.571383 123.12 +L 160.017926 120.793846 +L 160.464469 117.692308 +L 160.911012 121.569231 +L 161.357554 123.12 +L 161.804097 123.12 +L 162.25064 122.344615 +L 162.697183 123.12 +L 163.143726 123.12 +L 163.590269 122.344615 +L 164.036812 122.344615 +L 164.483355 121.569231 +L 164.929898 122.344615 +L 166.269526 120.018462 +L 166.716069 121.569231 +L 167.162612 121.569231 +L 168.055698 120.018462 +L 168.502241 122.344615 +L 168.948784 123.12 +L 169.395327 118.467692 +L 169.841869 122.344615 +L 170.288412 122.344615 +L 170.734955 117.692308 +L 171.181498 121.569231 +L 171.628041 121.569231 +L 172.074584 104.510769 +L 172.521127 120.018462 +L 172.96767 121.569231 +L 173.414213 118.467692 +L 173.860755 122.344615 +L 174.307298 119.243077 +L 174.753841 122.344615 +L 175.200384 117.692308 +L 175.646927 120.018462 +L 176.09347 123.12 +L 176.540013 122.344615 +L 176.986556 113.815385 +L 177.433099 120.018462 +L 177.879641 122.344615 +L 178.326184 123.12 +L 178.772727 123.12 +L 179.21927 117.692308 +L 179.665813 123.12 +L 180.112356 123.12 +L 180.558899 120.793846 +L 181.005442 123.12 +L 181.451985 123.12 +L 182.34507 120.018462 +L 182.791613 123.12 +L 183.238156 121.569231 +L 184.131242 123.12 +L 184.577785 120.018462 +L 185.024328 123.12 +L 185.470871 123.12 +L 185.917414 122.344615 +L 186.363956 123.12 +L 186.810499 123.12 +L 187.257042 120.018462 +L 188.150128 123.12 +L 189.043214 121.569231 +L 189.489757 121.569231 +L 189.9363 122.344615 +L 190.382843 120.793846 +L 190.829385 122.344615 +L 191.275928 123.12 +L 192.169014 123.12 +L 192.615557 120.018462 +L 193.0621 123.12 +L 193.508643 122.344615 +L 193.955186 123.12 +L 194.401729 120.018462 +L 194.848271 121.569231 +L 195.294814 122.344615 +L 195.741357 122.344615 +L 196.1879 120.793846 +L 196.634443 122.344615 +L 197.080986 121.569231 +L 197.527529 121.569231 +L 197.974072 122.344615 +L 198.420615 122.344615 +L 198.867157 123.12 +L 199.3137 118.467692 +L 199.760243 122.344615 +L 200.206786 122.344615 +L 200.653329 123.12 +L 201.099872 122.344615 +L 201.546415 123.12 +L 201.992958 123.12 +L 202.886044 121.569231 +L 203.332586 115.366154 +L 203.779129 100.633846 +L 204.225672 117.692308 +L 204.672215 123.12 +L 205.118758 122.344615 +L 205.565301 118.467692 +L 206.011844 120.018462 +L 206.458387 119.243077 +L 206.90493 113.04 +L 207.351472 123.12 +L 207.798015 122.344615 +L 208.244558 120.793846 +L 208.691101 118.467692 +L 209.137644 115.366154 +L 209.584187 106.061538 +L 210.03073 112.264615 +L 210.477273 122.344615 +L 210.923816 97.532308 +L 211.370359 111.489231 +L 211.816901 117.692308 +L 212.263444 113.04 +L 212.709987 120.018462 +L 213.15653 113.04 +L 213.603073 113.815385 +L 214.049616 112.264615 +L 214.496159 106.061538 +L 214.942702 121.569231 +L 215.389245 118.467692 +L 215.835787 121.569231 +L 216.28233 123.12 +L 216.728873 120.018462 +L 217.175416 123.12 +L 217.621959 121.569231 +L 218.068502 123.12 +L 218.515045 122.344615 +L 218.961588 113.04 +L 219.408131 121.569231 +L 219.854673 119.243077 +L 220.301216 120.793846 +L 220.747759 120.018462 +L 221.194302 123.12 +L 221.640845 118.467692 +L 222.087388 120.018462 +L 222.533931 122.344615 +L 222.980474 119.243077 +L 223.427017 123.12 +L 223.87356 121.569231 +L 224.320102 123.12 +L 224.766645 120.793846 +L 225.213188 122.344615 +L 225.659731 120.793846 +L 226.106274 120.018462 +L 226.552817 122.344615 +L 227.445903 122.344615 +L 227.892446 120.793846 +L 228.338988 120.018462 +L 228.785531 106.836923 +L 229.232074 108.387692 +L 229.678617 121.569231 +L 230.12516 122.344615 +L 230.571703 119.243077 +L 231.018246 120.018462 +L 231.464789 118.467692 +L 231.911332 122.344615 +L 232.357875 123.12 +L 232.804417 119.243077 +L 233.25096 118.467692 +L 233.697503 116.141538 +L 234.144046 122.344615 +L 234.590589 120.018462 +L 235.037132 122.344615 +L 235.483675 122.344615 +L 235.930218 123.12 +L 236.823303 120.018462 +L 237.716389 123.12 +L 238.162932 121.569231 +L 238.609475 122.344615 +L 239.056018 118.467692 +L 239.502561 116.916923 +L 239.949104 118.467692 +L 240.395647 119.243077 +L 240.84219 117.692308 +L 241.288732 111.489231 +L 241.735275 119.243077 +L 242.181818 120.793846 +L 242.628361 120.018462 +L 243.074904 117.692308 +L 243.521447 112.264615 +L 243.96799 122.344615 +L 244.414533 121.569231 +L 244.861076 123.12 +L 245.307618 116.916923 +L 245.754161 117.692308 +L 246.200704 120.793846 +L 246.647247 116.141538 +L 247.09379 114.590769 +L 247.540333 122.344615 +L 247.986876 122.344615 +L 248.433419 116.141538 +L 248.879962 102.184615 +L 249.326504 113.04 +L 249.773047 119.243077 +L 250.21959 120.018462 +L 250.666133 120.018462 +L 251.559219 121.569231 +L 252.005762 119.243077 +L 252.452305 119.243077 +L 252.898848 122.344615 +L 253.345391 122.344615 +L 253.791933 121.569231 +L 254.238476 122.344615 +L 254.685019 117.692308 +L 255.131562 117.692308 +L 255.578105 119.243077 +L 256.024648 116.916923 +L 256.471191 111.489231 +L 256.917734 117.692308 +L 257.364277 117.692308 +L 257.810819 121.569231 +L 258.257362 121.569231 +L 258.703905 118.467692 +L 259.150448 123.12 +L 259.596991 109.938462 +L 260.043534 108.387692 +L 260.490077 119.243077 +L 260.93662 120.018462 +L 261.383163 113.04 +L 262.276248 121.569231 +L 262.722791 120.018462 +L 263.169334 123.12 +L 263.615877 116.916923 +L 264.06242 106.836923 +L 264.508963 122.344615 +L 264.955506 121.569231 +L 265.848592 123.12 +L 266.295134 120.793846 +L 266.741677 108.387692 +L 267.18822 117.692308 +L 267.634763 122.344615 +L 268.081306 122.344615 +L 268.527849 111.489231 +L 268.974392 121.569231 +L 269.420935 122.344615 +L 269.867478 121.569231 +L 271.653649 121.569231 +L 272.100192 120.018462 +L 272.546735 123.12 +L 272.993278 122.344615 +L 273.439821 122.344615 +L 273.886364 123.12 +L 274.332907 123.12 +L 274.779449 122.344615 +L 275.225992 123.12 +L 275.672535 119.243077 +L 276.119078 122.344615 +L 276.565621 120.018462 +L 277.012164 120.018462 +L 277.458707 118.467692 +L 277.90525 120.018462 +L 278.351793 123.12 +L 278.798335 121.569231 +L 279.691421 121.569231 +L 280.137964 123.12 +L 280.584507 122.344615 +L 281.03105 122.344615 +L 281.477593 120.793846 +L 281.924136 122.344615 +L 283.263764 122.344615 +L 283.710307 95.981538 +L 284.15685 120.018462 +L 284.603393 123.12 +L 285.049936 122.344615 +L 285.496479 120.793846 +L 285.943022 123.12 +L 286.389565 120.018462 +L 286.836108 122.344615 +L 287.28265 119.243077 +L 287.729193 118.467692 +L 288.175736 118.467692 +L 288.622279 117.692308 +L 289.068822 108.387692 +L 289.515365 108.387692 +L 289.961908 118.467692 +L 290.408451 120.793846 +L 290.854994 122.344615 +L 291.301536 110.713846 +L 291.748079 120.018462 +L 292.194622 122.344615 +L 292.641165 122.344615 +L 293.087708 121.569231 +L 293.534251 121.569231 +L 293.980794 122.344615 +L 294.427337 122.344615 +L 294.87388 120.018462 +L 295.320423 123.12 +L 295.766965 122.344615 +L 296.213508 120.018462 +L 296.660051 120.018462 +L 297.106594 121.569231 +L 297.99968 108.387692 +L 298.446223 116.916923 +L 298.892766 116.141538 +L 299.339309 111.489231 +L 299.785851 114.590769 +L 300.678937 116.141538 +L 301.12548 115.366154 +L 301.572023 110.713846 +L 302.018566 113.04 +L 302.465109 120.018462 +L 302.911652 120.793846 +L 303.358195 119.243077 +L 303.804738 119.243077 +L 304.25128 116.916923 +L 304.697823 118.467692 +L 305.144366 113.815385 +L 305.590909 117.692308 +L 306.037452 118.467692 +L 306.483995 115.366154 +L 306.930538 108.387692 +L 307.377081 115.366154 +L 307.823624 118.467692 +L 308.716709 115.366154 +L 309.163252 118.467692 +L 309.609795 113.04 +L 310.056338 118.467692 +L 310.502881 115.366154 +L 310.949424 121.569231 +L 311.395967 118.467692 +L 311.84251 113.04 +L 312.289052 121.569231 +L 312.735595 120.018462 +L 313.182138 120.793846 +L 313.628681 117.692308 +L 314.075224 116.916923 +L 314.521767 104.510769 +L 314.96831 102.184615 +L 315.414853 116.141538 +L 315.861396 95.206154 +L 316.307939 121.569231 +L 316.754481 120.018462 +L 317.201024 121.569231 +L 317.647567 101.409231 +L 318.09411 102.184615 +L 318.987196 122.344615 +L 319.433739 120.018462 +L 319.880282 113.815385 +L 320.326825 120.018462 +L 320.773367 120.018462 +L 321.21991 102.184615 +L 321.666453 115.366154 +L 322.112996 117.692308 +L 323.006082 119.243077 +L 323.452625 119.243077 +L 323.899168 117.692308 +L 325.238796 122.344615 +L 325.685339 116.916923 +L 326.131882 122.344615 +L 326.578425 122.344615 +L 327.471511 120.793846 +L 327.918054 123.12 +L 328.364597 120.793846 +L 328.81114 119.243077 +L 329.257682 121.569231 +L 329.704225 122.344615 +L 330.150768 122.344615 +L 330.597311 116.916923 +L 331.043854 123.12 +L 331.490397 106.061538 +L 331.93694 121.569231 +L 332.383483 122.344615 +L 332.830026 120.018462 +L 333.276569 120.018462 +L 333.723111 120.793846 +L 334.169654 122.344615 +L 334.616197 117.692308 +L 335.06274 121.569231 +L 335.509283 120.018462 +L 335.955826 120.793846 +L 336.402369 105.286154 +L 336.848912 121.569231 +L 337.295455 119.243077 +L 337.741997 123.12 +L 338.18854 123.12 +L 338.635083 122.344615 +L 339.081626 116.916923 +L 339.528169 122.344615 +L 339.974712 117.692308 +L 340.421255 120.018462 +L 340.867798 123.12 +L 341.314341 120.018462 +L 342.207426 120.018462 +L 342.653969 117.692308 +L 343.100512 116.141538 +L 343.547055 120.793846 +L 343.993598 120.793846 +L 344.440141 120.018462 +L 344.886684 121.569231 +L 345.333227 121.569231 +L 345.77977 120.018462 +L 346.226312 117.692308 +L 346.672855 117.692308 +L 347.119398 119.243077 +L 347.565941 122.344615 +L 348.459027 122.344615 +L 348.90557 108.387692 +L 349.352113 75.821538 +L 349.798656 91.329231 +L 350.245198 119.243077 +L 350.691741 121.569231 +L 351.138284 120.018462 +L 352.03137 109.163077 +L 352.477913 118.467692 +L 352.924456 105.286154 +L 353.370999 88.227692 +L 353.817542 120.018462 +L 354.264085 87.452308 +L 354.710627 113.815385 +L 355.15717 122.344615 +L 355.603713 78.147692 +L 356.050256 102.96 +L 356.496799 111.489231 +L 356.943342 73.495385 +L 357.389885 119.243077 +L 357.836428 120.018462 +L 358.282971 102.184615 +L 358.729513 113.04 +L 359.176056 99.858462 +L 359.622599 113.04 +L 360.069142 120.793846 +L 360.515685 56.436923 +L 360.962228 119.243077 +L 361.408771 120.018462 +L 361.855314 120.018462 +L 362.301857 114.590769 +L 362.748399 118.467692 +L 363.194942 107.612308 +L 363.641485 118.467692 +L 364.088028 120.793846 +L 364.534571 102.96 +L 364.981114 102.96 +L 365.427657 114.590769 +L 365.8742 100.633846 +L 366.320743 119.243077 +L 366.767286 120.018462 +L 367.213828 102.96 +L 367.660371 123.12 +L 368.106914 121.569231 +L 368.553457 120.793846 +L 369 119.243077 +L 369.446543 121.569231 +L 369.893086 120.793846 +L 370.339629 119.243077 +L 370.786172 115.366154 +L 371.232714 117.692308 +L 371.679257 110.713846 +L 372.572343 122.344615 +L 373.018886 123.12 +L 373.465429 116.916923 +L 374.358515 121.569231 +L 374.805058 123.12 +L 375.251601 119.243077 +L 375.698143 122.344615 +L 376.144686 119.243077 +L 376.591229 123.12 +L 377.484315 121.569231 +L 377.930858 123.12 +L 378.377401 123.12 +L 378.823944 121.569231 +L 379.717029 120.018462 +L 380.163572 120.018462 +L 380.610115 120.793846 +L 381.056658 120.793846 +L 381.503201 123.12 +L 381.949744 121.569231 +L 382.396287 96.756923 +L 382.84283 120.793846 +L 383.289373 122.344615 +L 384.182458 122.344615 +L 384.629001 118.467692 +L 385.075544 119.243077 +L 385.522087 110.713846 +L 385.96863 121.569231 +L 386.415173 117.692308 +L 386.861716 122.344615 +L 387.308259 123.12 +L 387.754802 120.793846 +L 388.201344 121.569231 +L 388.647887 117.692308 +L 389.09443 119.243077 +L 389.540973 121.569231 +L 389.987516 122.344615 +L 390.434059 121.569231 +L 390.880602 118.467692 +L 391.327145 120.018462 +L 391.773688 119.243077 +L 392.22023 120.793846 +L 392.666773 116.916923 +L 393.113316 118.467692 +L 393.559859 123.12 +L 394.006402 119.243077 +L 394.452945 120.018462 +L 394.899488 111.489231 +L 395.346031 122.344615 +L 395.792574 119.243077 +L 396.239117 119.243077 +L 396.685659 120.018462 +L 397.132202 112.264615 +L 397.578745 99.858462 +L 398.025288 120.018462 +L 398.471831 122.344615 +L 398.918374 117.692308 +L 399.364917 121.569231 +L 399.81146 120.793846 +L 400.258003 120.793846 +L 400.704545 123.12 +L 401.597631 121.569231 +L 402.044174 120.018462 +L 402.490717 122.344615 +L 402.93726 120.018462 +L 403.383803 122.344615 +L 403.830346 120.018462 +L 404.276889 123.12 +L 404.723431 120.018462 +L 405.169974 121.569231 +L 405.616517 121.569231 +L 406.06306 120.018462 +L 406.509603 123.12 +L 406.956146 120.018462 +L 407.402689 121.569231 +L 407.849232 116.916923 +L 408.295775 122.344615 +L 408.742318 116.141538 +L 409.18886 99.858462 +L 409.635403 122.344615 +L 410.081946 123.12 +L 410.528489 121.569231 +L 410.975032 117.692308 +L 411.421575 121.569231 +L 412.314661 123.12 +L 412.761204 116.916923 +L 413.207746 121.569231 +L 413.654289 120.793846 +L 414.547375 108.387692 +L 414.993918 118.467692 +L 415.440461 121.569231 +L 415.887004 121.569231 +L 416.333547 117.692308 +L 416.78009 91.329231 +L 417.226633 110.713846 +L 417.673175 117.692308 +L 418.119718 120.793846 +L 418.566261 120.793846 +L 419.012804 98.307692 +L 419.459347 120.018462 +L 419.90589 122.344615 +L 420.798976 122.344615 +L 421.245519 120.793846 +L 421.692061 120.018462 +L 422.138604 121.569231 +L 422.585147 119.243077 +L 423.03169 120.793846 +L 423.478233 123.12 +L 423.924776 121.569231 +L 424.371319 118.467692 +L 424.817862 121.569231 +L 425.264405 117.692308 +L 425.710948 117.692308 +L 426.15749 119.243077 +L 426.604033 121.569231 +L 427.050576 107.612308 +L 427.497119 120.018462 +L 427.943662 110.713846 +L 428.390205 117.692308 +L 428.836748 119.243077 +L 429.283291 111.489231 +L 429.729834 121.569231 +L 430.622919 114.590769 +L 431.069462 120.018462 +L 431.516005 116.916923 +L 431.962548 110.713846 +L 432.409091 109.938462 +L 432.855634 119.243077 +L 433.302177 105.286154 +L 433.74872 111.489231 +L 434.195262 120.793846 +L 434.641805 109.938462 +L 435.088348 122.344615 +L 435.534891 121.569231 +L 435.981434 114.590769 +L 436.427977 116.916923 +L 436.87452 101.409231 +L 437.321063 114.590769 +L 437.767606 113.815385 +L 438.214149 113.815385 +L 438.660691 115.366154 +L 439.107234 119.243077 +L 439.553777 120.018462 +L 440.00032 117.692308 +L 440.446863 119.243077 +L 440.893406 113.04 +L 441.339949 120.018462 +L 441.786492 115.366154 +L 442.233035 109.163077 +L 442.679577 115.366154 +L 443.12612 116.141538 +L 443.572663 115.366154 +L 444.019206 122.344615 +L 444.465749 117.692308 +L 444.912292 121.569231 +L 445.358835 120.793846 +L 445.805378 113.815385 +L 446.251921 113.04 +L 446.698464 101.409231 +L 447.145006 113.04 +L 447.591549 110.713846 +L 448.038092 118.467692 +L 448.484635 120.018462 +L 448.931178 119.243077 +L 449.377721 119.243077 +L 449.824264 122.344615 +L 450.270807 119.243077 +L 450.71735 109.163077 +L 451.163892 119.243077 +L 452.056978 119.243077 +L 452.503521 122.344615 +L 452.950064 123.12 +L 453.396607 122.344615 +L 453.84315 118.467692 +L 454.289693 106.836923 +L 454.736236 120.018462 +L 455.182778 120.018462 +L 455.629321 123.12 +L 456.075864 123.12 +L 456.522407 98.307692 +L 456.96895 106.836923 +L 457.415493 117.692308 +L 457.862036 113.04 +L 458.755122 122.344615 +L 459.201665 121.569231 +L 459.648207 122.344615 +L 460.09475 116.141538 +L 460.541293 122.344615 +L 460.987836 120.793846 +L 461.434379 121.569231 +L 461.880922 123.12 +L 462.327465 100.633846 +L 462.774008 120.018462 +L 463.220551 111.489231 +L 463.667093 111.489231 +L 464.113636 114.590769 +L 464.560179 120.793846 +L 465.006722 121.569231 +L 465.453265 116.141538 +L 465.899808 118.467692 +L 466.346351 117.692308 +L 466.792894 116.141538 +L 467.239437 122.344615 +L 467.68598 120.018462 +L 468.132522 121.569231 +L 469.025608 123.12 +L 470.81178 120.018462 +L 471.704866 105.286154 +L 472.151408 118.467692 +L 472.597951 116.141538 +L 473.044494 122.344615 +L 473.491037 116.141538 +L 473.93758 123.12 +L 474.384123 121.569231 +L 475.277209 120.018462 +L 475.723752 120.793846 +L 476.170294 120.018462 +L 476.616837 118.467692 +L 477.06338 107.612308 +L 477.509923 118.467692 +L 477.956466 117.692308 +L 478.403009 106.061538 +L 478.849552 122.344615 +L 479.296095 114.590769 +L 479.742638 122.344615 +L 480.189181 118.467692 +L 480.635723 121.569231 +L 481.082266 123.12 +L 481.528809 120.793846 +L 481.975352 122.344615 +L 482.421895 120.018462 +L 482.868438 123.12 +L 483.761524 123.12 +L 484.208067 120.018462 +L 484.654609 123.12 +L 485.101152 118.467692 +L 485.547695 123.12 +L 485.994238 122.344615 +L 486.440781 120.018462 +L 486.887324 107.612308 +L 487.333867 123.12 +L 487.78041 123.12 +L 488.226953 112.264615 +L 488.673496 113.04 +L 489.120038 110.713846 +L 489.566581 113.815385 +L 490.013124 122.344615 +L 490.459667 112.264615 +L 490.90621 120.018462 +L 491.352753 121.569231 +L 491.799296 122.344615 +L 492.245839 122.344615 +L 492.692382 123.12 +L 493.138924 122.344615 +L 493.585467 119.243077 +L 494.03201 121.569231 +L 494.925096 123.12 +L 495.371639 123.12 +L 495.818182 117.692308 +L 496.264725 118.467692 +L 496.711268 121.569231 +L 497.604353 121.569231 +L 498.050896 120.793846 +L 498.497439 120.793846 +L 498.943982 116.141538 +L 499.390525 122.344615 +L 499.837068 105.286154 +L 500.283611 118.467692 +L 500.730154 122.344615 +L 501.176697 119.243077 +L 501.623239 120.018462 +L 502.516325 113.04 +L 502.962868 118.467692 +L 503.409411 103.735385 +L 503.855954 123.12 +L 504.302497 117.692308 +L 505.195583 102.96 +L 505.642125 110.713846 +L 506.088668 82.8 +L 506.535211 116.141538 +L 506.981754 123.12 +L 507.428297 93.655385 +L 507.87484 113.04 +L 508.321383 109.163077 +L 508.767926 110.713846 +L 509.214469 117.692308 +L 509.661012 113.815385 +L 510.107554 122.344615 +L 510.554097 106.836923 +L 511.00064 112.264615 +L 511.447183 112.264615 +L 511.893726 107.612308 +L 512.340269 108.387692 +L 512.786812 121.569231 +L 513.233355 123.12 +L 513.679898 92.104615 +L 514.12644 22.32 +L 514.572983 102.184615 +L 515.019526 106.836923 +L 515.466069 106.061538 +L 515.912612 120.018462 +L 516.359155 120.793846 +L 516.805698 92.88 +L 517.252241 73.495385 +L 517.698784 110.713846 +L 518.145327 108.387692 +L 518.591869 103.735385 +L 519.038412 120.018462 +L 519.484955 123.12 +L 519.931498 85.901538 +L 520.824584 90.553846 +L 521.71767 120.018462 +L 522.164213 121.569231 +L 522.610755 122.344615 +L 523.057298 117.692308 +L 523.503841 106.836923 +L 523.950384 113.04 +L 524.396927 90.553846 +L 524.84347 92.88 +L 525.290013 121.569231 +L 525.736556 120.018462 +L 526.183099 101.409231 +L 526.629641 115.366154 +L 527.076184 96.756923 +L 527.522727 116.141538 +L 527.96927 123.12 +L 528.415813 123.12 +L 528.862356 116.141538 +L 529.308899 105.286154 +L 529.755442 118.467692 +L 530.201985 118.467692 +L 530.648528 121.569231 +L 531.09507 123.12 +L 531.541613 123.12 +L 531.988156 121.569231 +L 532.434699 113.815385 +L 532.881242 120.793846 +L 533.327785 123.12 +L 533.774328 99.083077 +L 534.220871 117.692308 +L 534.667414 116.141538 +L 535.113956 95.981538 +L 535.560499 114.590769 +L 536.007042 122.344615 +L 536.453585 120.793846 +L 536.900128 123.12 +L 537.346671 121.569231 +L 537.793214 100.633846 +L 538.239757 98.307692 +L 538.6863 115.366154 +L 539.132843 109.938462 +L 539.579385 116.916923 +L 540.025928 106.836923 +L 540.472471 120.793846 +L 540.919014 114.590769 +L 541.365557 121.569231 +L 541.8121 120.793846 +L 542.258643 121.569231 +L 542.705186 123.12 +L 543.151729 105.286154 +L 543.598271 111.489231 +L 544.044814 120.793846 +L 544.491357 122.344615 +L 545.830986 122.344615 +L 546.277529 119.243077 +L 546.724072 94.430769 +L 547.170615 118.467692 +L 547.617157 111.489231 +L 548.0637 118.467692 +L 548.510243 120.018462 +L 548.956786 120.793846 +L 549.403329 119.243077 +L 549.849872 121.569231 +L 550.296415 122.344615 +L 550.742958 120.018462 +L 551.189501 122.344615 +L 551.636044 109.938462 +L 552.082586 119.243077 +L 552.529129 117.692308 +L 552.975672 120.793846 +L 553.422215 95.981538 +L 553.868758 120.018462 +L 554.315301 120.793846 +L 554.761844 119.243077 +L 555.208387 120.793846 +L 555.65493 119.243077 +L 556.101472 97.532308 +L 556.548015 116.141538 +L 556.994558 118.467692 +L 557.441101 120.018462 +L 557.887644 116.141538 +L 558.334187 120.018462 +L 558.78073 120.018462 +L 559.227273 120.793846 +L 559.673816 119.243077 +L 560.120359 119.243077 +L 560.566901 120.793846 +L 561.013444 119.243077 +L 561.459987 119.243077 +L 561.90653 120.018462 +L 562.353073 108.387692 +L 562.799616 113.815385 +L 563.246159 122.344615 +L 563.692702 121.569231 +L 564.139245 120.018462 +L 564.585787 121.569231 +L 565.03233 120.018462 +L 565.478873 121.569231 +L 565.925416 116.141538 +L 566.371959 115.366154 +L 566.818502 109.938462 +L 567.265045 119.243077 +L 567.711588 120.793846 +L 568.604673 115.366154 +L 569.051216 118.467692 +L 569.497759 119.243077 +L 569.944302 121.569231 +L 570.390845 121.569231 +L 570.837388 116.141538 +L 571.283931 92.104615 +L 571.730474 117.692308 +L 572.177017 117.692308 +L 572.62356 115.366154 +L 573.070102 120.793846 +L 573.516645 120.793846 +L 573.963188 117.692308 +L 574.409731 120.793846 +L 574.856274 81.249231 +L 575.302817 120.793846 +L 575.74936 122.344615 +L 576.195903 121.569231 +L 576.642446 121.569231 +L 577.088988 102.96 +L 577.535531 113.815385 +L 577.982074 120.018462 +L 578.428617 121.569231 +L 578.87516 122.344615 +L 579.321703 112.264615 +L 579.768246 119.243077 +L 580.214789 122.344615 +L 580.661332 106.836923 +L 581.107875 115.366154 +L 581.554417 119.243077 +L 582.00096 98.307692 +L 582.447503 122.344615 +L 582.894046 123.12 +L 583.340589 120.793846 +L 584.233675 122.344615 +L 584.680218 119.243077 +L 585.126761 118.467692 +L 585.573303 120.793846 +L 586.019846 122.344615 +L 586.466389 121.569231 +L 586.912932 122.344615 +L 587.359475 122.344615 +L 587.806018 123.12 +L 588.252561 123.12 +L 588.699104 120.018462 +L 589.145647 65.741538 +L 589.59219 110.713846 +L 590.038732 112.264615 +L 590.485275 116.141538 +L 590.931818 106.836923 +L 591.378361 112.264615 +L 591.824904 112.264615 +L 592.271447 104.510769 +L 592.71799 117.692308 +L 593.164533 105.286154 +L 593.611076 120.793846 +L 594.057618 116.916923 +L 594.504161 115.366154 +L 594.950704 120.793846 +L 595.397247 119.243077 +L 595.84379 116.916923 +L 596.290333 120.018462 +L 596.736876 120.793846 +L 597.183419 104.510769 +L 597.629962 117.692308 +L 598.076504 105.286154 +L 598.523047 116.916923 +L 598.96959 107.612308 +L 599.416133 117.692308 +L 599.862676 118.467692 +L 600.309219 108.387692 +L 600.755762 109.163077 +L 601.202305 117.692308 +L 601.648848 120.793846 +L 602.095391 116.916923 +L 602.541933 117.692308 +L 602.988476 123.12 +L 603.435019 117.692308 +L 603.881562 96.756923 +L 604.328105 116.141538 +L 604.774648 118.467692 +L 605.221191 114.590769 +L 605.667734 117.692308 +L 606.114277 119.243077 +L 606.560819 108.387692 +L 607.007362 118.467692 +L 607.453905 107.612308 +L 607.900448 116.141538 +L 608.346991 117.692308 +L 608.793534 107.612308 +L 609.240077 117.692308 +L 609.68662 95.206154 +L 610.133163 102.96 +L 610.579706 116.141538 +L 611.472791 117.692308 +L 611.919334 95.206154 +L 612.365877 89.003077 +L 612.81242 110.713846 +L 613.258963 120.793846 +L 614.152049 119.243077 +L 614.598592 114.590769 +L 615.045134 111.489231 +L 615.491677 110.713846 +L 615.93822 34.726154 +L 616.384763 89.003077 +L 616.831306 113.04 +L 617.724392 120.018462 +L 618.170935 109.163077 +L 618.617478 106.061538 +L 619.06402 100.633846 +L 619.510563 117.692308 +L 619.957106 119.243077 +L 620.403649 116.916923 +L 620.850192 113.04 +L 621.296735 111.489231 +L 621.743278 114.590769 +L 622.636364 89.003077 +L 622.636364 89.003077 +" clip-path="url(#p0add74da43)" style="fill: none; stroke: #1f77b4; stroke-width: 1.5; stroke-linecap: square"/> + diff --git a/doc/source/tracking/pypistats/downloads_data.csv b/doc/source/tracking/pypistats/downloads_data.csv index 17f1e78d8..f7e2a4f6a 100644 --- a/doc/source/tracking/pypistats/downloads_data.csv +++ b/doc/source/tracking/pypistats/downloads_data.csv @@ -1118,6 +1118,128 @@ with_mirrors,2023-07-28,6 with_mirrors,2023-07-29,3 with_mirrors,2023-07-30,3 with_mirrors,2023-07-31,72 +with_mirrors,2023-08-01,72 +with_mirrors,2023-08-02,14 +with_mirrors,2023-08-03,8 +with_mirrors,2023-08-04,11 +with_mirrors,2023-08-05,39 +with_mirrors,2023-08-06,89 +with_mirrors,2023-08-07,4 +with_mirrors,2023-08-08,70 +with_mirrors,2023-08-09,66 +with_mirrors,2023-08-10,148 +with_mirrors,2023-08-11,36 +with_mirrors,2023-08-12,10 +with_mirrors,2023-08-13,34 +with_mirrors,2023-08-14,65 +with_mirrors,2023-08-15,2 +with_mirrors,2023-08-16,29 +with_mirrors,2023-08-17,75 +with_mirrors,2023-08-18,7 +with_mirrors,2023-08-19,3 +with_mirrors,2023-08-20,2 +with_mirrors,2023-08-21,15 +with_mirrors,2023-08-22,6 +with_mirrors,2023-08-23,12 +with_mirrors,2023-08-24,22 +with_mirrors,2023-08-25,78 +with_mirrors,2023-08-26,37 +with_mirrors,2023-08-27,33 +with_mirrors,2023-08-28,2 +with_mirrors,2023-08-29,1 +with_mirrors,2023-08-30,12 +with_mirrors,2023-08-31,31 +with_mirrors,2023-09-01,3 +with_mirrors,2023-09-02,33 +with_mirrors,2023-09-03,6 +with_mirrors,2023-09-04,14 +with_mirrors,2023-09-05,19 +with_mirrors,2023-09-06,4 +with_mirrors,2023-09-07,2 +with_mirrors,2023-09-08,3 +with_mirrors,2023-09-09,2 +with_mirrors,2023-09-10,35 +with_mirrors,2023-09-11,32 +with_mirrors,2023-09-12,1 +with_mirrors,2023-09-13,5 +with_mirrors,2023-09-14,127 +with_mirrors,2023-09-15,125 +with_mirrors,2023-09-16,23 +with_mirrors,2023-09-17,18 +with_mirrors,2023-09-18,24 +with_mirrors,2023-09-19,19 +with_mirrors,2023-09-20,25 +with_mirrors,2023-09-21,29 +with_mirrors,2023-09-22,24 +with_mirrors,2023-09-23,121 +with_mirrors,2023-09-24,49 +with_mirrors,2023-09-25,42 +with_mirrors,2023-09-26,44 +with_mirrors,2023-09-27,8 +with_mirrors,2023-09-28,20 +with_mirrors,2023-09-29,19 +with_mirrors,2023-09-30,7 +with_mirrors,2023-10-01,4 +with_mirrors,2023-10-02,33 +with_mirrors,2023-10-03,43 +with_mirrors,2023-10-04,26 +with_mirrors,2023-10-05,9 +with_mirrors,2023-10-06,23 +with_mirrors,2023-10-07,32 +with_mirrors,2023-10-08,73 +with_mirrors,2023-10-09,20 +with_mirrors,2023-10-10,19 +with_mirrors,2023-10-11,12 +with_mirrors,2023-10-12,4 +with_mirrors,2023-10-13,11 +with_mirrors,2023-10-14,10 +with_mirrors,2023-10-15,1 +with_mirrors,2023-10-16,8 +with_mirrors,2023-10-17,37 +with_mirrors,2023-10-18,14 +with_mirrors,2023-10-19,12 +with_mirrors,2023-10-20,12 +with_mirrors,2023-10-21,41 +with_mirrors,2023-10-22,6 +with_mirrors,2023-10-23,25 +with_mirrors,2023-10-24,40 +with_mirrors,2023-10-25,21 +with_mirrors,2023-10-26,93 +with_mirrors,2023-10-27,12 +with_mirrors,2023-10-28,87 +with_mirrors,2023-10-29,43 +with_mirrors,2023-10-30,39 +with_mirrors,2023-10-31,47 +with_mirrors,2023-11-01,44 +with_mirrors,2023-11-02,46 +with_mirrors,2023-11-03,74 +with_mirrors,2023-11-04,72 +with_mirrors,2023-11-05,25 +with_mirrors,2023-11-06,61 +with_mirrors,2023-11-07,50 +with_mirrors,2023-11-08,10 +with_mirrors,2023-11-09,7 +with_mirrors,2023-11-10,16 +with_mirrors,2023-11-11,1 +with_mirrors,2023-11-12,12 +with_mirrors,2023-11-13,22 +with_mirrors,2023-11-14,89 +with_mirrors,2023-11-15,152 +with_mirrors,2023-11-16,101 +with_mirrors,2023-11-17,63 +with_mirrors,2023-11-18,58 +with_mirrors,2023-11-19,15 +with_mirrors,2023-11-20,21 +with_mirrors,2023-11-21,29 +with_mirrors,2023-11-22,73 +with_mirrors,2023-11-23,10 +with_mirrors,2023-11-24,48 +with_mirrors,2023-11-25,17 +with_mirrors,2023-11-26,16 +with_mirrors,2023-11-27,88 +with_mirrors,2023-11-28,20 +with_mirrors,2023-11-29,99 +with_mirrors,2023-11-30,59 without_mirrors,2020-06-18,22 without_mirrors,2020-06-19,14 without_mirrors,2020-06-21,4 @@ -2139,3 +2261,119 @@ without_mirrors,2023-07-28,6 without_mirrors,2023-07-29,3 without_mirrors,2023-07-30,3 without_mirrors,2023-07-31,10 +without_mirrors,2023-08-01,41 +without_mirrors,2023-08-02,8 +without_mirrors,2023-08-03,8 +without_mirrors,2023-08-04,11 +without_mirrors,2023-08-05,4 +without_mirrors,2023-08-07,4 +without_mirrors,2023-08-08,8 +without_mirrors,2023-08-09,4 +without_mirrors,2023-08-10,55 +without_mirrors,2023-08-11,4 +without_mirrors,2023-08-12,2 +without_mirrors,2023-08-13,3 +without_mirrors,2023-08-14,3 +without_mirrors,2023-08-16,27 +without_mirrors,2023-08-17,13 +without_mirrors,2023-08-18,5 +without_mirrors,2023-08-19,3 +without_mirrors,2023-08-20,2 +without_mirrors,2023-08-21,15 +without_mirrors,2023-08-22,6 +without_mirrors,2023-08-23,2 +without_mirrors,2023-08-24,22 +without_mirrors,2023-08-25,11 +without_mirrors,2023-08-26,6 +without_mirrors,2023-08-27,33 +without_mirrors,2023-08-28,2 +without_mirrors,2023-08-29,1 +without_mirrors,2023-08-30,4 +without_mirrors,2023-09-01,3 +without_mirrors,2023-09-02,2 +without_mirrors,2023-09-04,6 +without_mirrors,2023-09-05,7 +without_mirrors,2023-09-06,4 +without_mirrors,2023-09-07,2 +without_mirrors,2023-09-08,3 +without_mirrors,2023-09-09,2 +without_mirrors,2023-09-10,2 +without_mirrors,2023-09-11,1 +without_mirrors,2023-09-12,1 +without_mirrors,2023-09-13,5 +without_mirrors,2023-09-14,75 +without_mirrors,2023-09-15,17 +without_mirrors,2023-09-16,15 +without_mirrors,2023-09-17,10 +without_mirrors,2023-09-18,22 +without_mirrors,2023-09-19,15 +without_mirrors,2023-09-20,15 +without_mirrors,2023-09-21,25 +without_mirrors,2023-09-22,8 +without_mirrors,2023-09-23,24 +without_mirrors,2023-09-24,4 +without_mirrors,2023-09-25,9 +without_mirrors,2023-09-26,11 +without_mirrors,2023-09-27,4 +without_mirrors,2023-09-28,6 +without_mirrors,2023-09-29,9 +without_mirrors,2023-09-30,5 +without_mirrors,2023-10-01,4 +without_mirrors,2023-10-02,25 +without_mirrors,2023-10-03,8 +without_mirrors,2023-10-04,24 +without_mirrors,2023-10-05,9 +without_mirrors,2023-10-06,21 +without_mirrors,2023-10-07,8 +without_mirrors,2023-10-08,7 +without_mirrors,2023-10-09,20 +without_mirrors,2023-10-10,19 +without_mirrors,2023-10-11,8 +without_mirrors,2023-10-12,4 +without_mirrors,2023-10-13,9 +without_mirrors,2023-10-14,8 +without_mirrors,2023-10-15,1 +without_mirrors,2023-10-16,8 +without_mirrors,2023-10-17,35 +without_mirrors,2023-10-18,10 +without_mirrors,2023-10-19,7 +without_mirrors,2023-10-20,12 +without_mirrors,2023-10-21,8 +without_mirrors,2023-10-22,6 +without_mirrors,2023-10-23,20 +without_mirrors,2023-10-24,7 +without_mirrors,2023-10-25,21 +without_mirrors,2023-10-26,10 +without_mirrors,2023-10-27,8 +without_mirrors,2023-10-28,21 +without_mirrors,2023-10-29,8 +without_mirrors,2023-10-30,37 +without_mirrors,2023-10-31,27 +without_mirrors,2023-11-01,10 +without_mirrors,2023-11-02,9 +without_mirrors,2023-11-03,8 +without_mirrors,2023-11-04,37 +without_mirrors,2023-11-06,45 +without_mirrors,2023-11-07,17 +without_mirrors,2023-11-08,4 +without_mirrors,2023-11-09,5 +without_mirrors,2023-11-10,6 +without_mirrors,2023-11-12,12 +without_mirrors,2023-11-13,16 +without_mirrors,2023-11-14,17 +without_mirrors,2023-11-15,115 +without_mirrors,2023-11-16,45 +without_mirrors,2023-11-17,14 +without_mirrors,2023-11-18,9 +without_mirrors,2023-11-19,5 +without_mirrors,2023-11-20,19 +without_mirrors,2023-11-21,23 +without_mirrors,2023-11-22,30 +without_mirrors,2023-11-23,8 +without_mirrors,2023-11-24,6 +without_mirrors,2023-11-25,9 +without_mirrors,2023-11-26,14 +without_mirrors,2023-11-27,16 +without_mirrors,2023-11-28,12 +without_mirrors,2023-11-29,29 +without_mirrors,2023-11-30,45 diff --git a/doc/source/tracking/pypistats/sys_downloads_data.csv b/doc/source/tracking/pypistats/sys_downloads_data.csv index c5867dc23..98cdf03fe 100644 --- a/doc/source/tracking/pypistats/sys_downloads_data.csv +++ b/doc/source/tracking/pypistats/sys_downloads_data.csv @@ -155,6 +155,33 @@ Darwin,2023-07-07,1 Darwin,2023-07-10,1 Darwin,2023-07-16,1 Darwin,2023-07-26,1 +Darwin,2023-08-01,31 +Darwin,2023-08-03,1 +Darwin,2023-08-08,2 +Darwin,2023-08-18,1 +Darwin,2023-08-21,1 +Darwin,2023-09-13,1 +Darwin,2023-09-14,3 +Darwin,2023-09-18,2 +Darwin,2023-09-21,2 +Darwin,2023-09-24,1 +Darwin,2023-09-25,2 +Darwin,2023-10-05,2 +Darwin,2023-10-23,1 +Darwin,2023-10-24,2 +Darwin,2023-10-28,1 +Darwin,2023-10-30,2 +Darwin,2023-11-01,1 +Darwin,2023-11-04,1 +Darwin,2023-11-06,4 +Darwin,2023-11-08,1 +Darwin,2023-11-13,2 +Darwin,2023-11-15,1 +Darwin,2023-11-16,8 +Darwin,2023-11-20,4 +Darwin,2023-11-21,2 +Darwin,2023-11-24,1 +Darwin,2023-11-27,2 Linux,2020-06-18,9 Linux,2020-06-19,2 Linux,2020-06-22,2 @@ -846,6 +873,114 @@ Linux,2023-07-28,5 Linux,2023-07-29,3 Linux,2023-07-30,2 Linux,2023-07-31,4 +Linux,2023-08-01,7 +Linux,2023-08-02,5 +Linux,2023-08-03,5 +Linux,2023-08-04,10 +Linux,2023-08-05,1 +Linux,2023-08-07,2 +Linux,2023-08-08,2 +Linux,2023-08-09,3 +Linux,2023-08-10,10 +Linux,2023-08-11,3 +Linux,2023-08-12,2 +Linux,2023-08-13,2 +Linux,2023-08-14,1 +Linux,2023-08-16,16 +Linux,2023-08-17,10 +Linux,2023-08-18,2 +Linux,2023-08-19,2 +Linux,2023-08-20,2 +Linux,2023-08-21,11 +Linux,2023-08-22,4 +Linux,2023-08-23,1 +Linux,2023-08-24,21 +Linux,2023-08-25,10 +Linux,2023-08-26,1 +Linux,2023-08-27,2 +Linux,2023-08-30,3 +Linux,2023-09-01,3 +Linux,2023-09-02,1 +Linux,2023-09-04,5 +Linux,2023-09-05,3 +Linux,2023-09-06,2 +Linux,2023-09-08,2 +Linux,2023-09-09,1 +Linux,2023-09-10,2 +Linux,2023-09-11,1 +Linux,2023-09-12,1 +Linux,2023-09-13,1 +Linux,2023-09-14,20 +Linux,2023-09-15,1 +Linux,2023-09-16,4 +Linux,2023-09-17,2 +Linux,2023-09-19,1 +Linux,2023-09-20,6 +Linux,2023-09-21,1 +Linux,2023-09-22,7 +Linux,2023-09-23,3 +Linux,2023-09-25,4 +Linux,2023-09-26,7 +Linux,2023-09-27,1 +Linux,2023-09-28,4 +Linux,2023-09-29,1 +Linux,2023-09-30,2 +Linux,2023-10-01,1 +Linux,2023-10-02,14 +Linux,2023-10-03,1 +Linux,2023-10-04,24 +Linux,2023-10-05,4 +Linux,2023-10-06,10 +Linux,2023-10-07,2 +Linux,2023-10-08,1 +Linux,2023-10-09,20 +Linux,2023-10-10,16 +Linux,2023-10-11,2 +Linux,2023-10-13,2 +Linux,2023-10-14,2 +Linux,2023-10-15,1 +Linux,2023-10-16,8 +Linux,2023-10-17,29 +Linux,2023-10-18,6 +Linux,2023-10-19,3 +Linux,2023-10-20,9 +Linux,2023-10-21,4 +Linux,2023-10-22,3 +Linux,2023-10-23,14 +Linux,2023-10-24,3 +Linux,2023-10-25,16 +Linux,2023-10-26,4 +Linux,2023-10-27,5 +Linux,2023-10-28,4 +Linux,2023-10-29,2 +Linux,2023-10-30,27 +Linux,2023-10-31,6 +Linux,2023-11-01,1 +Linux,2023-11-03,7 +Linux,2023-11-04,32 +Linux,2023-11-06,35 +Linux,2023-11-07,14 +Linux,2023-11-08,1 +Linux,2023-11-09,2 +Linux,2023-11-10,2 +Linux,2023-11-13,2 +Linux,2023-11-14,7 +Linux,2023-11-15,13 +Linux,2023-11-16,20 +Linux,2023-11-17,4 +Linux,2023-11-18,3 +Linux,2023-11-19,1 +Linux,2023-11-20,10 +Linux,2023-11-21,14 +Linux,2023-11-22,16 +Linux,2023-11-23,2 +Linux,2023-11-24,2 +Linux,2023-11-25,1 +Linux,2023-11-26,2 +Linux,2023-11-27,10 +Linux,2023-11-28,3 +Linux,2023-11-29,23 +Linux,2023-11-30,5 Windows,2020-06-21,1 Windows,2020-06-25,1 Windows,2020-06-30,1 @@ -1411,6 +1546,94 @@ Windows,2023-07-24,1 Windows,2023-07-26,1 Windows,2023-07-27,3 Windows,2023-07-31,4 +Windows,2023-08-01,3 +Windows,2023-08-02,3 +Windows,2023-08-03,2 +Windows,2023-08-04,1 +Windows,2023-08-05,3 +Windows,2023-08-07,2 +Windows,2023-08-08,3 +Windows,2023-08-11,1 +Windows,2023-08-13,1 +Windows,2023-08-14,2 +Windows,2023-08-16,10 +Windows,2023-08-17,2 +Windows,2023-08-18,1 +Windows,2023-08-19,1 +Windows,2023-08-21,2 +Windows,2023-08-22,2 +Windows,2023-08-25,1 +Windows,2023-08-26,5 +Windows,2023-08-29,1 +Windows,2023-09-04,1 +Windows,2023-09-05,3 +Windows,2023-09-06,2 +Windows,2023-09-07,1 +Windows,2023-09-08,1 +Windows,2023-09-09,1 +Windows,2023-09-13,1 +Windows,2023-09-14,2 +Windows,2023-09-15,9 +Windows,2023-09-16,1 +Windows,2023-09-17,2 +Windows,2023-09-18,17 +Windows,2023-09-19,13 +Windows,2023-09-20,9 +Windows,2023-09-22,1 +Windows,2023-09-23,19 +Windows,2023-09-24,3 +Windows,2023-09-25,2 +Windows,2023-09-26,2 +Windows,2023-09-27,1 +Windows,2023-09-29,3 +Windows,2023-10-01,2 +Windows,2023-10-02,1 +Windows,2023-10-06,11 +Windows,2023-10-07,6 +Windows,2023-10-08,4 +Windows,2023-10-10,3 +Windows,2023-10-11,6 +Windows,2023-10-12,2 +Windows,2023-10-13,7 +Windows,2023-10-14,5 +Windows,2023-10-17,4 +Windows,2023-10-18,4 +Windows,2023-10-19,4 +Windows,2023-10-20,2 +Windows,2023-10-21,2 +Windows,2023-10-22,2 +Windows,2023-10-23,5 +Windows,2023-10-24,2 +Windows,2023-10-25,5 +Windows,2023-10-26,5 +Windows,2023-10-27,3 +Windows,2023-10-28,8 +Windows,2023-10-29,4 +Windows,2023-10-30,4 +Windows,2023-10-31,19 +Windows,2023-11-01,5 +Windows,2023-11-02,9 +Windows,2023-11-03,1 +Windows,2023-11-04,3 +Windows,2023-11-06,5 +Windows,2023-11-08,1 +Windows,2023-11-10,4 +Windows,2023-11-12,12 +Windows,2023-11-13,5 +Windows,2023-11-14,2 +Windows,2023-11-15,8 +Windows,2023-11-16,2 +Windows,2023-11-17,2 +Windows,2023-11-18,2 +Windows,2023-11-20,4 +Windows,2023-11-21,5 +Windows,2023-11-22,11 +Windows,2023-11-23,4 +Windows,2023-11-24,2 +Windows,2023-11-27,4 +Windows,2023-11-28,7 +Windows,2023-11-29,2 +Windows,2023-11-30,4 null,2020-06-18,12 null,2020-06-19,12 null,2020-06-21,2 @@ -2101,3 +2324,72 @@ null,2023-07-27,4 null,2023-07-28,1 null,2023-07-30,1 null,2023-07-31,2 +null,2023-08-08,1 +null,2023-08-09,1 +null,2023-08-10,45 +null,2023-08-16,1 +null,2023-08-17,1 +null,2023-08-18,1 +null,2023-08-21,1 +null,2023-08-23,1 +null,2023-08-24,1 +null,2023-08-27,31 +null,2023-08-28,2 +null,2023-08-30,1 +null,2023-09-02,1 +null,2023-09-05,1 +null,2023-09-07,1 +null,2023-09-13,2 +null,2023-09-14,50 +null,2023-09-15,7 +null,2023-09-16,10 +null,2023-09-17,6 +null,2023-09-18,3 +null,2023-09-19,1 +null,2023-09-21,22 +null,2023-09-23,2 +null,2023-09-25,1 +null,2023-09-26,2 +null,2023-09-27,2 +null,2023-09-28,2 +null,2023-09-29,5 +null,2023-09-30,3 +null,2023-10-01,1 +null,2023-10-02,10 +null,2023-10-03,7 +null,2023-10-05,3 +null,2023-10-08,2 +null,2023-10-12,2 +null,2023-10-14,1 +null,2023-10-17,2 +null,2023-10-20,1 +null,2023-10-21,2 +null,2023-10-22,1 +null,2023-10-26,1 +null,2023-10-28,8 +null,2023-10-29,2 +null,2023-10-30,4 +null,2023-10-31,2 +null,2023-11-01,3 +null,2023-11-04,1 +null,2023-11-06,1 +null,2023-11-07,3 +null,2023-11-08,1 +null,2023-11-09,3 +null,2023-11-13,7 +null,2023-11-14,8 +null,2023-11-15,93 +null,2023-11-16,15 +null,2023-11-17,8 +null,2023-11-18,4 +null,2023-11-19,4 +null,2023-11-20,1 +null,2023-11-21,2 +null,2023-11-22,3 +null,2023-11-23,2 +null,2023-11-24,1 +null,2023-11-25,8 +null,2023-11-26,12 +null,2023-11-28,2 +null,2023-11-29,4 +null,2023-11-30,36 diff --git a/doc/source/tracking/traffic/clones.csv b/doc/source/tracking/traffic/clones.csv index 24e6bea49..5aab8477b 100644 --- a/doc/source/tracking/traffic/clones.csv +++ b/doc/source/tracking/traffic/clones.csv @@ -786,3 +786,93 @@ _date,total_clones,unique_clones 2023-08-25,2,2 2023-08-26,2,1 2023-08-27,2,2 +2023-08-28,9,9 +2023-08-29,8,8 +2023-08-30,33,24 +2023-08-31,16,13 +2023-09-01,19,15 +2023-09-02,1,1 +2023-09-04,7,6 +2023-09-05,8,8 +2023-09-06,13,12 +2023-09-07,2,2 +2023-09-08,8,8 +2023-09-11,9,8 +2023-09-12,18,17 +2023-09-13,27,15 +2023-09-14,39,25 +2023-09-15,6,5 +2023-09-16,1,1 +2023-09-17,1,1 +2023-09-18,5,5 +2023-09-21,11,4 +2023-09-22,1,1 +2023-09-23,5,2 +2023-09-25,14,11 +2023-09-26,5,5 +2023-09-27,4,2 +2023-09-29,1,1 +2023-09-30,3,3 +2023-10-01,2,2 +2023-10-02,8,7 +2023-10-03,7,7 +2023-10-05,1,1 +2023-10-06,52,16 +2023-10-08,1,1 +2023-10-09,11,11 +2023-10-10,17,14 +2023-10-11,3,3 +2023-10-12,2,2 +2023-10-13,1,1 +2023-10-14,2,2 +2023-10-16,7,7 +2023-10-17,14,13 +2023-10-18,38,27 +2023-10-19,9,7 +2023-10-20,9,6 +2023-10-21,4,4 +2023-10-23,10,10 +2023-10-24,3,1 +2023-10-25,1,1 +2023-10-26,18,13 +2023-10-27,27,15 +2023-10-28,2,2 +2023-10-29,2,2 +2023-10-30,5,4 +2023-10-31,15,14 +2023-11-01,6,5 +2023-11-02,14,14 +2023-11-03,13,10 +2023-11-04,5,5 +2023-11-05,1,1 +2023-11-06,16,12 +2023-11-07,6,6 +2023-11-08,4,4 +2023-11-09,2,2 +2023-11-11,2,2 +2023-11-13,6,6 +2023-11-14,14,10 +2023-11-15,15,12 +2023-11-16,4,3 +2023-11-17,8,7 +2023-11-18,1,1 +2023-11-19,2,2 +2023-11-20,13,10 +2023-11-21,6,6 +2023-11-22,1,1 +2023-11-23,3,3 +2023-11-24,2,2 +2023-11-25,2,2 +2023-11-26,2,2 +2023-11-27,22,16 +2023-11-28,1,1 +2023-11-29,3,3 +2023-12-01,4,4 +2023-12-02,1,1 +2023-12-03,30,3 +2023-12-04,13,13 +2023-12-05,6,5 +2023-12-06,28,24 +2023-12-07,7,7 +2023-12-09,5,4 +2023-12-10,2,2 diff --git a/doc/source/tracking/traffic/plots.svg b/doc/source/tracking/traffic/plots.svg index 427b06351..f1fb9aab4 100644 --- a/doc/source/tracking/traffic/plots.svg +++ b/doc/source/tracking/traffic/plots.svg @@ -6,11 +6,11 @@ - 2023-08-28T00:36:38.038589 + 2023-12-11T00:40:01.228614 image/svg+xml - Matplotlib v3.7.2, https://matplotlib.org/ + Matplotlib v3.8.2, https://matplotlib.org/ @@ -41,12 +41,12 @@ z - - + @@ -136,12 +136,46 @@ z - + - - + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + - + - + - + - + + - + - - - + + + + + + - + - - - - - - - - - + + - + @@ -559,17 +477,17 @@ z - + - - + - + @@ -577,12 +495,12 @@ L -3.5 0 - + - + - + @@ -592,12 +510,12 @@ L -3.5 0 - + - + - + @@ -607,12 +525,12 @@ L -3.5 0 - + - + - + @@ -622,1477 +540,1644 @@ L -3.5 0 - + +" clip-path="url(#p409c0df722)" style="fill: none; stroke: #1f77b4; stroke-width: 1.5; stroke-linecap: square"/> - + +" clip-path="url(#p409c0df722)" style="fill: none; stroke: #ff7f0e; stroke-width: 1.5; stroke-linecap: square"/> - + - + @@ -2372,13 +2457,13 @@ z - + - + @@ -2460,13 +2545,13 @@ z " style="fill: #ffffff"/> - - + + - + - + @@ -2482,15 +2567,15 @@ z - - + + - + - + - + - + + - + - + - + + + + @@ -2553,47 +2679,15 @@ z - - + + - + - + - - - - + @@ -2607,15 +2701,15 @@ z - - + + - + - + - + @@ -2629,15 +2723,15 @@ z - - + + - + - + - + @@ -2651,7 +2745,7 @@ z - + @@ -2664,12 +2758,12 @@ z - + - + - + @@ -2677,12 +2771,12 @@ z - + - + - + @@ -2692,12 +2786,12 @@ z - + - + - + @@ -2707,12 +2801,12 @@ z - + - + - + @@ -2722,1973 +2816,2157 @@ z - + +L 115.810967 231.910431 +L 116.258297 229.498947 +L 116.705628 244.771675 +L 117.152958 251.470239 +L 117.600289 239.412823 +L 118.047619 239.14488 +L 118.494949 235.393684 +L 118.94228 239.14488 +L 119.38961 239.948708 +L 119.836941 250.666411 +L 120.284271 248.522871 +L 120.731602 247.719043 +L 121.178932 239.948708 +L 121.626263 213.690335 +L 122.520924 244.235789 +L 123.415584 251.202297 +L 124.310245 225.747751 +L 124.757576 246.111388 +L 125.204906 243.699904 +L 125.652237 237.537225 +L 126.099567 237.537225 +L 126.546898 249.862584 +L 126.994228 237.805167 +L 127.441558 234.321914 +L 127.888889 243.967847 +L 128.336219 243.967847 +L 128.78355 239.412823 +L 129.23088 247.183158 +L 129.678211 249.326699 +L 130.125541 227.623349 +L 130.572872 249.058756 +L 131.020202 248.254928 +L 131.467532 249.862584 +L 131.914863 247.986986 +L 132.362193 251.470239 +L 132.809524 251.470239 +L 133.256854 229.231005 +L 133.704185 217.173589 +L 134.151515 244.235789 +L 134.598846 245.575502 +L 135.046176 227.891292 +L 135.493506 250.934354 +L 135.940837 245.30756 +L 136.388167 242.092249 +L 136.835498 246.37933 +L 137.282828 237.00134 +L 137.730159 247.183158 +L 138.177489 250.934354 +L 138.62482 251.738182 +L 139.07215 244.235789 +L 139.519481 242.628134 +L 139.966811 242.896077 +L 140.414141 241.556364 +L 140.861472 249.058756 +L 141.308802 250.934354 +L 141.756133 250.666411 +L 142.203463 247.4511 +L 142.650794 247.986986 +L 143.098124 243.164019 +L 143.545455 250.130526 +L 143.992785 251.202297 +L 144.440115 251.470239 +L 144.887446 248.522871 +L 146.229437 251.738182 +L 146.676768 247.4511 +L 147.124098 247.719043 +L 147.571429 250.934354 +L 148.466089 243.164019 +L 148.91342 240.752536 +L 149.36075 247.986986 +L 149.808081 249.058756 +L 150.255411 233.518086 +L 150.702742 250.398469 +L 151.150072 235.125742 +L 151.597403 236.733397 +L 152.044733 240.752536 +L 152.492063 234.589856 +L 152.939394 247.183158 +L 153.386724 235.929569 +L 153.834055 251.202297 +L 154.281385 234.857799 +L 154.728716 236.197512 +L 155.176046 228.159234 +L 156.070707 240.752536 +L 156.518038 243.431962 +L 156.965368 248.522871 +L 157.412698 230.302775 +L 157.860029 226.015694 +L 158.307359 237.269282 +L 158.75469 235.929569 +L 159.20202 233.518086 +L 159.649351 247.719043 +L 160.096681 248.790813 +L 160.544012 231.374545 +L 160.991342 247.183158 +L 161.886003 234.321914 +L 162.780664 250.934354 +L 163.227994 247.986986 +L 163.675325 236.465455 +L 164.122655 235.929569 +L 164.569986 226.819522 +L 165.017316 242.628134 +L 165.464646 250.398469 +L 165.911977 250.666411 +L 166.359307 249.326699 +L 166.806638 243.164019 +L 167.253968 243.431962 +L 167.701299 247.183158 +L 168.148629 232.178373 +L 168.59596 248.522871 +L 169.04329 242.628134 +L 169.49062 239.948708 +L 169.937951 241.556364 +L 170.385281 227.087464 +L 170.832612 236.733397 +L 171.279942 235.929569 +L 171.727273 240.752536 +L 172.174603 239.14488 +L 172.621934 240.216651 +L 173.069264 187.431962 +L 173.516595 246.111388 +L 173.963925 232.178373 +L 174.411255 235.661627 +L 174.858586 228.427177 +L 175.305916 242.360191 +L 175.753247 202.972632 +L 176.200577 201.364976 +L 176.647908 206.99177 +L 177.095238 230.570718 +L 177.542569 178.321914 +L 177.989899 225.211866 +L 178.437229 246.37933 +L 178.88456 245.575502 +L 179.779221 232.446316 +L 180.226551 229.231005 +L 180.673882 235.661627 +L 181.121212 225.479809 +L 181.568543 244.235789 +L 182.015873 246.111388 +L 182.463203 208.063541 +L 182.910534 233.786029 +L 183.357864 235.393684 +L 183.805195 246.647273 +L 184.252525 248.790813 +L 184.699856 248.254928 +L 185.147186 234.053971 +L 185.594517 187.967847 +L 186.041847 231.106603 +L 186.489177 235.393684 +L 186.936508 216.369761 +L 187.383838 244.771675 +L 187.831169 248.522871 +L 188.278499 245.30756 +L 188.72583 223.604211 +L 189.17316 239.412823 +L 189.620491 224.943923 +L 190.067821 203.240574 +L 190.515152 234.857799 +L 190.962482 241.824306 +L 191.409812 246.37933 +L 191.857143 180.465455 +L 192.304473 228.963062 +L 192.751804 243.967847 +L 193.199134 236.197512 +L 193.646465 237.537225 +L 194.093795 249.058756 +L 194.541126 247.183158 +L 194.988456 191.183158 +L 195.435786 184.752536 +L 195.883117 201.364976 +L 196.330447 243.431962 +L 196.777778 249.326699 +L 197.225108 237.537225 +L 197.672439 247.719043 +L 198.5671 226.015694 +L 199.01443 167.336268 +L 199.46176 223.872153 +L 199.909091 223.068325 +L 200.356421 227.623349 +L 200.803752 245.843445 +L 201.251082 246.647273 +L 201.698413 221.996555 +L 202.145743 237.00134 +L 202.593074 236.733397 +L 203.040404 227.891292 +L 203.487734 243.699904 +L 203.935065 249.326699 +L 204.382395 201.097033 +L 204.829726 238.07311 +L 205.277056 215.030048 +L 205.724387 209.135311 +L 206.171717 215.565933 +L 206.619048 249.862584 +L 207.066378 249.862584 +L 207.513709 224.140096 +L 207.961039 234.053971 +L 208.408369 233.518086 +L 208.8557 239.680766 +L 209.30303 243.164019 +L 209.750361 248.522871 +L 210.197691 246.37933 +L 210.645022 227.355407 +L 211.092352 244.503732 +L 211.539683 241.020478 +L 211.987013 233.250144 +L 212.434343 239.412823 +L 212.881674 249.862584 +L 213.329004 249.862584 +L 213.776335 243.431962 +L 214.223665 240.752536 +L 214.670996 246.647273 +L 215.118326 241.824306 +L 215.565657 243.967847 +L 216.012987 249.326699 +L 216.460317 247.719043 +L 216.907648 232.178373 +L 217.354978 237.269282 +L 217.802309 172.69512 +L 218.249639 215.833876 +L 218.69697 244.503732 +L 219.1443 242.628134 +L 219.591631 250.130526 +L 220.038961 233.250144 +L 220.486291 235.125742 +L 220.933622 242.360191 +L 221.380952 241.556364 +L 221.828283 236.465455 +L 222.275613 249.862584 +L 222.722944 250.398469 +L 223.170274 196.274067 +L 223.617605 239.948708 +L 224.064935 245.30756 +L 224.512266 247.4511 +L 224.959596 204.312344 +L 225.406926 247.4511 +L 225.854257 248.254928 +L 226.748918 242.360191 +L 227.196248 242.628134 +L 227.643579 225.479809 +L 228.090909 244.771675 +L 228.53824 249.862584 +L 228.98557 243.967847 +L 229.4329 245.575502 +L 229.880231 183.948708 +L 230.327561 200.293206 +L 230.774892 241.020478 +L 231.669553 245.039617 +L 232.116883 244.503732 +L 232.564214 232.714258 +L 233.011544 242.628134 +L 233.458874 243.699904 +L 233.906205 223.872153 +L 234.353535 239.948708 +L 234.800866 250.398469 +L 235.248196 251.738182 +L 235.695527 245.039617 +L 236.142857 245.575502 +L 236.590188 246.915215 +L 237.037518 243.967847 +L 237.484848 246.37933 +L 237.932179 249.594641 +L 238.379509 251.202297 +L 238.82684 243.967847 +L 239.27417 243.699904 +L 239.721501 231.910431 +L 240.168831 230.302775 +L 240.616162 250.666411 +L 241.063492 246.37933 +L 241.510823 251.738182 +L 241.958153 239.680766 +L 242.405483 240.752536 +L 242.852814 243.164019 +L 243.300144 240.484593 +L 243.747475 248.522871 +L 244.194805 245.843445 +L 244.642136 250.130526 +L 245.089466 237.537225 +L 245.536797 239.412823 +L 245.984127 244.235789 +L 246.431457 241.288421 +L 246.878788 232.446316 +L 247.326118 242.360191 +L 247.773449 248.522871 +L 248.220779 233.518086 +L 248.66811 250.398469 +L 249.11544 246.111388 +L 249.562771 244.503732 +L 250.010101 251.202297 +L 250.457431 250.934354 +L 250.904762 245.039617 +L 251.352092 245.575502 +L 251.799423 233.250144 +L 252.246753 231.642488 +L 252.694084 249.058756 +L 253.141414 249.862584 +L 253.588745 250.398469 +L 254.036075 242.628134 +L 254.483405 228.159234 +L 254.930736 236.465455 +L 255.378066 236.197512 +L 255.825397 245.30756 +L 256.272727 249.862584 +L 256.720058 245.843445 +L 257.167388 246.647273 +L 257.614719 249.862584 +L 258.062049 237.269282 +L 258.50938 247.719043 +L 258.95671 242.628134 +L 259.40404 226.015694 +L 259.851371 250.934354 +L 260.298701 248.254928 +L 261.193362 216.905646 +L 261.640693 236.733397 +L 262.088023 248.254928 +L 262.535354 247.719043 +L 262.982684 250.398469 +L 263.430014 246.647273 +L 263.877345 232.178373 +L 264.324675 230.570718 +L 264.772006 221.46067 +L 265.666667 248.522871 +L 266.113997 251.470239 +L 266.561328 248.522871 +L 267.008658 249.862584 +L 267.455988 239.948708 +L 267.903319 189.575502 +L 268.350649 229.498947 +L 268.79798 242.628134 +L 269.24531 241.020478 +L 269.692641 230.83866 +L 270.139971 230.83866 +L 270.587302 220.924785 +L 271.034632 198.417608 +L 271.481962 217.441531 +L 271.929293 246.37933 +L 272.376623 250.666411 +L 272.823954 224.675981 +L 273.271284 228.159234 +L 273.718615 212.618565 +L 274.165945 240.216651 +L 274.613276 236.197512 +L 275.060606 246.111388 +L 275.507937 248.254928 +L 275.955267 215.565933 +L 276.402597 238.608995 +L 276.849928 249.594641 +L 277.297258 223.604211 +L 277.744589 164.924785 +L 278.191919 240.484593 +L 278.63925 235.929569 +L 279.08658 227.891292 +L 279.533911 228.963062 +L 279.981241 242.896077 +L 280.428571 232.446316 +L 280.875902 238.876938 +L 281.323232 246.915215 +L 281.770563 245.843445 +L 282.217893 235.929569 +L 282.665224 249.058756 +L 283.112554 243.431962 +L 283.559885 229.498947 +L 284.454545 250.934354 +L 284.901876 250.130526 +L 285.349206 245.039617 +L 285.796537 231.642488 +L 286.243867 240.752536 +L 286.691198 247.183158 +L 287.138528 247.986986 +L 287.585859 247.986986 +L 288.033189 248.790813 +L 288.480519 239.412823 +L 288.92785 235.661627 +L 289.37518 243.967847 +L 290.269841 237.537225 +L 290.717172 250.130526 +L 291.164502 249.862584 +L 291.611833 234.589856 +L 292.059163 232.982201 +L 292.506494 245.575502 +L 293.401154 250.934354 +L 293.848485 248.522871 +L 294.295815 247.719043 +L 294.743146 244.503732 +L 295.190476 228.69512 +L 295.637807 238.341053 +L 296.085137 238.876938 +L 296.532468 226.819522 +L 296.979798 249.862584 +L 297.427128 228.159234 +L 297.874459 237.269282 +L 298.321789 202.972632 +L 298.76912 239.948708 +L 299.21645 221.996555 +L 299.663781 247.4511 +L 300.111111 248.254928 +L 301.005772 208.599426 +L 301.453102 234.589856 +L 301.900433 237.269282 +L 302.347763 243.431962 +L 302.795094 233.250144 +L 303.242424 249.058756 +L 303.689755 217.173589 +L 304.137085 235.929569 +L 304.584416 239.948708 +L 305.031746 242.628134 +L 305.479076 242.896077 +L 305.926407 246.915215 +L 306.373737 249.058756 +L 306.821068 245.843445 +L 307.268398 248.790813 +L 307.715729 250.934354 +L 308.163059 249.594641 +L 308.61039 249.326699 +L 309.05772 246.915215 +L 309.505051 217.709474 +L 310.399711 242.092249 +L 310.847042 243.164019 +L 311.294372 249.862584 +L 312.189033 251.738182 +L 313.083694 236.733397 +L 313.531025 241.288421 +L 313.978355 219.585072 +L 314.425685 246.915215 +L 314.873016 246.111388 +L 315.320346 250.934354 +L 315.767677 247.719043 +L 316.215007 239.412823 +L 316.662338 244.771675 +L 317.109668 242.092249 +L 317.556999 245.843445 +L 318.004329 250.666411 +L 318.451659 250.130526 +L 318.89899 219.585072 +L 319.34632 220.120957 +L 319.793651 235.929569 +L 320.240981 247.719043 +L 320.688312 217.441531 +L 321.135642 246.915215 +L 321.582973 235.929569 +L 322.030303 212.886507 +L 322.477633 238.07311 +L 322.924964 209.939139 +L 323.372294 198.68555 +L 323.819625 232.714258 +L 324.266955 250.666411 +L 324.714286 249.862584 +L 325.161616 196.809952 +L 326.056277 223.872153 +L 326.503608 224.675981 +L 326.950938 179.125742 +L 327.398268 243.967847 +L 327.845599 245.575502 +L 328.292929 217.441531 +L 328.74026 226.015694 +L 329.18759 242.628134 +L 329.634921 240.752536 +L 330.082251 226.283636 +L 330.529582 247.719043 +L 330.976912 249.862584 +L 331.424242 225.747751 +L 331.871573 235.929569 +L 332.318903 223.336268 +L 332.766234 232.178373 +L 333.213564 216.369761 +L 333.660895 239.680766 +L 334.108225 248.254928 +L 334.555556 222.264498 +L 335.002886 228.427177 +L 335.450216 231.910431 +L 335.897547 227.623349 +L 336.344877 185.824306 +L 336.792208 223.872153 +L 337.239538 241.288421 +L 337.686869 160.101818 +L 338.134199 200.829091 +L 338.58153 224.943923 +L 339.02886 241.288421 +L 339.47619 239.680766 +L 340.370851 247.4511 +L 340.818182 211.814737 +L 341.265512 223.068325 +L 341.712843 209.671196 +L 342.160173 213.15445 +L 342.607504 225.211866 +L 343.054834 250.130526 +L 343.502165 249.594641 +L 343.949495 213.422392 +L 344.396825 235.661627 +L 344.844156 222.53244 +L 345.291486 230.570718 +L 345.738817 235.393684 +L 346.186147 242.896077 +L 346.633478 247.719043 +L 347.080808 229.76689 +L 347.528139 236.197512 +L 347.975469 216.637703 +L 348.422799 214.22622 +L 348.87013 242.896077 +L 349.31746 249.058756 +L 349.764791 251.738182 +L 350.212121 239.680766 +L 350.659452 237.805167 +L 351.106782 243.699904 +L 351.554113 235.661627 +L 352.001443 244.235789 +L 352.448773 249.326699 +L 352.896104 250.130526 +L 353.790765 234.589856 +L 354.238095 247.986986 +L 354.685426 247.986986 +L 355.580087 240.752536 +L 356.027417 229.498947 +L 356.474747 239.14488 +L 356.922078 238.608995 +L 357.369408 246.37933 +L 358.264069 247.4511 +L 358.7114 250.398469 +L 359.15873 248.790813 +L 360.053391 232.178373 +L 360.500722 234.857799 +L 360.948052 242.896077 +L 361.395382 235.661627 +L 361.842713 249.862584 +L 362.290043 251.470239 +L 362.737374 243.699904 +L 363.184704 246.37933 +L 363.632035 233.786029 +L 364.079365 213.15445 +L 364.526696 244.235789 +L 364.974026 250.666411 +L 365.421356 249.326699 +L 365.868687 240.484593 +L 366.316017 236.733397 +L 366.763348 239.412823 +L 367.210678 238.341053 +L 367.658009 234.857799 +L 368.105339 250.934354 +L 368.55267 241.288421 +L 369 228.159234 +L 369.44733 226.819522 +L 369.894661 245.575502 +L 370.341991 248.254928 +L 370.789322 211.278852 +L 371.236652 247.986986 +L 371.683983 250.666411 +L 372.131313 227.891292 +L 372.578644 246.111388 +L 373.025974 221.192727 +L 373.473304 229.498947 +L 373.920635 246.111388 +L 374.367965 251.470239 +L 374.815296 251.470239 +L 375.262626 249.862584 +L 375.709957 244.503732 +L 376.157287 245.843445 +L 376.604618 243.431962 +L 377.051948 251.470239 +L 377.499278 249.862584 +L 377.946609 251.738182 +L 378.393939 245.575502 +L 378.84127 247.986986 +L 379.2886 244.771675 +L 379.735931 243.164019 +L 380.183261 250.666411 +L 380.630592 251.202297 +L 381.077922 241.020478 +L 381.525253 220.656842 +L 381.972583 245.843445 +L 382.419913 246.111388 +L 382.867244 240.484593 +L 383.314574 248.254928 +L 383.761905 251.470239 +L 384.209235 247.4511 +L 384.656566 241.556364 +L 385.103896 243.699904 +L 385.551227 244.235789 +L 385.998557 249.326699 +L 386.445887 251.470239 +L 386.893218 251.202297 +L 387.340548 243.164019 +L 387.787879 243.967847 +L 388.235209 247.4511 +L 388.68254 237.537225 +L 389.12987 249.058756 +L 389.577201 250.934354 +L 390.024531 248.790813 +L 390.471861 250.934354 +L 390.919192 239.14488 +L 391.813853 227.623349 +L 392.261183 228.427177 +L 392.708514 247.719043 +L 393.155844 249.594641 +L 393.603175 221.728612 +L 394.050505 238.07311 +L 394.497835 246.647273 +L 394.945166 243.699904 +L 395.392496 234.589856 +L 395.839827 251.202297 +L 396.287157 246.111388 +L 396.734488 238.876938 +L 397.181818 247.4511 +L 397.629149 228.159234 +L 398.076479 234.321914 +L 398.52381 250.934354 +L 398.97114 251.470239 +L 399.41847 250.130526 +L 399.865801 239.948708 +L 400.313131 241.556364 +L 400.760462 248.790813 +L 401.207792 231.106603 +L 401.655123 249.594641 +L 402.102453 251.470239 +L 402.549784 249.058756 +L 402.997114 249.058756 +L 403.444444 241.020478 +L 403.891775 247.183158 +L 404.339105 241.288421 +L 404.786436 249.326699 +L 405.233766 251.738182 +L 405.681097 248.254928 +L 406.128427 227.087464 +L 406.575758 246.915215 +L 407.023088 241.824306 +L 407.470418 247.719043 +L 408.365079 251.470239 +L 408.81241 244.771675 +L 409.25974 215.833876 +L 409.707071 249.862584 +L 410.154401 226.551579 +L 410.601732 243.164019 +L 411.049062 251.738182 +L 411.496392 251.470239 +L 411.943723 250.130526 +L 412.391053 248.254928 +L 412.838384 238.876938 +L 413.285714 242.092249 +L 413.733045 203.240574 +L 414.180375 249.862584 +L 414.627706 249.326699 +L 415.522367 212.082679 +L 415.969697 237.805167 +L 416.417027 246.647273 +L 416.864358 247.4511 +L 417.311688 250.666411 +L 417.759019 250.934354 +L 418.206349 249.594641 +L 418.65368 237.00134 +L 419.10101 245.575502 +L 419.548341 219.317129 +L 419.995671 237.00134 +L 420.443001 240.216651 +L 420.890332 251.202297 +L 421.337662 216.905646 +L 421.784993 211.010909 +L 422.232323 238.07311 +L 422.679654 248.522871 +L 423.126984 249.058756 +L 423.574315 250.398469 +L 424.021645 243.164019 +L 424.468975 234.053971 +L 424.916306 238.876938 +L 425.363636 235.661627 +L 425.810967 235.929569 +L 426.258297 242.360191 +L 426.705628 251.470239 +L 427.152958 234.321914 +L 427.600289 247.719043 +L 428.047619 249.594641 +L 428.494949 234.857799 +L 428.94228 244.503732 +L 429.38961 249.594641 +L 429.836941 251.202297 +L 430.284271 242.360191 +L 430.731602 241.288421 +L 431.178932 243.164019 +L 431.626263 243.699904 +L 432.073593 239.412823 +L 432.520924 251.202297 +L 432.968254 249.326699 +L 433.415584 244.503732 +L 433.862915 247.719043 +L 434.310245 244.771675 +L 434.757576 249.594641 +L 435.204906 233.518086 +L 435.652237 243.699904 +L 436.099567 250.666411 +L 436.546898 235.929569 +L 436.994228 241.020478 +L 437.888889 229.498947 +L 438.336219 232.178373 +L 439.23088 247.986986 +L 439.678211 235.929569 +L 440.125541 235.125742 +L 440.572872 232.982201 +L 441.020202 237.805167 +L 441.467532 236.465455 +L 441.914863 249.594641 +L 442.362193 249.594641 +L 442.809524 237.269282 +L 443.256854 241.020478 +L 443.704185 238.876938 +L 444.151515 245.039617 +L 444.598846 235.125742 +L 445.046176 250.398469 +L 445.493506 250.666411 +L 445.940837 241.020478 +L 446.388167 244.503732 +L 446.835498 243.164019 +L 447.282828 247.4511 +L 447.730159 247.719043 +L 448.177489 250.666411 +L 448.62482 251.202297 +L 449.07215 241.288421 +L 449.519481 250.130526 +L 449.966811 241.556364 +L 450.414141 241.288421 +L 450.861472 249.594641 +L 451.308802 251.470239 +L 451.756133 248.522871 +L 452.203463 250.130526 +L 453.098124 241.556364 +L 453.545455 249.058756 +L 453.992785 250.130526 +L 454.440115 245.039617 +L 454.887446 249.326699 +L 455.334776 230.83866 +L 455.782107 245.30756 +L 456.229437 247.4511 +L 456.676768 243.431962 +L 457.571429 251.470239 +L 458.018759 250.398469 +L 458.466089 237.537225 +L 459.36075 245.30756 +L 459.808081 245.843445 +L 460.255411 249.326699 +L 460.702742 249.058756 +L 461.150072 247.986986 +L 461.597403 234.321914 +L 462.044733 240.216651 +L 462.492063 248.522871 +L 462.939394 250.398469 +L 463.834055 245.30756 +L 464.281385 243.164019 +L 464.728716 249.862584 +L 465.176046 245.039617 +L 465.623377 247.986986 +L 466.070707 251.738182 +L 466.518038 251.738182 +L 466.965368 251.202297 +L 467.412698 251.738182 +L 467.860029 250.666411 +L 468.75469 251.738182 +L 469.20202 247.4511 +L 469.649351 247.986986 +L 470.096681 250.666411 +L 470.544012 230.302775 +L 470.991342 239.14488 +L 471.438672 239.14488 +L 471.886003 246.647273 +L 472.333333 250.666411 +L 472.780664 243.431962 +L 473.227994 223.872153 +L 473.675325 244.503732 +L 474.122655 243.164019 +L 474.569986 241.288421 +L 475.017316 247.986986 +L 475.464646 250.666411 +L 475.911977 250.666411 +L 476.359307 247.183158 +L 476.806638 248.254928 +L 477.253968 243.699904 +L 477.701299 244.235789 +L 478.148629 239.412823 +L 478.59596 245.575502 +L 479.04329 243.967847 +L 479.49062 223.336268 +L 479.937951 213.958278 +L 480.385281 246.37933 +L 480.832612 245.039617 +L 481.279942 234.589856 +L 481.727273 248.522871 +L 482.174603 249.594641 +L 482.621934 226.015694 +L 483.069264 249.326699 +L 483.516595 246.37933 +L 483.963925 237.805167 +L 484.411255 249.058756 +L 484.858586 250.666411 +L 485.305916 241.556364 +L 485.753247 241.556364 +L 486.200577 243.431962 +L 486.647908 239.680766 +L 487.095238 226.819522 +L 487.542569 230.034833 +L 487.989899 251.470239 +L 488.437229 249.862584 +L 488.88456 243.431962 +L 489.33189 229.498947 +L 489.779221 243.164019 +L 490.226551 239.14488 +L 490.673882 227.891292 +L 491.121212 248.522871 +L 491.568543 248.790813 +L 492.015873 248.254928 +L 492.463203 240.216651 +L 492.910534 246.111388 +L 493.357864 240.752536 +L 493.805195 248.522871 +L 494.252525 245.30756 +L 494.699856 248.790813 +L 495.147186 232.178373 +L 495.594517 243.164019 +L 496.041847 241.020478 +L 496.489177 228.69512 +L 496.936508 220.120957 +L 497.383838 249.326699 +L 497.831169 244.771675 +L 498.278499 234.321914 +L 498.72583 237.00134 +L 499.620491 244.503732 +L 500.067821 246.37933 +L 500.515152 249.594641 +L 500.962482 250.934354 +L 501.409812 245.039617 +L 501.857143 228.963062 +L 502.304473 231.910431 +L 502.751804 233.518086 +L 503.199134 234.589856 +L 503.646465 245.039617 +L 504.093795 247.986986 +L 504.541126 214.494163 +L 504.988456 231.910431 +L 505.435786 243.699904 +L 505.883117 240.216651 +L 506.777778 250.398469 +L 507.225108 250.398469 +L 507.672439 228.963062 +L 508.119769 240.216651 +L 508.5671 235.393684 +L 509.01443 244.235789 +L 509.46176 235.661627 +L 509.909091 250.934354 +L 510.356421 250.398469 +L 511.251082 238.608995 +L 511.698413 245.039617 +L 512.145743 212.886507 +L 512.593074 250.666411 +L 513.040404 250.934354 +L 513.487734 250.398469 +L 514.382395 233.250144 +L 514.829726 234.589856 +L 515.277056 243.431962 +L 515.724387 243.967847 +L 516.171717 251.470239 +L 516.619048 250.130526 +L 517.066378 211.010909 +L 517.513709 235.393684 +L 517.961039 248.522871 +L 518.408369 241.020478 +L 518.8557 250.398469 +L 519.30303 250.130526 +L 519.750361 250.934354 +L 520.197691 236.733397 +L 520.645022 235.393684 +L 521.092352 246.111388 +L 521.539683 240.216651 +L 521.987013 244.771675 +L 522.434343 247.183158 +L 522.881674 251.738182 +L 523.329004 246.915215 +L 523.776335 236.465455 +L 524.223665 230.570718 +L 524.670996 246.111388 +L 525.118326 251.202297 +L 525.565657 250.398469 +L 526.012987 249.058756 +L 526.460317 244.503732 +L 526.907648 247.183158 +L 527.354978 242.628134 +L 527.802309 234.589856 +L 528.249639 247.183158 +L 528.69697 240.752536 +L 529.1443 236.465455 +L 529.591631 236.197512 +L 530.038961 238.07311 +L 530.486291 230.034833 +L 530.933622 238.341053 +L 531.380952 241.288421 +L 531.828283 251.470239 +L 532.275613 238.608995 +L 532.722944 230.83866 +L 533.617605 247.4511 +L 534.064935 244.235789 +L 534.512266 249.594641 +L 534.959596 250.398469 +L 535.406926 246.111388 +L 535.854257 249.594641 +L 536.301587 238.876938 +L 537.196248 248.254928 +L 537.643579 237.269282 +L 538.090909 249.058756 +L 538.53824 250.666411 +L 538.98557 246.37933 +L 539.4329 244.771675 +L 539.880231 241.556364 +L 540.327561 233.250144 +L 540.774892 248.790813 +L 541.222222 248.790813 +L 541.669553 249.862584 +L 542.116883 243.699904 +L 542.564214 231.374545 +L 543.011544 234.857799 +L 543.458874 237.00134 +L 543.906205 211.814737 +L 544.353535 247.986986 +L 544.800866 251.738182 +L 545.248196 238.07311 +L 545.695527 240.484593 +L 546.142857 248.790813 +L 546.590188 247.183158 +L 547.484848 251.470239 +L 547.932179 249.594641 +L 548.379509 232.714258 +L 548.82684 246.915215 +L 549.27417 241.824306 +L 549.721501 246.111388 +L 550.168831 234.857799 +L 550.616162 250.934354 +L 551.063492 244.771675 +L 551.510823 245.575502 +L 551.958153 250.130526 +L 552.405483 237.269282 +L 552.852814 230.302775 +L 553.747475 245.039617 +L 554.194805 246.915215 +L 554.642136 245.575502 +L 555.089466 242.092249 +L 555.536797 243.699904 +L 555.984127 246.647273 +L 556.431457 239.412823 +L 556.878788 250.666411 +L 557.326118 250.666411 +L 558.220779 232.982201 +L 558.66811 230.570718 +L 559.11544 235.929569 +L 559.562771 249.862584 +L 560.010101 251.738182 +L 560.457431 241.020478 +L 560.904762 241.288421 +L 561.352092 246.111388 +L 561.799423 234.589856 +L 562.246753 249.326699 +L 562.694084 215.833876 +L 563.141414 238.876938 +L 563.588745 237.00134 +L 564.036075 167.068325 +L 564.483405 202.436746 +L 564.930736 214.22622 +L 565.378066 204.580287 +L 565.825397 230.570718 +L 566.272727 226.015694 +L 566.720058 245.30756 +L 567.167388 235.661627 +L 567.614719 198.953493 +L 568.062049 228.427177 +L 568.50938 246.111388 +L 569.40404 232.178373 +L 569.851371 229.231005 +L 570.298701 238.608995 +L 570.746032 242.896077 +L 571.193362 212.886507 +L 571.640693 242.092249 +L 572.088023 212.082679 +L 572.535354 235.929569 +L 572.982684 235.393684 +L 573.430014 233.250144 +L 573.877345 221.46067 +L 574.324675 236.733397 +L 574.772006 226.015694 +L 575.219336 245.039617 +L 575.666667 250.130526 +L 576.113997 250.666411 +L 576.561328 220.3889 +L 577.008658 232.446316 +L 577.455988 208.063541 +L 577.903319 229.231005 +L 578.350649 224.943923 +L 578.79798 250.398469 +L 579.24531 251.202297 +L 579.692641 245.843445 +L 580.139971 227.355407 +L 580.587302 186.092249 +L 581.034632 238.341053 +L 581.929293 217.977416 +L 582.376623 239.948708 +L 583.271284 225.479809 +L 583.718615 228.963062 +L 584.165945 205.92 +L 585.060606 239.948708 +L 585.507937 218.781244 +L 585.955267 222.53244 +L 586.402597 240.752536 +L 586.849928 234.321914 +L 587.297258 242.360191 +L 587.744589 237.269282 +L 588.191919 250.666411 +L 588.63925 221.728612 +L 589.08658 238.876938 +L 589.533911 230.83866 +L 589.981241 248.522871 +L 590.428571 250.130526 +L 590.875902 234.857799 +L 591.323232 211.814737 +L 591.770563 250.666411 +L 592.665224 230.034833 +L 593.112554 230.302775 +L 593.559885 250.398469 +L 594.007215 229.498947 +L 594.454545 249.326699 +L 594.901876 249.594641 +L 595.349206 230.302775 +L 595.796537 227.087464 +L 596.243867 239.948708 +L 596.691198 243.164019 +L 597.138528 249.326699 +L 597.585859 250.398469 +L 598.033189 247.719043 +L 598.480519 233.518086 +L 598.92785 232.714258 +L 599.37518 223.068325 +L 599.822511 230.83866 +L 600.269841 244.235789 +L 600.717172 249.326699 +L 601.164502 240.484593 +L 601.611833 241.556364 +L 602.059163 235.125742 +L 602.506494 218.781244 +L 602.953824 238.341053 +L 603.401154 251.470239 +L 603.848485 250.666411 +L 604.295815 241.824306 +L 604.743146 193.326699 +L 605.190476 221.192727 +L 605.637807 227.891292 +L 606.085137 239.14488 +L 606.532468 237.00134 +L 606.979798 242.896077 +L 607.427128 219.585072 +L 607.874459 233.518086 +L 608.321789 240.752536 +L 608.76912 239.948708 +L 609.21645 245.843445 +L 609.663781 246.37933 +L 610.111111 247.183158 +L 610.558442 230.83866 +L 611.005772 204.044402 +L 611.453102 226.551579 +L 611.900433 238.07311 +L 612.347763 239.948708 +L 612.795094 251.202297 +L 613.242424 246.647273 +L 613.689755 209.939139 +L 614.137085 233.250144 +L 615.031746 242.360191 +L 615.479076 251.470239 +L 615.926407 250.398469 +L 616.373737 250.666411 +L 616.821068 173.498947 +L 617.268398 239.680766 +L 617.715729 246.647273 +L 618.163059 239.14488 +L 618.61039 238.07311 +L 619.505051 250.130526 +L 619.952381 221.192727 +L 620.399711 233.786029 +L 620.847042 221.996555 +L 621.294372 229.76689 +L 621.741703 245.843445 +L 622.189033 249.594641 +L 622.636364 246.915215 +L 622.636364 246.915215 +" clip-path="url(#p157c280b4f)" style="fill: none; stroke: #1f77b4; stroke-width: 1.5; stroke-linecap: square"/> - + +L 115.810967 249.594641 +L 116.258297 248.522871 +L 116.705628 249.594641 +L 117.152958 251.738182 +L 117.600289 250.130526 +L 118.047619 249.326699 +L 118.494949 249.326699 +L 118.94228 249.058756 +L 119.38961 249.594641 +L 119.836941 251.470239 +L 121.626263 249.326699 +L 122.073593 249.058756 +L 122.968254 250.934354 +L 123.415584 251.738182 +L 123.862915 249.058756 +L 124.310245 249.594641 +L 124.757576 250.666411 +L 125.204906 250.934354 +L 125.652237 250.666411 +L 126.099567 251.470239 +L 126.546898 251.202297 +L 126.994228 250.130526 +L 127.441558 251.202297 +L 127.888889 249.058756 +L 128.336219 249.862584 +L 128.78355 250.398469 +L 129.23088 251.202297 +L 129.678211 251.202297 +L 130.125541 247.719043 +L 130.572872 251.470239 +L 131.020202 251.470239 +L 131.467532 251.202297 +L 132.362193 251.738182 +L 132.809524 251.470239 +L 133.256854 249.594641 +L 133.704185 247.183158 +L 134.598846 250.130526 +L 135.046176 250.130526 +L 135.493506 251.470239 +L 135.940837 250.398469 +L 136.388167 250.130526 +L 136.835498 250.934354 +L 137.282828 247.986986 +L 138.177489 251.470239 +L 138.62482 251.738182 +L 139.07215 249.326699 +L 139.966811 249.326699 +L 140.414141 250.130526 +L 140.861472 250.398469 +L 141.308802 251.202297 +L 141.756133 251.470239 +L 142.203463 250.934354 +L 142.650794 250.666411 +L 143.098124 251.202297 +L 143.992785 251.202297 +L 144.440115 251.470239 +L 144.887446 250.934354 +L 145.782107 250.934354 +L 146.229437 251.738182 +L 146.676768 251.202297 +L 147.124098 251.202297 +L 147.571429 250.934354 +L 148.018759 249.862584 +L 148.466089 248.254928 +L 148.91342 250.130526 +L 149.36075 251.202297 +L 149.808081 250.934354 +L 150.255411 250.934354 +L 150.702742 251.202297 +L 151.150072 249.594641 +L 151.597403 248.790813 +L 152.044733 249.862584 +L 152.492063 247.183158 +L 152.939394 249.862584 +L 153.386724 250.398469 +L 153.834055 251.202297 +L 154.281385 250.666411 +L 154.728716 250.666411 +L 155.176046 248.522871 +L 155.623377 249.594641 +L 156.070707 249.326699 +L 156.518038 249.326699 +L 156.965368 250.398469 +L 157.412698 246.37933 +L 158.307359 249.326699 +L 158.75469 247.719043 +L 159.20202 246.647273 +L 159.649351 250.934354 +L 160.096681 250.666411 +L 160.544012 248.790813 +L 160.991342 250.130526 +L 161.438672 248.254928 +L 161.886003 249.594641 +L 162.333333 249.594641 +L 162.780664 250.934354 +L 163.227994 251.202297 +L 163.675325 249.058756 +L 164.122655 248.254928 +L 164.569986 248.522871 +L 165.017316 250.934354 +L 165.464646 251.470239 +L 166.359307 250.934354 +L 166.806638 250.934354 +L 167.253968 249.594641 +L 167.701299 250.398469 +L 168.148629 249.058756 +L 168.59596 250.934354 +L 169.04329 250.398469 +L 169.49062 250.934354 +L 169.937951 249.326699 +L 170.385281 249.058756 +L 170.832612 249.058756 +L 171.279942 248.254928 +L 171.727273 250.398469 +L 172.174603 250.934354 +L 172.621934 250.934354 +L 173.069264 242.896077 +L 173.516595 249.058756 +L 173.963925 249.058756 +L 174.411255 247.719043 +L 175.305916 250.666411 +L 175.753247 250.666411 +L 176.200577 246.647273 +L 176.647908 246.915215 +L 177.095238 248.522871 +L 177.542569 246.915215 +L 177.989899 247.719043 +L 178.437229 250.130526 +L 178.88456 250.934354 +L 179.33189 247.719043 +L 179.779221 246.37933 +L 180.226551 247.4511 +L 180.673882 247.719043 +L 181.568543 251.202297 +L 182.015873 250.130526 +L 182.463203 247.986986 +L 182.910534 247.986986 +L 183.357864 248.790813 +L 183.805195 250.666411 +L 184.699856 251.202297 +L 185.147186 250.130526 +L 185.594517 246.647273 +L 186.041847 247.4511 +L 186.489177 248.790813 +L 186.936508 247.183158 +L 187.383838 249.862584 +L 187.831169 251.202297 +L 188.72583 249.594641 +L 189.17316 248.522871 +L 189.620491 248.790813 +L 190.067821 247.719043 +L 190.515152 249.326699 +L 190.962482 249.594641 +L 191.409812 250.934354 +L 191.857143 245.843445 +L 192.304473 248.254928 +L 192.751804 249.058756 +L 193.199134 249.326699 +L 193.646465 249.326699 +L 194.093795 250.934354 +L 194.541126 250.398469 +L 194.988456 246.111388 +L 195.435786 244.771675 +L 196.330447 249.058756 +L 196.777778 250.398469 +L 197.225108 249.326699 +L 197.672439 250.398469 +L 198.119769 246.647273 +L 198.5671 246.915215 +L 199.01443 230.302775 +L 199.46176 248.254928 +L 199.909091 246.915215 +L 200.356421 250.666411 +L 200.803752 251.738182 +L 201.251082 249.862584 +L 201.698413 248.522871 +L 202.145743 248.522871 +L 202.593074 248.790813 +L 203.040404 247.986986 +L 203.935065 251.202297 +L 204.382395 249.326699 +L 204.829726 248.522871 +L 205.277056 247.183158 +L 205.724387 247.4511 +L 206.171717 246.111388 +L 206.619048 250.666411 +L 207.066378 251.738182 +L 207.513709 246.111388 +L 207.961039 247.4511 +L 208.408369 249.594641 +L 208.8557 247.719043 +L 209.30303 248.522871 +L 209.750361 251.470239 +L 210.197691 250.398469 +L 210.645022 248.522871 +L 211.092352 249.594641 +L 211.987013 246.647273 +L 212.881674 250.934354 +L 213.329004 251.202297 +L 213.776335 249.326699 +L 214.223665 247.986986 +L 214.670996 250.130526 +L 215.118326 249.594641 +L 215.565657 249.594641 +L 216.460317 250.130526 +L 216.907648 247.183158 +L 217.354978 249.058756 +L 217.802309 249.058756 +L 218.249639 250.130526 +L 218.69697 249.326699 +L 219.1443 251.202297 +L 219.591631 250.398469 +L 220.038961 248.522871 +L 220.486291 249.326699 +L 220.933622 249.058756 +L 221.380952 248.522871 +L 221.828283 249.326699 +L 222.275613 251.202297 +L 222.722944 250.934354 +L 223.170274 247.719043 +L 223.617605 250.666411 +L 224.064935 249.862584 +L 224.512266 249.326699 +L 224.959596 246.111388 +L 225.406926 251.202297 +L 225.854257 250.666411 +L 226.301587 249.862584 +L 226.748918 250.398469 +L 227.196248 249.058756 +L 227.643579 248.254928 +L 228.090909 249.326699 +L 228.53824 250.934354 +L 228.98557 250.934354 +L 229.4329 249.594641 +L 229.880231 247.4511 +L 230.327561 247.4511 +L 230.774892 250.398469 +L 231.222222 248.790813 +L 231.669553 250.666411 +L 232.116883 249.594641 +L 232.564214 247.986986 +L 233.011544 248.790813 +L 233.458874 248.790813 +L 233.906205 247.986986 +L 234.353535 248.254928 +L 234.800866 251.202297 +L 235.248196 251.738182 +L 235.695527 250.130526 +L 236.142857 250.130526 +L 236.590188 250.398469 +L 237.037518 250.398469 +L 237.484848 250.666411 +L 238.379509 251.738182 +L 238.82684 250.398469 +L 239.27417 249.594641 +L 239.721501 249.862584 +L 240.168831 248.790813 +L 240.616162 251.202297 +L 241.063492 250.398469 +L 241.510823 251.738182 +L 241.958153 250.398469 +L 242.405483 250.398469 +L 242.852814 248.522871 +L 243.300144 247.986986 +L 243.747475 249.594641 +L 244.194805 250.666411 +L 244.642136 250.666411 +L 245.089466 248.790813 +L 245.536797 249.594641 +L 245.984127 249.862584 +L 246.431457 249.862584 +L 246.878788 249.058756 +L 247.326118 249.326699 +L 247.773449 250.666411 +L 248.220779 248.522871 +L 248.66811 251.470239 +L 249.11544 249.862584 +L 249.562771 249.862584 +L 250.010101 251.202297 +L 250.457431 250.934354 +L 250.904762 250.130526 +L 251.352092 250.130526 +L 251.799423 249.326699 +L 252.246753 249.594641 +L 252.694084 251.202297 +L 253.141414 250.934354 +L 253.588745 251.470239 +L 254.036075 250.398469 +L 254.483405 250.130526 +L 254.930736 248.790813 +L 255.378066 249.326699 +L 255.825397 250.398469 +L 256.272727 250.934354 +L 256.720058 250.398469 +L 257.167388 251.470239 +L 257.614719 250.934354 +L 258.062049 249.326699 +L 258.50938 250.130526 +L 258.95671 251.202297 +L 259.40404 248.522871 +L 259.851371 251.470239 +L 260.298701 251.202297 +L 260.746032 248.254928 +L 261.640693 249.326699 +L 262.088023 250.398469 +L 262.535354 249.862584 +L 262.982684 251.202297 +L 263.430014 250.666411 +L 263.877345 247.986986 +L 264.324675 249.862584 +L 264.772006 249.326699 +L 265.219336 249.862584 +L 265.666667 249.862584 +L 266.113997 251.470239 +L 266.561328 251.470239 +L 267.008658 250.934354 +L 267.455988 248.522871 +L 268.350649 248.522871 +L 268.79798 249.862584 +L 269.24531 249.862584 +L 269.692641 249.058756 +L 270.139971 248.790813 +L 270.587302 247.4511 +L 271.034632 249.594641 +L 271.481962 247.719043 +L 271.929293 250.130526 +L 272.376623 251.202297 +L 272.823954 250.398469 +L 273.271284 249.862584 +L 273.718615 248.254928 +L 274.165945 248.522871 +L 274.613276 249.326699 +L 275.060606 249.862584 +L 275.507937 250.130526 +L 275.955267 247.719043 +L 276.402597 248.522871 +L 276.849928 250.666411 +L 277.297258 249.594641 +L 278.191919 249.058756 +L 278.63925 249.594641 +L 279.08658 247.719043 +L 279.533911 250.130526 +L 279.981241 250.398469 +L 280.428571 248.522871 +L 280.875902 249.058756 +L 281.323232 250.934354 +L 281.770563 250.398469 +L 282.217893 249.594641 +L 282.665224 250.934354 +L 283.112554 249.326699 +L 283.559885 248.790813 +L 284.007215 249.058756 +L 284.454545 251.202297 +L 284.901876 251.202297 +L 285.349206 249.326699 +L 285.796537 250.398469 +L 286.243867 247.986986 +L 287.138528 250.130526 +L 287.585859 250.666411 +L 288.033189 250.130526 +L 288.480519 249.058756 +L 288.92785 247.4511 +L 289.37518 248.522871 +L 289.822511 247.986986 +L 290.269841 249.058756 +L 290.717172 250.666411 +L 291.164502 250.934354 +L 291.611833 249.326699 +L 292.059163 250.666411 +L 292.506494 249.862584 +L 292.953824 250.130526 +L 293.401154 251.470239 +L 293.848485 250.934354 +L 294.295815 250.934354 +L 294.743146 251.470239 +L 295.190476 249.594641 +L 295.637807 250.398469 +L 296.085137 249.058756 +L 296.532468 250.666411 +L 296.979798 251.202297 +L 297.427128 249.058756 +L 297.874459 249.594641 +L 298.321789 249.594641 +L 298.76912 246.915215 +L 299.21645 246.647273 +L 299.663781 250.398469 +L 300.111111 250.398469 +L 300.558442 248.790813 +L 301.005772 248.790813 +L 301.453102 249.862584 +L 301.900433 249.326699 +L 302.347763 250.398469 +L 302.795094 250.666411 +L 303.242424 250.666411 +L 303.689755 248.254928 +L 304.137085 249.862584 +L 304.584416 250.666411 +L 305.031746 250.934354 +L 305.479076 249.594641 +L 305.926407 251.738182 +L 306.373737 251.470239 +L 306.821068 250.934354 +L 307.268398 250.934354 +L 307.715729 251.202297 +L 308.163059 250.666411 +L 308.61039 251.202297 +L 309.05772 250.398469 +L 309.505051 244.771675 +L 309.952381 247.4511 +L 310.399711 249.326699 +L 311.741703 250.934354 +L 312.189033 251.738182 +L 312.636364 249.594641 +L 313.083694 249.594641 +L 313.531025 249.058756 +L 313.978355 246.111388 +L 314.425685 250.666411 +L 314.873016 251.738182 +L 315.320346 251.470239 +L 315.767677 250.666411 +L 316.215007 248.254928 +L 316.662338 250.934354 +L 317.109668 250.130526 +L 317.556999 248.522871 +L 318.004329 251.202297 +L 318.451659 251.202297 +L 318.89899 250.130526 +L 319.34632 247.986986 +L 319.793651 248.790813 +L 320.240981 249.862584 +L 320.688312 249.326699 +L 321.135642 250.934354 +L 321.582973 250.666411 +L 322.030303 248.254928 +L 322.477633 249.326699 +L 322.924964 247.183158 +L 323.372294 246.647273 +L 323.819625 247.183158 +L 324.266955 251.202297 +L 324.714286 251.738182 +L 325.161616 247.986986 +L 325.608947 246.111388 +L 326.056277 247.986986 +L 326.503608 246.647273 +L 326.950938 243.431962 +L 327.398268 249.594641 +L 327.845599 249.326699 +L 328.292929 247.4511 +L 328.74026 247.183158 +L 329.18759 248.254928 +L 329.634921 248.790813 +L 330.082251 247.986986 +L 330.529582 249.862584 +L 330.976912 250.934354 +L 331.424242 247.719043 +L 332.318903 248.254928 +L 332.766234 246.915215 +L 333.213564 247.4511 +L 333.660895 247.719043 +L 334.108225 250.666411 +L 334.555556 245.843445 +L 335.002886 245.843445 +L 335.450216 243.967847 +L 335.897547 247.183158 +L 336.344877 247.183158 +L 336.792208 249.862584 +L 337.239538 249.058756 +L 337.686869 247.986986 +L 338.134199 245.843445 +L 338.58153 248.522871 +L 339.02886 249.058756 +L 339.47619 249.326699 +L 339.923521 249.326699 +L 340.370851 249.862584 +L 340.818182 246.647273 +L 341.265512 247.183158 +L 341.712843 245.039617 +L 342.160173 247.986986 +L 342.607504 247.183158 +L 343.054834 250.934354 +L 343.502165 251.202297 +L 343.949495 243.967847 +L 344.396825 247.4511 +L 344.844156 245.575502 +L 345.291486 246.37933 +L 345.738817 249.594641 +L 346.186147 250.398469 +L 346.633478 250.398469 +L 347.080808 248.522871 +L 347.528139 249.326699 +L 347.975469 247.183158 +L 348.422799 246.915215 +L 348.87013 249.594641 +L 349.31746 249.862584 +L 349.764791 251.738182 +L 350.212121 248.522871 +L 350.659452 248.254928 +L 351.106782 249.058756 +L 351.554113 244.771675 +L 352.001443 249.326699 +L 352.448773 251.470239 +L 352.896104 250.934354 +L 353.343434 249.058756 +L 353.790765 249.594641 +L 354.238095 249.594641 +L 354.685426 250.666411 +L 355.132756 250.666411 +L 355.580087 251.202297 +L 356.027417 250.666411 +L 356.474747 249.326699 +L 356.922078 249.058756 +L 357.369408 249.326699 +L 357.816739 250.130526 +L 358.264069 249.594641 +L 358.7114 250.666411 +L 359.606061 248.790813 +L 360.053391 248.522871 +L 360.500722 248.522871 +L 360.948052 249.862584 +L 361.395382 246.915215 +L 361.842713 251.470239 +L 362.290043 251.470239 +L 362.737374 249.058756 +L 363.184704 250.398469 +L 363.632035 250.398469 +L 364.079365 248.254928 +L 364.974026 251.202297 +L 365.421356 250.398469 +L 366.316017 248.522871 +L 366.763348 249.058756 +L 367.210678 247.4511 +L 367.658009 249.594641 +L 368.105339 250.934354 +L 368.55267 250.130526 +L 369 246.647273 +L 369.44733 247.986986 +L 369.894661 250.666411 +L 370.341991 250.130526 +L 370.789322 248.522871 +L 371.683983 251.470239 +L 372.131313 249.594641 +L 372.578644 250.130526 +L 373.025974 249.594641 +L 373.473304 248.790813 +L 373.920635 250.398469 +L 374.367965 251.470239 +L 374.815296 251.470239 +L 375.262626 250.666411 +L 375.709957 250.130526 +L 376.157287 250.130526 +L 376.604618 249.058756 +L 377.051948 251.470239 +L 377.499278 250.934354 +L 377.946609 251.738182 +L 378.393939 250.398469 +L 378.84127 249.862584 +L 379.2886 249.594641 +L 379.735931 248.522871 +L 380.183261 251.202297 +L 380.630592 251.470239 +L 381.077922 249.326699 +L 381.525253 249.594641 +L 381.972583 250.130526 +L 382.419913 250.398469 +L 382.867244 249.862584 +L 383.314574 251.470239 +L 383.761905 251.738182 +L 384.209235 250.934354 +L 384.656566 250.398469 +L 385.103896 249.058756 +L 385.551227 251.202297 +L 385.998557 250.666411 +L 386.445887 251.470239 +L 386.893218 251.202297 +L 387.340548 250.130526 +L 387.787879 249.594641 +L 388.235209 250.130526 +L 388.68254 249.058756 +L 389.12987 250.666411 +L 389.577201 251.470239 +L 390.024531 250.130526 +L 390.471861 251.202297 +L 390.919192 250.666411 +L 391.366522 250.666411 +L 391.813853 248.522871 +L 392.708514 250.934354 +L 393.155844 251.202297 +L 393.603175 249.594641 +L 394.050505 249.594641 +L 394.497835 250.666411 +L 394.945166 249.594641 +L 395.392496 249.594641 +L 395.839827 251.470239 +L 396.287157 249.594641 +L 396.734488 249.326699 +L 397.181818 250.666411 +L 397.629149 249.594641 +L 398.076479 249.326699 +L 398.52381 251.202297 +L 398.97114 251.738182 +L 399.41847 250.130526 +L 399.865801 249.594641 +L 400.313131 250.666411 +L 401.207792 250.130526 +L 401.655123 250.666411 +L 402.102453 251.470239 +L 402.549784 250.130526 +L 402.997114 250.130526 +L 403.444444 249.326699 +L 403.891775 249.862584 +L 404.339105 249.862584 +L 405.233766 251.738182 +L 405.681097 251.202297 +L 406.128427 250.398469 +L 406.575758 250.398469 +L 407.023088 250.934354 +L 407.470418 250.130526 +L 407.917749 250.934354 +L 408.365079 251.470239 +L 408.81241 250.398469 +L 409.25974 249.862584 +L 409.707071 250.130526 +L 410.154401 249.058756 +L 410.601732 249.594641 +L 411.049062 251.738182 +L 411.496392 251.470239 +L 411.943723 250.398469 +L 412.391053 249.862584 +L 412.838384 249.058756 +L 413.285714 249.594641 +L 413.733045 245.039617 +L 414.180375 251.202297 +L 414.627706 250.666411 +L 415.522367 247.183158 +L 415.969697 248.790813 +L 416.417027 249.862584 +L 416.864358 250.398469 +L 417.311688 251.202297 +L 417.759019 251.202297 +L 418.206349 250.130526 +L 418.65368 248.254928 +L 419.10101 249.862584 +L 419.548341 246.915215 +L 419.995671 246.915215 +L 420.443001 251.202297 +L 420.890332 251.470239 +L 421.337662 246.37933 +L 421.784993 246.915215 +L 422.232323 247.183158 +L 422.679654 249.594641 +L 423.126984 250.130526 +L 423.574315 251.202297 +L 424.021645 249.594641 +L 424.468975 248.522871 +L 424.916306 249.862584 +L 425.363636 246.111388 +L 426.258297 250.666411 +L 426.705628 251.470239 +L 427.152958 247.986986 +L 428.047619 250.666411 +L 428.494949 248.254928 +L 428.94228 249.862584 +L 429.38961 250.934354 +L 429.836941 251.202297 +L 430.731602 248.790813 +L 431.626263 249.862584 +L 432.073593 249.594641 +L 432.520924 251.202297 +L 432.968254 251.202297 +L 433.415584 249.058756 +L 433.862915 249.058756 +L 434.310245 249.326699 +L 434.757576 250.398469 +L 435.204906 248.790813 +L 436.099567 251.470239 +L 436.546898 243.431962 +L 436.994228 247.986986 +L 437.441558 247.719043 +L 437.888889 248.790813 +L 438.336219 248.522871 +L 438.78355 250.934354 +L 439.23088 250.130526 +L 439.678211 248.254928 +L 440.125541 248.522871 +L 440.572872 248.522871 +L 441.020202 247.4511 +L 441.914863 250.934354 +L 443.256854 248.522871 +L 443.704185 249.326699 +L 444.151515 248.790813 +L 444.598846 248.522871 +L 445.046176 250.666411 +L 445.493506 251.202297 +L 445.940837 248.254928 +L 446.388167 248.522871 +L 447.282828 250.130526 +L 447.730159 249.594641 +L 448.177489 251.202297 +L 448.62482 251.470239 +L 449.07215 248.254928 +L 449.519481 250.934354 +L 450.414141 247.986986 +L 450.861472 250.666411 +L 451.308802 251.738182 +L 451.756133 250.398469 +L 452.203463 250.666411 +L 452.650794 250.398469 +L 453.098124 249.326699 +L 453.545455 251.202297 +L 453.992785 250.398469 +L 454.440115 251.202297 +L 454.887446 251.202297 +L 455.334776 249.326699 +L 455.782107 248.790813 +L 456.229437 250.398469 +L 456.676768 248.522871 +L 457.571429 251.470239 +L 458.018759 251.738182 +L 458.466089 249.594641 +L 459.36075 250.130526 +L 459.808081 250.934354 +L 460.702742 250.398469 +L 461.150072 250.934354 +L 461.597403 249.594641 +L 462.044733 246.647273 +L 462.492063 250.666411 +L 462.939394 250.934354 +L 463.386724 251.470239 +L 463.834055 249.862584 +L 464.281385 249.862584 +L 464.728716 250.666411 +L 465.176046 250.130526 +L 465.623377 250.398469 +L 466.070707 251.738182 +L 466.518038 251.738182 +L 466.965368 251.202297 +L 467.412698 251.738182 +L 467.860029 250.934354 +L 468.307359 251.738182 +L 468.75469 251.738182 +L 469.20202 250.398469 +L 469.649351 250.934354 +L 470.096681 250.934354 +L 470.544012 249.862584 +L 470.991342 249.594641 +L 471.438672 250.130526 +L 471.886003 249.326699 +L 472.333333 250.666411 +L 472.780664 251.470239 +L 473.227994 248.254928 +L 473.675325 250.130526 +L 474.122655 250.130526 +L 474.569986 249.058756 +L 475.017316 249.862584 +L 475.464646 251.202297 +L 475.911977 251.202297 +L 476.359307 250.398469 +L 476.806638 250.934354 +L 477.253968 250.130526 +L 478.148629 250.130526 +L 478.59596 251.202297 +L 479.04329 250.666411 +L 479.49062 246.37933 +L 479.937951 249.594641 +L 480.385281 249.862584 +L 480.832612 249.058756 +L 481.279942 249.058756 +L 481.727273 251.202297 +L 482.174603 250.666411 +L 482.621934 249.326699 +L 483.069264 250.934354 +L 483.516595 249.058756 +L 483.963925 249.862584 +L 484.411255 250.398469 +L 484.858586 251.470239 +L 485.305916 250.130526 +L 485.753247 249.594641 +L 486.200577 250.130526 +L 486.647908 248.254928 +L 487.095238 248.522871 +L 487.542569 249.326699 +L 487.989899 251.738182 +L 488.437229 250.934354 +L 488.88456 249.058756 +L 489.33189 249.862584 +L 489.779221 249.058756 +L 490.226551 248.522871 +L 490.673882 247.183158 +L 491.121212 250.398469 +L 491.568543 251.202297 +L 492.463203 248.790813 +L 492.910534 250.398469 +L 493.357864 249.326699 +L 493.805195 250.130526 +L 494.699856 250.666411 +L 495.147186 248.790813 +L 495.594517 249.594641 +L 496.489177 246.111388 +L 496.936508 247.4511 +L 497.383838 250.934354 +L 497.831169 249.862584 +L 498.278499 249.326699 +L 498.72583 247.719043 +L 499.17316 248.790813 +L 499.620491 250.398469 +L 500.067821 249.326699 +L 500.515152 250.130526 +L 500.962482 251.470239 +L 501.409812 251.202297 +L 502.304473 248.254928 +L 502.751804 248.522871 +L 503.199134 249.594641 +L 503.646465 250.130526 +L 504.093795 250.934354 +L 504.541126 249.058756 +L 504.988456 250.130526 +L 505.435786 249.862584 +L 505.883117 248.254928 +L 506.330447 247.986986 +L 506.777778 251.202297 +L 507.225108 250.934354 +L 507.672439 249.594641 +L 508.119769 248.790813 +L 508.5671 248.254928 +L 509.01443 248.522871 +L 509.909091 251.202297 +L 510.356421 250.666411 +L 510.803752 248.790813 +L 511.251082 248.522871 +L 511.698413 250.398469 +L 512.145743 249.326699 +L 512.593074 251.202297 +L 513.040404 251.202297 +L 513.487734 250.666411 +L 513.935065 247.719043 +L 514.382395 248.790813 +L 514.829726 248.254928 +L 515.277056 247.986986 +L 515.724387 250.666411 +L 516.171717 251.738182 +L 516.619048 250.666411 +L 517.066378 247.986986 +L 517.961039 250.398469 +L 518.408369 248.254928 +L 518.8557 250.398469 +L 519.30303 251.202297 +L 519.750361 251.202297 +L 520.197691 248.790813 +L 520.645022 250.130526 +L 521.092352 249.862584 +L 521.987013 250.398469 +L 522.434343 250.934354 +L 522.881674 251.738182 +L 523.329004 248.790813 +L 524.223665 249.862584 +L 524.670996 248.790813 +L 525.118326 251.470239 +L 525.565657 250.934354 +L 526.012987 250.666411 +L 526.460317 250.130526 +L 526.907648 249.058756 +L 527.354978 247.4511 +L 527.802309 247.4511 +L 528.69697 251.470239 +L 529.591631 249.326699 +L 530.486291 249.326699 +L 530.933622 248.790813 +L 531.380952 249.326699 +L 531.828283 251.738182 +L 532.275613 250.130526 +L 532.722944 247.183158 +L 533.170274 248.790813 +L 533.617605 249.326699 +L 534.064935 248.790813 +L 534.959596 251.470239 +L 535.406926 250.934354 +L 536.301587 249.326699 +L 536.748918 248.790813 +L 537.196248 249.862584 +L 537.643579 249.326699 +L 538.090909 250.666411 +L 538.53824 250.934354 +L 538.98557 250.666411 +L 539.4329 247.719043 +L 539.880231 249.058756 +L 540.327561 249.326699 +L 540.774892 249.862584 +L 541.222222 251.202297 +L 541.669553 251.202297 +L 542.116883 248.522871 +L 542.564214 249.326699 +L 543.011544 249.862584 +L 543.458874 247.719043 +L 543.906205 249.594641 +L 544.353535 250.398469 +L 544.800866 251.738182 +L 545.248196 250.666411 +L 545.695527 250.130526 +L 546.142857 250.666411 +L 546.590188 250.934354 +L 547.037518 250.398469 +L 547.484848 251.738182 +L 547.932179 250.666411 +L 548.379509 248.254928 +L 548.82684 248.522871 +L 549.27417 250.666411 +L 549.721501 248.790813 +L 550.168831 247.986986 +L 550.616162 251.470239 +L 551.063492 250.398469 +L 551.510823 250.398469 +L 551.958153 250.934354 +L 552.405483 249.862584 +L 552.852814 249.862584 +L 553.747475 250.398469 +L 554.194805 250.398469 +L 554.642136 249.326699 +L 555.089466 249.862584 +L 555.536797 249.058756 +L 555.984127 250.130526 +L 556.431457 249.862584 +L 556.878788 251.738182 +L 557.326118 251.202297 +L 557.773449 248.522871 +L 558.220779 247.986986 +L 558.66811 248.790813 +L 559.11544 249.058756 +L 559.562771 250.666411 +L 560.010101 251.738182 +L 560.457431 250.666411 +L 560.904762 249.058756 +L 561.352092 249.862584 +L 561.799423 248.790813 +L 562.246753 250.130526 +L 562.694084 250.130526 +L 563.141414 244.503732 +L 563.588745 244.503732 +L 564.036075 249.058756 +L 564.483405 243.699904 +L 564.930736 240.484593 +L 565.378066 249.058756 +L 565.825397 245.30756 +L 566.272727 242.896077 +L 566.720058 250.934354 +L 567.167388 244.235789 +L 567.614719 240.216651 +L 568.062049 242.628134 +L 568.50938 251.470239 +L 568.95671 244.503732 +L 569.40404 246.647273 +L 569.851371 243.431962 +L 570.298701 250.666411 +L 570.746032 250.398469 +L 571.193362 239.14488 +L 571.640693 247.719043 +L 572.088023 249.326699 +L 572.535354 244.771675 +L 572.982684 246.111388 +L 573.430014 248.522871 +L 574.324675 247.986986 +L 574.772006 249.594641 +L 575.219336 250.398469 +L 575.666667 250.934354 +L 576.113997 251.202297 +L 576.561328 249.058756 +L 577.008658 247.986986 +L 577.455988 248.790813 +L 577.903319 250.130526 +L 578.350649 249.058756 +L 578.79798 251.470239 +L 579.24531 251.202297 +L 579.692641 250.398469 +L 580.139971 248.254928 +L 580.587302 248.254928 +L 581.034632 250.130526 +L 581.481962 249.326699 +L 581.929293 250.934354 +L 582.376623 250.130526 +L 582.823954 249.862584 +L 583.271284 249.326699 +L 583.718615 249.326699 +L 584.165945 247.986986 +L 584.613276 248.254928 +L 585.060606 251.470239 +L 585.507937 251.202297 +L 585.955267 249.862584 +L 586.402597 249.594641 +L 586.849928 247.986986 +L 587.744589 250.398469 +L 588.191919 250.934354 +L 588.63925 250.934354 +L 589.08658 248.790813 +L 589.533911 249.058756 +L 590.428571 251.202297 +L 590.875902 249.594641 +L 591.323232 251.202297 +L 591.770563 251.470239 +L 592.217893 250.398469 +L 592.665224 249.862584 +L 593.112554 249.862584 +L 593.559885 250.666411 +L 594.007215 250.398469 +L 594.454545 251.470239 +L 594.901876 250.666411 +L 595.349206 249.594641 +L 595.796537 249.594641 +L 596.243867 250.398469 +L 596.691198 250.398469 +L 597.138528 250.666411 +L 597.585859 251.202297 +L 598.033189 251.202297 +L 598.480519 248.254928 +L 598.92785 248.254928 +L 599.37518 247.986986 +L 600.269841 250.666411 +L 600.717172 250.934354 +L 601.164502 249.326699 +L 601.611833 250.398469 +L 602.059163 248.522871 +L 602.506494 247.719043 +L 602.953824 249.058756 +L 603.401154 251.470239 +L 603.848485 251.470239 +L 604.295815 248.522871 +L 604.743146 246.647273 +L 605.190476 247.4511 +L 605.637807 248.522871 +L 606.085137 250.130526 +L 606.532468 251.202297 +L 606.979798 250.666411 +L 607.427128 248.522871 +L 607.874459 249.862584 +L 608.321789 249.594641 +L 609.21645 249.594641 +L 609.663781 250.934354 +L 610.111111 250.934354 +L 610.558442 248.254928 +L 611.005772 246.647273 +L 611.453102 249.058756 +L 611.900433 249.862584 +L 612.347763 249.594641 +L 612.795094 251.202297 +L 614.137085 247.719043 +L 614.584416 248.522871 +L 615.031746 247.986986 +L 615.479076 251.470239 +L 615.926407 250.934354 +L 616.373737 251.202297 +L 616.821068 248.790813 +L 617.268398 248.790813 +L 617.715729 249.594641 +L 618.163059 249.058756 +L 618.61039 249.058756 +L 619.05772 250.398469 +L 619.505051 250.666411 +L 619.952381 249.058756 +L 620.399711 249.058756 +L 620.847042 249.594641 +L 621.294372 248.790813 +L 621.741703 249.326699 +L 622.189033 250.666411 +L 622.636364 250.130526 +L 622.636364 250.130526 +" clip-path="url(#p157c280b4f)" style="fill: none; stroke: #ff7f0e; stroke-width: 1.5; stroke-linecap: square"/> - - - + - + - + - - + - + - + @@ -4815,10 +5093,10 @@ L 525.782812 183.296562 - + - + diff --git a/doc/source/tracking/traffic/traffic_data_mgmt.py b/doc/source/tracking/traffic/traffic_data_mgmt.py index 457646a05..0d0a6af9c 100644 --- a/doc/source/tracking/traffic/traffic_data_mgmt.py +++ b/doc/source/tracking/traffic/traffic_data_mgmt.py @@ -1,7 +1,6 @@ import matplotlib.pyplot as plt import os import pandas as pd -import subprocess cwd = os.getcwd() @@ -52,5 +51,6 @@ def update_csv(string): fig.savefig(trafficpath + "plots.svg") -# removing the files should not be necessary here since they're not included in the git commit +# removing the files should not be necessary here +# since they're not included in the git commit # subprocess.run(["rm -rf " + defaultpath[:-1]], shell=True) diff --git a/doc/source/tracking/traffic/views.csv b/doc/source/tracking/traffic/views.csv index 82cb0bb06..724aa73f6 100644 --- a/doc/source/tracking/traffic/views.csv +++ b/doc/source/tracking/traffic/views.csv @@ -1030,3 +1030,107 @@ _date,total_views,unique_views 2023-08-25,26,6 2023-08-26,7,4 2023-08-27,5,3 +2023-08-28,118,11 +2023-08-29,73,15 +2023-08-30,164,12 +2023-08-31,85,7 +2023-09-01,101,11 +2023-09-02,6,2 +2023-09-03,3,3 +2023-09-04,23,6 +2023-09-05,92,14 +2023-09-06,246,14 +2023-09-07,51,7 +2023-09-08,90,10 +2023-09-09,127,4 +2023-09-10,45,7 +2023-09-11,74,8 +2023-09-12,99,10 +2023-09-13,86,10 +2023-09-14,172,15 +2023-09-15,114,14 +2023-09-16,45,2 +2023-09-17,124,3 +2023-09-18,110,8 +2023-09-19,42,9 +2023-09-20,66,15 +2023-09-21,36,10 +2023-09-22,55,6 +2023-09-23,5,4 +2023-09-24,113,4 +2023-09-25,49,12 +2023-09-26,79,11 +2023-09-27,13,7 +2023-09-28,7,3 +2023-09-29,64,9 +2023-09-30,150,3 +2023-10-01,5,2 +2023-10-02,40,6 +2023-10-03,82,8 +2023-10-04,81,8 +2023-10-05,6,5 +2023-10-06,84,6 +2023-10-07,10,2 +2023-10-08,9,5 +2023-10-09,81,9 +2023-10-10,93,9 +2023-10-11,45,6 +2023-10-12,33,6 +2023-10-13,10,5 +2023-10-14,6,3 +2023-10-15,16,3 +2023-10-16,69,14 +2023-10-17,72,14 +2023-10-18,108,15 +2023-10-19,79,10 +2023-10-20,29,5 +2023-10-22,10,4 +2023-10-23,43,10 +2023-10-24,39,6 +2023-10-25,63,13 +2023-10-26,124,16 +2023-10-27,51,11 +2023-10-28,2,2 +2023-10-29,5,2 +2023-10-30,38,13 +2023-10-31,219,20 +2023-11-01,115,17 +2023-11-02,90,13 +2023-11-03,48,7 +2023-11-04,56,3 +2023-11-05,34,5 +2023-11-06,121,13 +2023-11-07,69,8 +2023-11-08,42,9 +2023-11-09,45,9 +2023-11-10,23,9 +2023-11-11,21,4 +2023-11-12,18,4 +2023-11-13,79,14 +2023-11-14,179,20 +2023-11-15,95,11 +2023-11-16,52,8 +2023-11-17,45,9 +2023-11-18,3,3 +2023-11-19,20,7 +2023-11-20,157,12 +2023-11-21,70,16 +2023-11-22,54,13 +2023-11-23,36,15 +2023-11-24,2,2 +2023-11-25,6,4 +2023-11-26,5,3 +2023-11-27,293,12 +2023-11-28,46,12 +2023-11-29,20,9 +2023-11-30,48,11 +2023-12-01,52,11 +2023-12-02,27,6 +2023-12-03,7,5 +2023-12-04,115,11 +2023-12-05,68,11 +2023-12-06,112,9 +2023-12-07,83,12 +2023-12-08,23,10 +2023-12-09,9,5 +2023-12-10,19,7 diff --git a/doc/source/user_guide/changelog/index.rst b/doc/source/user_guide/changelog/index.rst index eaaffd658..1d6579898 100644 --- a/doc/source/user_guide/changelog/index.rst +++ b/doc/source/user_guide/changelog/index.rst @@ -6,9 +6,18 @@ icepyx ChangeLog This is the list of changes made to icepyx in between each release. Full details can be found in the `commit logs `_. -Latest Release (Version 0.8.1) + +Latest Release (Version 1.0.0) ------------------------------ +.. toctree:: + :maxdepth: 2 + + v1.0.0 + +Version 0.8.1 +------------- + .. toctree:: :maxdepth: 2 diff --git a/doc/source/user_guide/changelog/v1.0.0.rst b/doc/source/user_guide/changelog/v1.0.0.rst new file mode 100644 index 000000000..1e015e2b6 --- /dev/null +++ b/doc/source/user_guide/changelog/v1.0.0.rst @@ -0,0 +1,102 @@ +What's new in 1.0.0 (5 January 2024) +----------------------------------- + +These are the changes in icepyx 1.0.0 See :ref:`release` for a full changelog +including other versions of icepyx. + + +New (and Updated) Features +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- update Read input arguments (#444) + + - add filelist and product properties to Read object + - deprecate filename_pattern and product class Read inputs + - transition to data_source input as a string (including glob string) or list + - update tutorial with changes and user guidance for using glob + +- enable QUEST kwarg handling (#452) + + - add kwarg acceptance for data queries and download_all in quest + - Add QUEST dataset page to RTD + +- Variables as an independent class (#451) + + - Refactor Variables class to be user facing functionality + +- Expand Variables class to read s3 urls (#464) + + - expand extract_product and extract_version to check for s3 url + - add cloud notes to variables notebook + +- add argo functionality to QUEST (#427) + + - add argo.py dataset functionality and implementation through QUEST + - demonstrate QUEST usage via example notebook + - add save to QUEST DataSet class template + +- Expand icepyx to read s3 data (#468) + + +Bug fixes +~~~~~~~~~ + +- temporarily disable OpenAltimetry API tests (#459) + + - add OA API warning + - comment out tests that use OA API + +- fix spot number calculation (#458) +- Update read module coordinate dimension manipulations to use new xarray index (#473) +- Fix behind EDL tests (#480) +- fix permissions for publishing to pypi (#487) + + +Deprecations +~~~~~~~~~~~~ + +- deprecate filename_pattern and product class Read inputs (#444) +- remove `file` input arg and `_source` property from query (and improve some formatting) (#479) + + +Maintenance +^^^^^^^^^^^ + +- update QUEST and GenQuery classes for argo integration (#441) +- format all code files using black (#476) +- update tests to data version 006 and resolve flake8 errors on edited files (#478) +- update github actions and add black linter for PRs (#475) + + - update pypi action to use OIDC trusted publisher mgmt + - generalize the flake8 action to a general linting action and add black + - put flake8 config parameters into a separate file (.flake8) + - update versions of actions/pre-commit hooks + - specify uml updates only need to run on PRs to development + - do not run uml updates on PRs into main #449) + - update docs config files to be compliant + - temporarily ignore many flake8 error codes until legacy files are updated + +- Convert deprecation warnings to errors and remove associated checks #482 + + +Documentation +^^^^^^^^^^^^^ + +- Fix a broken link in IS2_data_access.ipynb (#456) +- docs: add rwegener2 as a contributor for bug, code, and 6 more (#460) +- docs: add jpswinski as a contributor for review (#461) +- docs: add whyjz as a contributor for tutorial (#462) +- add newest icepyx citations (#455) +- traffic updates Aug-Dec 2023 (#477) +- docs: add lheagy as a contributor for mentoring, and review (#481) +- docs: add rtilling as a contributor for ideas (#484) + + +Contributors +~~~~~~~~~~~~ + +.. contributors:: v0.8.1..v1.0.0|HEAD + +- Kelsey Bisson +- Zach Fair +- Romina Piunno diff --git a/doc/source/user_guide/documentation/classes_dev_uml.svg b/doc/source/user_guide/documentation/classes_dev_uml.svg index 8e83d4dc1..09c112f5c 100644 --- a/doc/source/user_guide/documentation/classes_dev_uml.svg +++ b/doc/source/user_guide/documentation/classes_dev_uml.svg @@ -4,392 +4,389 @@ - - + + classes_dev_uml - + icepyx.core.auth.AuthenticationError - -AuthenticationError - - - + +AuthenticationError + + + icepyx.core.exceptions.DeprecationError - -DeprecationError - - - + +DeprecationError + + + icepyx.core.auth.EarthdataAuthMixin - -EarthdataAuthMixin - -_auth : NoneType -_s3_initial_ts : NoneType, datetime -_s3login_credentials : NoneType -_session : NoneType -auth -s3login_credentials -session - -__init__(auth) -__str__() -earthdata_login(uid, email, s3token): None + +EarthdataAuthMixin + +_auth : NoneType +_s3_initial_ts : NoneType, datetime +_s3login_credentials : NoneType +_session : NoneType +auth +s3login_credentials +session + +__init__(auth) +__str__() +earthdata_login(uid, email, s3token): None icepyx.core.query.GenQuery - -GenQuery - -_spatial -_temporal - -__init__(spatial_extent, date_range, start_time, end_time) -__str__() + +GenQuery + +_spatial +_temporal +dates +end_time +spatial +spatial_extent +start_time +temporal + +__init__(spatial_extent, date_range, start_time, end_time) +__str__() icepyx.core.granules.Granules - -Granules - -avail : list -orderIDs : list - -__init__ -() -download(verbose, path, session, restart) -get_avail(CMRparams, reqparams, cloud) -place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) + +Granules + +avail : list +orderIDs : list + +__init__ +() +download(verbose, path, session, restart) +get_avail(CMRparams, reqparams, cloud) +place_order(CMRparams, reqparams, subsetparams, verbose, subset, session, geom_filepath) icepyx.core.query.Query - -Query - -CMRparams -_CMRparams -_about_product -_cust_options : dict -_cycles : list -_file_vars -_granules -_order_vars -_prod : NoneType, str -_readable_granule_name : list -_reqparams -_source : str -_subsetparams : NoneType -_tracks : list -_version -cycles -dataset -dates -end_time -file_vars -granules -order_vars -product -product_version -reqparams -spatial -spatial_extent -start_time -temporal -tracks - -__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, files, auth) -__str__() -avail_granules(ids, cycles, tracks, cloud) -download_granules(path, verbose, subset, restart) -latest_version() -order_granules(verbose, subset, email) -product_all_info() -product_summary_info() -show_custom_options(dictview) -subsetparams() -visualize_elevation() -visualize_spatial_extent() + +Query + +CMRparams +_CMRparams +_about_product +_cust_options : dict +_cycles : list +_granules +_order_vars +_prod : NoneType, str +_readable_granule_name : list +_reqparams +_subsetparams : NoneType +_tracks : list +_version +cycles +dataset +granules +order_vars +product +product_version +reqparams +tracks + +__init__(product, spatial_extent, date_range, start_time, end_time, version, cycles, tracks, auth) +__str__() +avail_granules(ids, cycles, tracks, cloud) +download_granules(path, verbose, subset, restart) +latest_version() +order_granules(verbose, subset, email) +product_all_info() +product_summary_info() +show_custom_options(dictview) +subsetparams() +visualize_elevation() +visualize_spatial_extent() - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules icepyx.core.icesat2data.Icesat2Data - -Icesat2Data - - -__init__() + +Icesat2Data + + +__init__() icepyx.core.exceptions.NsidcQueryError - -NsidcQueryError - -errmsg -msgtxt : str - -__init__(errmsg, msgtxt) -__str__() + +NsidcQueryError + +errmsg +msgtxt : str + +__init__(errmsg, msgtxt) +__str__() icepyx.core.exceptions.QueryError - -QueryError - - - + +QueryError + + + icepyx.core.exceptions.NsidcQueryError->icepyx.core.exceptions.QueryError - - + + icepyx.core.APIformatting.Parameters - -Parameters - -_fmted_keys : NoneType, dict -_poss_keys : dict -_reqtype : NoneType, str -fmted_keys -partype -poss_keys - -__init__(partype, values, reqtype) -_check_valid_keys() -_get_possible_keys() -build_params() -check_req_values() -check_values() - - - -icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_CMRparams + +Parameters + +_fmted_keys : NoneType, dict +_poss_keys : dict +_reqtype : NoneType, str +fmted_keys +partype +poss_keys + +__init__(partype, values, reqtype) +_check_valid_keys() +_get_possible_keys() +build_params() +check_req_values() +check_values() icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_reqparams + + +_CMRparams icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_reqparams icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams + + + +icepyx.core.APIformatting.Parameters->icepyx.core.query.Query + + +_subsetparams icepyx.core.query.Query->icepyx.core.auth.EarthdataAuthMixin - - + + icepyx.core.query.Query->icepyx.core.query.GenQuery - - + + icepyx.core.read.Read - -Read - -_filelist : NoneType, list -_out_obj : Dataset -_pattern : str -_prod : str -_read_vars -_source_type : str -data_source -vars - -__init__(data_source, product, filename_pattern, catalog, out_obj_type) -_add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict) -_build_dataset_template(file) -_build_single_file_dataset(file, groups_list) -_check_source_for_pattern(source, filename_pattern) -_combine_nested_vars(is2ds, ds, grp_path, wanted_dict) -_read_single_grp(file, grp_path) -load() + +Read + +_filelist : list +_out_obj : Dataset +_product +_read_vars +filelist +is_s3 +product +vars + +__init__(data_source, glob_kwargs, out_obj_type, product, filename_pattern, catalog) +_add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict) +_build_dataset_template(file) +_build_single_file_dataset(file, groups_list) +_combine_nested_vars(is2ds, ds, grp_path, wanted_dict) +_read_single_grp(file, grp_path) +load() + + + +icepyx.core.read.Read->icepyx.core.auth.EarthdataAuthMixin + + icepyx.core.spatial.Spatial - -Spatial - -_ext_type : str -_gdf_spat : GeoDataFrame -_geom_file : NoneType -_spatial_ext -_xdateln -extent -extent_as_gdf -extent_file -extent_type - -__init__(spatial_extent) -__str__() -fmt_for_CMR() -fmt_for_EGI() + +Spatial + +_ext_type : str +_gdf_spat : GeoDataFrame +_geom_file : NoneType +_spatial_ext +_xdateln +extent +extent_as_gdf +extent_file +extent_type + +__init__(spatial_extent) +__str__() +fmt_for_CMR() +fmt_for_EGI() - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial icepyx.core.temporal.Temporal - -Temporal - -_end : datetime -_start : datetime -end -start - -__init__(date_range, start_time, end_time) -__str__() + +Temporal + +_end : datetime +_start : datetime +end +start + +__init__(date_range, start_time, end_time) +__str__() - + icepyx.core.temporal.Temporal->icepyx.core.query.GenQuery - - -_temporal + + +_temporal icepyx.core.variables.Variables - -Variables - -_avail : NoneType, list -_vartype -_version : NoneType -path : NoneType -product : NoneType -wanted : NoneType, dict - -__init__(vartype, avail, wanted, product, version, path, auth) -_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) -_get_combined_list(beam_list, keyword_list) -_get_sum_varlist(var_list, all_vars, defaults) -_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list) -_iter_vars(sum_varlist, req_vars, vgrp) -append(defaults, var_list, beam_list, keyword_list) -avail(options, internal) -parse_var_list(varlist, tiered, tiered_vars) -remove(all, var_list, beam_list, keyword_list) + +Variables + +_avail : NoneType, list +_path : NoneType +_product : NoneType, str +_version +path +product +version +wanted : NoneType, dict + +__init__(vartype, path, product, version, avail, wanted, auth) +_check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) +_get_combined_list(beam_list, keyword_list) +_get_sum_varlist(var_list, all_vars, defaults) +_iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list) +_iter_vars(sum_varlist, req_vars, vgrp) +append(defaults, var_list, beam_list, keyword_list) +avail(options, internal) +parse_var_list(varlist, tiered, tiered_vars) +remove(all, var_list, beam_list, keyword_list) - + icepyx.core.variables.Variables->icepyx.core.auth.EarthdataAuthMixin - - - - - -icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars icepyx.core.variables.Variables->icepyx.core.query.Query - - -_file_vars + + +_order_vars icepyx.core.variables.Variables->icepyx.core.read.Read - - -_read_vars + + +_read_vars icepyx.core.visualization.Visualize - -Visualize - -bbox : list -cycles : NoneType -date_range : NoneType -product : NoneType, str -tracks : NoneType - -__init__(query_obj, product, spatial_extent, date_range, cycles, tracks) -generate_OA_parameters(): list -grid_bbox(binsize): list -make_request(base_url, payload) -parallel_request_OA(): da.array -query_icesat2_filelist(): tuple -request_OA_data(paras): da.array -viz_elevation(): (hv.DynamicMap, hv.Layout) + +Visualize + +bbox : list +cycles : NoneType +date_range : NoneType +product : NoneType, str +tracks : NoneType + +__init__(query_obj, product, spatial_extent, date_range, cycles, tracks) +generate_OA_parameters(): list +grid_bbox(binsize): list +make_request(base_url, payload) +parallel_request_OA(): da.array +query_icesat2_filelist(): tuple +request_OA_data(paras): da.array +viz_elevation(): (hv.DynamicMap, hv.Layout) diff --git a/doc/source/user_guide/documentation/classes_user_uml.svg b/doc/source/user_guide/documentation/classes_user_uml.svg index 1c9184379..256cc1794 100644 --- a/doc/source/user_guide/documentation/classes_user_uml.svg +++ b/doc/source/user_guide/documentation/classes_user_uml.svg @@ -4,11 +4,11 @@ - - + + classes_user_uml - + icepyx.core.auth.AuthenticationError @@ -30,23 +30,29 @@ icepyx.core.auth.EarthdataAuthMixin - -EarthdataAuthMixin - -auth -s3login_credentials -session - -earthdata_login(uid, email, s3token): None + +EarthdataAuthMixin + +auth +s3login_credentials +session + +earthdata_login(uid, email, s3token): None icepyx.core.query.GenQuery - -GenQuery - - - + +GenQuery + +dates +end_time +spatial +spatial_extent +start_time +temporal + + @@ -64,85 +70,78 @@ icepyx.core.query.Query - -Query - -CMRparams -cycles -dataset -dates -end_time -file_vars -granules -order_vars -product -product_version -reqparams -spatial -spatial_extent -start_time -temporal -tracks - -avail_granules(ids, cycles, tracks, cloud) -download_granules(path, verbose, subset, restart) -latest_version() -order_granules(verbose, subset, email) -product_all_info() -product_summary_info() -show_custom_options(dictview) -subsetparams() -visualize_elevation() -visualize_spatial_extent() + +Query + +CMRparams +cycles +dataset +granules +order_vars +product +product_version +reqparams +tracks + +avail_granules(ids, cycles, tracks, cloud) +download_granules(path, verbose, subset, restart) +latest_version() +order_granules(verbose, subset, email) +product_all_info() +product_summary_info() +show_custom_options(dictview) +subsetparams() +visualize_elevation() +visualize_spatial_extent() - + icepyx.core.granules.Granules->icepyx.core.query.Query - - + + _granules - + icepyx.core.granules.Granules->icepyx.core.query.Query - - -_granules + + +_granules icepyx.core.icesat2data.Icesat2Data - -Icesat2Data - - - + +Icesat2Data + + + icepyx.core.exceptions.NsidcQueryError - -NsidcQueryError - -errmsg -msgtxt : str - - + +NsidcQueryError + +errmsg +msgtxt : str + + icepyx.core.exceptions.QueryError - -QueryError - - - + +QueryError + + + icepyx.core.exceptions.NsidcQueryError->icepyx.core.exceptions.QueryError - - + + @@ -159,170 +158,172 @@ check_values() - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - + + _CMRparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - + + _reqparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - + + _subsetparams - + icepyx.core.APIformatting.Parameters->icepyx.core.query.Query - - -_subsetparams + + +_subsetparams icepyx.core.query.Query->icepyx.core.auth.EarthdataAuthMixin - - + + icepyx.core.query.Query->icepyx.core.query.GenQuery - - + + icepyx.core.read.Read - -Read - -data_source -vars - -load() + +Read + +filelist +is_s3 +product +vars + +load() + + + +icepyx.core.read.Read->icepyx.core.auth.EarthdataAuthMixin + + icepyx.core.spatial.Spatial - -Spatial - -extent -extent_as_gdf -extent_file -extent_type - -fmt_for_CMR() -fmt_for_EGI() + +Spatial + +extent +extent_as_gdf +extent_file +extent_type + +fmt_for_CMR() +fmt_for_EGI() - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial - + icepyx.core.spatial.Spatial->icepyx.core.query.GenQuery - - -_spatial + + +_spatial icepyx.core.temporal.Temporal - -Temporal - -end -start - - + +Temporal + +end +start + + - + icepyx.core.temporal.Temporal->icepyx.core.query.GenQuery - - -_temporal + + +_temporal icepyx.core.variables.Variables - -Variables - -path : NoneType -product : NoneType -wanted : NoneType, dict - -append(defaults, var_list, beam_list, keyword_list) -avail(options, internal) -parse_var_list(varlist, tiered, tiered_vars) -remove(all, var_list, beam_list, keyword_list) + +Variables + +path +product +version +wanted : NoneType, dict + +append(defaults, var_list, beam_list, keyword_list) +avail(options, internal) +parse_var_list(varlist, tiered, tiered_vars) +remove(all, var_list, beam_list, keyword_list) - + icepyx.core.variables.Variables->icepyx.core.auth.EarthdataAuthMixin - - - - - -icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + icepyx.core.variables.Variables->icepyx.core.query.Query - - -_order_vars + + +_order_vars icepyx.core.variables.Variables->icepyx.core.query.Query - - -_file_vars + + +_order_vars icepyx.core.variables.Variables->icepyx.core.read.Read - - -_read_vars + + +_read_vars icepyx.core.visualization.Visualize - -Visualize - -bbox : list -cycles : NoneType -date_range : NoneType -product : NoneType, str -tracks : NoneType - -generate_OA_parameters(): list -grid_bbox(binsize): list -make_request(base_url, payload) -parallel_request_OA(): da.array -query_icesat2_filelist(): tuple -request_OA_data(paras): da.array -viz_elevation(): (hv.DynamicMap, hv.Layout) + +Visualize + +bbox : list +cycles : NoneType +date_range : NoneType +product : NoneType, str +tracks : NoneType + +generate_OA_parameters(): list +grid_bbox(binsize): list +make_request(base_url, payload) +parallel_request_OA(): da.array +query_icesat2_filelist(): tuple +request_OA_data(paras): da.array +viz_elevation(): (hv.DynamicMap, hv.Layout) diff --git a/doc/source/user_guide/documentation/components.rst b/doc/source/user_guide/documentation/components.rst index b4b658385..dea41a970 100644 --- a/doc/source/user_guide/documentation/components.rst +++ b/doc/source/user_guide/documentation/components.rst @@ -67,14 +67,6 @@ validate\_inputs :undoc-members: :show-inheritance: -variables ---------- - -.. automodule:: icepyx.core.variables - :members: - :undoc-members: - :show-inheritance: - visualize --------- diff --git a/doc/source/user_guide/documentation/icepyx.rst b/doc/source/user_guide/documentation/icepyx.rst index 56ff7f496..a8a9a6f8e 100644 --- a/doc/source/user_guide/documentation/icepyx.rst +++ b/doc/source/user_guide/documentation/icepyx.rst @@ -23,4 +23,5 @@ Diagrams are updated automatically after a pull request (PR) is approved and bef query read quest + variables components diff --git a/doc/source/user_guide/documentation/packages_user_uml.svg b/doc/source/user_guide/documentation/packages_user_uml.svg index 44a041c77..5c45fc92b 100644 --- a/doc/source/user_guide/documentation/packages_user_uml.svg +++ b/doc/source/user_guide/documentation/packages_user_uml.svg @@ -4,11 +4,11 @@ - - + + packages_user_uml - + icepyx.core @@ -24,116 +24,146 @@ icepyx.core.auth - -icepyx.core.auth + +icepyx.core.auth icepyx.core.exceptions - -icepyx.core.exceptions + +icepyx.core.exceptions + + + +icepyx.core.auth->icepyx.core.exceptions + + icepyx.core.granules - -icepyx.core.granules + +icepyx.core.granules icepyx.core.icesat2data - -icepyx.core.icesat2data + +icepyx.core.icesat2data + + + +icepyx.core.icesat2data->icepyx.core.exceptions + + icepyx.core.is2ref - -icepyx.core.is2ref + +icepyx.core.is2ref icepyx.core.query - -icepyx.core.query + +icepyx.core.query - + icepyx.core.query->icepyx.core.auth - - + + + + + +icepyx.core.query->icepyx.core.exceptions + + - + icepyx.core.query->icepyx.core.granules - - + + icepyx.core.variables - -icepyx.core.variables + +icepyx.core.variables - + icepyx.core.query->icepyx.core.variables - - + + icepyx.core.visualization - -icepyx.core.visualization + +icepyx.core.visualization - + icepyx.core.query->icepyx.core.visualization - - + + icepyx.core.read - -icepyx.core.read + +icepyx.core.read + + + +icepyx.core.read->icepyx.core.auth + + - + icepyx.core.read->icepyx.core.exceptions - - + + - + icepyx.core.read->icepyx.core.variables - - + + icepyx.core.spatial - -icepyx.core.spatial + +icepyx.core.spatial icepyx.core.temporal - -icepyx.core.temporal + +icepyx.core.temporal icepyx.core.validate_inputs - -icepyx.core.validate_inputs + +icepyx.core.validate_inputs - + icepyx.core.variables->icepyx.core.auth - - + + + + + +icepyx.core.variables->icepyx.core.exceptions + + diff --git a/doc/source/user_guide/documentation/read.rst b/doc/source/user_guide/documentation/read.rst index a5beedf4e..68da03b1d 100644 --- a/doc/source/user_guide/documentation/read.rst +++ b/doc/source/user_guide/documentation/read.rst @@ -19,6 +19,8 @@ Attributes .. autosummary:: :toctree: ../../_icepyx/ + Read.filelist + Read.product Read.vars diff --git a/doc/source/user_guide/documentation/variables.rst b/doc/source/user_guide/documentation/variables.rst new file mode 100644 index 000000000..e147bfd64 --- /dev/null +++ b/doc/source/user_guide/documentation/variables.rst @@ -0,0 +1,25 @@ +Variables Class +================= + +.. currentmodule:: icepyx + + +Constructor +----------- + +.. autosummary:: + :toctree: ../../_icepyx/ + + Variables + + +Methods +------- + +.. autosummary:: + :toctree: ../../_icepyx/ + + Variables.avail + Variables.parse_var_list + Variables.append + Variables.remove diff --git a/doc/sphinxext/announce.py b/doc/sphinxext/announce.py index 21bf7a69e..db6858678 100644 --- a/doc/sphinxext/announce.py +++ b/doc/sphinxext/announce.py @@ -17,9 +17,20 @@ The output is utf8 rst. -Custom extension from the Pandas library: https://github.com/pandas-dev/pandas/blob/1.1.x/doc/sphinxext/announce.py +Custom extension from the Pandas library: +https://github.com/pandas-dev/pandas/blob/1.1.x/doc/sphinxext/announce.py Copied 10 August 2020 and subsequently modified. -Specifically, get_authors was adjusted to check for a .mailmap file and use the git through the command line in order to utilize it if present. Using a mailmap file is currently not possible in gitpython (from git import Repo), and the recommended solution is to bring in the mailmap file yourself and use it to modify the author list (i.e. replicate the functionality that already exists in git). This felt a bit out of time-scope for right now. Alternatively, the git-fame library (imported as gitfame) uses the mailmap file and compiles statistics, but the python wrapper for this command line tool was taking forever. So, I've reverted to using os.system to use git behind the scenes instead. +Specifically, get_authors was adjusted to check for a .mailmap file +and use the git through the command line in order to utilize it if present. +Using a mailmap file is currently not possible in gitpython +(from git import Repo), and the recommended solution is to +bring in the mailmap file yourself and use it to modify the author list +(i.e. replicate the functionality that already exists in git). +This felt a bit out of time-scope for right now. +Alternatively, the git-fame library (imported as gitfame) +uses the mailmap file and compiles statistics, +but the python wrapper for this command line tool was taking forever. +So, I've reverted to using os.system to use git behind the scenes instead. Dependencies ------------ @@ -76,7 +87,6 @@ def get_authors(revision_range): # "Co-authored by" commits, which come from backports by the bot, # and one for regular commits. if ".mailmap" in os.listdir(this_repo.git.working_dir): - xpr = re.compile(r"Co-authored-by: (?P[^<]+) ") gitcur = list(os.popen("git shortlog -s " + revision_range).readlines()) @@ -94,7 +104,6 @@ def get_authors(revision_range): pre = set(pre) else: - xpr = re.compile(r"Co-authored-by: (?P[^<]+) ") cur = set( xpr.findall( diff --git a/icepyx/__init__.py b/icepyx/__init__.py index 3d92e2e60..40ea9e1ec 100644 --- a/icepyx/__init__.py +++ b/icepyx/__init__.py @@ -1,5 +1,6 @@ from icepyx.core.query import Query, GenQuery from icepyx.core.read import Read from icepyx.quest.quest import Quest +from icepyx.core.variables import Variables from _icepyx_version import version as __version__ diff --git a/icepyx/core/APIformatting.py b/icepyx/core/APIformatting.py index 55d49f84c..b5d31bdfa 100644 --- a/icepyx/core/APIformatting.py +++ b/icepyx/core/APIformatting.py @@ -205,7 +205,6 @@ class Parameters: """ def __init__(self, partype, values=None, reqtype=None): - assert partype in [ "CMR", "required", diff --git a/icepyx/core/auth.py b/icepyx/core/auth.py index 7c36126f9..ba07ac398 100644 --- a/icepyx/core/auth.py +++ b/icepyx/core/auth.py @@ -3,17 +3,21 @@ import warnings import earthaccess +from icepyx.core.exceptions import DeprecationError + class AuthenticationError(Exception): - ''' + """ Raised when an error is encountered while authenticating Earthdata credentials - ''' + """ + pass -class EarthdataAuthMixin(): +class EarthdataAuthMixin: """ - This mixin class generates the needed authentication sessions and tokens, including for NASA Earthdata cloud access. + This mixin class generates the needed authentication sessions and tokens, + including for NASA Earthdata cloud access. Authentication is completed using the [earthaccess library](https://nsidc.github.io/earthaccess/). Methods for authenticating are: 1. Storing credentials as environment variables ($EARTHDATA_LOGIN and $EARTHDATA_PASSWORD) @@ -21,26 +25,29 @@ class EarthdataAuthMixin(): 3. Storing credentials in a .netrc file (not recommended for security reasons) More details on using these methods is available in the [earthaccess documentation](https://nsidc.github.io/earthaccess/tutorials/restricted-datasets/#auth). - This class can be inherited by any other class that requires authentication. For - example, the `Query` class inherits this one, and so a Query object has the - `.session` property. The method `earthdata_login()` is included for backwards compatibility. - + This class can be inherited by any other class that requires authentication. + For example, the `Query` class inherits this one, and so a Query object has the + `.session` property. + The method `earthdata_login()` is included for backwards compatibility. + The class can be created without any initialization parameters, and the properties will - be populated when they are called. It can alternately be initialized with an - earthaccess.auth.Auth object, which will then be used to create a session or + be populated when they are called. + It can alternately be initialized with an + earthaccess.auth.Auth object, which will then be used to create a session or s3login_credentials as they are called. - + Parameters ---------- auth : earthaccess.auth.Auth, default None Optional parameter to initialize an object with existing credentials. - + Examples -------- >>> a = EarthdataAuthMixin() >>> a.session # doctest: +SKIP >>> a.s3login_credentials # doctest: +SKIP """ + def __init__(self, auth=None): self._auth = copy.deepcopy(auth) # initializatin of session and s3 creds is not allowed because those are generated @@ -58,25 +65,27 @@ def __str__(self): @property def auth(self): - ''' - Authentication object returned from earthaccess.login() which stores user authentication. - ''' + """ + Authentication object returned from earthaccess.login() which stores user authentication. + """ # Only login the first time .auth is accessed if self._auth is None: auth = earthaccess.login() # check for a valid auth response if auth.authenticated is False: - raise AuthenticationError('Earthdata authentication failed. Check output for error message') + raise AuthenticationError( + "Earthdata authentication failed. Check output for error message" + ) else: self._auth = auth - + return self._auth @property def session(self): - ''' + """ Earthaccess session object for connecting to Earthdata resources. - ''' + """ # Only generate a session the first time .session is accessed if self._session is None: self._session = self.auth.get_session() @@ -84,33 +93,38 @@ def session(self): @property def s3login_credentials(self): - ''' - A dictionary which stores login credentials for AWS s3 access. This property is accessed - if using AWS cloud data. - + """ + A dictionary which stores login credentials for AWS s3 access. + This property is accessed if using AWS cloud data. + Because s3 tokens are only good for one hour, this function will automatically check if an hour has elapsed since the last token use and generate a new token if necessary. - ''' - + """ + def set_s3_creds(): - ''' Store s3login creds from `auth`and reset the last updated timestamp''' + """Store s3login creds from `auth`and reset the last updated timestamp""" self._s3login_credentials = self.auth.get_s3_credentials(daac="NSIDC") self._s3_initial_ts = datetime.datetime.now() - + # Only generate s3login_credentials the first time credentials are accessed, or if an hour - # has passed since the last login + # has passed since the last login if self._s3login_credentials is None: set_s3_creds() - elif (datetime.datetime.now() - self._s3_initial_ts) >= datetime.timedelta(hours=1): + elif (datetime.datetime.now() - self._s3_initial_ts) >= datetime.timedelta( + hours=1 + ): set_s3_creds() return self._s3login_credentials def earthdata_login(self, uid=None, email=None, s3token=None, **kwargs) -> None: """ Authenticate with NASA Earthdata to enable data ordering and download. - Credential storage details are described in the EathdataAuthMixin class section. - - **Note:** This method is maintained for backward compatibility. It is no longer required to explicitly run `.earthdata_login()`. Authentication will be performed by the module as needed when `.session` or `.s3login_credentials` are accessed. + Credential storage details are described in the + EathdataAuthMixin class section. + + **Note:** This method is deprecated and will be removed in a future release. + It is no longer required to explicitly run `.earthdata_login()`. + Authentication will be performed by the module as needed. Parameters ---------- @@ -119,7 +133,8 @@ def earthdata_login(self, uid=None, email=None, s3token=None, **kwargs) -> None: email : string, default None Deprecated keyword for backwards compatibility. s3token : boolean, default None - Deprecated keyword to generate AWS s3 ICESat-2 data access credentials + Deprecated keyword to generate AWS s3 ICESat-2 + data access credentials kwargs : key:value pairs Keyword arguments to be passed into earthaccess.login(). @@ -133,13 +148,7 @@ def earthdata_login(self, uid=None, email=None, s3token=None, **kwargs) -> None: No .netrc found in /Users/username """ - warnings.warn( - "It is no longer required to explicitly run the `.earthdata_login()` method. Authentication will be performed by the module as needed.", - DeprecationWarning, stacklevel=2 - ) - - if uid != None or email != None or s3token != None: - warnings.warn( - "The user id (uid) and/or email keyword arguments are no longer required.", - DeprecationWarning, stacklevel=2 - ) + raise DeprecationError( + "It is no longer required to explicitly run the `.earthdata_login()` method." + "Authentication will be performed by the module as needed.", + ) diff --git a/icepyx/core/exceptions.py b/icepyx/core/exceptions.py index a36a1b645..d20bbfe61 100644 --- a/icepyx/core/exceptions.py +++ b/icepyx/core/exceptions.py @@ -2,6 +2,7 @@ class DeprecationError(Exception): """ Class raised for use of functionality that is no longer supported by icepyx. """ + pass @@ -27,5 +28,3 @@ def __init__( def __str__(self): return f"{self.msgtxt}: {self.errmsg}" - - diff --git a/icepyx/core/icesat2data.py b/icepyx/core/icesat2data.py index cebce4160..e57305124 100644 --- a/icepyx/core/icesat2data.py +++ b/icepyx/core/icesat2data.py @@ -1,11 +1,10 @@ -import warnings +from icepyx.core.exceptions import DeprecationError class Icesat2Data: - def __init__(self,): - - warnings.filterwarnings("always") - warnings.warn( + def __init__( + self, + ): + DeprecationError( "DEPRECATED. Please use icepyx.Query to create a download data object (all other functionality is the same)", - DeprecationWarning, ) diff --git a/icepyx/core/is2ref.py b/icepyx/core/is2ref.py index 52cf0e3a1..c51c631be 100644 --- a/icepyx/core/is2ref.py +++ b/icepyx/core/is2ref.py @@ -1,20 +1,21 @@ +import h5py import json import numpy as np import requests import warnings from xml.etree import ElementTree as ET +import earthaccess -import icepyx # ICESat-2 specific reference functions -# options to get customization options for ICESat-2 data (though could be used generally) def _validate_product(product): """ Confirm a valid ICESat-2 product was specified """ + error_msg = "A valid product string was not provided. Check user input, if given, or file metadata." if isinstance(product, str): product = str.upper(product) assert product in [ @@ -40,15 +41,12 @@ def _validate_product(product): "ATL20", "ATL21", "ATL23", - ], "Please enter a valid product" + ], error_msg else: - raise TypeError("Please enter a product string") + raise TypeError(error_msg) return product -# DevGoal: See if there's a way to dynamically get this list so it's automatically updated - - def _validate_OA_product(product): """ Confirm a valid ICESat-2 product was specified @@ -85,6 +83,7 @@ def about_product(prod): # DevGoal: use a mock of this output to test later functions, such as displaying options and widgets, etc. +# options to get customization options for ICESat-2 data (though could be used generally) def _get_custom_options(session, product, version): """ Get lists of what customization options are available for the product from NSIDC. @@ -109,7 +108,11 @@ def _get_custom_options(session, product, version): # reformatting formats = [Format.attrib for Format in root.iter("Format")] format_vals = [formats[i]["value"] for i in range(len(formats))] - format_vals.remove("") + try: + format_vals.remove("") + except KeyError: + # ATL23 does not have an empty value + pass cust_options.update({"fileformats": format_vals}) # reprojection only applicable on ICESat-2 L3B products. @@ -265,8 +268,11 @@ def _default_varlists(product): return common_list -# dev goal: check and test this function def gt2spot(gt, sc_orient): + warnings.warn( + "icepyx versions 0.8.0 and earlier used an incorrect spot number calculation." + "As a result, computations depending on spot number may be incorrect and should be redone." + ) assert gt in [ "gt1l", @@ -280,12 +286,13 @@ def gt2spot(gt, sc_orient): gr_num = np.uint8(gt[2]) gr_lr = gt[3] + # spacecraft oriented forward if sc_orient == 1: if gr_num == 1: if gr_lr == "l": - spot = 2 + spot = 6 elif gr_lr == "r": - spot = 1 + spot = 5 elif gr_num == 2: if gr_lr == "l": spot = 4 @@ -293,16 +300,17 @@ def gt2spot(gt, sc_orient): spot = 3 elif gr_num == 3: if gr_lr == "l": - spot = 6 + spot = 2 elif gr_lr == "r": - spot = 5 + spot = 1 + # spacecraft oriented backward elif sc_orient == 0: if gr_num == 1: if gr_lr == "l": - spot = 5 + spot = 1 elif gr_lr == "r": - spot = 6 + spot = 2 elif gr_num == 2: if gr_lr == "l": spot = 3 @@ -310,11 +318,112 @@ def gt2spot(gt, sc_orient): spot = 4 elif gr_num == 3: if gr_lr == "l": - spot = 1 + spot = 5 elif gr_lr == "r": - spot = 2 + spot = 6 if "spot" not in locals(): raise ValueError("Could not compute the spot number.") return np.uint8(spot) + + +def latest_version(product): + """ + Determine the most recent version available for the given product. + + Examples + -------- + >>> latest_version('ATL03') + '006' + """ + _about_product = about_product(product) + + return max([entry["version_id"] for entry in _about_product["feed"]["entry"]]) + + +def extract_product(filepath, auth=None): + """ + Read the product type from the metadata of the file. Valid for local or s3 files, but must + provide an auth object if reading from s3. Return the product as a string. + + Parameters + ---------- + filepath: string + local or remote location of a file. Could be a local string or an s3 filepath + auth: earthaccess.auth.Auth, default None + An earthaccess authentication object. Optional, but necessary if accessing data in an + s3 bucket. + """ + # Generate a file reader object relevant for the file location + if filepath.startswith("s3"): + if not auth: + raise AttributeError( + "Must provide credentials to `auth` if accessing s3 data" + ) + # Read the s3 file + s3 = earthaccess.get_s3fs_session(daac="NSIDC") + f = h5py.File(s3.open(filepath, "rb")) + else: + # Otherwise assume a local filepath. Read with h5py. + f = h5py.File(filepath, "r") + + # Extract the product information + try: + product = f.attrs["short_name"] + if isinstance(product, bytes): + # For most products the short name is stored in a bytes string + product = product.decode() + elif isinstance(product, np.ndarray): + # ATL14 saves the short_name as an array ['ATL14'] + product = product[0] + product = _validate_product(product) + except KeyError: + raise "Unable to parse the product name from file metadata" + + # Close the file reader + f.close() + return product + + +def extract_version(filepath, auth=None): + """ + Read the version from the metadata of the file. Valid for local or s3 files, but must + provide an auth object if reading from s3. Return the version as a string. + + Parameters + ---------- + filepath: string + local or remote location of a file. Could be a local string or an s3 filepath + auth: earthaccess.auth.Auth, default None + An earthaccess authentication object. Optional, but necessary if accessing data in an + s3 bucket. + """ + # Generate a file reader object relevant for the file location + if filepath.startswith("s3"): + if not auth: + raise AttributeError( + "Must provide credentials to `auth` if accessing s3 data" + ) + # Read the s3 file + s3 = earthaccess.get_s3fs_session(daac="NSIDC") + f = h5py.File(s3.open(filepath, "rb")) + else: + # Otherwise assume a local filepath. Read with h5py. + f = h5py.File(filepath, "r") + + # Read the version information + try: + version = f["METADATA"]["DatasetIdentification"].attrs["VersionID"] + if isinstance(version, np.ndarray): + # ATL14 stores the version as an array ['00x'] + version = version[0] + if isinstance(version, bytes): + version = version.decode() + + except KeyError: + raise "Unable to parse the version from file metadata" + + # Close the file reader + f.close() + return version diff --git a/icepyx/core/query.py b/icepyx/core/query.py index e8f1d8e7c..25f13d5b6 100644 --- a/icepyx/core/query.py +++ b/icepyx/core/query.py @@ -1,22 +1,14 @@ -import datetime as dt import geopandas as gpd -import json import matplotlib.pyplot as plt -import numpy as np -import os -from pathlib import Path +from pathlib import Path # used in docstring tests import pprint -import time -import warnings import icepyx.core.APIformatting as apifmt from icepyx.core.auth import EarthdataAuthMixin +from icepyx.core.exceptions import DeprecationError import icepyx.core.granules as granules -from icepyx.core.granules import Granules as Granules +from icepyx.core.granules import Granules import icepyx.core.is2ref as is2ref - -# QUESTION: why doesn't from granules import Granules as Granules work, since granules=icepyx.core.granules? -# from icepyx.core.granules import Granules import icepyx.core.spatial as spat import icepyx.core.temporal as tp import icepyx.core.validate_inputs as val @@ -35,14 +27,17 @@ class GenQuery: Parameters ---------- spatial_extent : list of coordinates or string (i.e. file name) - Spatial extent of interest, provided as a bounding box, list of polygon coordinates, or + Spatial extent of interest, provided as a bounding box, + list of polygon coordinates, or geospatial polygon file. NOTE: Longitude values are assumed to be in the range -180 to +180, - with 0 being the Prime Meridian (Greenwich). See xdateline for regions crossing the date line. + with 0 being the Prime Meridian (Greenwich). + See xdateline for regions crossing the date line. You can submit at most one bounding box or list of polygon coordinates. Per NSIDC requirements, geospatial polygon files may only contain one feature (polygon). Bounding box coordinates should be provided in decimal degrees as - [lower-left-longitude, lower-left-latitute, upper-right-longitude, upper-right-latitude]. + [lower-left-longitude, lower-left-latitute, + upper-right-longitude, upper-right-latitude]. Polygon coordinates should be provided as coordinate pairs in decimal degrees as [(longitude1, latitude1), (longitude2, latitude2), ... (longitude_n,latitude_n), (longitude1,latitude1)] or @@ -74,7 +69,8 @@ class GenQuery: where HH = hours, mm = minutes, ss = seconds. If None is given (and a datetime.datetime object is not supplied for `date_range`), a default of 23:59:59 is applied. - If a datetime.datetime object was created without times, the datetime package defaults will apply over those of icepyx + If a datetime.datetime object was created without times, + the datetime package defaults will apply over those of icepyx xdateline : boolean, default None Keyword argument to enforce spatial inputs that cross the International Date Line. Internally, this will translate your longitudes to 0 to 360 to construct the @@ -148,191 +144,9 @@ def __str__(self): ) return str - -# DevGoal: update docs throughout to allow for polygon spatial extent -# Note: add files to docstring once implemented -# DevNote: currently this class is not tested -class Query(GenQuery, EarthdataAuthMixin): - """ - Query and get ICESat-2 data - - ICESat-2 Data object to query, obtain, and perform basic operations on - available ICESat-2 data products using temporal and spatial input parameters. - Allows the easy input and formatting of search parameters to match the - NASA NSIDC DAAC and (development goal-not yet implemented) conversion to multiple data types. - Expands the superclass GenQuery. - - See the doc page for GenQuery for details on temporal and spatial input parameters. - - Parameters - ---------- - product : string - ICESat-2 data product ID, also known as "short name" (e.g. ATL03). - Available data products can be found at: https://nsidc.org/data/icesat-2/data-sets - version : string, default most recent version - Product version, given as a 3 digit string. If no version is given, the current - version is used. Example: "004" - cycles : string or a list of strings, default all available orbital cycles - Product cycle, given as a 2 digit string. If no cycle is given, all available - cycles are used. Example: "04" - tracks : string or a list of strings, default all available reference ground tracks (RGTs) - Product track, given as a 4 digit string. If no track is given, all available - reference ground tracks are used. Example: "0594" - files : string, default None - A placeholder for future development. Not used for any purposes yet. - - Returns - ------- - query object - - Examples - -------- - Initializing Query with a bounding box. - - >>> reg_a_bbox = [-55, 68, -48, 71] - >>> reg_a_dates = ['2019-02-20','2019-02-28'] - >>> reg_a = Query('ATL06', reg_a_bbox, reg_a_dates) - >>> print(reg_a) - Product ATL06 v006 - ('bounding_box', [-55.0, 68.0, -48.0, 71.0]) - Date range ['2019-02-20', '2019-02-28'] - - Initializing Query with a list of polygon vertex coordinate pairs. - - >>> reg_a_poly = [(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)] - >>> reg_a_dates = ['2019-02-20','2019-02-28'] - >>> reg_a = Query('ATL06', reg_a_poly, reg_a_dates) - >>> reg_a.spatial_extent - ('polygon', [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0]) - - Initializing Query with a geospatial polygon file. - - >>> aoi = str(Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve()) - >>> reg_a_dates = ['2019-02-22','2019-02-28'] - >>> reg_a = Query('ATL06', aoi, reg_a_dates) - >>> print(reg_a) - Product ATL06 v006 - ('polygon', [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0]) - Date range ['2019-02-22', '2019-02-28'] - - See Also - -------- - GenQuery - """ - - # ---------------------------------------------------------------------- - # Constructors - - def __init__( - self, - product=None, - spatial_extent=None, - date_range=None, - start_time=None, - end_time=None, - version=None, - cycles=None, - tracks=None, - files=None, # NOTE: if you end up implemeting this feature here, use a better variable name than "files" - auth=None, - **kwargs, - ): - - # Check necessary combination of input has been specified - if ( - (product is None or spatial_extent is None) - or ( - (date_range is None and cycles is None and tracks is None) - and int(product[-2:]) <= 13 - ) - and files is None - ): - raise ValueError( - "Please provide the required inputs. Use help([function]) to view the function's documentation" - ) - - if files is not None: - self._source = "files" - # self.file_vars = Variables(self._source) - else: - self._source = "order" - # self.order_vars = Variables(self._source) - # self.variables = Variables(self._source) - - self._prod = is2ref._validate_product(product) - - super().__init__(spatial_extent, date_range, start_time, end_time, **kwargs) - - self._version = val.prod_version(self.latest_version(), version) - - # build list of available CMR parameters if reducing by cycle or RGT - # or a list of explicitly named files (full or partial names) - # DevGoal: add file name search to optional queries - if cycles or tracks: - # get lists of available ICESat-2 cycles and tracks - self._cycles = val.cycles(cycles) - self._tracks = val.tracks(tracks) - # create list of CMR parameters for granule name - self._readable_granule_name = apifmt._fmt_readable_granules( - self._prod, cycles=self.cycles, tracks=self.tracks - ) - - # initialize authentication properties - EarthdataAuthMixin.__init__(self) # ---------------------------------------------------------------------- # Properties - def __str__(self): - str = "Product {2} v{3}\n{0}\nDate range {1}".format( - self.spatial_extent, self.dates, self.product, self.product_version - ) - return str - - @property - def dataset(self): - """ - Legacy property included to provide depracation warning. - - See Also - -------- - product - """ - warnings.filterwarnings("always") - warnings.warn( - "In line with most common usage, 'dataset' has been replaced by 'product'.", - DeprecationWarning, - ) - - @property - def product(self): - """ - Return the short name product ID string associated with the query object. - - Examples - -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) - >>> reg_a.product - 'ATL06' - """ - return self._prod - - @property - def product_version(self): - """ - Return the product version of the data object. - - Examples - -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) - >>> reg_a.product_version - '006' - - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='4') - >>> reg_a.product_version - '004' - """ - return self._version - @property def temporal(self): """ @@ -346,12 +160,12 @@ def temporal(self): Examples -------- - >>> reg_a = Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a = GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> print(reg_a.temporal) Start date and time: 2019-02-20 00:00:00 End date and time: 2019-02-28 23:59:59 - >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> reg_a = GenQuery([-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) >>> print(reg_a.temporal) ['No temporal parameters set'] """ @@ -377,7 +191,7 @@ def spatial(self): Examples -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a = ipx.GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.spatial # doctest: +SKIP @@ -394,7 +208,8 @@ def spatial_extent(self): Return an array showing the spatial extent of the query object. Spatial extent is returned as an input type (which depends on how you initially entered your spatial data) followed by the geometry data. - Bounding box data is [lower-left-longitude, lower-left-latitute, upper-right-longitude, upper-right-latitude]. + Bounding box data is [lower-left-longitude, lower-left-latitute, + ... upper-right-longitude, upper-right-latitude]. Polygon data is [longitude1, latitude1, longitude2, latitude2, ... longitude_n,latitude_n, longitude1,latitude1]. @@ -408,11 +223,11 @@ def spatial_extent(self): -------- # Note: coordinates returned as float, not int - >>> reg_a = Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a = GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.spatial_extent ('bounding_box', [-55.0, 68.0, -48.0, 71.0]) - >>> reg_a = Query('ATL06',[(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)],['2019-02-20','2019-02-28']) + >>> reg_a = GenQuery([(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)],['2019-02-20','2019-02-28']) >>> reg_a.spatial_extent ('polygon', [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0]) @@ -433,15 +248,16 @@ def spatial_extent(self): def dates(self): """ Return an array showing the date range of the query object. - Dates are returned as an array containing the start and end datetime objects, inclusive, in that order. + Dates are returned as an array containing the start and end datetime + objects, inclusive, in that order. Examples -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a = ipx.GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.dates ['2019-02-20', '2019-02-28'] - >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> reg_a = GenQuery([-55, 68, -48, 71]) >>> reg_a.dates ['No temporal parameters set'] """ @@ -460,15 +276,15 @@ def start_time(self): Examples -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a = ipx.GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.start_time '00:00:00' - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], start_time='12:30:30') + >>> reg_a = ipx.GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28'], start_time='12:30:30') >>> reg_a.start_time '12:30:30' - >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> reg_a = GenQuery([-55, 68, -48, 71]) >>> reg_a.start_time ['No temporal parameters set'] """ @@ -484,15 +300,15 @@ def end_time(self): Examples -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a = ipx.GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28']) >>> reg_a.end_time '23:59:59' - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], end_time='10:20:20') + >>> reg_a = ipx.GenQuery([-55, 68, -48, 71],['2019-02-20','2019-02-28'], end_time='10:20:20') >>> reg_a.end_time '10:20:20' - >>> reg_a = Query('ATL06',[-55, 68, -48, 71],cycles=['03','04','05','06','07'], tracks=['0849','0902']) + >>> reg_a = GenQuery([-55, 68, -48, 71]) >>> reg_a.end_time ['No temporal parameters set'] """ @@ -501,6 +317,178 @@ def end_time(self): else: return self._temporal._end.strftime("%H:%M:%S") + +# DevGoal: update docs throughout to allow for polygon spatial extent +# DevNote: currently this class is not tested +class Query(GenQuery, EarthdataAuthMixin): + """ + Query and get ICESat-2 data + + ICESat-2 Data object to query, obtain, and perform basic operations on + available ICESat-2 data products using temporal and spatial input parameters. + Allows the easy input and formatting of search parameters to match the + NASA NSIDC DAAC and (development goal-not yet implemented) conversion to multiple data types. + Expands the superclass GenQuery. + + See the doc page for GenQuery for details on temporal and spatial input parameters. + + Parameters + ---------- + product : string + ICESat-2 data product ID, also known as "short name" (e.g. ATL03). + Available data products can be found at: https://nsidc.org/data/icesat-2/data-sets + version : string, default most recent version + Product version, given as a 3 digit string. + If no version is given, the current version is used. Example: "006" + cycles : string or a list of strings, default all available orbital cycles + Product cycle, given as a 2 digit string. + If no cycle is given, all available cycles are used. Example: "04" + tracks : string or a list of strings, default all available reference ground tracks (RGTs) + Product track, given as a 4 digit string. + If no track is given, all available reference ground tracks are used. + Example: "0594" + auth : earthaccess.auth.Auth, default None + An earthaccess authentication object. Available as an argument so an existing + earthaccess.auth.Auth object can be used for authentication. If not given, a new auth + object will be created whenever authentication is needed. + + Returns + ------- + query object + + Examples + -------- + Initializing Query with a bounding box. + + >>> reg_a_bbox = [-55, 68, -48, 71] + >>> reg_a_dates = ['2019-02-20','2019-02-28'] + >>> reg_a = Query('ATL06', reg_a_bbox, reg_a_dates) + >>> print(reg_a) + Product ATL06 v006 + ('bounding_box', [-55.0, 68.0, -48.0, 71.0]) + Date range ['2019-02-20', '2019-02-28'] + + Initializing Query with a list of polygon vertex coordinate pairs. + + >>> reg_a_poly = [(-55, 68), (-55, 71), (-48, 71), (-48, 68), (-55, 68)] + >>> reg_a_dates = ['2019-02-20','2019-02-28'] + >>> reg_a = Query('ATL06', reg_a_poly, reg_a_dates) + >>> reg_a.spatial_extent + ('polygon', [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0]) + + Initializing Query with a geospatial polygon file. + + >>> aoi = str(Path('./doc/source/example_notebooks/supporting_files/simple_test_poly.gpkg').resolve()) + >>> reg_a_dates = ['2019-02-22','2019-02-28'] + >>> reg_a = Query('ATL06', aoi, reg_a_dates) + >>> print(reg_a) + Product ATL06 v006 + ('polygon', [-55.0, 68.0, -55.0, 71.0, -48.0, 71.0, -48.0, 68.0, -55.0, 68.0]) + Date range ['2019-02-22', '2019-02-28'] + + See Also + -------- + GenQuery + """ + + # ---------------------------------------------------------------------- + # Constructors + + def __init__( + self, + product=None, + spatial_extent=None, + date_range=None, + start_time=None, + end_time=None, + version=None, + cycles=None, + tracks=None, + auth=None, + **kwargs, + ): + # Check necessary combination of input has been specified + if (product is None or spatial_extent is None) or ( + (date_range is None and cycles is None and tracks is None) + and int(product[-2:]) <= 13 + ): + raise ValueError( + "Please provide the required inputs. Use help([function]) to view the function's documentation" + ) + + self._prod = is2ref._validate_product(product) + + super().__init__(spatial_extent, date_range, start_time, end_time, **kwargs) + + self._version = val.prod_version(is2ref.latest_version(self._prod), version) + + # build list of available CMR parameters if reducing by cycle or RGT + # or a list of explicitly named files (full or partial names) + # DevGoal: add file name search to optional queries + if cycles or tracks: + # get lists of available ICESat-2 cycles and tracks + self._cycles = val.cycles(cycles) + self._tracks = val.tracks(tracks) + # create list of CMR parameters for granule name + self._readable_granule_name = apifmt._fmt_readable_granules( + self._prod, cycles=self.cycles, tracks=self.tracks + ) + + # initialize authentication properties + EarthdataAuthMixin.__init__(self) + + # ---------------------------------------------------------------------- + # Properties + + def __str__(self): + str = "Product {2} v{3}\n{0}\nDate range {1}".format( + self.spatial_extent, self.dates, self.product, self.product_version + ) + return str + + @property + def dataset(self): + """ + Legacy property included to provide depracation warning. + + See Also + -------- + product + """ + DeprecationError( + "In line with most common usage, 'dataset' has been replaced by 'product'.", + ) + + @property + def product(self): + """ + Return the short name product ID string associated with the query object. + + Examples + -------- + >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a.product + 'ATL06' + """ + return self._prod + + @property + def product_version(self): + """ + Return the product version of the data object. + + Examples + -------- + >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) + >>> reg_a.product_version + '006' + + >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28'], version='4') + >>> reg_a.product_version + '004' + """ + return self._version + @property def cycles(self): """ @@ -588,7 +576,8 @@ def CMRparams(self): @property def reqparams(self): """ - Display the required key:value pairs that will be submitted. It generates the dictionary if it does not already exist. + Display the required key:value pairs that will be submitted. + It generates the dictionary if it does not already exist. Examples -------- @@ -612,7 +601,8 @@ def reqparams(self): # DevQuestion: if I make this a property, I get a "dict" object is not callable when I try to give input kwargs... what approach should I be taking? def subsetparams(self, **kwargs): """ - Display the subsetting key:value pairs that will be submitted. It generates the dictionary if it does not already exist + Display the subsetting key:value pairs that will be submitted. + It generates the dictionary if it does not already exist and returns an empty dictionary if subsetting is set to False during ordering. Parameters @@ -620,7 +610,8 @@ def subsetparams(self, **kwargs): **kwargs : key-value pairs Additional parameters to be passed to the subsetter. By default temporal and spatial subset keys are passed. - Acceptable key values are ['format','projection','projection_parameters','Coverage']. + Acceptable key values are + ['format','projection','projection_parameters','Coverage']. At this time (2020-05), only variable ('Coverage') parameters will be automatically formatted. See Also @@ -642,10 +633,31 @@ def subsetparams(self, **kwargs): kwargs["start"] = self._temporal._start kwargs["end"] = self._temporal._end - if self._subsetparams == None and not kwargs: + if self._subsetparams is None and not kwargs: return {} else: - if self._subsetparams == None: + # If the user has supplied a subset list of variables, append the + # icepyx required variables to the Coverage dict + if "Coverage" in kwargs.keys(): + var_list = [ + "orbit_info/sc_orient", + "orbit_info/sc_orient_time", + "ancillary_data/atlas_sdp_gps_epoch", + "orbit_info/cycle_number", + "orbit_info/rgt", + "ancillary_data/data_start_utc", + "ancillary_data/data_end_utc", + "ancillary_data/granule_start_utc", + "ancillary_data/granule_end_utc", + "ancillary_data/start_delta_time", + "ancillary_data/end_delta_time", + ] + # Add any variables from var_list to Coverage that are not already included + for var in var_list: + if var not in kwargs["Coverage"].keys(): + kwargs["Coverage"][var.split("/")[-1]] = [var] + + if self._subsetparams is None: self._subsetparams = apifmt.Parameters("subset") if self._spatial._geom_file is not None: self._subsetparams.build_params( @@ -683,63 +695,37 @@ def order_vars(self): """ if not hasattr(self, "_order_vars"): - if self._source == "order": - # DevGoal: check for active session here - if hasattr(self, "_cust_options"): - self._order_vars = Variables( - self._source, - auth = self.auth, - product=self.product, - avail=self._cust_options["variables"], - ) - else: - self._order_vars = Variables( - self._source, - auth=self.auth, - product=self.product, - version=self._version, - ) - - # I think this is where property setters come in, and one should be used here? Right now order_vars.avail is only filled in - # if _cust_options exists when the class is initialized, but not if _cust_options is filled in prior to another call to order_vars + # DevGoal: check for active session here + if hasattr(self, "_cust_options"): + self._order_vars = Variables( + product=self.product, + version=self._version, + avail=self._cust_options["variables"], + auth=self.auth, + ) + else: + self._order_vars = Variables( + product=self.product, + version=self._version, + auth=self.auth, + ) + + # I think this is where property setters come in, and one should be used here? + # Right now order_vars.avail is only filled in + # if _cust_options exists when the class is initialized, + # but not if _cust_options is filled in prior to another call to order_vars # if self._order_vars.avail == None and hasattr(self, '_cust_options'): # print('got into the loop') # self._order_vars.avail = self._cust_options['variables'] return self._order_vars - @property - def file_vars(self): - """ - Return the file variables object. - This instance is generated when files are used to create the data object (not yet implemented). - - See Also - -------- - variables.Variables - - Examples - -------- - >>> reg_a = ipx.Query('ATL06',[-55, 68, -48, 71],['2019-02-20','2019-02-28']) # doctest: +SKIP - - >>> reg_a.file_vars # doctest: +SKIP - - """ - - if not hasattr(self, "_file_vars"): - if self._source == "file": - self._file_vars = Variables(self._source, - auth=self.auth, - product=self.product, - ) - - return self._file_vars - @property def granules(self): """ Return the granules object, which provides the underlying funtionality for searching, ordering, - and downloading granules for the specified product. Users are encouraged to use the built in wrappers + and downloading granules for the specified product. + Users are encouraged to use the built-in wrappers rather than trying to access the granules object themselves. See Also @@ -758,7 +744,7 @@ def granules(self): if not hasattr(self, "_granules"): self._granules = Granules() - elif self._granules == None: + elif self._granules is None: self._granules = Granules() return self._granules @@ -814,6 +800,8 @@ def product_all_info(self): def latest_version(self): """ + A reference function to is2ref.latest_version. + Determine the most recent version available for the given product. Examples @@ -822,11 +810,7 @@ def latest_version(self): >>> reg_a.latest_version() '006' """ - if not hasattr(self, "_about_product"): - self._about_product = is2ref.about_product(self._prod) - return max( - [entry["version_id"] for entry in self._about_product["feed"]["entry"]] - ) + return is2ref.latest_version(self.product) def show_custom_options(self, dictview=False): """ @@ -1016,9 +1000,9 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): if self._reqparams._reqtype == "search": self._reqparams._reqtype = "download" - if "email" in self._reqparams.fmted_keys.keys() or email == False: + if "email" in self._reqparams.fmted_keys.keys() or email is False: self._reqparams.build_params(**self._reqparams.fmted_keys) - elif email == True: + elif email is True: user_profile = self.auth.get_user_profile() self._reqparams.build_params( **self._reqparams.fmted_keys, email=user_profile["email_address"] @@ -1027,9 +1011,9 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs): if subset is False: self._subsetparams = None elif ( - subset == True + subset is True and hasattr(self, "_subsetparams") - and self._subsetparams == None + and self._subsetparams is None ): del self._subsetparams @@ -1121,7 +1105,7 @@ def download_granules( if not hasattr(self, "_granules"): self.granules - if restart == True: + if restart is True: pass else: if ( diff --git a/icepyx/core/read.py b/icepyx/core/read.py index 627395be2..e11015935 100644 --- a/icepyx/core/read.py +++ b/icepyx/core/read.py @@ -1,17 +1,20 @@ import fnmatch +import glob import os +import sys import warnings +import earthaccess import numpy as np +from s3fs.core import S3File import xarray as xr +from icepyx.core.auth import EarthdataAuthMixin from icepyx.core.exceptions import DeprecationError import icepyx.core.is2ref as is2ref from icepyx.core.variables import Variables as Variables from icepyx.core.variables import list_of_dict_vals -# from icepyx.core.query import Query - def _make_np_datetime(df, keyword): """ @@ -132,7 +135,7 @@ def _check_datasource(filepath): Then the dict can also contain a catalog key with a dict of catalogs for each of those types of inputs ("s3" or "local") In general, the issue we'll run into with multiple files is going to be merging during the read in, so it could be beneficial to not hide this too much and mandate users handle this intentionally outside the read in itself. - + this function was derived with some of the following resources, based on echopype https://github.com/OSOceanAcoustics/echopype/blob/ab5128fb8580f135d875580f0469e5fba3193b84/echopype/utils/io.py @@ -157,9 +160,9 @@ def _validate_source(source): # acceptable inputs (for now) are a single file or directory # would ultimately like to make a Path (from pathlib import Path; isinstance(source, Path)) an option # see https://github.com/OSOceanAcoustics/echopype/blob/ab5128fb8580f135d875580f0469e5fba3193b84/echopype/utils/io.py#L82 - assert type(source) == str, "You must enter your input as a string." + assert isinstance(source, str), "You must enter your input as a string." assert ( - os.path.isdir(source) == True or os.path.isfile(source) == True + os.path.isdir(source) is True or os.path.isfile(source) is True ), "Your data source string is not a valid data source." return True @@ -258,37 +261,58 @@ def _pattern_to_glob(pattern): return glob_path +def _confirm_proceed(): + """ + Ask the user if they wish to proceed with processing. If 'y', or 'yes', then continue. Any + other user input will abort the process. + """ + answer = input("Do you wish to proceed (not recommended) y/[n]?") + if answer.lower() in ["y", "yes"]: + pass + else: + warnings.warn("Aborting", stacklevel=2) + sys.exit(0) + + # To do: test this class and functions therein -class Read: +class Read(EarthdataAuthMixin): """ Data object to read ICESat-2 data into the specified formats. Provides flexiblity for reading nested hdf5 files into common analysis formats. Parameters ---------- - data_source : string - A string with a full file path or full directory path to ICESat-2 hdf5 (.h5) format files. - Files within a directory must have a consistent filename pattern that includes the "ATL??" data product name. - Files must all be within a single directory. + data_source : string, List + A string or list which specifies the files to be read. + The string can be either: + 1) the path of a single file + 2) the path to a directory or + 3) a [glob string](https://docs.python.org/3/library/glob.html). + The List must be a list of strings, each of which is the path of a single file. + + glob_kwargs : dict, default {} + Additional arguments to be passed into the [glob.glob()](https://docs.python.org/3/library/glob.html#glob.glob)function + + out_obj_type : object, default xarray.Dataset + The desired format for the data to be read in. + Currently, only xarray.Dataset objects (default) are available. + Please ask us how to help enable usage of other data objects! product : string ICESat-2 data product ID, also known as "short name" (e.g. ATL03). Available data products can be found at: https://nsidc.org/data/icesat-2/data-sets + **Deprecation warning:** This argument is no longer required and has been deprecated. + The dataset product is read from the file metadata. - filename_pattern : string, default 'ATL{product:2}_{datetime:%Y%m%d%H%M%S}_{rgt:4}{cycle:2}{orbitsegment:2}_{version:3}_{revision:2}.h5' - String that shows the filename pattern as required for Intake's path_as_pattern argument. + filename_pattern : string, default None + String that shows the filename pattern as previously required for Intake's path_as_pattern argument. The default describes files downloaded directly from NSIDC (subsetted and non-subsetted) for most products (e.g. ATL06). The ATL11 filename pattern from NSIDC is: 'ATL{product:2}_{rgt:4}{orbitsegment:2}_{cycles:4}_{version:3}_{revision:2}.h5'. - + **Deprecation warning:** This argument is no longer required and has been deprecated. catalog : string, default None Full path to an Intake catalog for reading in data. If you still need to create a catalog, leave as default. - **Deprecation warning:** This argument has been depreciated. Please use the data_source argument to pass in valid data. - - out_obj_type : object, default xarray.Dataset - The desired format for the data to be read in. - Currently, only xarray.Dataset objects (default) are available. - Please ask us how to help enable usage of other data objects! + **Deprecation warning:** This argument has been deprecated. Please use the data_source argument to pass in valid data. Returns ------- @@ -296,6 +320,21 @@ class Read: Examples -------- + Reading a single file + >>> ipx.Read('/path/to/data/processed_ATL06_20190226005526_09100205_006_02.h5') # doctest: +SKIP + + Reading all files in a directory + >>> ipx.Read('/path/to/data/') # doctest: +SKIP + + Reading files that match a particular pattern (here, all .h5 files that start with `processed_ATL06_`). + >>> ipx.Read('/path/to/data/processed_ATL06_*.h5') # doctest: +SKIP + + Reading a specific list of files + >>> list_of_files = [ + ... '/path/to/data/processed_ATL06_20190226005526_09100205_006_02.h5', + ... '/path/to/more/data/processed_ATL06_20191202102922_10160505_006_01.h5', + ... ] + >>> ipx.Read(list_of_files) # doctest: +SKIP """ @@ -304,57 +343,102 @@ class Read: def __init__( self, - data_source=None, + data_source, + glob_kwargs={}, + out_obj_type=None, # xr.Dataset, + # deprecated arguments product=None, - filename_pattern="ATL{product:2}_{datetime:%Y%m%d%H%M%S}_{rgt:4}{cycle:2}{orbitsegment:2}_{version:3}_{revision:2}.h5", + filename_pattern=None, catalog=None, - out_obj_type=None, # xr.Dataset, ): - # Raise error for depreciated argument + # initialize authentication properties + EarthdataAuthMixin.__init__(self) + + # Raise errors for deprecated arguments + if filename_pattern: + raise DeprecationError( + "The `filename_pattern` argument is deprecated. Instead please provide a " + "string, list, or glob string to the `data_source` argument." + ) + + if product: + raise DeprecationError("The `product` argument is no longer required.") + if catalog: raise DeprecationError( "The `catalog` argument has been deprecated and intake is no longer supported. " "Please use the `data_source` argument to specify your dataset instead." ) - if data_source is None: - raise ValueError("Please provide a data source.") + if isinstance(data_source, list): + # if data_source is a list pass that directly to _filelist + self._filelist = data_source + elif os.path.isdir(data_source): + # if data_source is a directory glob search the directory and assign to _filelist + data_source = os.path.join(data_source, "*") + self._filelist = glob.glob(data_source, **glob_kwargs) + elif isinstance(data_source, str): + if data_source.startswith("s3"): + # if the string is an s3 path put it in the _filelist without globbing + self._filelist = [data_source] + else: + # data_source is a globable string + self._filelist = glob.glob(data_source, **glob_kwargs) else: - self._source_type = _check_datasource(data_source) - self.data_source = data_source + raise TypeError( + "data_source should be a list of files, a directory, the path to a file, " + "or a glob string." + ) - if product is None: - raise ValueError( - "Please provide the ICESat-2 data product of your file(s)." + # Remove any directories from the list (these get generated during recursive + # glob search) + self._filelist = [f for f in self._filelist if not os.path.isdir(f)] + + # Create a dictionary of the products as read from the metadata + product_dict = {} + self.is_s3 = [False] * len(self._filelist) + for i, file_ in enumerate(self._filelist): + # If the path is an s3 path set the respective element of self.is_s3 to True + if file_.startswith("s3"): + self.is_s3[i] = True + auth = self.auth + else: + auth = None + product_dict[file_] = is2ref.extract_product(file_, auth=auth) + + # Raise an error if there are both s3 and non-s3 paths present + if len(set(self.is_s3)) > 1: + raise TypeError( + "Mixed local and s3 paths is not supported. data_source must contain " + "only s3 paths or only local paths" ) - else: - self._prod = is2ref._validate_product(product) - pattern_ck, filelist = Read._check_source_for_pattern( - data_source, filename_pattern - ) - assert pattern_ck - # Note: need to check if this works for subset and non-subset NSIDC files (processed_ prepends the former) - self._pattern = filename_pattern - - # this is a first pass at getting rid of mixed product types and warning the user. - # it takes an approach assuming the product name is in the filename, but needs reworking if we let multiple products be loaded - # one way to handle this would be bring in the product info during the loading step and fill in product there instead of requiring it from the user - filtered_filelist = [file for file in filelist if self._prod in file] - if len(filtered_filelist) == 0: + self.is_s3 = self.is_s3[0] # Change is_s3 into one boolean value for _filelist + # Raise warning if more than 2 s3 files are given + if self.is_s3 is True and len(self._filelist) > 2: warnings.warn( - "Your filenames do not contain a product identifier (e.g. ATL06). " - "You will likely need to manually merge your dataframes." + "Processing more than two s3 files can take a prohibitively long time. " + "Approximate access time (using `.load()`) can exceed 6 minutes per data " + "variable.", + stacklevel=2, ) - self._filelist = filelist - elif len(filtered_filelist) < len(filelist): - warnings.warn( - "Some files matching your filename pattern were removed as they were not the specified product." + _confirm_proceed() + + # Raise warnings or errors for multiple products or products not matching the user-specified product + all_products = list(set(product_dict.values())) + if len(all_products) > 1: + raise TypeError( + f"Multiple product types were found in the file list: {product_dict}." + "Please provide a valid `data_source` parameter indicating files of a single " + "product" + ) + elif len(all_products) == 0: + raise TypeError( + "No files found matching the specified `data_source`. Check your glob " + "string or file list." ) - self._filelist = filtered_filelist else: - self._filelist = filelist - - # after validation, use the notebook code and code outline to start implementing the rest of the class + # Assign the identified product to the property + self._product = all_products[0] if out_obj_type is not None: print( @@ -366,8 +450,6 @@ def __init__( # ---------------------------------------------------------------------- # Properties - # I cut and pasted this directly out of the Query class - going to need to reconcile the _source/file stuff there - @property def vars(self): """ @@ -386,44 +468,25 @@ def vars(self): """ if not hasattr(self, "_read_vars"): - self._read_vars = Variables( - "file", path=self._filelist[0], product=self._prod - ) - + self._read_vars = Variables(path=self.filelist[0]) return self._read_vars - # ---------------------------------------------------------------------- - # Methods + @property + def filelist(self): + """ + Return the list of files represented by this Read object. + """ + return self._filelist - @staticmethod - def _check_source_for_pattern(source, filename_pattern): + @property + def product(self): """ - Check that the entered data source contains files that match the input filename_pattern + Return the product associated with the Read object. """ - glob_pattern = _pattern_to_glob(filename_pattern) + return self._product - if os.path.isdir(source): - _, filelist = _run_fast_scandir(source, glob_pattern) - assert ( - len(filelist) > 0 - ), "None of your filenames match the specified pattern." - print( - f"You have {len(filelist)} files matching the filename pattern to be read in." - ) - return True, filelist - elif os.path.isfile(source): - assert fnmatch.fnmatch( - os.path.basename(source), glob_pattern - ), "Your input filename does not match the filename pattern." - return True, [source] - elif isinstance(source, str): - if source.startswith("s3://"): - return True, [source] - elif isinstance(source, list): - if all(source.startswith("s3://")): - return True, source - - return False, None + # ---------------------------------------------------------------------- + # Methods @staticmethod def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict): @@ -512,13 +575,11 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict): .assign_coords( { spot_dim_name: (spot_dim_name, [spot]), - "delta_time": ("delta_time", photon_ids), + "photon_idx": ("delta_time", photon_ids), } ) .assign({spot_var_name: (("gran_idx", spot_dim_name), [[track_str]])}) - .rename_dims({"delta_time": "photon_idx"}) - .rename({"delta_time": "photon_idx"}) - # .set_index("photon_idx") + .swap_dims({"delta_time": "photon_idx"}) ) # handle cases where the delta time is 2d due to multiple cycles in that group @@ -526,8 +587,6 @@ def _add_vars_to_ds(is2ds, ds, grp_path, wanted_groups_tiered, wanted_dict): ds = ds.assign_coords( {"delta_time": (("photon_idx", "cycle_number"), hold_delta_times)} ) - else: - ds = ds.assign_coords({"delta_time": ("photon_idx", hold_delta_times)}) # for ATL11 if "ref_pt" in ds.coords: @@ -636,25 +695,66 @@ def load(self): # add a check that wanted variables exists, and create them with defaults if possible (and let the user know) # write tests for the functions! - # Notes: intake wants an entire group, not an individual variable (which makes sense if we're using its smarts to set up lat, lon, etc) - # so to get a combined dataset, we need to keep track of spots under the hood, open each group, and then combine them into one xarray where the spots are IDed somehow (or only the strong ones are returned) - # this means we need to get/track from each dataset we open some of the metadata, which we include as mandatory variables when constructing the wanted list + # Notes: intake wants an entire group, not an individual variable + # (which makes sense if we're using its smarts to set up lat, lon, etc) + # so to get a combined dataset, we need to keep track of spots under the hood, + # open each group, and then combine them into one xarray where the spots are IDed somehow + # (or only the strong ones are returned) + # this means we need to get/track from each dataset we open some of the metadata, + # which we include as mandatory variables when constructing the wanted list + + if not self.vars.wanted: + raise AttributeError( + "No variables listed in self.vars.wanted. Please use the Variables class " + "via self.vars to search for desired variables to read and self.vars.append(...) " + "to add variables to the wanted variables list." + ) + + if self.is_s3 is True and len(self.vars.wanted) > 3: + warnings.warn( + "Loading more than 3 variables from an s3 object can be prohibitively slow" + "Approximate access time (using `.load()`) can exceed 6 minutes per data " + "variable." + ) + _confirm_proceed() + + # Append the minimum variables needed for icepyx to merge the datasets + # Skip products which do not contain required variables + if self.product not in ["ATL14", "ATL15", "ATL23"]: + var_list = [ + "sc_orient", + "atlas_sdp_gps_epoch", + "cycle_number", + "rgt", + "data_start_utc", + "data_end_utc", + ] + + # Adjust the nec_varlist for individual products + if self.product == "ATL11": + var_list.remove("sc_orient") + + self.vars.append(defaults=False, var_list=var_list) try: - groups_list = list_of_dict_vals(self._read_vars.wanted) + groups_list = list_of_dict_vals(self.vars.wanted) except AttributeError: pass all_dss = [] - # DevNote: I'd originally hoped to rely on intake-xarray in order to not have to iterate through the files myself, - # by providing a generalized url/source in building the catalog. - # However, this led to errors when I tried to combine two identical datasets because the single dimension was equal. - # In these situations, xarray recommends manually controlling the merge/concat process yourself. - # While unlikely to be a broad issue, I've heard of multiple matching timestamps causing issues for combining multiple IS2 datasets. - for file in self._filelist: + + for file in self.filelist: + if file.startswith("s3"): + # If path is an s3 path create an s3fs filesystem to reference the file + # TODO would it be better to be able to generate an s3fs session from the Mixin? + s3 = earthaccess.get_s3fs_session(daac="NSIDC") + file = s3.open(file, "rb") + all_dss.append( self._build_single_file_dataset(file, groups_list) ) # wanted_groups, vgrp.keys())) + if isinstance(file, S3File): + file.close() if len(all_dss) == 1: return all_dss[0] @@ -686,7 +786,7 @@ def _build_dataset_template(self, file): gran_idx=[np.uint64(999999)], source_file=(["gran_idx"], [file]), ), - attrs=dict(data_product=self._prod), + attrs=dict(data_product=self.product), ) return is2ds @@ -734,20 +834,11 @@ def _build_single_file_dataset(self, file, groups_list): ------- Xarray Dataset """ - file_product = self._read_single_grp(file, "/").attrs["identifier_product_type"] - assert ( - file_product == self._prod - ), "Your product specification does not match the product specification within your files." - # I think the below method might NOT read the file into memory as the above might? - # import h5py - # with h5py.File(filepath,'r') as h5pt: - # prod_id = h5pt.attrs["identifier_product_type"] - # DEVNOTE: if and elif does not actually apply wanted variable list, and has not been tested for merging multiple files into one ds # if a gridded product # TODO: all products need to be tested, and quicklook products added or explicitly excluded # Level 3b, gridded (netcdf): ATL14, 15, 16, 17, 18, 19, 20, 21 - if self._prod in [ + if self.product in [ "ATL14", "ATL15", "ATL16", @@ -761,7 +852,7 @@ def _build_single_file_dataset(self, file, groups_list): is2ds = xr.open_dataset(file) # Level 3b, hdf5: ATL11 - elif self._prod in ["ATL11"]: + elif self.product in ["ATL11"]: is2ds = self._build_dataset_template(file) # returns the wanted groups as a single list of full group path strings diff --git a/icepyx/core/spatial.py b/icepyx/core/spatial.py index 7702acdf2..c34e928ed 100644 --- a/icepyx/core/spatial.py +++ b/icepyx/core/spatial.py @@ -80,7 +80,6 @@ def geodataframe(extent_type, spatial_extent, file=False, xdateline=None): # DevGoal: the crs setting and management needs to be improved elif extent_type == "polygon" and file == False: - # if spatial_extent is already a Polygon if isinstance(spatial_extent, Polygon): spatial_extent_geom = spatial_extent @@ -248,7 +247,6 @@ def validate_polygon_pairs(spatial_extent): if (spatial_extent[0][0] != spatial_extent[-1][0]) or ( spatial_extent[0][1] != spatial_extent[-1][1] ): - # Throw a warning warnings.warn( "WARNING: Polygon's first and last point's coordinates differ," @@ -436,7 +434,6 @@ def __init__(self, spatial_extent, **kwarg): # Check if spatial_extent is a list of coordinates (bounding box or polygon) if isinstance(spatial_extent, (list, np.ndarray)): - # bounding box if len(spatial_extent) == 4 and all( isinstance(i, scalar_types) for i in spatial_extent diff --git a/icepyx/core/temporal.py b/icepyx/core/temporal.py index c7e2dda1c..67f59882a 100644 --- a/icepyx/core/temporal.py +++ b/icepyx/core/temporal.py @@ -51,7 +51,6 @@ def convert_string_to_date(date): def check_valid_date_range(start, end): - """ Helper function for checking if a date range is valid. @@ -89,7 +88,6 @@ def check_valid_date_range(start, end): def validate_times(start_time, end_time): - """ Validates the start and end times passed into __init__ and returns them as datetime.time objects. @@ -145,7 +143,6 @@ def validate_times(start_time, end_time): def validate_date_range_datestr(date_range, start_time=None, end_time=None): - """ Validates a date range provided in the form of a list of strings. @@ -190,7 +187,6 @@ def validate_date_range_datestr(date_range, start_time=None, end_time=None): def validate_date_range_datetime(date_range, start_time=None, end_time=None): - """ Validates a date range provided in the form of a list of datetimes. @@ -230,7 +226,6 @@ def validate_date_range_datetime(date_range, start_time=None, end_time=None): def validate_date_range_date(date_range, start_time=None, end_time=None): - """ Validates a date range provided in the form of a list of datetime.date objects. @@ -268,7 +263,6 @@ def validate_date_range_date(date_range, start_time=None, end_time=None): def validate_date_range_dict(date_range, start_time=None, end_time=None): - """ Validates a date range provided in the form of a dict with the following keys: @@ -330,7 +324,6 @@ def validate_date_range_dict(date_range, start_time=None, end_time=None): # if is string date elif isinstance(_start_date, str): - _start_date = convert_string_to_date(_start_date) _start_date = dt.datetime.combine(_start_date, start_time) @@ -411,7 +404,6 @@ def __init__(self, date_range, start_time=None, end_time=None): """ if len(date_range) == 2: - # date range is provided as dict of strings, dates, or datetimes if isinstance(date_range, dict): self._start, self._end = validate_date_range_dict( diff --git a/icepyx/core/validate_inputs.py b/icepyx/core/validate_inputs.py index c7ba55a6d..a69f045fb 100644 --- a/icepyx/core/validate_inputs.py +++ b/icepyx/core/validate_inputs.py @@ -104,3 +104,18 @@ def tracks(track): warnings.warn("Listed Reference Ground Track is not available") return track_list + + +def check_s3bucket(path): + """ + Check if the given path is an s3 path. Raise a warning if the data being referenced is not + in the NSIDC bucket + """ + split_path = path.split("/") + if split_path[0] == "s3:" and split_path[2] != "nsidc-cumulus-prod-protected": + warnings.warn( + "s3 data being read from outside the NSIDC data bucket. Icepyx can " + "read this data, but available data lists may not be accurate.", + stacklevel=2, + ) + return path diff --git a/icepyx/core/variables.py b/icepyx/core/variables.py index d46561f46..15d5268e5 100644 --- a/icepyx/core/variables.py +++ b/icepyx/core/variables.py @@ -4,6 +4,9 @@ from icepyx.core.auth import EarthdataAuthMixin import icepyx.core.is2ref as is2ref +from icepyx.core.exceptions import DeprecationError +import icepyx.core.validate_inputs as val +import icepyx.core as ipxc # DEVGOAL: use h5py to simplify some of these tasks, if possible! @@ -27,58 +30,102 @@ class Variables(EarthdataAuthMixin): Parameters ---------- vartype : string + This argument is deprecated. The vartype will be inferred from data_source. One of ['order', 'file'] to indicate the source of the input variables. This field will be auto-populated when a variable object is created as an attribute of a query object. + path : string, default None + The path to a local Icesat-2 file. The variables list will contain the variables + present in this file. Either path or product are required input arguments. + product : string, default None + Properly formatted string specifying a valid ICESat-2 product. The variables list will + contain all available variables for this product. Either product or path are required + input arguments. + version : string, default None + Properly formatted string specifying a valid version of the ICESat-2 product. avail : dictionary, default None Dictionary (key:values) of available variable names (keys) and paths (values). wanted : dictionary, default None As avail, but for the desired list of variables - session : requests.session object - A session object authenticating the user to download data using their Earthdata login information. - The session object will automatically be passed from the query object if you - have successfully logged in there. - product : string, default None - Properly formatted string specifying a valid ICESat-2 product - version : string, default None - Properly formatted string specifying a valid version of the ICESat-2 product - path : string, default None - For vartype file, a path to a directory of or single input data file (not yet implemented) + auth : earthaccess.auth.Auth, default None + An earthaccess authentication object. Available as an argument so an existing + earthaccess.auth.Auth object can be used for authentication. If not given, a new auth + object will be created whenever authentication is needed. """ def __init__( self, - vartype, - avail=None, - wanted=None, + vartype=None, + path=None, product=None, version=None, - path=None, + avail=None, + wanted=None, auth=None, ): + # Deprecation error + if vartype in ["order", "file"]: + raise DeprecationError( + "It is no longer required to specify the variable type `vartype`. Instead please ", + "provide either the path to a local file (arg: `path`) or the product you would ", + "like variables for (arg: `product`).", + ) + + if path and product: + raise TypeError( + "Please provide either a path or a product. If a path is provided ", + "variables will be read from the file. If a product is provided all available ", + "variables for that product will be returned.", + ) - assert vartype in ["order", "file"], "Please submit a valid variables type flag" - # initialize authentication properties EarthdataAuthMixin.__init__(self, auth=auth) - - self._vartype = vartype - self.product = product + + # Set the product and version from either the input args or the file + if path: + self._path = val.check_s3bucket(path) + + # Set up auth + if self._path.startswith("s3"): + auth = self.auth + else: + auth = None + # Read the product and version from the file + self._product = is2ref.extract_product(self._path, auth=auth) + self._version = is2ref.extract_version(self._path, auth=auth) + elif product: + # Check for valid product string + self._product = is2ref._validate_product(product) + # Check for valid version string + # If version is not specified by the user assume the most recent version + self._version = val.prod_version( + is2ref.latest_version(self._product), version + ) + else: + raise TypeError( + "Either a path or a product need to be given as input arguments." + ) + self._avail = avail self.wanted = wanted # DevGoal: put some more/robust checks here to assess validity of inputs - if self._vartype == "order": - if self._avail == None: - self._version = version - elif self._vartype == "file": - # DevGoal: check that the list or string are valid dir/files - self.path = path + @property + def path(self): + if self._path: + path = self._path + else: + path = None + return path + + @property + def product(self): + return self._product - # @property - # def wanted(self): - # return self._wanted + @property + def version(self): + return self._version def avail(self, options=False, internal=False): """ @@ -97,16 +144,14 @@ def avail(self, options=False, internal=False): . 'quality_assessment/gt3r/signal_selection_source_fraction_3'] """ - # if hasattr(self, '_avail'): - # return self._avail - # else: + if not hasattr(self, "_avail") or self._avail == None: - if self._vartype == "order": + if not hasattr(self, "path") or self.path.startswith("s3"): self._avail = is2ref._get_custom_options( - self.session, self.product, self._version + self.session, self.product, self.version )["variables"] - - elif self._vartype == "file": + else: + # If a path was given, use that file to read the variables import h5py self._avail = [] @@ -446,53 +491,14 @@ def append(self, defaults=False, var_list=None, beam_list=None, keyword_list=Non and keyword_list == None ), "You must enter parameters to add to a variable subset list. If you do not want to subset by variable, ensure your is2.subsetparams dictionary does not contain the key 'Coverage'." - req_vars = {} + final_vars = {} - # if not hasattr(self, 'avail') or self.avail==None: self.get_avail() - # vgrp, paths = self.parse_var_list(self.avail) - # allpaths = [] - # [allpaths.extend(np.unique(np.array(paths[p]))) for p in range(len(paths))] vgrp, allpaths = self.avail(options=True, internal=True) - self._check_valid_lists(vgrp, allpaths, var_list, beam_list, keyword_list) - # add the mandatory variables to the data object - if self._vartype == "order": - nec_varlist = [ - "sc_orient", - "sc_orient_time", - "atlas_sdp_gps_epoch", - "data_start_utc", - "data_end_utc", - "granule_start_utc", - "granule_end_utc", - "start_delta_time", - "end_delta_time", - ] - elif self._vartype == "file": - nec_varlist = [ - "sc_orient", - "atlas_sdp_gps_epoch", - "cycle_number", - "rgt", - "data_start_utc", - "data_end_utc", - ] - - # Adjust the nec_varlist for individual products - if self.product == "ATL11": - nec_varlist.remove("sc_orient") - - try: - self._check_valid_lists(vgrp, allpaths, var_list=nec_varlist) - except ValueError: - # Assume gridded product since user input lists were previously validated - nec_varlist = [] - + # Instantiate self.wanted to an empty dictionary if it doesn't exist if not hasattr(self, "wanted") or self.wanted == None: - for varid in nec_varlist: - req_vars[varid] = vgrp[varid] - self.wanted = req_vars + self.wanted = {} # DEVGOAL: add a secondary var list to include uncertainty/error information for lower level data if specific data variables have been specified... @@ -501,21 +507,21 @@ def append(self, defaults=False, var_list=None, beam_list=None, keyword_list=Non # Case only variables (but not keywords or beams) are specified if beam_list == None and keyword_list == None: - req_vars.update(self._iter_vars(sum_varlist, req_vars, vgrp)) + final_vars.update(self._iter_vars(sum_varlist, final_vars, vgrp)) # Case a beam and/or keyword list is specified (with or without variables) else: - req_vars.update( - self._iter_paths(sum_varlist, req_vars, vgrp, beam_list, keyword_list) + final_vars.update( + self._iter_paths(sum_varlist, final_vars, vgrp, beam_list, keyword_list) ) # update the data object variables - for vkey in req_vars.keys(): + for vkey in final_vars.keys(): # add all matching keys and paths for new variables if vkey not in self.wanted.keys(): - self.wanted[vkey] = req_vars[vkey] + self.wanted[vkey] = final_vars[vkey] else: - for vpath in req_vars[vkey]: + for vpath in final_vars[vkey]: if vpath not in self.wanted[vkey]: self.wanted[vkey].append(vpath) @@ -625,7 +631,6 @@ def remove(self, all=False, var_list=None, beam_list=None, keyword_list=None): for bkw in beam_list: if bkw in vpath_kws: for kw in keyword_list: - if kw in vpath_kws: self.wanted[vkey].remove(vpath) except TypeError: diff --git a/icepyx/core/visualization.py b/icepyx/core/visualization.py index c6bef2333..001ae178e 100644 --- a/icepyx/core/visualization.py +++ b/icepyx/core/visualization.py @@ -4,6 +4,7 @@ import concurrent.futures import datetime import re +import warnings import backoff import dask.array as da @@ -141,7 +142,6 @@ def __init__( cycles=None, tracks=None, ): - if query_obj: pass else: @@ -240,7 +240,6 @@ def query_icesat2_filelist(self) -> tuple: is2_file_list = [] for bbox_i in bbox_list: - try: region = ipx.Query( self.product, @@ -332,7 +331,13 @@ def request_OA_data(self, paras) -> da.array: A dask array containing the ICESat-2 elevation data. """ - base_url = "https://openaltimetry.org/data/api/icesat2/level3a" + warnings.warn( + "NOTICE: visualizations requiring the OpenAltimetry API are currently (October 2023) " + "unavailable while hosting of OpenAltimetry transitions from UCSD to NSIDC." + "A ticket has been issued to restore programmatic API access." + ) + + base_url = "http://openaltimetry.earthdatacloud.nasa.gov/data/api/icesat2" trackId, Date, cycle, bbox, product = paras # Generate API @@ -357,7 +362,6 @@ def request_OA_data(self, paras) -> da.array: # get data we need (with the correct date) try: - df_series = df.query(expr="date == @Date").iloc[0] beam_data = df_series.beams @@ -476,7 +480,6 @@ def viz_elevation(self) -> (hv.DynamicMap, hv.Layout): return (None,) * 2 else: - cols = ( ["lat", "lon", "elevation", "canopy", "rgt", "cycle"] if self.product == "ATL08" diff --git a/icepyx/quest/dataset_scripts/__init__.py b/icepyx/quest/dataset_scripts/__init__.py index c7b28ee49..7834127ff 100644 --- a/icepyx/quest/dataset_scripts/__init__.py +++ b/icepyx/quest/dataset_scripts/__init__.py @@ -1 +1 @@ -from .dataset import * \ No newline at end of file +from .dataset import * diff --git a/icepyx/quest/dataset_scripts/argo.py b/icepyx/quest/dataset_scripts/argo.py new file mode 100644 index 000000000..8c614d301 --- /dev/null +++ b/icepyx/quest/dataset_scripts/argo.py @@ -0,0 +1,515 @@ +import os.path + +import numpy as np +import pandas as pd +import requests + +from icepyx.core.spatial import geodataframe +from icepyx.quest.dataset_scripts.dataset import DataSet + + +class Argo(DataSet): + """ + Initialises an Argo Dataset object via a Quest object. + Used to query physical and BGC Argo profiles. + + Parameters + --------- + aoi : + area of interest supplied via the spatial parameter of the QUEST object + toi : + time period of interest supplied via the temporal parameter of the QUEST object + params : list of str, default ["temperature"] + A list of strings, where each string is a requested parameter. + Only metadata for profiles with the requested parameters are returned. + To search for all parameters, use `params=["all"]`; + be careful using all for floats with BGC data, as this may be result in a large download. + presRange : str, default None + The pressure range (which correllates with depth) to search for data within. + Input as a "shallow-limit,deep-limit" string. + + See Also + -------- + DataSet + """ + + # Note: it looks like ArgoVis now accepts polygons, not just bounding boxes + def __init__(self, aoi, toi, params=["temperature"], presRange=None): + self._params = self._validate_parameters(params) + self._presRange = presRange + self._spatial = aoi + self._temporal = toi + # todo: verify that this will only work with a bounding box (I think our code can accept arbitrary polygons) + assert self._spatial._ext_type == "bounding_box" + self.argodata = None + self._apikey = "92259861231b55d32a9c0e4e3a93f4834fc0b6fa" + + def __str__(self): + if self.presRange is None: + prange = "All" + else: + prange = str(self.presRange) + + if self.argodata is None: + df = "No data yet" + else: + df = "\n" + str(self.argodata.head()) + s = ( + "---Argo---\n" + "Parameters: {0}\n" + "Pressure range: {1}\n" + "Dataframe head: {2}".format(self.params, prange, df) + ) + + return s + + # ---------------------------------------------------------------------- + # Properties + + @property + def params(self) -> list: + """ + User's list of Argo parameters to search (query) and download. + + The user may modify this list directly. + """ + + return self._params + + @params.setter + def params(self, value): + """ + Validate the input list of parameters. + """ + + self._params = list(set(self._validate_parameters(value))) + + @property + def presRange(self) -> str: + """ + User's pressure range to search (query) and download. + + The user may modify this string directly. + """ + + return self._presRange + + @presRange.setter + def presRange(self, value): + """ + Update the presRange based on the user input + """ + + self._presRange = value + + # ---------------------------------------------------------------------- + # Formatting API Inputs + + def _fmt_coordinates(self) -> str: + """ + Convert spatial extent into string format needed by argovis API + i.e. list of polygon coords [[[lat1,lon1],[lat2,lon2],...]] + """ + + gdf = geodataframe(self._spatial._ext_type, self._spatial._spatial_ext) + coordinates_array = np.asarray(gdf.geometry[0].exterior.coords) + x = "" + for i in coordinates_array: + coord = "[{0},{1}]".format(i[0], i[1]) + if x == "": + x = coord + else: + x += "," + coord + + x = "[" + x + "]" + return x + + # ---------------------------------------------------------------------- + # Validation + + def _valid_params(self) -> list: + """ + A list of valid Argo measurement parameters (including BGC). + + To get a list of valid parameters, comment out the validation line in `search_data` herein, + submit a search with an invalid parameter, and get the list from the response. + """ + + valid_params = [ + # all argo + "pressure", + "pressure_argoqc", + "salinity", + "salinity_argoqc", + "salinity_sfile", + "salinity_sfile_argoqc", + "temperature", + "temperature_argoqc", + "temperature_sfile", + "temperature_sfile_argoqc", + # BGC params + "bbp470", + "bbp470_argoqc", + "bbp532", + "bbp532_argoqc", + "bbp700", + "bbp700_argoqc", + "bbp700_2", + "bbp700_2_argoqc", + "bisulfide", + "bisulfide_argoqc", + "cdom", + "cdom_argoqc", + "chla", + "chla_argoqc", + "cndc", + "cndc_argoqc", + "cndx", + "cndx_argoqc", + "cp660", + "cp660_argoqc", + "down_irradiance380", + "down_irradiance380_argoqc", + "down_irradiance412", + "down_irradiance412_argoqc", + "down_irradiance442", + "down_irradiance442_argoqc", + "down_irradiance443", + "down_irradiance443_argoqc", + "down_irradiance490", + "down_irradiance490_argoqc", + "down_irradiance555", + "down_irradiance555_argoqc", + "down_irradiance670", + "down_irradiance670_argoqc", + "downwelling_par", + "downwelling_par_argoqc", + "doxy", + "doxy_argoqc", + "doxy2", + "doxy2_argoqc", + "doxy3", + "doxy3_argoqc", + "molar_doxy", + "molar_doxy_argoqc", + "nitrate", + "nitrate_argoqc", + "ph_in_situ_total", + "ph_in_situ_total_argoqc", + "turbidity", + "turbidity_argoqc", + "up_radiance412", + "up_radiance412_argoqc", + "up_radiance443", + "up_radiance443_argoqc", + "up_radiance490", + "up_radiance490_argoqc", + "up_radiance555", + "up_radiance555_argoqc", + # all params + "all", + ] + return valid_params + + def _validate_parameters(self, params) -> list: + """ + Checks that the list of user requested parameters are valid. + + Returns + ------- + The list of valid parameters + """ + + if "all" in params: + params = ["all"] + else: + valid_params = self._valid_params() + # checks that params are valid + for i in params: + assert ( + i in valid_params + ), "Parameter '{0}' is not valid. Valid parameters are {1}".format( + i, valid_params + ) + + return list(set(params)) + + # ---------------------------------------------------------------------- + # Querying and Getting Data + + def search_data(self, params=None, presRange=None, printURL=False) -> str: + """ + Query for available argo profiles given the spatio temporal criteria + and other params specific to the dataset. + Searches will automatically use the parameter and pressure range inputs + supplied when the `quest.argo` object was created unless replacement arguments + are added here. + + Parameters + --------- + params : list of str, default None + A list of strings, where each string is a requested parameter. + This kwarg is used to replace the existing list in `self.params`. + Do not submit this kwarg if you would like to use the existing `self.params` list. + Only metadata for profiles with the requested parameters are returned. + To search for all parameters, use `params=["all"]`; + be careful using all for floats with BGC data, as this may be result in a large download. + presRange : str, default None + The pressure range (which correllates with depth) to search for data within. + This kwarg is used to replace the existing pressure range in `self.presRange`. + Do not submit this kwarg if you would like to use the existing `self.presRange` values. + Input as a "shallow-limit,deep-limit" string. + printURL : boolean, default False + Print the URL of the data request. Useful for debugging and when no data is returned. + + Returns + ------ + str : message on the success status of the search + """ + + # if search is called with replaced parameters or presRange + if not params is None: + self.params = params + + if not presRange is None: + self.presRange = presRange + + # builds URL to be submitted + baseURL = "https://argovis-api.colorado.edu/argo" + payload = { + "startDate": self._temporal._start.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "endDate": self._temporal._end.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), + "polygon": [self._fmt_coordinates()], + "data": self.params, + } + + if self.presRange is not None: + payload["presRange"] = self.presRange + + # submit request + resp = requests.get( + baseURL, headers={"x-argokey": self._apikey}, params=payload + ) + + if printURL: + print(resp.url) + + selectionProfiles = resp.json() + + # Consider any status other than 2xx an error + if not resp.status_code // 100 == 2: + # check for the existence of profiles from query + if selectionProfiles == []: + msg = ( + "Warning: Query returned no profiles\n" + "Please try different search parameters" + ) + print(msg) + return msg + + else: + msg = "Error: Unexpected response {}".format(resp) + print(msg) + return msg + + # record the profile ids for the profiles that contain the requested parameters + prof_ids = [] + for i in selectionProfiles: + prof_ids.append(i["_id"]) + # should we be doing a set/duplicates check here?? + self.prof_ids = prof_ids + + msg = "{0} valid profiles have been identified".format(len(prof_ids)) + print(msg) + return msg + + def _download_profile( + self, + profile_number, + printURL=False, + ) -> dict: + """ + Download available argo data for a particular profile_ID. + + Parameters + --------- + profile_number: str + String containing the argo profile ID of the data being downloaded. + printURL: boolean, default False + Print the URL of the data request. Useful for debugging and when no data is returned. + + Returns + ------ + dict : json formatted dictionary of the profile data + """ + + # builds URL to be submitted + baseURL = "https://argovis-api.colorado.edu/argo" + payload = { + "id": profile_number, + "data": self.params, + } + + if self.presRange: + payload["presRange"] = self.presRange + + # submit request + resp = requests.get( + baseURL, headers={"x-argokey": self._apikey}, params=payload + ) + + if printURL: + print(resp.url) + + # Consider any status other than 2xx an error + if not resp.status_code // 100 == 2: + return "Error: Unexpected response {}".format(resp) + profile = resp.json() + return profile + + def _parse_into_df(self, profile_data) -> pd.DataFrame: + """ + Parses downloaded data from a single profile into dataframe. + Appends data to any existing profile data stored in the `argodata` property. + + Parameters + ---------- + profile_data: dict + The downloaded profile data. + The data is contained in the requests response and converted into a json formatted dictionary + by `_download_profile` before being passed into this function. + + Returns + ------- + pd.DataFrame : DataFrame of profile data + """ + + profileDf = pd.DataFrame( + np.transpose(profile_data["data"]), columns=profile_data["data_info"][0] + ) + + # this block tries to catch changes to the ArgoVis API that will break the dataframe creation + try: + profileDf["profile_id"] = profile_data["_id"] + # there's also a geolocation field that provides the geospatial info as shapely points + profileDf["lat"] = profile_data["geolocation"]["coordinates"][1] + profileDf["lon"] = profile_data["geolocation"]["coordinates"][0] + profileDf["date"] = profile_data["timestamp"] + except KeyError as err: + msg = "We cannot automatically parse your profile into a dataframe due to {0}".format( + err + ) + print(msg) + return msg + + profileDf.replace("None", np.nan, inplace=True, regex=True) + + return profileDf + + def download(self, params=None, presRange=None, keep_existing=True) -> pd.DataFrame: + """ + Downloads the requested data for a list of profile IDs (stored under .prof_ids) and returns it in a DataFrame. + + Data is also stored in self.argodata. + Note that if new inputs (`params` or `presRange`) are supplied and `keep_existing=True`, + the existing data will not be limited to the new input parameters. + + Parameters + ---------- + params : list of str, default None + A list of strings, where each string is a requested parameter. + This kwarg is used to replace the existing list in `self.params`. + Do not submit this kwarg if you would like to use the existing `self.params` list. + Only metadata for profiles with the requested parameters are returned. + To search for all parameters, use `params=["all"]`. + For a list of available parameters, see: `reg._valid_params` + presRange : str, default None + The pressure range (which correllates with depth) to search for data within. + This kwarg is used to replace the existing pressure range in `self.presRange`. + Do not submit this kwarg if you would like to use the existing `self.presRange` values. + Input as a "shallow-limit,deep-limit" string. + keep_existing : boolean, default True + Provides the option to clear any existing downloaded data before downloading more. + + Returns + ------- + pd.DataFrame : DataFrame of requested data + """ + + # TODO: do some basic testing of this block and how the dataframe merging actually behaves + if keep_existing == False: + print( + "Your previously stored data in reg.argodata", + "will be deleted before new data is downloaded.", + ) + self.argodata = None + elif keep_existing == True and hasattr(self, "argodata"): + print( + "The data requested by running this line of code\n", + "will be added to previously downloaded data.", + ) + + # if download is called with replaced parameters or presRange + if not params is None: + self.params = params + + if not presRange is None: + self.presRange = presRange + + # Add qc data for each of the parameters requested + if self.params == ["all"]: + pass + else: + for p in self.params: + if p.endswith("_argoqc") or (p + "_argoqc" in self.params): + pass + else: + self.params.append(p + "_argoqc") + + # intentionally resubmit search to reset prof_ids, in case the user requested different parameters + self.search_data() + + # create a dataframe for each profile and merge it with the rest of the profiles from this set of parameters being downloaded + merged_df = pd.DataFrame(columns=["profile_id"]) + for i in self.prof_ids: + print("processing profile", i) + try: + profile_data = self._download_profile(i) + profile_df = self._parse_into_df(profile_data[0]) + merged_df = pd.concat([merged_df, profile_df], sort=False) + except: + print("\tError processing profile {0}. Skipping.".format(i)) + + # now that we have a df from this round of downloads, we can add it to any existing dataframe + # note that if a given column has previously been added, update needs to be used to replace nans (merge will not replace the nan values) + if not self.argodata is None: + self.argodata = self.argodata.merge(merged_df, how="outer") + else: + self.argodata = merged_df + + self.argodata.reset_index(inplace=True, drop=True) + + return self.argodata + + def save(self, filepath): + """ + Saves the argo dataframe to a csv at the specified location + + Parameters + ---------- + filepath : str + String containing complete filepath and name of file + Any extension will be removed and replaced with csv. + Also appends '_argo.csv' to filename + e.g. /path/to/file/my_data(_argo.csv) + """ + + # create the directory if it doesn't exist + path, file = os.path.split(filepath) + if not os.path.exists(path): + os.mkdir(path) + + # remove any file extension + base, ext = os.path.splitext(filepath) + + self.argodata.to_csv(base + "_argo.csv") diff --git a/icepyx/quest/dataset_scripts/dataset.py b/icepyx/quest/dataset_scripts/dataset.py index 13e926229..193fab22e 100644 --- a/icepyx/quest/dataset_scripts/dataset.py +++ b/icepyx/quest/dataset_scripts/dataset.py @@ -1,4 +1,5 @@ import warnings +from icepyx.core.query import GenQuery warnings.filterwarnings("ignore") @@ -6,78 +7,79 @@ class DataSet: """ - Parent Class for all supported datasets (i.e. ATL03, ATL07, MODIS, etc.) - all sub classes must support the following methods for use in - colocated data class + Template parent class for all QUEST supported datasets (i.e. ICESat-2, Argo BGC, Argo, MODIS, etc.). + All sub-classes must support the following methods for use via the QUEST class. """ - def __init__(self, boundingbox, timeframe): + def __init__(self, spatial_extent, date_range, start_time=None, end_time=None): """ - * use existing Icepyx functionality to initialise this - :param timeframe: datetime + Complete any dataset specific initializations (i.e. beyond space and time) required here. + For instance, ICESat-2 requires a product, and Argo requires parameters. + One can also check that the "default" space and time supplied by QUEST are the right format + (e.g. if the spatial extent must be a bounding box). """ - self.bounding_box = boundingbox - self.time_frame = timeframe + raise NotImplementedError + + # ---------------------------------------------------------------------- + # Formatting API Inputs def _fmt_coordinates(self): - # use icepyx geospatial module (icepyx core) + """ + Convert spatial extent into format needed by DataSet API, + if different than the formats available directly from SuperQuery. + """ raise NotImplementedError def _fmt_timerange(self): """ - will return list of datetime objects [start_time, end_time] + Convert temporal information into format needed by DataSet API, + if different than the formats available directly from SuperQuery. """ raise NotImplementedError - # todo: merge with Icepyx SuperQuery - def _validate_input(self): + # ---------------------------------------------------------------------- + # Validation + + def _validate_inputs(self): """ - This may already be done in icepyx. - Not sure if we need this here + Create any additional validation functions for verifying inputs. + This function is not explicitly called by QUEST, + but is frequently needed for preparing API requests. + + See Also + -------- + quest.dataset_scripts.argo.Argo._validate_parameters """ raise NotImplementedError - def search_data(self, delta_t): + # ---------------------------------------------------------------------- + # Querying and Getting Data + + def search_data(self): """ - query dataset given the spatio temporal criteria - and other params specic to the dataset + Query the dataset (i.e. search for available data) + given the spatiotemporal criteria and other parameters specific to the dataset. """ raise NotImplementedError - def download(self, out_path): + def download(self): """ - once data is querried, the user may choose to dowload the - data locally + Download the data to your local machine. """ raise NotImplementedError - def visualize(self): + def save(self, filepath): """ - (once data is downloaded)?, makes a quick plot showing where - data are located - e.g. Plots location of Argo profile or highlights ATL03 photon track + Save the downloaded data to a directory on your local machine. """ raise NotImplementedError - def _add2colocated_plot(self): + # ---------------------------------------------------------------------- + # Working with Data + + def visualize(self): """ - Takes visualise() functionality and adds the plot to central - plot with other coincident data. This will be called by - show_area_overlap() in Colocateddata class + Tells QUEST how to plot data (for instance, which parameters to plot) on a basemap. + For ICESat-2, it might show a photon track, and for Argo it might show a profile location. """ raise NotImplementedError - - """ - The following are low priority functions - Not sure these are even worth keeping. Doesn't make sense for - all datasets. - """ - - # def get_meltpond_fraction(self): - # raise NotImplementedError - # - # def get_sea_ice_fraction(self): - # raise NotImplementedError - # - # def get_roughness(self): - # raise NotImplementedError diff --git a/icepyx/quest/quest.py b/icepyx/quest/quest.py index 2855a879c..966b19dca 100644 --- a/icepyx/quest/quest.py +++ b/icepyx/quest/quest.py @@ -1,25 +1,24 @@ import matplotlib.pyplot as plt -from icepyx.core.query import GenQuery +from icepyx.core.query import GenQuery, Query + +from icepyx.quest.dataset_scripts.argo import Argo -# todo: implement the subclass inheritance class Quest(GenQuery): """ QUEST - Query Unify Explore SpatioTemporal - object to query, obtain, and perform basic - operations on datasets for combined analysis with ICESat-2 data products. - A new dataset can be added using the `dataset.py` template. - A list of already supported datasets is available at: - Expands the icepyx GenQuery superclass. + operations on datasets (i.e. Argo, BGC Argo, MODIS, etc) for combined analysis with ICESat-2 + data products. A new dataset can be added using the `dataset.py` template. + QUEST expands the icepyx GenQuery superclass. See the doc page for GenQuery for details on temporal and spatial input parameters. - Parameters ---------- - projection : proj4 string - Not yet implemented - Ex text: a string name of projection to be used for plotting (e.g. 'Mercator', 'NorthPolarStereographic') + proj : proj4 string + Geospatial projection. + Not yet implemented Returns ------- @@ -38,7 +37,6 @@ class Quest(GenQuery): Date range: (2019-02-20 00:00:00, 2019-02-28 23:59:59) Data sets: None - # todo: make this work with real datasets Add datasets to the quest object. >>> reg_a.datasets = {'ATL07':None, 'Argo':None} @@ -55,19 +53,18 @@ class Quest(GenQuery): def __init__( self, - spatial_extent=None, - date_range=None, + spatial_extent, + date_range, start_time=None, end_time=None, - proj="Default", + proj="default", ): + """ + Tells QUEST to initialize data given the user input spatiotemporal data. + """ + super().__init__(spatial_extent, date_range, start_time, end_time) self.datasets = {} - self.projection = self._determine_proj(proj) - - # todo: maybe move this to icepyx superquery class - def _determine_proj(self, proj): - return None def __str__(self): str = super(Quest, self).__str__() @@ -83,4 +80,172 @@ def __str__(self): return str - # DEVNOTE: see colocated data branch and phyto team files for code that expands quest functionality + # ---------------------------------------------------------------------- + # Datasets + + def add_icesat2( + self, + product, + start_time=None, + end_time=None, + version=None, + cycles=None, + tracks=None, + files=None, + **kwargs, + ) -> None: + """ + Adds ICESat-2 datasets to QUEST structure. + + Parameters + ---------- + For details on inputs, see the Query documentation. + + Returns + ------- + None + + See Also + -------- + icepyx.core.GenQuery + icepyx.core.Query + """ + + query = Query( + product, + self._spatial.extent, + [self._temporal.start, self._temporal.end], + start_time, + end_time, + version, + cycles, + tracks, + files, + **kwargs, + ) + + self.datasets["icesat2"] = query + + def add_argo(self, params=["temperature"], presRange=None) -> None: + """ + Adds Argo (including Argo-BGC) to QUEST structure. + + Parameters + ---------- + For details on inputs, see the Argo dataset script documentation. + + Returns + ------- + None + + See Also + -------- + quest.dataset_scripts.argo + icepyx.query.GenQuery + + Examples + -------- + # example with profiles available + >>> reg_a = Quest([-154, 30,-143, 37], ['2022-04-12', '2022-04-26']) + >>> reg_a.add_argo(params=["temperature", "salinity"]) + """ + + argo = Argo(self._spatial, self._temporal, params, presRange) + self.datasets["argo"] = argo + + # ---------------------------------------------------------------------- + # Methods (on all datasets) + + # error handling? what happens when the user tries to re-query? + def search_all(self, **kwargs): + """ + Searches for requred dataset within platform (i.e. ICESat-2, Argo) of interest. + + Parameters + ---------- + **kwargs : default None + Optional passing of keyword arguments to supply additional search constraints per datasets. + Each key must match the dataset name (e.g. "icesat2", "argo") as in quest.datasets.keys(), + and the value is a dictionary of acceptable keyword arguments + and values allowable for the `search_data()` function for that dataset. + For instance: `icesat2 = {"IDs":True}, argo = {"presRange":"10,500"}`. + """ + print("\nSearching all datasets...") + + for k, v in self.datasets.items(): + print() + try: + if isinstance(v, Query): + print("---ICESat-2---") + try: + msg = v.avail_granules(kwargs[k]) + except KeyError: + msg = v.avail_granules() + print(msg) + else: + print(k) + try: + v.search_data(kwargs[k]) + except KeyError: + v.search_data() + + except: + dataset_name = type(v).__name__ + print("Error querying data from {0}".format(dataset_name)) + + # error handling? what happens if the user tries to re-download? + def download_all(self, path="", **kwargs): + """ + Downloads requested dataset(s). + + Parameters + ---------- + **kwargs : default None + Optional passing of keyword arguments to supply additional search constraints per datasets. + Each key must match the dataset name (e.g. "icesat2", "argo") as in quest.datasets.keys(), + and the value is a dictionary of acceptable keyword arguments + and values allowable for the `search_data()` function for that dataset. + For instance: `icesat2 = {"verbose":True}, argo = {"keep_existing":True}`. + """ + + print("\nDownloading all datasets...") + + for k, v in self.datasets.items(): + print() + + try: + if isinstance(v, Query): + print("---ICESat-2---") + try: + msg = v.download_granules(path, kwargs[k]) + except KeyError: + msg = v.download_granules(path) + print(msg) + else: + print(k) + try: + msg = v.download(kwargs[k]) + except KeyError: + msg = v.download() + print(msg) + except: + dataset_name = type(v).__name__ + print("Error downloading data from {0}".format(dataset_name)) + + def save_all(self, path): + """ + Saves all datasets according to their respective `.save()` functionality. + + Parameters + ---------- + path : str + Path at which to save the dataset files. + + """ + + for k, v in self.datasets.items(): + if isinstance(v, Query): + print("ICESat-2 granules are saved during download") + else: + print("Saving " + k) + v.save(path) diff --git a/icepyx/tests/ATL06v05_options.json b/icepyx/tests/ATL06v05_options.json deleted file mode 100644 index 0fc236fe8..000000000 --- a/icepyx/tests/ATL06v05_options.json +++ /dev/null @@ -1 +0,0 @@ -{"options": [{"id": "ICESAT2", "spatialSubsetting": "true", "spatialSubsettingShapefile": "true", "temporalSubsetting": "true", "type": "both", "maxGransSyncRequest": "100", "maxGransAsyncRequest": "2000"}], "fileformats": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile", "NetCDF-3"], "reprojectionONLY": [], "noproj": [], "formatreproj": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile", "NetCDF-3"], "variables": ["ancillary_data/atlas_sdp_gps_epoch", "ancillary_data/control", "ancillary_data/data_end_utc", "ancillary_data/data_start_utc", "ancillary_data/end_cycle", "ancillary_data/end_delta_time", "ancillary_data/end_geoseg", "ancillary_data/end_gpssow", "ancillary_data/end_gpsweek", "ancillary_data/end_orbit", "ancillary_data/end_region", "ancillary_data/end_rgt", "ancillary_data/granule_end_utc", "ancillary_data/granule_start_utc", "ancillary_data/qa_at_interval", "ancillary_data/release", "ancillary_data/start_cycle", "ancillary_data/start_delta_time", "ancillary_data/start_geoseg", "ancillary_data/start_gpssow", "ancillary_data/start_gpsweek", "ancillary_data/start_orbit", "ancillary_data/start_region", "ancillary_data/start_rgt", "ancillary_data/version", "ancillary_data/land_ice/dt_hist", "ancillary_data/land_ice/fit_maxiter", "ancillary_data/land_ice/fpb_maxiter", "ancillary_data/land_ice/max_res_ids", "ancillary_data/land_ice/min_dist", "ancillary_data/land_ice/min_gain_th", "ancillary_data/land_ice/min_n_pe", "ancillary_data/land_ice/min_n_sel", "ancillary_data/land_ice/min_signal_conf", "ancillary_data/land_ice/n_hist", "ancillary_data/land_ice/nhist_bins", "ancillary_data/land_ice/n_sigmas", "ancillary_data/land_ice/proc_interval", "ancillary_data/land_ice/qs_lim_bsc", "ancillary_data/land_ice/qs_lim_hrs", "ancillary_data/land_ice/qs_lim_hsigma", "ancillary_data/land_ice/qs_lim_msw", "ancillary_data/land_ice/qs_lim_snr", "ancillary_data/land_ice/qs_lim_sss", "ancillary_data/land_ice/rbin_width", "ancillary_data/land_ice/sigma_beam", "ancillary_data/land_ice/sigma_tx", "ancillary_data/land_ice/t_dead", "ancillary_data/land_ice/txp_maxiter", "gt1l/land_ice_segments/atl06_quality_summary", "gt1l/land_ice_segments/delta_time", "gt1l/land_ice_segments/h_li", "gt1l/land_ice_segments/h_li_sigma", "gt1l/land_ice_segments/latitude", "gt1l/land_ice_segments/longitude", "gt1l/land_ice_segments/segment_id", "gt1l/land_ice_segments/sigma_geo_h", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_med_corr", "gt1l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_n_corr", "gt1l/land_ice_segments/bias_correction/med_r_fit", "gt1l/land_ice_segments/bias_correction/tx_mean_corr", "gt1l/land_ice_segments/bias_correction/tx_med_corr", "gt1l/land_ice_segments/dem/dem_flag", "gt1l/land_ice_segments/dem/dem_h", "gt1l/land_ice_segments/dem/geoid_free2mean", "gt1l/land_ice_segments/dem/geoid_h", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1l/land_ice_segments/fit_statistics/dh_fit_dy", "gt1l/land_ice_segments/fit_statistics/h_expected_rms", "gt1l/land_ice_segments/fit_statistics/h_mean", "gt1l/land_ice_segments/fit_statistics/h_rms_misfit", "gt1l/land_ice_segments/fit_statistics/h_robust_sprd", "gt1l/land_ice_segments/fit_statistics/n_fit_photons", "gt1l/land_ice_segments/fit_statistics/n_seg_pulses", "gt1l/land_ice_segments/fit_statistics/sigma_h_mean", "gt1l/land_ice_segments/fit_statistics/signal_selection_source", "gt1l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1l/land_ice_segments/fit_statistics/snr", "gt1l/land_ice_segments/fit_statistics/snr_significance", "gt1l/land_ice_segments/fit_statistics/w_surface_window_final", "gt1l/land_ice_segments/geophysical/bckgrd", "gt1l/land_ice_segments/geophysical/bsnow_conf", "gt1l/land_ice_segments/geophysical/bsnow_h", "gt1l/land_ice_segments/geophysical/bsnow_od", "gt1l/land_ice_segments/geophysical/cloud_flg_asr", "gt1l/land_ice_segments/geophysical/cloud_flg_atm", "gt1l/land_ice_segments/geophysical/dac", "gt1l/land_ice_segments/geophysical/e_bckgrd", "gt1l/land_ice_segments/geophysical/layer_flag", "gt1l/land_ice_segments/geophysical/msw_flag", "gt1l/land_ice_segments/geophysical/neutat_delay_total", "gt1l/land_ice_segments/geophysical/r_eff", "gt1l/land_ice_segments/geophysical/solar_azimuth", "gt1l/land_ice_segments/geophysical/solar_elevation", "gt1l/land_ice_segments/geophysical/tide_earth", "gt1l/land_ice_segments/geophysical/tide_earth_free2mean", "gt1l/land_ice_segments/geophysical/tide_equilibrium", "gt1l/land_ice_segments/geophysical/tide_load", "gt1l/land_ice_segments/geophysical/tide_ocean", "gt1l/land_ice_segments/geophysical/tide_pole", "gt1l/land_ice_segments/ground_track/ref_azimuth", "gt1l/land_ice_segments/ground_track/ref_coelv", "gt1l/land_ice_segments/ground_track/seg_azimuth", "gt1l/land_ice_segments/ground_track/sigma_geo_at", "gt1l/land_ice_segments/ground_track/sigma_geo_r", "gt1l/land_ice_segments/ground_track/sigma_geo_xt", "gt1l/land_ice_segments/ground_track/x_atc", "gt1l/land_ice_segments/ground_track/y_atc", "gt1l/residual_histogram/bckgrd_per_m", "gt1l/residual_histogram/bin_top_h", "gt1l/residual_histogram/count", "gt1l/residual_histogram/delta_time", "gt1l/residual_histogram/ds_segment_id", "gt1l/residual_histogram/lat_mean", "gt1l/residual_histogram/lon_mean", "gt1l/residual_histogram/pulse_count", "gt1l/residual_histogram/segment_id_list", "gt1l/residual_histogram/x_atc_mean", "gt1l/segment_quality/delta_time", "gt1l/segment_quality/record_number", "gt1l/segment_quality/reference_pt_lat", "gt1l/segment_quality/reference_pt_lon", "gt1l/segment_quality/segment_id", "gt1l/segment_quality/signal_selection_source", "gt1l/segment_quality/signal_selection_status/signal_selection_status_all", "gt1l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt1r/land_ice_segments/atl06_quality_summary", "gt1r/land_ice_segments/delta_time", "gt1r/land_ice_segments/h_li", "gt1r/land_ice_segments/h_li_sigma", "gt1r/land_ice_segments/latitude", "gt1r/land_ice_segments/longitude", "gt1r/land_ice_segments/segment_id", "gt1r/land_ice_segments/sigma_geo_h", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_med_corr", "gt1r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_n_corr", "gt1r/land_ice_segments/bias_correction/med_r_fit", "gt1r/land_ice_segments/bias_correction/tx_mean_corr", "gt1r/land_ice_segments/bias_correction/tx_med_corr", "gt1r/land_ice_segments/dem/dem_flag", "gt1r/land_ice_segments/dem/dem_h", "gt1r/land_ice_segments/dem/geoid_free2mean", "gt1r/land_ice_segments/dem/geoid_h", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1r/land_ice_segments/fit_statistics/dh_fit_dy", "gt1r/land_ice_segments/fit_statistics/h_expected_rms", "gt1r/land_ice_segments/fit_statistics/h_mean", "gt1r/land_ice_segments/fit_statistics/h_rms_misfit", "gt1r/land_ice_segments/fit_statistics/h_robust_sprd", "gt1r/land_ice_segments/fit_statistics/n_fit_photons", "gt1r/land_ice_segments/fit_statistics/n_seg_pulses", "gt1r/land_ice_segments/fit_statistics/sigma_h_mean", "gt1r/land_ice_segments/fit_statistics/signal_selection_source", "gt1r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1r/land_ice_segments/fit_statistics/snr", "gt1r/land_ice_segments/fit_statistics/snr_significance", "gt1r/land_ice_segments/fit_statistics/w_surface_window_final", "gt1r/land_ice_segments/geophysical/bckgrd", "gt1r/land_ice_segments/geophysical/bsnow_conf", "gt1r/land_ice_segments/geophysical/bsnow_h", "gt1r/land_ice_segments/geophysical/bsnow_od", "gt1r/land_ice_segments/geophysical/cloud_flg_asr", "gt1r/land_ice_segments/geophysical/cloud_flg_atm", "gt1r/land_ice_segments/geophysical/dac", "gt1r/land_ice_segments/geophysical/e_bckgrd", "gt1r/land_ice_segments/geophysical/layer_flag", "gt1r/land_ice_segments/geophysical/msw_flag", "gt1r/land_ice_segments/geophysical/neutat_delay_total", "gt1r/land_ice_segments/geophysical/r_eff", "gt1r/land_ice_segments/geophysical/solar_azimuth", "gt1r/land_ice_segments/geophysical/solar_elevation", "gt1r/land_ice_segments/geophysical/tide_earth", "gt1r/land_ice_segments/geophysical/tide_earth_free2mean", "gt1r/land_ice_segments/geophysical/tide_equilibrium", "gt1r/land_ice_segments/geophysical/tide_load", "gt1r/land_ice_segments/geophysical/tide_ocean", "gt1r/land_ice_segments/geophysical/tide_pole", "gt1r/land_ice_segments/ground_track/ref_azimuth", "gt1r/land_ice_segments/ground_track/ref_coelv", "gt1r/land_ice_segments/ground_track/seg_azimuth", "gt1r/land_ice_segments/ground_track/sigma_geo_at", "gt1r/land_ice_segments/ground_track/sigma_geo_r", "gt1r/land_ice_segments/ground_track/sigma_geo_xt", "gt1r/land_ice_segments/ground_track/x_atc", "gt1r/land_ice_segments/ground_track/y_atc", "gt1r/residual_histogram/bckgrd_per_m", "gt1r/residual_histogram/bin_top_h", "gt1r/residual_histogram/count", "gt1r/residual_histogram/delta_time", "gt1r/residual_histogram/ds_segment_id", "gt1r/residual_histogram/lat_mean", "gt1r/residual_histogram/lon_mean", "gt1r/residual_histogram/pulse_count", "gt1r/residual_histogram/segment_id_list", "gt1r/residual_histogram/x_atc_mean", "gt1r/segment_quality/delta_time", "gt1r/segment_quality/record_number", "gt1r/segment_quality/reference_pt_lat", "gt1r/segment_quality/reference_pt_lon", "gt1r/segment_quality/segment_id", "gt1r/segment_quality/signal_selection_source", "gt1r/segment_quality/signal_selection_status/signal_selection_status_all", "gt1r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2l/land_ice_segments/atl06_quality_summary", "gt2l/land_ice_segments/delta_time", "gt2l/land_ice_segments/h_li", "gt2l/land_ice_segments/h_li_sigma", "gt2l/land_ice_segments/latitude", "gt2l/land_ice_segments/longitude", "gt2l/land_ice_segments/segment_id", "gt2l/land_ice_segments/sigma_geo_h", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_med_corr", "gt2l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_n_corr", "gt2l/land_ice_segments/bias_correction/med_r_fit", "gt2l/land_ice_segments/bias_correction/tx_mean_corr", "gt2l/land_ice_segments/bias_correction/tx_med_corr", "gt2l/land_ice_segments/dem/dem_flag", "gt2l/land_ice_segments/dem/dem_h", "gt2l/land_ice_segments/dem/geoid_free2mean", "gt2l/land_ice_segments/dem/geoid_h", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2l/land_ice_segments/fit_statistics/dh_fit_dy", "gt2l/land_ice_segments/fit_statistics/h_expected_rms", "gt2l/land_ice_segments/fit_statistics/h_mean", "gt2l/land_ice_segments/fit_statistics/h_rms_misfit", "gt2l/land_ice_segments/fit_statistics/h_robust_sprd", "gt2l/land_ice_segments/fit_statistics/n_fit_photons", "gt2l/land_ice_segments/fit_statistics/n_seg_pulses", "gt2l/land_ice_segments/fit_statistics/sigma_h_mean", "gt2l/land_ice_segments/fit_statistics/signal_selection_source", "gt2l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2l/land_ice_segments/fit_statistics/snr", "gt2l/land_ice_segments/fit_statistics/snr_significance", "gt2l/land_ice_segments/fit_statistics/w_surface_window_final", "gt2l/land_ice_segments/geophysical/bckgrd", "gt2l/land_ice_segments/geophysical/bsnow_conf", "gt2l/land_ice_segments/geophysical/bsnow_h", "gt2l/land_ice_segments/geophysical/bsnow_od", "gt2l/land_ice_segments/geophysical/cloud_flg_asr", "gt2l/land_ice_segments/geophysical/cloud_flg_atm", "gt2l/land_ice_segments/geophysical/dac", "gt2l/land_ice_segments/geophysical/e_bckgrd", "gt2l/land_ice_segments/geophysical/layer_flag", "gt2l/land_ice_segments/geophysical/msw_flag", "gt2l/land_ice_segments/geophysical/neutat_delay_total", "gt2l/land_ice_segments/geophysical/r_eff", "gt2l/land_ice_segments/geophysical/solar_azimuth", "gt2l/land_ice_segments/geophysical/solar_elevation", "gt2l/land_ice_segments/geophysical/tide_earth", "gt2l/land_ice_segments/geophysical/tide_earth_free2mean", "gt2l/land_ice_segments/geophysical/tide_equilibrium", "gt2l/land_ice_segments/geophysical/tide_load", "gt2l/land_ice_segments/geophysical/tide_ocean", "gt2l/land_ice_segments/geophysical/tide_pole", "gt2l/land_ice_segments/ground_track/ref_azimuth", "gt2l/land_ice_segments/ground_track/ref_coelv", "gt2l/land_ice_segments/ground_track/seg_azimuth", "gt2l/land_ice_segments/ground_track/sigma_geo_at", "gt2l/land_ice_segments/ground_track/sigma_geo_r", "gt2l/land_ice_segments/ground_track/sigma_geo_xt", "gt2l/land_ice_segments/ground_track/x_atc", "gt2l/land_ice_segments/ground_track/y_atc", "gt2l/residual_histogram/bckgrd_per_m", "gt2l/residual_histogram/bin_top_h", "gt2l/residual_histogram/count", "gt2l/residual_histogram/delta_time", "gt2l/residual_histogram/ds_segment_id", "gt2l/residual_histogram/lat_mean", "gt2l/residual_histogram/lon_mean", "gt2l/residual_histogram/pulse_count", "gt2l/residual_histogram/segment_id_list", "gt2l/residual_histogram/x_atc_mean", "gt2l/segment_quality/delta_time", "gt2l/segment_quality/record_number", "gt2l/segment_quality/reference_pt_lat", "gt2l/segment_quality/reference_pt_lon", "gt2l/segment_quality/segment_id", "gt2l/segment_quality/signal_selection_source", "gt2l/segment_quality/signal_selection_status/signal_selection_status_all", "gt2l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2r/land_ice_segments/atl06_quality_summary", "gt2r/land_ice_segments/delta_time", "gt2r/land_ice_segments/h_li", "gt2r/land_ice_segments/h_li_sigma", "gt2r/land_ice_segments/latitude", "gt2r/land_ice_segments/longitude", "gt2r/land_ice_segments/segment_id", "gt2r/land_ice_segments/sigma_geo_h", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_med_corr", "gt2r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_n_corr", "gt2r/land_ice_segments/bias_correction/med_r_fit", "gt2r/land_ice_segments/bias_correction/tx_mean_corr", "gt2r/land_ice_segments/bias_correction/tx_med_corr", "gt2r/land_ice_segments/dem/dem_flag", "gt2r/land_ice_segments/dem/dem_h", "gt2r/land_ice_segments/dem/geoid_free2mean", "gt2r/land_ice_segments/dem/geoid_h", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2r/land_ice_segments/fit_statistics/dh_fit_dy", "gt2r/land_ice_segments/fit_statistics/h_expected_rms", "gt2r/land_ice_segments/fit_statistics/h_mean", "gt2r/land_ice_segments/fit_statistics/h_rms_misfit", "gt2r/land_ice_segments/fit_statistics/h_robust_sprd", "gt2r/land_ice_segments/fit_statistics/n_fit_photons", "gt2r/land_ice_segments/fit_statistics/n_seg_pulses", "gt2r/land_ice_segments/fit_statistics/sigma_h_mean", "gt2r/land_ice_segments/fit_statistics/signal_selection_source", "gt2r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2r/land_ice_segments/fit_statistics/snr", "gt2r/land_ice_segments/fit_statistics/snr_significance", "gt2r/land_ice_segments/fit_statistics/w_surface_window_final", "gt2r/land_ice_segments/geophysical/bckgrd", "gt2r/land_ice_segments/geophysical/bsnow_conf", "gt2r/land_ice_segments/geophysical/bsnow_h", "gt2r/land_ice_segments/geophysical/bsnow_od", "gt2r/land_ice_segments/geophysical/cloud_flg_asr", "gt2r/land_ice_segments/geophysical/cloud_flg_atm", "gt2r/land_ice_segments/geophysical/dac", "gt2r/land_ice_segments/geophysical/e_bckgrd", "gt2r/land_ice_segments/geophysical/layer_flag", "gt2r/land_ice_segments/geophysical/msw_flag", "gt2r/land_ice_segments/geophysical/neutat_delay_total", "gt2r/land_ice_segments/geophysical/r_eff", "gt2r/land_ice_segments/geophysical/solar_azimuth", "gt2r/land_ice_segments/geophysical/solar_elevation", "gt2r/land_ice_segments/geophysical/tide_earth", "gt2r/land_ice_segments/geophysical/tide_earth_free2mean", "gt2r/land_ice_segments/geophysical/tide_equilibrium", "gt2r/land_ice_segments/geophysical/tide_load", "gt2r/land_ice_segments/geophysical/tide_ocean", "gt2r/land_ice_segments/geophysical/tide_pole", "gt2r/land_ice_segments/ground_track/ref_azimuth", "gt2r/land_ice_segments/ground_track/ref_coelv", "gt2r/land_ice_segments/ground_track/seg_azimuth", "gt2r/land_ice_segments/ground_track/sigma_geo_at", "gt2r/land_ice_segments/ground_track/sigma_geo_r", "gt2r/land_ice_segments/ground_track/sigma_geo_xt", "gt2r/land_ice_segments/ground_track/x_atc", "gt2r/land_ice_segments/ground_track/y_atc", "gt2r/residual_histogram/bckgrd_per_m", "gt2r/residual_histogram/bin_top_h", "gt2r/residual_histogram/count", "gt2r/residual_histogram/delta_time", "gt2r/residual_histogram/ds_segment_id", "gt2r/residual_histogram/lat_mean", "gt2r/residual_histogram/lon_mean", "gt2r/residual_histogram/pulse_count", "gt2r/residual_histogram/segment_id_list", "gt2r/residual_histogram/x_atc_mean", "gt2r/segment_quality/delta_time", "gt2r/segment_quality/record_number", "gt2r/segment_quality/reference_pt_lat", "gt2r/segment_quality/reference_pt_lon", "gt2r/segment_quality/segment_id", "gt2r/segment_quality/signal_selection_source", "gt2r/segment_quality/signal_selection_status/signal_selection_status_all", "gt2r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3l/land_ice_segments/atl06_quality_summary", "gt3l/land_ice_segments/delta_time", "gt3l/land_ice_segments/h_li", "gt3l/land_ice_segments/h_li_sigma", "gt3l/land_ice_segments/latitude", "gt3l/land_ice_segments/longitude", "gt3l/land_ice_segments/segment_id", "gt3l/land_ice_segments/sigma_geo_h", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_med_corr", "gt3l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_n_corr", "gt3l/land_ice_segments/bias_correction/med_r_fit", "gt3l/land_ice_segments/bias_correction/tx_mean_corr", "gt3l/land_ice_segments/bias_correction/tx_med_corr", "gt3l/land_ice_segments/dem/dem_flag", "gt3l/land_ice_segments/dem/dem_h", "gt3l/land_ice_segments/dem/geoid_free2mean", "gt3l/land_ice_segments/dem/geoid_h", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3l/land_ice_segments/fit_statistics/dh_fit_dy", "gt3l/land_ice_segments/fit_statistics/h_expected_rms", "gt3l/land_ice_segments/fit_statistics/h_mean", "gt3l/land_ice_segments/fit_statistics/h_rms_misfit", "gt3l/land_ice_segments/fit_statistics/h_robust_sprd", "gt3l/land_ice_segments/fit_statistics/n_fit_photons", "gt3l/land_ice_segments/fit_statistics/n_seg_pulses", "gt3l/land_ice_segments/fit_statistics/sigma_h_mean", "gt3l/land_ice_segments/fit_statistics/signal_selection_source", "gt3l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3l/land_ice_segments/fit_statistics/snr", "gt3l/land_ice_segments/fit_statistics/snr_significance", "gt3l/land_ice_segments/fit_statistics/w_surface_window_final", "gt3l/land_ice_segments/geophysical/bckgrd", "gt3l/land_ice_segments/geophysical/bsnow_conf", "gt3l/land_ice_segments/geophysical/bsnow_h", "gt3l/land_ice_segments/geophysical/bsnow_od", "gt3l/land_ice_segments/geophysical/cloud_flg_asr", "gt3l/land_ice_segments/geophysical/cloud_flg_atm", "gt3l/land_ice_segments/geophysical/dac", "gt3l/land_ice_segments/geophysical/e_bckgrd", "gt3l/land_ice_segments/geophysical/layer_flag", "gt3l/land_ice_segments/geophysical/msw_flag", "gt3l/land_ice_segments/geophysical/neutat_delay_total", "gt3l/land_ice_segments/geophysical/r_eff", "gt3l/land_ice_segments/geophysical/solar_azimuth", "gt3l/land_ice_segments/geophysical/solar_elevation", "gt3l/land_ice_segments/geophysical/tide_earth", "gt3l/land_ice_segments/geophysical/tide_earth_free2mean", "gt3l/land_ice_segments/geophysical/tide_equilibrium", "gt3l/land_ice_segments/geophysical/tide_load", "gt3l/land_ice_segments/geophysical/tide_ocean", "gt3l/land_ice_segments/geophysical/tide_pole", "gt3l/land_ice_segments/ground_track/ref_azimuth", "gt3l/land_ice_segments/ground_track/ref_coelv", "gt3l/land_ice_segments/ground_track/seg_azimuth", "gt3l/land_ice_segments/ground_track/sigma_geo_at", "gt3l/land_ice_segments/ground_track/sigma_geo_r", "gt3l/land_ice_segments/ground_track/sigma_geo_xt", "gt3l/land_ice_segments/ground_track/x_atc", "gt3l/land_ice_segments/ground_track/y_atc", "gt3l/residual_histogram/bckgrd_per_m", "gt3l/residual_histogram/bin_top_h", "gt3l/residual_histogram/count", "gt3l/residual_histogram/delta_time", "gt3l/residual_histogram/ds_segment_id", "gt3l/residual_histogram/lat_mean", "gt3l/residual_histogram/lon_mean", "gt3l/residual_histogram/pulse_count", "gt3l/residual_histogram/segment_id_list", "gt3l/residual_histogram/x_atc_mean", "gt3l/segment_quality/delta_time", "gt3l/segment_quality/record_number", "gt3l/segment_quality/reference_pt_lat", "gt3l/segment_quality/reference_pt_lon", "gt3l/segment_quality/segment_id", "gt3l/segment_quality/signal_selection_source", "gt3l/segment_quality/signal_selection_status/signal_selection_status_all", "gt3l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3r/land_ice_segments/atl06_quality_summary", "gt3r/land_ice_segments/delta_time", "gt3r/land_ice_segments/h_li", "gt3r/land_ice_segments/h_li_sigma", "gt3r/land_ice_segments/latitude", "gt3r/land_ice_segments/longitude", "gt3r/land_ice_segments/segment_id", "gt3r/land_ice_segments/sigma_geo_h", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_med_corr", "gt3r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_n_corr", "gt3r/land_ice_segments/bias_correction/med_r_fit", "gt3r/land_ice_segments/bias_correction/tx_mean_corr", "gt3r/land_ice_segments/bias_correction/tx_med_corr", "gt3r/land_ice_segments/dem/dem_flag", "gt3r/land_ice_segments/dem/dem_h", "gt3r/land_ice_segments/dem/geoid_free2mean", "gt3r/land_ice_segments/dem/geoid_h", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3r/land_ice_segments/fit_statistics/dh_fit_dy", "gt3r/land_ice_segments/fit_statistics/h_expected_rms", "gt3r/land_ice_segments/fit_statistics/h_mean", "gt3r/land_ice_segments/fit_statistics/h_rms_misfit", "gt3r/land_ice_segments/fit_statistics/h_robust_sprd", "gt3r/land_ice_segments/fit_statistics/n_fit_photons", "gt3r/land_ice_segments/fit_statistics/n_seg_pulses", "gt3r/land_ice_segments/fit_statistics/sigma_h_mean", "gt3r/land_ice_segments/fit_statistics/signal_selection_source", "gt3r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3r/land_ice_segments/fit_statistics/snr", "gt3r/land_ice_segments/fit_statistics/snr_significance", "gt3r/land_ice_segments/fit_statistics/w_surface_window_final", "gt3r/land_ice_segments/geophysical/bckgrd", "gt3r/land_ice_segments/geophysical/bsnow_conf", "gt3r/land_ice_segments/geophysical/bsnow_h", "gt3r/land_ice_segments/geophysical/bsnow_od", "gt3r/land_ice_segments/geophysical/cloud_flg_asr", "gt3r/land_ice_segments/geophysical/cloud_flg_atm", "gt3r/land_ice_segments/geophysical/dac", "gt3r/land_ice_segments/geophysical/e_bckgrd", "gt3r/land_ice_segments/geophysical/layer_flag", "gt3r/land_ice_segments/geophysical/msw_flag", "gt3r/land_ice_segments/geophysical/neutat_delay_total", "gt3r/land_ice_segments/geophysical/r_eff", "gt3r/land_ice_segments/geophysical/solar_azimuth", "gt3r/land_ice_segments/geophysical/solar_elevation", "gt3r/land_ice_segments/geophysical/tide_earth", "gt3r/land_ice_segments/geophysical/tide_earth_free2mean", "gt3r/land_ice_segments/geophysical/tide_equilibrium", "gt3r/land_ice_segments/geophysical/tide_load", "gt3r/land_ice_segments/geophysical/tide_ocean", "gt3r/land_ice_segments/geophysical/tide_pole", "gt3r/land_ice_segments/ground_track/ref_azimuth", "gt3r/land_ice_segments/ground_track/ref_coelv", "gt3r/land_ice_segments/ground_track/seg_azimuth", "gt3r/land_ice_segments/ground_track/sigma_geo_at", "gt3r/land_ice_segments/ground_track/sigma_geo_r", "gt3r/land_ice_segments/ground_track/sigma_geo_xt", "gt3r/land_ice_segments/ground_track/x_atc", "gt3r/land_ice_segments/ground_track/y_atc", "gt3r/residual_histogram/bckgrd_per_m", "gt3r/residual_histogram/bin_top_h", "gt3r/residual_histogram/count", "gt3r/residual_histogram/delta_time", "gt3r/residual_histogram/ds_segment_id", "gt3r/residual_histogram/lat_mean", "gt3r/residual_histogram/lon_mean", "gt3r/residual_histogram/pulse_count", "gt3r/residual_histogram/segment_id_list", "gt3r/residual_histogram/x_atc_mean", "gt3r/segment_quality/delta_time", "gt3r/segment_quality/record_number", "gt3r/segment_quality/reference_pt_lat", "gt3r/segment_quality/reference_pt_lon", "gt3r/segment_quality/segment_id", "gt3r/segment_quality/signal_selection_source", "gt3r/segment_quality/signal_selection_status/signal_selection_status_all", "gt3r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3r/segment_quality/signal_selection_status/signal_selection_status_confident", "orbit_info/crossing_time", "orbit_info/cycle_number", "orbit_info/lan", "orbit_info/orbit_number", "orbit_info/rgt", "orbit_info/sc_orient", "orbit_info/sc_orient_time", "quality_assessment/qa_granule_fail_reason", "quality_assessment/qa_granule_pass_fail", "quality_assessment/gt1l/delta_time", "quality_assessment/gt1l/lat_mean", "quality_assessment/gt1l/lon_mean", "quality_assessment/gt1l/signal_selection_source_fraction_0", "quality_assessment/gt1l/signal_selection_source_fraction_1", "quality_assessment/gt1l/signal_selection_source_fraction_2", "quality_assessment/gt1l/signal_selection_source_fraction_3", "quality_assessment/gt1r/delta_time", "quality_assessment/gt1r/lat_mean", "quality_assessment/gt1r/lon_mean", "quality_assessment/gt1r/signal_selection_source_fraction_0", "quality_assessment/gt1r/signal_selection_source_fraction_1", "quality_assessment/gt1r/signal_selection_source_fraction_2", "quality_assessment/gt1r/signal_selection_source_fraction_3", "quality_assessment/gt2l/delta_time", "quality_assessment/gt2l/lat_mean", "quality_assessment/gt2l/lon_mean", "quality_assessment/gt2l/signal_selection_source_fraction_0", "quality_assessment/gt2l/signal_selection_source_fraction_1", "quality_assessment/gt2l/signal_selection_source_fraction_2", "quality_assessment/gt2l/signal_selection_source_fraction_3", "quality_assessment/gt2r/delta_time", "quality_assessment/gt2r/lat_mean", "quality_assessment/gt2r/lon_mean", "quality_assessment/gt2r/signal_selection_source_fraction_0", "quality_assessment/gt2r/signal_selection_source_fraction_1", "quality_assessment/gt2r/signal_selection_source_fraction_2", "quality_assessment/gt2r/signal_selection_source_fraction_3", "quality_assessment/gt3l/delta_time", "quality_assessment/gt3l/lat_mean", "quality_assessment/gt3l/lon_mean", "quality_assessment/gt3l/signal_selection_source_fraction_0", "quality_assessment/gt3l/signal_selection_source_fraction_1", "quality_assessment/gt3l/signal_selection_source_fraction_2", "quality_assessment/gt3l/signal_selection_source_fraction_3", "quality_assessment/gt3r/delta_time", "quality_assessment/gt3r/lat_mean", "quality_assessment/gt3r/lon_mean", "quality_assessment/gt3r/signal_selection_source_fraction_0", "quality_assessment/gt3r/signal_selection_source_fraction_1", "quality_assessment/gt3r/signal_selection_source_fraction_2", "quality_assessment/gt3r/signal_selection_source_fraction_3"]} \ No newline at end of file diff --git a/icepyx/tests/ATL06v06_options.json b/icepyx/tests/ATL06v06_options.json new file mode 100644 index 000000000..7851446be --- /dev/null +++ b/icepyx/tests/ATL06v06_options.json @@ -0,0 +1 @@ +{"options": [{"id": "ICESAT2", "spatialSubsetting": "true", "spatialSubsettingShapefile": "true", "temporalSubsetting": "true", "type": "both", "maxGransSyncRequest": "100", "maxGransAsyncRequest": "2000"}], "fileformats": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile"], "reprojectionONLY": [], "noproj": [], "formatreproj": ["TABULAR_ASCII", "NetCDF4-CF", "Shapefile"], "variables": ["ancillary_data/atlas_sdp_gps_epoch", "ancillary_data/control", "ancillary_data/data_end_utc", "ancillary_data/data_start_utc", "ancillary_data/end_cycle", "ancillary_data/end_delta_time", "ancillary_data/end_geoseg", "ancillary_data/end_gpssow", "ancillary_data/end_gpsweek", "ancillary_data/end_orbit", "ancillary_data/end_region", "ancillary_data/end_rgt", "ancillary_data/granule_end_utc", "ancillary_data/granule_start_utc", "ancillary_data/qa_at_interval", "ancillary_data/release", "ancillary_data/start_cycle", "ancillary_data/start_delta_time", "ancillary_data/start_geoseg", "ancillary_data/start_gpssow", "ancillary_data/start_gpsweek", "ancillary_data/start_orbit", "ancillary_data/start_region", "ancillary_data/start_rgt", "ancillary_data/version", "ancillary_data/land_ice/dt_hist", "ancillary_data/land_ice/fit_maxiter", "ancillary_data/land_ice/fpb_maxiter", "ancillary_data/land_ice/max_res_ids", "ancillary_data/land_ice/min_dist", "ancillary_data/land_ice/min_gain_th", "ancillary_data/land_ice/min_n_pe", "ancillary_data/land_ice/min_n_sel", "ancillary_data/land_ice/min_signal_conf", "ancillary_data/land_ice/n_hist", "ancillary_data/land_ice/nhist_bins", "ancillary_data/land_ice/n_sigmas", "ancillary_data/land_ice/proc_interval", "ancillary_data/land_ice/qs_lim_bsc", "ancillary_data/land_ice/qs_lim_hrs", "ancillary_data/land_ice/qs_lim_hsigma", "ancillary_data/land_ice/qs_lim_msw", "ancillary_data/land_ice/qs_lim_snr", "ancillary_data/land_ice/qs_lim_sss", "ancillary_data/land_ice/rbin_width", "ancillary_data/land_ice/sigma_beam", "ancillary_data/land_ice/sigma_tx", "ancillary_data/land_ice/t_dead", "ancillary_data/land_ice/txp_maxiter", "gt1l/land_ice_segments/atl06_quality_summary", "gt1l/land_ice_segments/delta_time", "gt1l/land_ice_segments/h_li", "gt1l/land_ice_segments/h_li_sigma", "gt1l/land_ice_segments/latitude", "gt1l/land_ice_segments/longitude", "gt1l/land_ice_segments/segment_id", "gt1l/land_ice_segments/sigma_geo_h", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr", "gt1l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_med_corr", "gt1l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1l/land_ice_segments/bias_correction/fpb_n_corr", "gt1l/land_ice_segments/bias_correction/med_r_fit", "gt1l/land_ice_segments/bias_correction/tx_mean_corr", "gt1l/land_ice_segments/bias_correction/tx_med_corr", "gt1l/land_ice_segments/dem/dem_flag", "gt1l/land_ice_segments/dem/dem_h", "gt1l/land_ice_segments/dem/geoid_free2mean", "gt1l/land_ice_segments/dem/geoid_h", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx", "gt1l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1l/land_ice_segments/fit_statistics/dh_fit_dy", "gt1l/land_ice_segments/fit_statistics/h_expected_rms", "gt1l/land_ice_segments/fit_statistics/h_mean", "gt1l/land_ice_segments/fit_statistics/h_rms_misfit", "gt1l/land_ice_segments/fit_statistics/h_robust_sprd", "gt1l/land_ice_segments/fit_statistics/n_fit_photons", "gt1l/land_ice_segments/fit_statistics/n_seg_pulses", "gt1l/land_ice_segments/fit_statistics/sigma_h_mean", "gt1l/land_ice_segments/fit_statistics/signal_selection_source", "gt1l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1l/land_ice_segments/fit_statistics/snr", "gt1l/land_ice_segments/fit_statistics/snr_significance", "gt1l/land_ice_segments/fit_statistics/w_surface_window_final", "gt1l/land_ice_segments/geophysical/bckgrd", "gt1l/land_ice_segments/geophysical/bsnow_conf", "gt1l/land_ice_segments/geophysical/bsnow_h", "gt1l/land_ice_segments/geophysical/bsnow_od", "gt1l/land_ice_segments/geophysical/cloud_flg_asr", "gt1l/land_ice_segments/geophysical/cloud_flg_atm", "gt1l/land_ice_segments/geophysical/dac", "gt1l/land_ice_segments/geophysical/e_bckgrd", "gt1l/land_ice_segments/geophysical/layer_flag", "gt1l/land_ice_segments/geophysical/msw_flag", "gt1l/land_ice_segments/geophysical/neutat_delay_total", "gt1l/land_ice_segments/geophysical/r_eff", "gt1l/land_ice_segments/geophysical/solar_azimuth", "gt1l/land_ice_segments/geophysical/solar_elevation", "gt1l/land_ice_segments/geophysical/tide_earth", "gt1l/land_ice_segments/geophysical/tide_earth_free2mean", "gt1l/land_ice_segments/geophysical/tide_equilibrium", "gt1l/land_ice_segments/geophysical/tide_load", "gt1l/land_ice_segments/geophysical/tide_ocean", "gt1l/land_ice_segments/geophysical/tide_pole", "gt1l/land_ice_segments/ground_track/ref_azimuth", "gt1l/land_ice_segments/ground_track/ref_coelv", "gt1l/land_ice_segments/ground_track/seg_azimuth", "gt1l/land_ice_segments/ground_track/sigma_geo_at", "gt1l/land_ice_segments/ground_track/sigma_geo_r", "gt1l/land_ice_segments/ground_track/sigma_geo_xt", "gt1l/land_ice_segments/ground_track/x_atc", "gt1l/land_ice_segments/ground_track/y_atc", "gt1l/residual_histogram/bckgrd_per_m", "gt1l/residual_histogram/bin_top_h", "gt1l/residual_histogram/count", "gt1l/residual_histogram/delta_time", "gt1l/residual_histogram/ds_segment_id", "gt1l/residual_histogram/lat_mean", "gt1l/residual_histogram/lon_mean", "gt1l/residual_histogram/pulse_count", "gt1l/residual_histogram/segment_id_list", "gt1l/residual_histogram/x_atc_mean", "gt1l/segment_quality/delta_time", "gt1l/segment_quality/record_number", "gt1l/segment_quality/reference_pt_lat", "gt1l/segment_quality/reference_pt_lon", "gt1l/segment_quality/segment_id", "gt1l/segment_quality/signal_selection_source", "gt1l/segment_quality/signal_selection_status/signal_selection_status_all", "gt1l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt1r/land_ice_segments/atl06_quality_summary", "gt1r/land_ice_segments/delta_time", "gt1r/land_ice_segments/h_li", "gt1r/land_ice_segments/h_li_sigma", "gt1r/land_ice_segments/latitude", "gt1r/land_ice_segments/longitude", "gt1r/land_ice_segments/segment_id", "gt1r/land_ice_segments/sigma_geo_h", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr", "gt1r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_med_corr", "gt1r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt1r/land_ice_segments/bias_correction/fpb_n_corr", "gt1r/land_ice_segments/bias_correction/med_r_fit", "gt1r/land_ice_segments/bias_correction/tx_mean_corr", "gt1r/land_ice_segments/bias_correction/tx_med_corr", "gt1r/land_ice_segments/dem/dem_flag", "gt1r/land_ice_segments/dem/dem_h", "gt1r/land_ice_segments/dem/geoid_free2mean", "gt1r/land_ice_segments/dem/geoid_h", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx", "gt1r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt1r/land_ice_segments/fit_statistics/dh_fit_dy", "gt1r/land_ice_segments/fit_statistics/h_expected_rms", "gt1r/land_ice_segments/fit_statistics/h_mean", "gt1r/land_ice_segments/fit_statistics/h_rms_misfit", "gt1r/land_ice_segments/fit_statistics/h_robust_sprd", "gt1r/land_ice_segments/fit_statistics/n_fit_photons", "gt1r/land_ice_segments/fit_statistics/n_seg_pulses", "gt1r/land_ice_segments/fit_statistics/sigma_h_mean", "gt1r/land_ice_segments/fit_statistics/signal_selection_source", "gt1r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt1r/land_ice_segments/fit_statistics/snr", "gt1r/land_ice_segments/fit_statistics/snr_significance", "gt1r/land_ice_segments/fit_statistics/w_surface_window_final", "gt1r/land_ice_segments/geophysical/bckgrd", "gt1r/land_ice_segments/geophysical/bsnow_conf", "gt1r/land_ice_segments/geophysical/bsnow_h", "gt1r/land_ice_segments/geophysical/bsnow_od", "gt1r/land_ice_segments/geophysical/cloud_flg_asr", "gt1r/land_ice_segments/geophysical/cloud_flg_atm", "gt1r/land_ice_segments/geophysical/dac", "gt1r/land_ice_segments/geophysical/e_bckgrd", "gt1r/land_ice_segments/geophysical/layer_flag", "gt1r/land_ice_segments/geophysical/msw_flag", "gt1r/land_ice_segments/geophysical/neutat_delay_total", "gt1r/land_ice_segments/geophysical/r_eff", "gt1r/land_ice_segments/geophysical/solar_azimuth", "gt1r/land_ice_segments/geophysical/solar_elevation", "gt1r/land_ice_segments/geophysical/tide_earth", "gt1r/land_ice_segments/geophysical/tide_earth_free2mean", "gt1r/land_ice_segments/geophysical/tide_equilibrium", "gt1r/land_ice_segments/geophysical/tide_load", "gt1r/land_ice_segments/geophysical/tide_ocean", "gt1r/land_ice_segments/geophysical/tide_pole", "gt1r/land_ice_segments/ground_track/ref_azimuth", "gt1r/land_ice_segments/ground_track/ref_coelv", "gt1r/land_ice_segments/ground_track/seg_azimuth", "gt1r/land_ice_segments/ground_track/sigma_geo_at", "gt1r/land_ice_segments/ground_track/sigma_geo_r", "gt1r/land_ice_segments/ground_track/sigma_geo_xt", "gt1r/land_ice_segments/ground_track/x_atc", "gt1r/land_ice_segments/ground_track/y_atc", "gt1r/residual_histogram/bckgrd_per_m", "gt1r/residual_histogram/bin_top_h", "gt1r/residual_histogram/count", "gt1r/residual_histogram/delta_time", "gt1r/residual_histogram/ds_segment_id", "gt1r/residual_histogram/lat_mean", "gt1r/residual_histogram/lon_mean", "gt1r/residual_histogram/pulse_count", "gt1r/residual_histogram/segment_id_list", "gt1r/residual_histogram/x_atc_mean", "gt1r/segment_quality/delta_time", "gt1r/segment_quality/record_number", "gt1r/segment_quality/reference_pt_lat", "gt1r/segment_quality/reference_pt_lon", "gt1r/segment_quality/segment_id", "gt1r/segment_quality/signal_selection_source", "gt1r/segment_quality/signal_selection_status/signal_selection_status_all", "gt1r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt1r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2l/land_ice_segments/atl06_quality_summary", "gt2l/land_ice_segments/delta_time", "gt2l/land_ice_segments/h_li", "gt2l/land_ice_segments/h_li_sigma", "gt2l/land_ice_segments/latitude", "gt2l/land_ice_segments/longitude", "gt2l/land_ice_segments/segment_id", "gt2l/land_ice_segments/sigma_geo_h", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr", "gt2l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_med_corr", "gt2l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2l/land_ice_segments/bias_correction/fpb_n_corr", "gt2l/land_ice_segments/bias_correction/med_r_fit", "gt2l/land_ice_segments/bias_correction/tx_mean_corr", "gt2l/land_ice_segments/bias_correction/tx_med_corr", "gt2l/land_ice_segments/dem/dem_flag", "gt2l/land_ice_segments/dem/dem_h", "gt2l/land_ice_segments/dem/geoid_free2mean", "gt2l/land_ice_segments/dem/geoid_h", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx", "gt2l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2l/land_ice_segments/fit_statistics/dh_fit_dy", "gt2l/land_ice_segments/fit_statistics/h_expected_rms", "gt2l/land_ice_segments/fit_statistics/h_mean", "gt2l/land_ice_segments/fit_statistics/h_rms_misfit", "gt2l/land_ice_segments/fit_statistics/h_robust_sprd", "gt2l/land_ice_segments/fit_statistics/n_fit_photons", "gt2l/land_ice_segments/fit_statistics/n_seg_pulses", "gt2l/land_ice_segments/fit_statistics/sigma_h_mean", "gt2l/land_ice_segments/fit_statistics/signal_selection_source", "gt2l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2l/land_ice_segments/fit_statistics/snr", "gt2l/land_ice_segments/fit_statistics/snr_significance", "gt2l/land_ice_segments/fit_statistics/w_surface_window_final", "gt2l/land_ice_segments/geophysical/bckgrd", "gt2l/land_ice_segments/geophysical/bsnow_conf", "gt2l/land_ice_segments/geophysical/bsnow_h", "gt2l/land_ice_segments/geophysical/bsnow_od", "gt2l/land_ice_segments/geophysical/cloud_flg_asr", "gt2l/land_ice_segments/geophysical/cloud_flg_atm", "gt2l/land_ice_segments/geophysical/dac", "gt2l/land_ice_segments/geophysical/e_bckgrd", "gt2l/land_ice_segments/geophysical/layer_flag", "gt2l/land_ice_segments/geophysical/msw_flag", "gt2l/land_ice_segments/geophysical/neutat_delay_total", "gt2l/land_ice_segments/geophysical/r_eff", "gt2l/land_ice_segments/geophysical/solar_azimuth", "gt2l/land_ice_segments/geophysical/solar_elevation", "gt2l/land_ice_segments/geophysical/tide_earth", "gt2l/land_ice_segments/geophysical/tide_earth_free2mean", "gt2l/land_ice_segments/geophysical/tide_equilibrium", "gt2l/land_ice_segments/geophysical/tide_load", "gt2l/land_ice_segments/geophysical/tide_ocean", "gt2l/land_ice_segments/geophysical/tide_pole", "gt2l/land_ice_segments/ground_track/ref_azimuth", "gt2l/land_ice_segments/ground_track/ref_coelv", "gt2l/land_ice_segments/ground_track/seg_azimuth", "gt2l/land_ice_segments/ground_track/sigma_geo_at", "gt2l/land_ice_segments/ground_track/sigma_geo_r", "gt2l/land_ice_segments/ground_track/sigma_geo_xt", "gt2l/land_ice_segments/ground_track/x_atc", "gt2l/land_ice_segments/ground_track/y_atc", "gt2l/residual_histogram/bckgrd_per_m", "gt2l/residual_histogram/bin_top_h", "gt2l/residual_histogram/count", "gt2l/residual_histogram/delta_time", "gt2l/residual_histogram/ds_segment_id", "gt2l/residual_histogram/lat_mean", "gt2l/residual_histogram/lon_mean", "gt2l/residual_histogram/pulse_count", "gt2l/residual_histogram/segment_id_list", "gt2l/residual_histogram/x_atc_mean", "gt2l/segment_quality/delta_time", "gt2l/segment_quality/record_number", "gt2l/segment_quality/reference_pt_lat", "gt2l/segment_quality/reference_pt_lon", "gt2l/segment_quality/segment_id", "gt2l/segment_quality/signal_selection_source", "gt2l/segment_quality/signal_selection_status/signal_selection_status_all", "gt2l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt2r/land_ice_segments/atl06_quality_summary", "gt2r/land_ice_segments/delta_time", "gt2r/land_ice_segments/h_li", "gt2r/land_ice_segments/h_li_sigma", "gt2r/land_ice_segments/latitude", "gt2r/land_ice_segments/longitude", "gt2r/land_ice_segments/segment_id", "gt2r/land_ice_segments/sigma_geo_h", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr", "gt2r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_med_corr", "gt2r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt2r/land_ice_segments/bias_correction/fpb_n_corr", "gt2r/land_ice_segments/bias_correction/med_r_fit", "gt2r/land_ice_segments/bias_correction/tx_mean_corr", "gt2r/land_ice_segments/bias_correction/tx_med_corr", "gt2r/land_ice_segments/dem/dem_flag", "gt2r/land_ice_segments/dem/dem_h", "gt2r/land_ice_segments/dem/geoid_free2mean", "gt2r/land_ice_segments/dem/geoid_h", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx", "gt2r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt2r/land_ice_segments/fit_statistics/dh_fit_dy", "gt2r/land_ice_segments/fit_statistics/h_expected_rms", "gt2r/land_ice_segments/fit_statistics/h_mean", "gt2r/land_ice_segments/fit_statistics/h_rms_misfit", "gt2r/land_ice_segments/fit_statistics/h_robust_sprd", "gt2r/land_ice_segments/fit_statistics/n_fit_photons", "gt2r/land_ice_segments/fit_statistics/n_seg_pulses", "gt2r/land_ice_segments/fit_statistics/sigma_h_mean", "gt2r/land_ice_segments/fit_statistics/signal_selection_source", "gt2r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt2r/land_ice_segments/fit_statistics/snr", "gt2r/land_ice_segments/fit_statistics/snr_significance", "gt2r/land_ice_segments/fit_statistics/w_surface_window_final", "gt2r/land_ice_segments/geophysical/bckgrd", "gt2r/land_ice_segments/geophysical/bsnow_conf", "gt2r/land_ice_segments/geophysical/bsnow_h", "gt2r/land_ice_segments/geophysical/bsnow_od", "gt2r/land_ice_segments/geophysical/cloud_flg_asr", "gt2r/land_ice_segments/geophysical/cloud_flg_atm", "gt2r/land_ice_segments/geophysical/dac", "gt2r/land_ice_segments/geophysical/e_bckgrd", "gt2r/land_ice_segments/geophysical/layer_flag", "gt2r/land_ice_segments/geophysical/msw_flag", "gt2r/land_ice_segments/geophysical/neutat_delay_total", "gt2r/land_ice_segments/geophysical/r_eff", "gt2r/land_ice_segments/geophysical/solar_azimuth", "gt2r/land_ice_segments/geophysical/solar_elevation", "gt2r/land_ice_segments/geophysical/tide_earth", "gt2r/land_ice_segments/geophysical/tide_earth_free2mean", "gt2r/land_ice_segments/geophysical/tide_equilibrium", "gt2r/land_ice_segments/geophysical/tide_load", "gt2r/land_ice_segments/geophysical/tide_ocean", "gt2r/land_ice_segments/geophysical/tide_pole", "gt2r/land_ice_segments/ground_track/ref_azimuth", "gt2r/land_ice_segments/ground_track/ref_coelv", "gt2r/land_ice_segments/ground_track/seg_azimuth", "gt2r/land_ice_segments/ground_track/sigma_geo_at", "gt2r/land_ice_segments/ground_track/sigma_geo_r", "gt2r/land_ice_segments/ground_track/sigma_geo_xt", "gt2r/land_ice_segments/ground_track/x_atc", "gt2r/land_ice_segments/ground_track/y_atc", "gt2r/residual_histogram/bckgrd_per_m", "gt2r/residual_histogram/bin_top_h", "gt2r/residual_histogram/count", "gt2r/residual_histogram/delta_time", "gt2r/residual_histogram/ds_segment_id", "gt2r/residual_histogram/lat_mean", "gt2r/residual_histogram/lon_mean", "gt2r/residual_histogram/pulse_count", "gt2r/residual_histogram/segment_id_list", "gt2r/residual_histogram/x_atc_mean", "gt2r/segment_quality/delta_time", "gt2r/segment_quality/record_number", "gt2r/segment_quality/reference_pt_lat", "gt2r/segment_quality/reference_pt_lon", "gt2r/segment_quality/segment_id", "gt2r/segment_quality/signal_selection_source", "gt2r/segment_quality/signal_selection_status/signal_selection_status_all", "gt2r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt2r/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3l/land_ice_segments/atl06_quality_summary", "gt3l/land_ice_segments/delta_time", "gt3l/land_ice_segments/h_li", "gt3l/land_ice_segments/h_li_sigma", "gt3l/land_ice_segments/latitude", "gt3l/land_ice_segments/longitude", "gt3l/land_ice_segments/segment_id", "gt3l/land_ice_segments/sigma_geo_h", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr", "gt3l/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_med_corr", "gt3l/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3l/land_ice_segments/bias_correction/fpb_n_corr", "gt3l/land_ice_segments/bias_correction/med_r_fit", "gt3l/land_ice_segments/bias_correction/tx_mean_corr", "gt3l/land_ice_segments/bias_correction/tx_med_corr", "gt3l/land_ice_segments/dem/dem_flag", "gt3l/land_ice_segments/dem/dem_h", "gt3l/land_ice_segments/dem/geoid_free2mean", "gt3l/land_ice_segments/dem/geoid_h", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx", "gt3l/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3l/land_ice_segments/fit_statistics/dh_fit_dy", "gt3l/land_ice_segments/fit_statistics/h_expected_rms", "gt3l/land_ice_segments/fit_statistics/h_mean", "gt3l/land_ice_segments/fit_statistics/h_rms_misfit", "gt3l/land_ice_segments/fit_statistics/h_robust_sprd", "gt3l/land_ice_segments/fit_statistics/n_fit_photons", "gt3l/land_ice_segments/fit_statistics/n_seg_pulses", "gt3l/land_ice_segments/fit_statistics/sigma_h_mean", "gt3l/land_ice_segments/fit_statistics/signal_selection_source", "gt3l/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3l/land_ice_segments/fit_statistics/snr", "gt3l/land_ice_segments/fit_statistics/snr_significance", "gt3l/land_ice_segments/fit_statistics/w_surface_window_final", "gt3l/land_ice_segments/geophysical/bckgrd", "gt3l/land_ice_segments/geophysical/bsnow_conf", "gt3l/land_ice_segments/geophysical/bsnow_h", "gt3l/land_ice_segments/geophysical/bsnow_od", "gt3l/land_ice_segments/geophysical/cloud_flg_asr", "gt3l/land_ice_segments/geophysical/cloud_flg_atm", "gt3l/land_ice_segments/geophysical/dac", "gt3l/land_ice_segments/geophysical/e_bckgrd", "gt3l/land_ice_segments/geophysical/layer_flag", "gt3l/land_ice_segments/geophysical/msw_flag", "gt3l/land_ice_segments/geophysical/neutat_delay_total", "gt3l/land_ice_segments/geophysical/r_eff", "gt3l/land_ice_segments/geophysical/solar_azimuth", "gt3l/land_ice_segments/geophysical/solar_elevation", "gt3l/land_ice_segments/geophysical/tide_earth", "gt3l/land_ice_segments/geophysical/tide_earth_free2mean", "gt3l/land_ice_segments/geophysical/tide_equilibrium", "gt3l/land_ice_segments/geophysical/tide_load", "gt3l/land_ice_segments/geophysical/tide_ocean", "gt3l/land_ice_segments/geophysical/tide_pole", "gt3l/land_ice_segments/ground_track/ref_azimuth", "gt3l/land_ice_segments/ground_track/ref_coelv", "gt3l/land_ice_segments/ground_track/seg_azimuth", "gt3l/land_ice_segments/ground_track/sigma_geo_at", "gt3l/land_ice_segments/ground_track/sigma_geo_r", "gt3l/land_ice_segments/ground_track/sigma_geo_xt", "gt3l/land_ice_segments/ground_track/x_atc", "gt3l/land_ice_segments/ground_track/y_atc", "gt3l/residual_histogram/bckgrd_per_m", "gt3l/residual_histogram/bin_top_h", "gt3l/residual_histogram/count", "gt3l/residual_histogram/delta_time", "gt3l/residual_histogram/ds_segment_id", "gt3l/residual_histogram/lat_mean", "gt3l/residual_histogram/lon_mean", "gt3l/residual_histogram/pulse_count", "gt3l/residual_histogram/segment_id_list", "gt3l/residual_histogram/x_atc_mean", "gt3l/segment_quality/delta_time", "gt3l/segment_quality/record_number", "gt3l/segment_quality/reference_pt_lat", "gt3l/segment_quality/reference_pt_lon", "gt3l/segment_quality/segment_id", "gt3l/segment_quality/signal_selection_source", "gt3l/segment_quality/signal_selection_status/signal_selection_status_all", "gt3l/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3l/segment_quality/signal_selection_status/signal_selection_status_confident", "gt3r/land_ice_segments/atl06_quality_summary", "gt3r/land_ice_segments/delta_time", "gt3r/land_ice_segments/h_li", "gt3r/land_ice_segments/h_li_sigma", "gt3r/land_ice_segments/latitude", "gt3r/land_ice_segments/longitude", "gt3r/land_ice_segments/segment_id", "gt3r/land_ice_segments/sigma_geo_h", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr", "gt3r/land_ice_segments/bias_correction/fpb_mean_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_med_corr", "gt3r/land_ice_segments/bias_correction/fpb_med_corr_sigma", "gt3r/land_ice_segments/bias_correction/fpb_n_corr", "gt3r/land_ice_segments/bias_correction/med_r_fit", "gt3r/land_ice_segments/bias_correction/tx_mean_corr", "gt3r/land_ice_segments/bias_correction/tx_med_corr", "gt3r/land_ice_segments/dem/dem_flag", "gt3r/land_ice_segments/dem/dem_h", "gt3r/land_ice_segments/dem/geoid_free2mean", "gt3r/land_ice_segments/dem/geoid_h", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx", "gt3r/land_ice_segments/fit_statistics/dh_fit_dx_sigma", "gt3r/land_ice_segments/fit_statistics/dh_fit_dy", "gt3r/land_ice_segments/fit_statistics/h_expected_rms", "gt3r/land_ice_segments/fit_statistics/h_mean", "gt3r/land_ice_segments/fit_statistics/h_rms_misfit", "gt3r/land_ice_segments/fit_statistics/h_robust_sprd", "gt3r/land_ice_segments/fit_statistics/n_fit_photons", "gt3r/land_ice_segments/fit_statistics/n_seg_pulses", "gt3r/land_ice_segments/fit_statistics/sigma_h_mean", "gt3r/land_ice_segments/fit_statistics/signal_selection_source", "gt3r/land_ice_segments/fit_statistics/signal_selection_source_status", "gt3r/land_ice_segments/fit_statistics/snr", "gt3r/land_ice_segments/fit_statistics/snr_significance", "gt3r/land_ice_segments/fit_statistics/w_surface_window_final", "gt3r/land_ice_segments/geophysical/bckgrd", "gt3r/land_ice_segments/geophysical/bsnow_conf", "gt3r/land_ice_segments/geophysical/bsnow_h", "gt3r/land_ice_segments/geophysical/bsnow_od", "gt3r/land_ice_segments/geophysical/cloud_flg_asr", "gt3r/land_ice_segments/geophysical/cloud_flg_atm", "gt3r/land_ice_segments/geophysical/dac", "gt3r/land_ice_segments/geophysical/e_bckgrd", "gt3r/land_ice_segments/geophysical/layer_flag", "gt3r/land_ice_segments/geophysical/msw_flag", "gt3r/land_ice_segments/geophysical/neutat_delay_total", "gt3r/land_ice_segments/geophysical/r_eff", "gt3r/land_ice_segments/geophysical/solar_azimuth", "gt3r/land_ice_segments/geophysical/solar_elevation", "gt3r/land_ice_segments/geophysical/tide_earth", "gt3r/land_ice_segments/geophysical/tide_earth_free2mean", "gt3r/land_ice_segments/geophysical/tide_equilibrium", "gt3r/land_ice_segments/geophysical/tide_load", "gt3r/land_ice_segments/geophysical/tide_ocean", "gt3r/land_ice_segments/geophysical/tide_pole", "gt3r/land_ice_segments/ground_track/ref_azimuth", "gt3r/land_ice_segments/ground_track/ref_coelv", "gt3r/land_ice_segments/ground_track/seg_azimuth", "gt3r/land_ice_segments/ground_track/sigma_geo_at", "gt3r/land_ice_segments/ground_track/sigma_geo_r", "gt3r/land_ice_segments/ground_track/sigma_geo_xt", "gt3r/land_ice_segments/ground_track/x_atc", "gt3r/land_ice_segments/ground_track/y_atc", "gt3r/residual_histogram/bckgrd_per_m", "gt3r/residual_histogram/bin_top_h", "gt3r/residual_histogram/count", "gt3r/residual_histogram/delta_time", "gt3r/residual_histogram/ds_segment_id", "gt3r/residual_histogram/lat_mean", "gt3r/residual_histogram/lon_mean", "gt3r/residual_histogram/pulse_count", "gt3r/residual_histogram/segment_id_list", "gt3r/residual_histogram/x_atc_mean", "gt3r/segment_quality/delta_time", "gt3r/segment_quality/record_number", "gt3r/segment_quality/reference_pt_lat", "gt3r/segment_quality/reference_pt_lon", "gt3r/segment_quality/segment_id", "gt3r/segment_quality/signal_selection_source", "gt3r/segment_quality/signal_selection_status/signal_selection_status_all", "gt3r/segment_quality/signal_selection_status/signal_selection_status_backup", "gt3r/segment_quality/signal_selection_status/signal_selection_status_confident", "orbit_info/bounding_polygon_lat1", "orbit_info/bounding_polygon_lon1", "orbit_info/crossing_time", "orbit_info/cycle_number", "orbit_info/lan", "orbit_info/orbit_number", "orbit_info/rgt", "orbit_info/sc_orient", "orbit_info/sc_orient_time", "quality_assessment/qa_granule_fail_reason", "quality_assessment/qa_granule_pass_fail", "quality_assessment/gt1l/delta_time", "quality_assessment/gt1l/lat_mean", "quality_assessment/gt1l/lon_mean", "quality_assessment/gt1l/signal_selection_source_fraction_0", "quality_assessment/gt1l/signal_selection_source_fraction_1", "quality_assessment/gt1l/signal_selection_source_fraction_2", "quality_assessment/gt1l/signal_selection_source_fraction_3", "quality_assessment/gt1r/delta_time", "quality_assessment/gt1r/lat_mean", "quality_assessment/gt1r/lon_mean", "quality_assessment/gt1r/signal_selection_source_fraction_0", "quality_assessment/gt1r/signal_selection_source_fraction_1", "quality_assessment/gt1r/signal_selection_source_fraction_2", "quality_assessment/gt1r/signal_selection_source_fraction_3", "quality_assessment/gt2l/delta_time", "quality_assessment/gt2l/lat_mean", "quality_assessment/gt2l/lon_mean", "quality_assessment/gt2l/signal_selection_source_fraction_0", "quality_assessment/gt2l/signal_selection_source_fraction_1", "quality_assessment/gt2l/signal_selection_source_fraction_2", "quality_assessment/gt2l/signal_selection_source_fraction_3", "quality_assessment/gt2r/delta_time", "quality_assessment/gt2r/lat_mean", "quality_assessment/gt2r/lon_mean", "quality_assessment/gt2r/signal_selection_source_fraction_0", "quality_assessment/gt2r/signal_selection_source_fraction_1", "quality_assessment/gt2r/signal_selection_source_fraction_2", "quality_assessment/gt2r/signal_selection_source_fraction_3", "quality_assessment/gt3l/delta_time", "quality_assessment/gt3l/lat_mean", "quality_assessment/gt3l/lon_mean", "quality_assessment/gt3l/signal_selection_source_fraction_0", "quality_assessment/gt3l/signal_selection_source_fraction_1", "quality_assessment/gt3l/signal_selection_source_fraction_2", "quality_assessment/gt3l/signal_selection_source_fraction_3", "quality_assessment/gt3r/delta_time", "quality_assessment/gt3r/lat_mean", "quality_assessment/gt3r/lon_mean", "quality_assessment/gt3r/signal_selection_source_fraction_0", "quality_assessment/gt3r/signal_selection_source_fraction_1", "quality_assessment/gt3r/signal_selection_source_fraction_2", "quality_assessment/gt3r/signal_selection_source_fraction_3"]} \ No newline at end of file diff --git a/icepyx/tests/conftest.py b/icepyx/tests/conftest.py index fca31847a..9ce8e4081 100644 --- a/icepyx/tests/conftest.py +++ b/icepyx/tests/conftest.py @@ -2,6 +2,7 @@ import pytest from unittest import mock + # PURPOSE: mock environmental variables @pytest.fixture(scope="session", autouse=True) def mock_settings_env_vars(): diff --git a/icepyx/tests/test_APIformatting.py b/icepyx/tests/test_APIformatting.py index 83e88a131..d934a97dd 100644 --- a/icepyx/tests/test_APIformatting.py +++ b/icepyx/tests/test_APIformatting.py @@ -1,15 +1,16 @@ import pytest -import warnings import datetime as dt -from shapely.geometry import Polygon import icepyx.core.APIformatting as apifmt -# DevNote: is this a situation where you'd ideally build a test class, since you're just repeating the -# test function with different inputs? Especially for the _fmt_spaital, where there's >2 tests? +# DevNote: is this a situation where you'd ideally build a test class, +# since you're just repeating the test function with different inputs? +# Especially for the _fmt_spaital, where there's >2 tests? + +# CMR temporal and spatial formats --> what's the best way to compare formatted text? +# character by character comparison of strings? -# CMR temporal and spatial formats --> what's the best way to compare formatted text? character by character comparison of strings? ########## _fmt_temporal ########## def test_time_fmt(): @@ -45,7 +46,18 @@ def test_var_subset_list_fmt(): "start_delta_time": ["ancillary_data/start_delta_time"], } ) - exp = "/ancillary_data/atlas_sdp_gps_epoch,/ancillary_data/data_end_utc,/ancillary_data/data_start_utc,/ancillary_data/end_delta_time,/ancillary_data/granule_end_utc,/ancillary_data/granule_start_utc,/profile_2/high_rate/latitude,/profile_2/low_rate/latitude,/orbit_info/sc_orient,/ancillary_data/start_delta_time" + exp = ( + "/ancillary_data/atlas_sdp_gps_epoch," + "/ancillary_data/data_end_utc," + "/ancillary_data/data_start_utc," + "/ancillary_data/end_delta_time," + "/ancillary_data/granule_end_utc," + "/ancillary_data/granule_start_utc," + "/profile_2/high_rate/latitude," + "/profile_2/low_rate/latitude," + "/orbit_info/sc_orient," + "/ancillary_data/start_delta_time" + ) assert obs == exp @@ -138,7 +150,7 @@ def test_CMRparams_no_other_inputs(): CMRparams.build_params( product="ATL06", - version="005", + version="006", start=dt.datetime(2019, 2, 20, 0, 0), end=dt.datetime(2019, 2, 24, 23, 59, 59), extent_type="bounding_box", @@ -147,7 +159,7 @@ def test_CMRparams_no_other_inputs(): obs_fmted_params = CMRparams.fmted_keys exp_fmted_params = { "short_name": "ATL06", - "version": "005", + "version": "006", "temporal": "2019-02-20T00:00:00Z,2019-02-24T23:59:59Z", "bounding_box": "-55.0,68.0,-48.0,71.0", } diff --git a/icepyx/tests/test_Earthdata.py b/icepyx/tests/test_Earthdata.py index 8ad883e6a..60b92f621 100644 --- a/icepyx/tests/test_Earthdata.py +++ b/icepyx/tests/test_Earthdata.py @@ -8,6 +8,7 @@ import shutil import warnings + # PURPOSE: test different authentication methods @pytest.fixture(scope="module", autouse=True) def setup_earthdata(): @@ -65,7 +66,6 @@ def earthdata_login(uid=None, pwd=None, email=None, s3token=False) -> bool: url = "urs.earthdata.nasa.gov" mock_uid, _, mock_pwd = netrc.netrc(netrc).authenticators(url) except: - mock_uid = os.environ.get("EARTHDATA_USERNAME") mock_pwd = os.environ.get("EARTHDATA_PASSWORD") diff --git a/icepyx/tests/test_auth.py b/icepyx/tests/test_auth.py index 6ac77c864..c8f8e8f5d 100644 --- a/icepyx/tests/test_auth.py +++ b/icepyx/tests/test_auth.py @@ -4,34 +4,33 @@ import earthaccess from icepyx.core.auth import EarthdataAuthMixin +from icepyx.core.exceptions import DeprecationError @pytest.fixture() def auth_instance(): - ''' + """ An EarthdatAuthMixin object for each of the tests. Default scope is function level, so a new instance should be created for each of the tests. - ''' + """ return EarthdataAuthMixin() + # Test that .session creates a session def test_get_session(auth_instance): assert isinstance(auth_instance.session, requests.sessions.Session) + # Test that .s3login_credentials creates a dict with the correct keys def test_get_s3login_credentials(auth_instance): assert isinstance(auth_instance.s3login_credentials, dict) - expected_keys = set(['accessKeyId', 'secretAccessKey', 'sessionToken', - 'expiration']) + expected_keys = set( + ["accessKeyId", "secretAccessKey", "sessionToken", "expiration"] + ) assert set(auth_instance.s3login_credentials.keys()) == expected_keys + # Test that earthdata_login generates an auth object def test_login_function(auth_instance): - auth_instance.earthdata_login() assert isinstance(auth_instance.auth, earthaccess.auth.Auth) assert auth_instance.auth.authenticated - -# Test that earthdata_login raises a warning if email is provided -def test_depreciation_warning(auth_instance): - with pytest.warns(DeprecationWarning): - auth_instance.earthdata_login(email='me@gmail.com') diff --git a/icepyx/tests/test_behind_NSIDC_API_login.py b/icepyx/tests/test_behind_NSIDC_API_login.py index 3e7b645d7..47d0a10d2 100644 --- a/icepyx/tests/test_behind_NSIDC_API_login.py +++ b/icepyx/tests/test_behind_NSIDC_API_login.py @@ -1,18 +1,22 @@ -import icepyx as ipx +import json import os import pytest -import warnings + +import icepyx as ipx +import icepyx.core.is2ref as is2ref # Misc notes and needed tests -# test avail data and subsetting success for each input type (kml, shp, list of coords, bbox) -# check that downloaded data is subset? or is this an NSIDC level test so long as we verify the right info is submitted? +# test avail data and subsetting success for each input type +# (kml, shp, list of coords, bbox) +# check that downloaded data is subset? +# or is this an NSIDC level test so long as we verify the right info is submitted? @pytest.fixture(scope="module") def reg(): live_reg = ipx.Query( - "ATL06", [-55, 68, -48, 71], ["2019-02-22", "2019-02-28"], version="005" + "ATL06", [-55, 68, -48, 71], ["2019-02-22", "2019-02-28"], version="006" ) yield live_reg del live_reg @@ -20,19 +24,18 @@ def reg(): @pytest.fixture(scope="module") def session(reg): + os.environ = {"EARTHDATA_USERNAME": "icepyx_devteam"} ed_obj = reg.session yield ed_obj ed_obj.close() ########## is2ref module ########## -import icepyx.core.is2ref as is2ref -import json def test_get_custom_options_output(session): - obs = is2ref._get_custom_options(session, "ATL06", "005") - with open("./icepyx/tests/ATL06v05_options.json") as exp_json: + obs = is2ref._get_custom_options(session, "ATL06", "006") + with open("./icepyx/tests/ATL06v06_options.json") as exp_json: exp = json.load(exp_json) assert all(keys in obs.keys() for keys in exp.keys()) assert all(obs[key] == exp[key] for key in exp.keys()) diff --git a/icepyx/tests/test_granules.py b/icepyx/tests/test_granules.py index f8d07be19..e8be99f39 100644 --- a/icepyx/tests/test_granules.py +++ b/icepyx/tests/test_granules.py @@ -1,8 +1,6 @@ import pytest import re -import requests import responses -import warnings import icepyx as ipx from icepyx.core import granules as granules @@ -22,7 +20,8 @@ # check that agent key is added in event of no subsetting -# add test for granules info for ATL11 and ATL13 (and all datasets? or at least ones that don't have the same filename structure) +# add test for granules info for ATL11 and ATL13 +# (and all datasets? or at least ones that don't have the same filename structure) # this example failed in version 0.6.4, leading to a fix in 0.6.5 # short_name = 'ATL11' # spatial_extent = [-38.65,72.5,-38.40,72.7] @@ -31,7 +30,8 @@ # region_a.avail_granules(ids=True) -# DevNote: clearly there's a better way that doesn't make the function so long... what is it? +# DevNote: clearly there's a better way that doesn't make the function so long... +# what is it? def test_granules_info(): # reg_a = ipx.Query('ATL06', [-55, 68, -48, 71], ['2019-02-20','2019-02-24'], version='3') # granules = reg_a.granules.avail @@ -609,15 +609,15 @@ def test_correct_granule_list_returned(): "ATL06", [-55, 68, -48, 71], ["2019-02-20", "2019-02-28"], - version="5", + version="6", ) (obs_grans,) = reg_a.avail_granules(ids=True) exp_grans = [ - "ATL06_20190221121851_08410203_005_01.h5", - "ATL06_20190222010344_08490205_005_01.h5", - "ATL06_20190225121032_09020203_005_01.h5", - "ATL06_20190226005526_09100205_005_01.h5", + "ATL06_20190221121851_08410203_006_02.h5", + "ATL06_20190222010344_08490205_006_02.h5", + "ATL06_20190225121032_09020203_006_02.h5", + "ATL06_20190226005526_09100205_006_02.h5", ] assert set(obs_grans) == set(exp_grans) diff --git a/icepyx/tests/test_is2ref.py b/icepyx/tests/test_is2ref.py index 8d50568fe..a07a6b948 100644 --- a/icepyx/tests/test_is2ref.py +++ b/icepyx/tests/test_is2ref.py @@ -1,5 +1,4 @@ import pytest -import warnings import icepyx.core.is2ref as is2ref @@ -8,14 +7,14 @@ def test_num_product(): dsnum = 6 - ermsg = "Please enter a product string" + ermsg = "A valid product string was not provided. Check user input, if given, or file metadata." with pytest.raises(TypeError, match=ermsg): is2ref._validate_product(dsnum) def test_bad_product(): wrngds = "atl-6" - ermsg = "Please enter a valid product" + ermsg = "A valid product string was not provided. Check user input, if given, or file metadata." with pytest.raises(AssertionError, match=ermsg): is2ref._validate_product(wrngds) @@ -556,12 +555,12 @@ def test_unsupported_default_varlist(): def test_gt2spot_sc_orient_1(): # gt1l obs = is2ref.gt2spot("gt1l", 1) - expected = 2 + expected = 6 assert obs == expected # gt1r obs = is2ref.gt2spot("gt1r", 1) - expected = 1 + expected = 5 assert obs == expected # gt2l @@ -576,24 +575,24 @@ def test_gt2spot_sc_orient_1(): # gt3l obs = is2ref.gt2spot("gt3l", 1) - expected = 6 + expected = 2 assert obs == expected # gt3r obs = is2ref.gt2spot("gt3r", 1) - expected = 5 + expected = 1 assert obs == expected def test_gt2spot_sc_orient_0(): # gt1l obs = is2ref.gt2spot("gt1l", 0) - expected = 5 + expected = 1 assert obs == expected # gt1r obs = is2ref.gt2spot("gt1r", 0) - expected = 6 + expected = 2 assert obs == expected # gt2l @@ -608,10 +607,10 @@ def test_gt2spot_sc_orient_0(): # gt3l obs = is2ref.gt2spot("gt3l", 0) - expected = 1 + expected = 5 assert obs == expected # gt3r obs = is2ref.gt2spot("gt3r", 0) - expected = 2 + expected = 6 assert obs == expected diff --git a/icepyx/tests/test_query.py b/icepyx/tests/test_query.py index 55b25ef4a..34f9e1d24 100644 --- a/icepyx/tests/test_query.py +++ b/icepyx/tests/test_query.py @@ -1,6 +1,4 @@ import icepyx as ipx -import pytest -import warnings # ------------------------------------ # Generic Query tests @@ -9,6 +7,7 @@ # seem to be adequately covered in docstrings; # may want to focus on testing specific queries + # ------------------------------------ # icepyx-specific tests # ------------------------------------ @@ -19,7 +18,7 @@ def test_icepyx_boundingbox_query(): ["2019-02-22", "2019-02-28"], start_time="03:30:00", end_time="21:30:00", - version="5", + version="6", ) obs_tuple = ( reg_a.product, @@ -34,13 +33,25 @@ def test_icepyx_boundingbox_query(): ["2019-02-22", "2019-02-28"], "03:30:00", "21:30:00", - "005", + "006", ("bounding_box", [-64.0, 66.0, -55.0, 72.0]), ) assert obs_tuple == exp_tuple +def test_temporal_properties_cycles_tracks(): + reg_a = ipx.Query( + "ATL06", + [-55, 68, -48, 71], + cycles=["03", "04", "05", "06", "07"], + tracks=["0849", "0902"], + ) + exp = ["No temporal parameters set"] + + assert [obs == exp for obs in (reg_a.dates, reg_a.start_time, reg_a.end_time)] + + # Tests need to add (given can't do them within docstrings/they're behind NSIDC login) # reqparams post-order # product_all_info diff --git a/icepyx/tests/test_quest.py b/icepyx/tests/test_quest.py new file mode 100644 index 000000000..0ba7325a6 --- /dev/null +++ b/icepyx/tests/test_quest.py @@ -0,0 +1,103 @@ +import pytest +import re + +import icepyx as ipx +from icepyx.quest.quest import Quest + + +@pytest.fixture +def quest_instance(scope="module", autouse=True): + bounding_box = [-150, 30, -120, 60] + date_range = ["2022-06-07", "2022-06-14"] + my_quest = Quest(spatial_extent=bounding_box, date_range=date_range) + return my_quest + + +########## PER-DATASET ADDITION TESTS ########## + + +# Paramaterize these add_dataset tests once more datasets are added +def test_add_is2(quest_instance): + # Add ATL06 as a test to QUEST + + prod = "ATL06" + quest_instance.add_icesat2(product=prod) + exp_key = "icesat2" + exp_type = ipx.Query + + obs = quest_instance.datasets + + assert type(obs) == dict + assert exp_key in obs.keys() + assert type(obs[exp_key]) == exp_type + assert quest_instance.datasets[exp_key].product == prod + + +def test_add_argo(quest_instance): + params = ["down_irradiance412", "temperature"] + quest_instance.add_argo(params=params) + exp_key = "argo" + exp_type = ipx.quest.dataset_scripts.argo.Argo + + obs = quest_instance.datasets + + assert type(obs) == dict + assert exp_key in obs.keys() + assert type(obs[exp_key]) == exp_type + assert set(quest_instance.datasets[exp_key].params) == set(params) + + +def test_add_multiple_datasets(quest_instance): + quest_instance.add_argo(params=["down_irradiance412", "temperature"]) + # print(quest_instance.datasets["argo"].params) + + quest_instance.add_icesat2(product="ATL06") + # print(quest_instance.datasets["icesat2"].product) + + exp_keys = ["argo", "icesat2"] + assert set(exp_keys) == set(quest_instance.datasets.keys()) + + +########## ALL DATASET METHODS TESTS ########## + + +# each of the query functions should be tested in their respective modules +def test_search_all(quest_instance): + quest_instance.add_argo(params=["down_irradiance412", "temperature"]) + quest_instance.add_icesat2(product="ATL06") + + # Search and test all datasets + quest_instance.search_all() + + +@pytest.mark.parametrize( + "kwargs", + [ + {"icesat2": {"IDs": True}}, + {"argo": {"presRange": "10,500"}}, + {"icesat2": {"IDs": True}, "argo": {"presRange": "10,500"}}, + ], +) +def test_search_all_kwargs(quest_instance, kwargs): + quest_instance.search_all(**kwargs) + + +# TESTS NOT IMPLEMENTED +# def test_download_all(): +# quest_instance.add_argo(params=["down_irradiance412", "temperature"]) +# quest_instance.add_icesat2(product="ATL06") + +# # this will require auth in some cases... +# quest_instance.download_all() + + +# @pytest.mark.parametrize( +# "kwargs", +# [ +# {"icesat2": {"verbose":True}}, +# {"argo":{"keep_existing":True}, +# {"icesat2":{"verbose":True}, "argo":{"keep_existing":True} +# ], +# ) +# def test_download_all_kwargs(quest_instance, kwargs): +# pass diff --git a/icepyx/tests/test_quest_argo.py b/icepyx/tests/test_quest_argo.py new file mode 100644 index 000000000..a6940fe7b --- /dev/null +++ b/icepyx/tests/test_quest_argo.py @@ -0,0 +1,247 @@ +import os + +import pytest +import re + +from icepyx.quest.quest import Quest + + +# create an Argo instance via quest (Argo is a submodule) +@pytest.fixture(scope="function") +def argo_quest_instance(): + def _argo_quest_instance(bounding_box, date_range): # aka "factories as fixtures" + my_quest = Quest(spatial_extent=bounding_box, date_range=date_range) + my_quest.add_argo() + my_argo = my_quest.datasets["argo"] + + return my_argo + + return _argo_quest_instance + + +# --------------------------------------------------- +# Test Formatting and Validation + + +def test_fmt_coordinates(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + obs = reg_a._fmt_coordinates() + + exp = "[[-143.0,30.0],[-143.0,37.0],[-154.0,37.0],[-154.0,30.0],[-143.0,30.0]]" + + assert obs == exp + + +def test_validate_parameters(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + + invalid_params = ["temp", "temperature_files"] + + ermsg = re.escape( + "Parameter '{0}' is not valid. Valid parameters are {1}".format( + "temp", reg_a._valid_params() + ) + ) + + with pytest.raises(AssertionError, match=ermsg): + reg_a._validate_parameters(invalid_params) + + +# --------------------------------------------------- +# Test Setters + + +def test_param_setter(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + + exp = ["temperature"] + assert reg_a.params == exp + + reg_a.params = ["temperature", "salinity"] + + exp = list(set(["temperature", "salinity"])) + assert reg_a.params == exp + + +def test_param_setter_invalid_inputs(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + + exp = ["temperature"] + assert reg_a.params == exp + + ermsg = re.escape( + "Parameter '{0}' is not valid. Valid parameters are {1}".format( + "temp", reg_a._valid_params() + ) + ) + + with pytest.raises(AssertionError, match=ermsg): + reg_a.params = ["temp", "salinity"] + + +def test_presRange_setter(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + + exp = None + assert reg_a.presRange == exp + + reg_a.presRange = "0.5,150" + + exp = "0.5,150" + assert reg_a.presRange == exp + + +def test_presRange_setter_invalid_inputs(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + + exp = None + assert reg_a.presRange == exp + + reg_a.presRange = ( + "0.5, sam" # it looks like the API will take a string with a space + ) + + # this setter doesn't currently have a validation check, so would need to search + obs_msg = reg_a.search_data() + + exp_msg = "Error: Unexpected response " + + assert obs_msg == exp_msg + + +# --------------------------------------------------- +# Test search_data + + +def test_search_data_available_profiles(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + obs_msg = reg_a.search_data() + + exp_msg = "19 valid profiles have been identified" + + assert obs_msg == exp_msg + + +def test_search_data_no_available_profiles(argo_quest_instance): + reg_a = argo_quest_instance([-55, 68, -48, 71], ["2019-02-20", "2019-02-28"]) + obs = reg_a.search_data() + + exp = ( + "Warning: Query returned no profiles\n" "Please try different search parameters" + ) + + assert obs == exp + + +# --------------------------------------------------- +# Test download and df + + +def test_download_parse_into_df(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-13"]) + reg_a.download() # note: pressure is returned by default + + obs_cols = reg_a.argodata.columns + + exp_cols = [ + "temperature", + "temperature_argoqc", + "pressure", + "profile_id", + "lat", + "lon", + "date", + ] + + assert set(exp_cols) == set(obs_cols) + + assert len(reg_a.argodata) == 2948 + + +# approach for additional testing of df functions: create json files with profiles and store them in test suite +# then use those for the comparison (e.g. number of rows in df and json match) + + +def test_save_df_to_csv(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-13"]) + reg_a.download() # note: pressure is returned by default + + path = os.getcwd() + "test_file" + reg_a.save(path) + + assert os.path.exists(path + "_argo.csv") + os.remove(path + "_argo.csv") + + +def test_merge_df(argo_quest_instance): + reg_a = argo_quest_instance([-150, 30, -120, 60], ["2022-06-07", "2022-06-14"]) + param_list = ["salinity", "temperature", "down_irradiance412"] + + df = reg_a.download(params=param_list) + + assert "down_irradiance412" in df.columns + assert "down_irradiance412_argoqc" in df.columns + + df = reg_a.download(["doxy"], keep_existing=True) + assert "doxy" in df.columns + assert "doxy_argoqc" in df.columns + assert "down_irradiance412" in df.columns + assert "down_irradiance412_argoqc" in df.columns + + +# --------------------------------------------------- +# Test kwargs to replace params and presRange in search and download + + +def test_replace_param_search(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + + obs = reg_a.search_data(params=["doxy"]) + + exp = ( + "Warning: Query returned no profiles\n" "Please try different search parameters" + ) + + assert obs == exp + + +def test_replace_param_download(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-13"]) + reg_a.download(params=["salinity"]) # note: pressure is returned by default + + obs_cols = reg_a.argodata.columns + + exp_cols = [ + "salinity", + "salinity_argoqc", + "pressure", + "profile_id", + "lat", + "lon", + "date", + ] + + assert set(exp_cols) == set(obs_cols) + + assert len(reg_a.argodata) == 1942 + + +def test_replace_presRange_search(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-26"]) + obs_msg = reg_a.search_data(presRange="100,600") + + exp_msg = "19 valid profiles have been identified" + + assert obs_msg == exp_msg + + +def test_replace_presRange_download(argo_quest_instance): + reg_a = argo_quest_instance([-154, 30, -143, 37], ["2022-04-12", "2022-04-13"]) + df = reg_a.download(params=["salinity"], presRange="0.2,180") + + assert df["pressure"].min() >= 0.2 + assert df["pressure"].max() <= 180 + assert "salinity" in df.columns + + +# second pres range test where does have a higher max pressure because only the new data was presRange limited? diff --git a/icepyx/tests/test_read.py b/icepyx/tests/test_read.py index 018435968..67b29b598 100644 --- a/icepyx/tests/test_read.py +++ b/icepyx/tests/test_read.py @@ -15,13 +15,13 @@ def test_check_datasource_type(): [ ("./", "is2_local"), ( - "s3://nsidc-cumulus-prod-protected/ATLAS/ATL03/004/2019/11/30/ATL03_20191130221008_09930503_004_01.h5", + """s3://nsidc-cumulus-prod-protected/ATLAS/ + ATL03/006/2019/11/30/ATL03_20191130221008_09930503_006_01.h5""", "is2_s3", ), ], ) def test_check_datasource(filepath, expect): - source_type = read._check_datasource(filepath) assert source_type == expect @@ -90,7 +90,6 @@ def test_validate_source_str_not_a_dir_or_file(): ], ) def test_check_run_fast_scandir(dir, fn_glob, expect): - (subfolders, files) = read._run_fast_scandir(dir, fn_glob) assert (sorted(subfolders), sorted(files)) == expect @@ -117,7 +116,8 @@ def test_get_track_type_str( ) -# Best way to test this may be by including a small sample file with the repo (which can be used for testing some of the catalog/read-in functions as well) +# Best way to test this may be by including a small sample file with the repo +# (which can be used for testing some of the catalog/read-in functions as well) # def test_invalid_filename_pattern_in_file(): # ermesg = "Your input filename does not match the specified pattern." # default_pattern = Read("/path/to/valid/source/file")._filename_pattern diff --git a/icepyx/tests/test_spatial.py b/icepyx/tests/test_spatial.py index 2666d857d..4d6369d9e 100644 --- a/icepyx/tests/test_spatial.py +++ b/icepyx/tests/test_spatial.py @@ -351,7 +351,6 @@ def test_poly_list_auto_close(): def test_poly_file_simple_one_poly(): - poly_from_file = spat.Spatial( str( Path( @@ -391,7 +390,6 @@ def test_bad_poly_inputfile_type_throws_error(): def test_gdf_from_one_bbox(): - obs = spat.geodataframe("bounding_box", [-55, 68, -48, 71]) geom = [Polygon(list(zip([-55, -55, -48, -48, -55], [68, 71, 71, 68, 68])))] exp = gpd.GeoDataFrame(geometry=geom) diff --git a/icepyx/tests/test_temporal.py b/icepyx/tests/test_temporal.py index 83926946e..c93b30a38 100644 --- a/icepyx/tests/test_temporal.py +++ b/icepyx/tests/test_temporal.py @@ -235,6 +235,7 @@ def test_range_str_yyyydoy_dict_time_start_end(): # Date Range Errors + # (The following inputs are bad, testing to ensure the temporal class handles this elegantly) def test_bad_start_time_type(): with pytest.raises(AssertionError): diff --git a/icepyx/tests/test_visualization.py b/icepyx/tests/test_visualization.py index 8056a453f..ede046f0b 100644 --- a/icepyx/tests/test_visualization.py +++ b/icepyx/tests/test_visualization.py @@ -10,38 +10,38 @@ ( 1, [ - "ATL06_20200702014158_01020810_004_01.h5", - "ATL06_20200703011618_01170810_004_01.h5", + "ATL06_20200702014158_01020810_006_01.h5", + "ATL06_20200703011618_01170810_006_01.h5", ], ), ( 2, [ - "ATL06_20200612151119_11920712_004_01.h5", - "ATL06_20200616021517_12450710_004_01.h5", - "ATL06_20200702014158_01020810_004_01.h5", - "ATL06_20200703011618_01170810_004_01.h5", + "ATL06_20200612151119_11920712_006_01.h5", + "ATL06_20200616021517_12450710_006_01.h5", + "ATL06_20200702014158_01020810_006_01.h5", + "ATL06_20200703011618_01170810_006_01.h5", ], ), ( 3, [ - "ATL06_20200612151119_11920712_004_01.h5", - "ATL06_20200616021517_12450710_004_01.h5", - "ATL06_20200702014158_01020810_004_01.h5", - "ATL06_20200703011618_01170810_004_01.h5", + "ATL06_20200612151119_11920712_006_01.h5", + "ATL06_20200616021517_12450710_006_01.h5", + "ATL06_20200702014158_01020810_006_01.h5", + "ATL06_20200703011618_01170810_006_01.h5", ], ), ], ) def test_files_in_latest_cycles(n, exp): files = [ - "ATL06_20190710071617_01860412_004_01.h5", - "ATL06_20190713182016_02390410_004_01.h5", - "ATL06_20200612151119_11920712_004_01.h5", - "ATL06_20200616021517_12450710_004_01.h5", - "ATL06_20200702014158_01020810_004_01.h5", - "ATL06_20200703011618_01170810_004_01.h5", + "ATL06_20190710071617_01860412_006_01.h5", + "ATL06_20190713182016_02390410_006_01.h5", + "ATL06_20200612151119_11920712_006_01.h5", + "ATL06_20200616021517_12450710_006_01.h5", + "ATL06_20200702014158_01020810_006_01.h5", + "ATL06_20200703011618_01170810_006_01.h5", ] cycles = [8, 7, 4] obs = vis.files_in_latest_n_cycles(files, cycles=cycles, n=n) @@ -51,18 +51,17 @@ def test_files_in_latest_cycles(n, exp): @pytest.mark.parametrize( "filename, expect", [ - ("ATL06_20190525202604_08790310_004_01.h5", [879, 3, "2019-05-25"]), - ("ATL06_20190614194425_11840310_004_01.h5", [1184, 3, "2019-06-14"]), - ("ATL07-02_20190624063616_13290301_004_01.h5", [1329, 3, "2019-06-24"]), - ("ATL07-02_20190602190916_10010301_004_01.h5", [1001, 3, "2019-06-02"]), - ("ATL10-02_20190611072656_11310301_004_01.h5", [1131, 3, "2019-06-11"]), - ("ATL10-02_20190731045538_05060401_004_01.h5", [506, 4, "2019-07-31"]), - ("ATL12_20190615023544_11890301_004_01.h5", [1189, 3, "2019-06-15"]), - ("ATL12_20190721170332_03610401_004_01.h5", [361, 4, "2019-07-21"]), + ("ATL06_20190525202604_08790310_006_01.h5", [879, 3, "2019-05-25"]), + ("ATL06_20190614194425_11840310_006_01.h5", [1184, 3, "2019-06-14"]), + ("ATL07-02_20190624063616_13290301_006_01.h5", [1329, 3, "2019-06-24"]), + ("ATL07-02_20190602190916_10010301_006_01.h5", [1001, 3, "2019-06-02"]), + ("ATL10-02_20190611072656_11310301_006_01.h5", [1131, 3, "2019-06-11"]), + ("ATL10-02_20190731045538_05060401_006_01.h5", [506, 4, "2019-07-31"]), + ("ATL12_20190615023544_11890301_006_01.h5", [1189, 3, "2019-06-15"]), + ("ATL12_20190721170332_03610401_006_01.h5", [361, 4, "2019-07-21"]), ], ) def test_gran_paras(filename, expect): - para_list = vis.gran_paras(filename) assert para_list == expect @@ -70,7 +69,7 @@ def test_gran_paras(filename, expect): # 2023-01-27: for the commented test below, r (in visualization line 444) is returning None even though I can see OA data there via a browser - +""" @pytest.mark.parametrize( "product, date_range, bbox, expect", [ @@ -112,3 +111,4 @@ def test_visualization_orbits(product, bbox, cycles, tracks, expect): data_size = region_viz.parallel_request_OA().size assert data_size == expect +"""