diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3d7a5ec96..837054f54 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -91,29 +91,65 @@ jobs: with: files: artifacts/*/junit-report.xml - - name: Publish Coverage Report for Ubuntu - uses: 5monkeys/cobertura-action@master - if: ${{ github.event_name == 'pull_request' }} + - name: Produce the coverage report for Ubuntu + uses: insightsengineering/coverage-action@v2 with: + # Path to the Cobertura XML report. path: artifacts/unit-test-results-python3.12-ubuntu-latest/coverage.xml - repo_token: ${{ secrets.GITHUB_TOKEN }} - pull_request_number: ${{ github.pull_request.number }} - minimum_coverage: 70 - fail_below_threshold: false - only_changed_files: true - report_name: Code Coverage (Ubuntu) + # Minimum total coverage, if you want to the + # workflow to enforce it as a standard. + # This has no effect if the `fail` arg is set to `false`. + threshold: 70 + # Fail the workflow if the minimum code coverage + # reuqirements are not satisfied. + fail: false + # Publish the rendered output as a PR comment + publish: true + # Create a coverage diff report. + diff: true + # Branch to diff against. + # Compare the current coverage to the coverage + # determined on this branch. + diff-branch: develop + # This is where the coverage reports for the + # `diff-branch` are stored. + # Branch is created if it doesn't already exist'. + diff-storage: _xml_coverage_reports + # A custom title that can be added to the code + # coverage summary in the PR comment. + coverage-summary-title: "Code Coverage (Ubuntu)" + # Make the code coverage report togglable + togglable-report: true - - name: Publish Coverage Report for Windows - uses: 5monkeys/cobertura-action@master - if: ${{ github.event_name == 'pull_request' }} + - name: Produce the coverage report for Windows + uses: insightsengineering/coverage-action@v2 with: + # Path to the Cobertura XML report. path: artifacts/unit-test-results-python3.12-windows-latest/coverage.xml - repo_token: ${{ secrets.GITHUB_TOKEN }} - pull_request_number: ${{ github.pull_request.number }} - minimum_coverage: 70 - fail_below_threshold: false - only_changed_files: true - report_name: Code Coverage (Windows) + # Minimum total coverage, if you want to the + # workflow to enforce it as a standard. + # This has no effect if the `fail` arg is set to `false`. + threshold: 70 + # Fail the workflow if the minimum code coverage + # reuqirements are not satisfied. + fail: false + # Publish the rendered output as a PR comment + publish: true + # Create a coverage diff report. + diff: true + # Branch to diff against. + # Compare the current coverage to the coverage + # determined on this branch. + diff-branch: develop + # This is where the coverage reports for the + # `diff-branch` are stored. + # Branch is created if it doesn't already exist'. + diff-storage: _xml_coverage_reports_win + # A custom title that can be added to the code + # coverage summary in the PR comment. + coverage-summary-title: "Code Coverage (Windows)" + # Make the code coverage report togglable + togglable-report: true build-docs: name: Build the docs diff --git a/.gitignore b/.gitignore index 0e6aad375..0fcb9b900 100644 --- a/.gitignore +++ b/.gitignore @@ -82,7 +82,7 @@ celerybeat-schedule .env # virtualenv -.venv/ +.venv*/ venv/ ENV/ diff --git a/CHANGES.rst b/CHANGES.rst index f7f0d35b0..181b7a5db 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -1,11 +1,41 @@ Release history --------------- +2.12.0 (2024-02-19) ++++++++++++++++++++ + +* Individual product asset download methods (:pull:`932`) +* New environment variable `EODAG_CFG_DIR` available for custom configuration directory (:pull:`927`) +* New `list_queryables `_ + method, available through python API and server mode, and using product-types constraints if available (:pull:`911`) + (:pull:`917`)(:pull:`974`)(:pull:`977`)(:pull:`978`)(:pull:`981`)(:pull:`1005`) +* Removes limited RPC server (:pull:`1011`) +* Product types aliases (:pull:`905`) +* New provider `creodias_s3` (:pull:`986`)(:pull:`1002`) +* `earth_search` endpoint updated from v0 to v1 (:pull:`754`) +* `wekeo` endpoint updated to *wekeo2 wekeo-broker API* (:pull:`1010`) +* New product types added for `cop_ads` and `cop_cds` (:pull:`898`) +* Adds missing `tileIdentifier` and `quicklook` for `creodias`, `creodias_s3` and `cop_dataspace` (:pull:`957`) + (:pull:`1014`) +* HTTP download with :class:`~eodag.plugins.apis.cds.CdsApi` (:pull:`946`) +* Download streaming available for :class:`~eodag.plugins.download.aws.AwsDownload` plugin (:pull:`997`) +* Lists STAC alternate assets in server mode (:pull:`961`) +* `_dc_qs` used in server-mode to store :class:`~eodag.plugins.apis.cds.CdsApi` search criteria (:pull:`958`)(:pull:`1000`) +* New eodag exception :class:`~eodag.utils.exceptions.TimeOutError` (:pull:`982`) +* Cast loaded environment variables type using config type-hints (:pull:`987`) +* Type hints fixes (:pull:`880`)(:pull:`983`) +* Requirements updates (:pull:`1020`)(:pull:`1021`) +* Various server mode fixes (:pull:`891`)(:pull:`895`)(:pull:`947`)(:pull:`992`)(:pull:`1001`) +* Various minor fixes and improvements (:pull:`934`)(:pull:`935`)(:pull:`936`)(:pull:`962`)(:pull:`969`)(:pull:`976`) + (:pull:`980`)(:pull:`988`)(:pull:`991`)(:pull:`996`)(:pull:`1003`)(:pull:`1009`)(:pull:`1013`)(:pull:`1016`) + (:pull:`1019`)(:pull:`1022`)(:pull:`1024`)(:pull:`1025`) + 2.11.0 (2023-11-20) +++++++++++++++++++ * Fallback mechanism for search (:pull:`753`)(:pull:`807`) -* `creodias` and `cop_dataspace` configuration update (from `OData` to `OpenSearch`) (:pull:`866`)(:pull:`883`)(:pull:`894`)(:pull:`915`)(:pull:`929`) +* `creodias` and `cop_dataspace` configuration update (from `OData` to `OpenSearch`) (:pull:`866`)(:pull:`883`) + (:pull:`894`)(:pull:`915`)(:pull:`929`) * Removes `mundi` provider (:pull:`890`) * Copernicus DEM product types available through creodias (:pull:`882`) * `wekeo` driver update and new product types (:pull:`798`)(:pull:`840`)(:pull:`856`)(:pull:`902`) @@ -13,14 +43,19 @@ Release history * Refresh token usage in `KeycloakOIDCPasswordAuth` (`creodias` and `cop_dataspace`) (:pull:`921`) * Per-provider search timeout (:pull:`841`) * New `EODAG_PROVIDERS_CFG_FILE` environment variable for custom provider configuration setting (:pull:`836`) -* Many server-mode updates and fixes: `queryables` endpoints (:pull:`795`), built-in Swagger doc update (:pull:`846`), exceptions handling (:pull:`794`)(:pull:`806`)(:pull:`812`)(:pull:`829`), +* Many server-mode updates and fixes: `queryables` endpoints (:pull:`795`), built-in Swagger doc update (:pull:`846`), + exceptions handling (:pull:`794`)(:pull:`806`)(:pull:`812`)(:pull:`829`), provider setting (:pull:`808`) and returned information (:pull:`884`)(:pull:`879`), multithreaded requests (:pull:`843`), - opened time intervals fixes (:pull:`837`), search-by-ids fix (:pull:`822`), intersects parameter fixes (:pull:`796`)(:pull:`797`) + opened time intervals fixes (:pull:`837`), search-by-ids fix (:pull:`822`), intersects parameter fixes (:pull:`796`) + (:pull:`797`) * Adds support for Python 3.12 (:pull:`892`) and removes support for Python 3.7 (:pull:`903`) * Fixes plugin manager rebuild (solves preferred provider issues) (:pull:`919`) * Reformatted logs (:pull:`842`)(:pull:`885`) * Adds static type information (:pull:`863`) -* Various minor fixes and improvements (:pull:`759`)(:pull:`788`)(:pull:`791`)(:pull:`793`)(:pull:`802`)(:pull:`804`)(:pull:`805`)(:pull:`813`)(:pull:`818`)(:pull:`819`)(:pull:`821`)(:pull:`824`)(:pull:`825`)(:pull:`828`)(:pull:`830`)(:pull:`832`)(:pull:`835`)(:pull:`838`)(:pull:`844`)(:pull:`867`)(:pull:`868`)(:pull:`872`)(:pull:`877`)(:pull:`878`)(:pull:`881`)(:pull:`893`)(:pull:`899`)(:pull:`913`)(:pull:`920`)(:pull:`925`)(:pull:`926`) +* Various minor fixes and improvements (:pull:`759`)(:pull:`788`)(:pull:`791`)(:pull:`793`)(:pull:`802`)(:pull:`804`) + (:pull:`805`)(:pull:`813`)(:pull:`818`)(:pull:`819`)(:pull:`821`)(:pull:`824`)(:pull:`825`)(:pull:`828`)(:pull:`830`) + (:pull:`832`)(:pull:`835`)(:pull:`838`)(:pull:`844`)(:pull:`867`)(:pull:`868`)(:pull:`872`)(:pull:`877`)(:pull:`878`) + (:pull:`881`)(:pull:`893`)(:pull:`899`)(:pull:`913`)(:pull:`920`)(:pull:`925`)(:pull:`926`) 2.11.0b1 (2023-07-28) +++++++++++++++++++++ @@ -66,7 +101,7 @@ Release history (:pull:`659`) * Fetch product types optimization (:pull:`683`) * Fixes external product types update for unknown provider (:pull:`682`) -* Default dates and refactor for `CdsApi` and `EcmwfApi` (:pull:`672`)(:pull:`678`)(:pull:`679`) +* Default dates and refactor for `:class:`~eodag.plugins.apis.cds.CdsApi` and `:class:`~eodag.plugins.apis.cds.EcmwfApi` (:pull:`672`)(:pull:`678`)(:pull:`679`) * `peps` `storageStatus` update (:pull:`677`) * Customized and faster `deepcopy` (:pull:`664`) * Various minor fixes and improvements (:pull:`665`)(:pull:`666`)(:pull:`667`)(:pull:`668`)(:pull:`669`)(:pull:`670`) diff --git a/NOTICE b/NOTICE index 89c14b15b..93a19ad34 100644 --- a/NOTICE +++ b/NOTICE @@ -12,12 +12,13 @@ about the license of each project. The Apache 2.0 License ================================================================ -The following components are provided under the Apache 2.0 License (https://opensource.org/licenses/Apache-2.0). +The following components are provided under the Apache 2.0 License (https://opensource.org/licenses/Apache-2.0). See project link for details. https://github.com/requests/requests https://github.com/dateutil/dateutil https://github.com/boto/boto3 +https://github.com/boto/botocore https://github.com/h2non/jsonpath-ng https://github.com/CS-SI/eodag-cube https://github.com/ecmwf/ecmwf-api-client @@ -30,7 +31,7 @@ https://github.com/stac-utils/pystac The MIT License ================================================================ -The following components are provided under the MIT License (http://www.opensource.org/licenses/mit-license.php). +The following components are provided under the MIT License (http://www.opensource.org/licenses/mit-license.php). See project link for details. https://pyyaml.org/wiki/PyYAML @@ -42,13 +43,16 @@ https://github.com/GeospatialPython/pyshp https://github.com/python-visualization/folium https://github.com/Unidata/netcdf4-python https://github.com/tiangolo/fastapi +https://github.com/urllib3/urllib3 +https://github.com/annotated-types/annotated-types +https://github.com/pypa/setuptools ================================================================ The BSD-2-Clause Licence ================================================================ -The following components are provided under the BSD-3-Clause License (https://opensource.org/licenses/BSD-2-Clause). +The following components are provided under the BSD-2-Clause License (https://opensource.org/licenses/BSD-2-Clause). See project link for details. https://github.com/mchaput/whoosh @@ -59,7 +63,7 @@ https://github.com/imageio/imageio The BSD-3-Clause Licence ================================================================ -The following components are provided under the BSD-3-Clause License (https://opensource.org/licenses/BSD-3-Clause). +The following components are provided under the BSD-3-Clause License (https://opensource.org/licenses/BSD-3-Clause). See project link for details. https://github.com/pallets/click @@ -72,6 +76,8 @@ https://github.com/lxml/lxml https://github.com/jupyter-widgets/ipywidgets https://github.com/jupyter/jupyter https://github.com/encode/uvicorn +https://github.com/encode/starlette + The function slugify, located at eodag/utils/__init__.py is a modified version of the function with the same name from the Django Project, licensed under the BSD-3-Clause Licence. Follow project link below for more information: @@ -79,10 +85,11 @@ https://github.com/django/django ================================================================ -The Python-2.0 License +The Python Software Foundation License ================================================================ -The following components are provided under the Python-2.0 License (https://opensource.org/licenses/Python-2.0). +The following components are provided under the Python Software Foundation License (https://opensource.org/licenses/Python-2.0). See project link for details. https://github.com/matplotlib +https://github.com/python/typing_extensions diff --git a/README.rst b/README.rst index 21f24e2e2..4687acc81 100644 --- a/README.rst +++ b/README.rst @@ -34,7 +34,7 @@ .. Checkout **EODAG Jupyterlab extension**: `eodag-labextension `_! - This will bring a fiendly UI to your notebook and help you search and browse for EO products using ``eodag``. + This will bring a friendly UI to your notebook and help you search and browse for EO products using ``eodag``. EODAG (Earth Observation Data Access Gateway) is a command line tool and a plugin-oriented Python framework for searching, aggregating results and downloading remote sensed images while offering a unified API for data access regardless of the @@ -152,12 +152,12 @@ An eodag instance can be exposed through a STAC compliant REST api from the comm # search for items $ curl "http://127.0.0.1:5000/search?collections=S2_MSI_L1C&bbox=0,43,1,44&datetime=2018-01-20/2018-01-25" \ - | jq ".context.matched" + | jq ".numberMatched" 6 # browse for items $ curl "http://127.0.0.1:5000/catalogs/S2_MSI_L1C/country/FRA/year/2021/month/01/day/25/cloud_cover/10/items" \ - | jq ".context.matched" + | jq ".numberMatched" 9 # get download link @@ -173,7 +173,7 @@ An eodag instance can be exposed through a STAC compliant REST api from the comm .. code-block:: bash - docker run -p 5000:5000 --rm csspace/eodag-server:2.11.0 + docker run -p 5000:5000 --rm csspace/eodag-server:2.12.0 You can also browse over your STAC API server using `STAC Browser `_. Simply run: @@ -265,4 +265,4 @@ Credits EODAG is built on top of amazingly useful open source projects. See NOTICE file for details about those projects and their licenses. -Thank you to all the authors of these projects ! +Thank you to all the authors of these projects! diff --git a/charts/eodag-server/Chart.yaml b/charts/eodag-server/Chart.yaml index 356a116ae..575d6b004 100644 --- a/charts/eodag-server/Chart.yaml +++ b/charts/eodag-server/Chart.yaml @@ -1,5 +1,5 @@ apiVersion: v2 -appVersion: 2.11.0 +appVersion: 2.12.0 dependencies: - name: common repository: oci://registry-1.docker.io/bitnamicharts @@ -15,4 +15,4 @@ name: eodag-server sources: - https://github.com/CS-SI/eodag type: application -version: 2.11.0 +version: 2.12.0 diff --git a/docker/stac-server.dockerfile b/docker/stac-server.dockerfile index f7fb4b6c0..eaac9570c 100644 --- a/docker/stac-server.dockerfile +++ b/docker/stac-server.dockerfile @@ -55,7 +55,7 @@ COPY ./eodag /eodag/eodag RUN python -m pip install . # add python path -ENV PYTHONPATH="${PYTHONPATH}:/eodag/eodag" +ENV PYTHONPATH="${PYTHONPATH}:/eodag/eodag/resources" # copy start-stac script COPY ./docker/run-stac-server.sh /eodag/run-stac-server.sh diff --git a/docs/_static/params_mapping_extra.csv b/docs/_static/params_mapping_extra.csv index fcd240352..d305504f5 100644 --- a/docs/_static/params_mapping_extra.csv +++ b/docs/_static/params_mapping_extra.csv @@ -9,9 +9,9 @@ latitudeBand,,,,,,:green:`queryable metadata`,,,,,,,,, orderLink,,,,,,,,,,metadata only,,,,, polarizationChannels,metadata only,,,:green:`queryable metadata`,,:green:`queryable metadata`,:green:`queryable metadata`,:green:`queryable metadata`,,metadata only,,:green:`queryable metadata`,,metadata only,:green:`queryable metadata` polarizationMode,,,,,:green:`queryable metadata`,,metadata only,,,,:green:`queryable metadata`,,:green:`queryable metadata`,metadata only, -quicklook,metadata only,,,,metadata only,metadata only,metadata only,metadata only,,metadata only,metadata only,metadata only,metadata only,metadata only,metadata only +quicklook,metadata only,,,metadata only,metadata only,metadata only,metadata only,metadata only,,metadata only,metadata only,metadata only,metadata only,metadata only,metadata only storageStatus,metadata only,,,metadata only,metadata only,metadata only,metadata only,metadata only,,metadata only,metadata only,metadata only,metadata only,metadata only,metadata only -thumbnail,metadata only,,,,metadata only,metadata only,metadata only,metadata only,,,metadata only,metadata only,metadata only,metadata only,metadata only -tileIdentifier,,,,metadata only,metadata only,:green:`queryable metadata`,,,,:green:`queryable metadata`,:green:`queryable metadata`,:green:`queryable metadata`,,:green:`queryable metadata`, +thumbnail,metadata only,,,metadata only,metadata only,metadata only,metadata only,metadata only,,,metadata only,metadata only,metadata only,metadata only,metadata only +tileIdentifier,,,,:green:`queryable metadata`,:green:`queryable metadata`,:green:`queryable metadata`,,,,:green:`queryable metadata`,:green:`queryable metadata`,:green:`queryable metadata`,,:green:`queryable metadata`, uid,,,,metadata only,metadata only,,,,,metadata only,metadata only,,metadata only,metadata only, utmZone,,,,,,:green:`queryable metadata`,,,,,,,,, diff --git a/docs/_static/product_types_information.csv b/docs/_static/product_types_information.csv index 57d750a24..dc700927f 100644 --- a/docs/_static/product_types_information.csv +++ b/docs/_static/product_types_information.csv @@ -1,160 +1,169 @@ -product type,abstract,instrument,platform,platformSerialIdentifier,processingLevel,keywords,sensorType,license,title,missionStartDate,astraea_eod,aws_eos,cop_ads,cop_cds,cop_dataspace,creodias,earth_search,earth_search_cog,earth_search_gcs,ecmwf,hydroweb_next,meteoblue,onda,peps,planetary_computer,sara,theia,usgs,usgs_satapi_aws,wekeo -CAMS_EAC4,CAMS (Copernicus Atmosphere Monitoring Service) ECMWF Atmospheric Composition Reanalysis 4 from Copernicus ADS ,,CAMS,CAMS,,"Copernicus,Atmosphere,Atmospheric,Reanalysis,CAMS,EAC4,ADS,ECMWF",ATMOSPHERIC,proprietary,CAMS ECMWF Atmospheric Composition Reanalysis 4,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,, -CAMS_GACF_AOT,CAMS (Copernicus Atmosphere Monitoring Service) Global Atmospheric Composition Forecast of Aerosol Optical Thickness from Copernicus ADS ,,CAMS,CAMS,,"Copernicus,Atmosphere,Atmospheric,Forecast,CAMS,GACF,AOT,ADS",ATMOSPHERIC,proprietary,CAMS GACF Aerosol Optical Thickness,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,, -CAMS_GACF_MR,CAMS (Copernicus Atmosphere Monitoring Service) Global Atmospheric Composition Forecast of Mixing Ratios from Copernicus ADS ,,CAMS,CAMS,,"Copernicus,Atmosphere,Atmospheric,Forecast,CAMS,GACF,MR,ADS",ATMOSPHERIC,proprietary,CAMS GACF Mixing Ratios,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,, -CAMS_GACF_RH,CAMS (Copernicus Atmosphere Monitoring Service) Global Atmospheric Composition Forecast of Relative Humidity from Copernicus ADS ,,CAMS,CAMS,,"Copernicus,Atmosphere,Atmospheric,Forecast,CAMS,GACF,RH,ADS",ATMOSPHERIC,proprietary,CAMS GACF Relative Humidity,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,, -CBERS4_AWFI_L2,"China-Brazil Earth Resources Satellite, CBERS-4 AWFI camera Level-2 product. System corrected images, expect some translation error. ",AWFI,CBERS,CBERS-4,L2,"AWFI,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 AWFI Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CBERS4_AWFI_L4,"China-Brazil Earth Resources Satellite, CBERS-4 AWFI camera Level-4 product. Orthorectified with ground control points. ",AWFI,CBERS,CBERS-4,L4,"AWFI,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 AWFI Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CBERS4_MUX_L2,"China-Brazil Earth Resources Satellite, CBERS-4 MUX camera Level-2 product. System corrected images, expect some translation error. ",MUX,CBERS,CBERS-4,L2,"MUX,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 MUX Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CBERS4_MUX_L4,"China-Brazil Earth Resources Satellite, CBERS-4 MUX camera Level-4 product. Orthorectified with ground control points. ",MUX,CBERS,CBERS-4,L4,"MUX,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 MUX Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CBERS4_PAN10M_L2,"China-Brazil Earth Resources Satellite, CBERS-4 PAN10M camera Level-2 product. System corrected images, expect some translation error. ",PAN10M,CBERS,CBERS-4,L2,"PAN10M,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 PAN10M Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CBERS4_PAN10M_L4,"China-Brazil Earth Resources Satellite, CBERS-4 PAN10M camera Level-4 product. Orthorectified with ground control points. ",PAN10M,CBERS,CBERS-4,L4,"PAN10M,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 PAN10M Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CBERS4_PAN5M_L2,"China-Brazil Earth Resources Satellite, CBERS-4 PAN5M camera Level-2 product. System corrected images, expect some translation error. ",PAN5M,CBERS,CBERS-4,L2,"PAN5M,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 PAN5M Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CBERS4_PAN5M_L4,"China-Brazil Earth Resources Satellite, CBERS-4 PAN5M camera Level-4 product. Orthorectified with ground control points. ",PAN5M,CBERS,CBERS-4,L4,"PAN5M,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 PAN5M Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,, -CLMS_CORINE,"The CORINE Land Cover (CLC) inventory was initiated in 1985 (reference year 1990). Updates have been produced in 2000, 2006, 2012, and 2018. It consists of an inventory of land cover in 44 classes. CLC uses a Minimum Mapping Unit (MMU) of 25 hectares (ha) for areal phenomena and a minimum width of 100 m for linear phenomena. The time series are complemented by change layers, which highlight changes in land cover with an MMU of 5 ha. Different MMUs mean that the change layer has higher resolution than the status layer. Due to differences in MMUs the difference between two status layers will not equal to the corresponding CLC-Changes layer. If you are interested in CLC-Changes between two neighbour surveys always use the CLC-Change layer. ",,"Sentinel-2, LANDSAT, SPOT-4/5, IRS P6 LISS III","S2, L5, L7, L8, SPOT4, SPOT5",,"Land-cover,LCL,CORINE,CLMS",,proprietary,CORINE Land Cover,1986-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -CLMS_GLO_DMP_333M,"Dry matter Productivity (DMP) is an indication of the overall growth rate or dry biomass increase of the vegetation and is directly related to ecosystem Net Primary Productivity (NPP), however its units (kilograms of gross dry matter per hectare per day) are customized for agro-statistical purposes. Compared to the Gross DMP (GDMP), or its equivalent Gross Primary Productivity, the main difference lies in the inclusion of the autotrophic respiration. Like the FAPAR products that are used as input for the GDMP estimation, these GDMP products are provided in Near Real Time, with consolidations in the next periods, or as offline product. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Dry-matter-productivity,DMP,OLCI,PROBA-V,Sentinel-3",,proprietary,10-daily Dry Matter Productivity 333M,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -CLMS_GLO_FAPAR_333M,"The FAPAR quantifies the fraction of the solar radiation absorbed by plants for photosynthesis. It refers only to the green and living elements of the canopy. The FAPAR depends on the canopy structure, vegetation element optical properties, atmospheric conditions and angular configuration. To overcome this latter dependency, a daily integrated FAPAR value is assessed. FAPAR is very useful as input to a number of primary productivity models and is recognized as an Essential Climate Variable (ECV) by the Global Climate Observing System (GCOS). The product at 333m resolution is provided in Near Real Time and consolidated in the next six periods. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Fraction-of-absorbed-PAR,FAPAR,OLCI,PROBA-V,Sentinel-3",,proprietary,Global 10-daily Fraction of Absorbed PAR 333m,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -CLMS_GLO_FCOVER_333M,"The Fraction of Vegetation Cover (FCover) corresponds to the fraction of ground covered by green vegetation. Practically, it quantifies the spatial extent of the vegetation. Because it is independent from the illumination direction and it is sensitive to the vegetation amount, FCover is a very good candidate for the replacement of classical vegetation indices for the monitoring of ecosystems. The product at 333m resolution is provided in Near Real Time and consolidated in the next six periods. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Fraction-of-vegetation-cover,OLCI,PROBA-V,Sentinel-3",,proprietary,Global 10-daily Fraction of Vegetation Cover 333m,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -CLMS_GLO_GDMP_333M,"Gross dry matter Productivity (GDMP) is an indication of the overall growth rate or dry biomass increase of the vegetation and is directly related to ecosystem Gross Primary Productivity (GPP), that reflects the ecosystem's overall production of organic compounds from atmospheric carbon dioxide, however its units (kilograms of gross dry matter per hectare per day) are customized for agro-statistical purposes. Like the FAPAR products that are used as input for the GDMP estimation, these GDMP products are provided in Near Real Time, with consolidations in the next periods, or as offline product. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Gross-dry-matter-productivity,GDMP,GPP,OLCI,PROBA-V,Sentinel-3",,proprietary,10-daily Gross Dry Matter Productivity 333M,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -CLMS_GLO_LAI_333M,"LAI was defined by CEOS as half the developed area of the convex hull wrapping the green canopy elements per unit horizontal ground. This definition allows accounting for elements which are not flat such as needles or stems. LAI is strongly non linearly related to reflectance. Therefore, its estimation from remote sensing observations will be scale dependant over heterogeneous landscapes. When observing a canopy made of different layers of vegetation, it is therefore mandatory to consider all the green layers. This is particularly important for forest canopies where the understory may represent a very significant contribution to the total canopy LAI. The derived LAI corresponds therefore to the total green LAI, including the contribution of the green elements of the understory. The product at 333m resolution is provided in Near Real Time and consolidated in the next six periods. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Leaf-area-index,LAI,OLCI,PROBA-V,Sentinel-3",,proprietary,Global 10-daily Leaf Area Index 333m,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -CLMS_GLO_NDVI_1KM_LTS,"The Normalized Difference Vegetation Index (NDVI) is a proxy to quantify the vegetation amount. It is defined as NDVI=(NIR-Red)/(NIR+Red) where NIR corresponds to the reflectance in the near infrared band, and Red to the reflectance in the red band. The time series of dekadal (10-daily) NDVI 1km version 2 observations over the period 1999-2017 is used to calculate Long Term Statistics (LTS) for each of the 36 10-daily periods (dekads) of the year. The calculated LTS include the minimum, median, maximum, average, standard deviation and the number of observations in the covered time series period. These LTS can be used as a reference for actual NDVI observations, which allows evaluating whether vegetation conditions deviate from a 'normal' situation. ","VEGETATION,PROBA-V",SPOT,,,"Land,NDVI,LTS,SPOT,VEGETATION,PROBA-V",,proprietary,"Normalized Difference Vegetation Index: global Long Term Statistics (raster 1km) - version 2, Apr 2019",1999-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -CLMS_GLO_NDVI_333M,"The Normalized Difference Vegetation Index (NDVI) is a proxy to quantify the vegetation amount. It is defined as NDVI=(NIR-Red)/(NIR+Red) where NIR corresponds to the reflectance in the near infrared band, and Red to the reflectance in the red band. It is closely related to FAPAR and is little scale dependant. ",PROBA-V,,,,"Land,NDVI,PROBA-V",,proprietary,Global 10-daily Normalized Difference Vegetation Index 333M,2014-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -COP_DEM_GLO30_DGED,"Defence Gridded Elevation Data (DGED) formatted Copernicus DEM GLO-30 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-30 provides worldwide coverage at 30 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-30,DSM,GDGED",ALTIMETRIC,proprietary,Copernicus DEM GLO-30 DGED,2010-06-21T00:00:00Z,,,,,,available,,,,,,,,,,,,,,available -COP_DEM_GLO30_DTED,"Digital Terrain Elevation Data (DTED) formatted Copernicus DEM GLO-30 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-30 provides worldwide coverage at 30 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-30,DSM,DTED",ALTIMETRIC,proprietary,Copernicus DEM GLO-30 DTED,2010-06-21T00:00:00Z,,,,,,available,,,,,,,,,,,,,, -COP_DEM_GLO90_DGED,"Defence Gridded Elevation Data (DGED) formatted Copernicus DEM GLO-90 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-90 provides worldwide coverage at 90 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-90,DSM,GDGED",ALTIMETRIC,proprietary,Copernicus DEM GLO-90 DGED,2010-06-21T00:00:00Z,,,,,,available,,,,,,,,,,,,,,available -COP_DEM_GLO90_DTED,"Digital Terrain Elevation Data (DTED) formatted Copernicus DEM GLO-90 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-90 provides worldwide coverage at 90 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-90,DSM,DTED",ALTIMETRIC,proprietary,Copernicus DEM GLO-90 DTED,2010-06-21T00:00:00Z,,,,,,available,,,,,,,,,,,,,, -EEA_DAILY_SSM_1KM,"Surface Soil Moisture (SSM) is the relative water content of the top few centimetres soil, describing how wet or dry the soil is in its topmost layer, expressed in percent saturation. It is measured by satellite radar sensors and allows insights in local precipitation impacts and soil conditions. SSM is a key driver of water and heat fluxes between the ground and the atmosphere, regulating air temperature and humidity. Moreover, in its role as water supply, it is vital to vegetation health. Vice versa, SSM is very sensitive to external forcing in the form of precipitation, temperature, solar irradiation, humidity, and wind. SSM is thus both an integrator of climatic conditions and a driver of local weather and climate, and plays a major role in global water-, energy- and carbon- cycles. Knowledge on the dynamics of soil moisture is important in the understanding of processes in many environmental and socio-economic fields, e.g., its impact on vegetation vitality, crop yield, droughts or exposure to flood threats. ","C-SAR,Metop ASCAT",Sentinel-1,,,"SSM,C-SAR,Metop-ASCAT,Sentinel-1",RADAR,proprietary,"Surface Soil Moisture: continental Europe daily (raster 1km) - version 1, Apr 2019",2015-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -EEA_DAILY_SWI_1KM,"The Soil Water Index (SWI) quantifies the moisture condition at various depths in the soil. It is mainly driven by the precipitation via the process of infiltration. Soil moisture is a very heterogeneous variable and varies on small scales with soil properties and drainage patterns. Satellite measurements integrate over relative large-scale areas, with the presence of vegetation adding complexity to the interpretation. Soil moisture is a key parameter in numerous environmental studies including hydrology, meteorology and agriculture, and is recognized as an Essential Climate Variable (ECV) by the Global Climate Observing System (GCOS). The SWI product provides daily information about moisture conditions in different soil layers. It includes a quality flag (QFLAG) indicating the availability of SSM measurements for SWI calculations, and a Surface State Flag (SSF) indicating frozen or snow covered soils. ","C-SAR,Metop ASCAT",Sentinel-1,,,"SWI,QFLAG,SSF,C-SAR,Metop-ASCAT,Sentinel-1",RADAR,proprietary,"Soil Water Index: continental Europe daily (raster 1km) - version 1, Apr 2019",2015-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -EEA_DAILY_VI,"Vegetation Indices (VI) comprises four daily vegetation indices (PPI, NDVI, LAI and FAPAR) and quality information, that are part of the Copernicus Land Monitoring Service (CLMS) HR-VPP product suite. The 10m resolution, daily updated Plant Phenology Index (PPI), Normalized Difference Vegetation Index (NDVI), Leaf Area Index (LAI) and Fraction of Absorbed Photosynthetically Active Radiation (fAPAR) are derived from Copernicus Sentinel-2 satellite observations. They are provided together with a related quality indicator (QFLAG2) that flags clouds, shadows, snow, open water and other areas where the VI retrieval is less reliable. These Vegetation Indices are made available as a set of raster files with 10 x 10m resolution, in UTM/WGS84 projection corresponding to the Sentinel-2 tiling grid, for those tiles that cover the EEA38 countries and the United Kingdom and for the period from 2017 until today, with daily updates. The Vegetation Indices are part of the pan-European High Resolution Vegetation Phenology and Productivity (HR-VPP) component of the Copernicus Land Monitoring Service (CLMS). ",,Sentinel-2,"S2A, S2B",,"Land,Plant-phenology-index,Phenology,Vegetation,Sentinel-2,S2A,S2B",RADAR,proprietary,"Vegetation Indices, daily, UTM projection",,,,,,,,,,,,,,,,,,,,,available -EFAS_FORECAST,"This dataset provides gridded modelled hydrological time series forced with medium-range meteorological forecasts. The data is a consistent representation of the most important hydrological variables across the European Flood Awareness System (EFAS) domain. The temporal resolution is sub-daily high-resolution and ensemble forecasts of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis data set was produced by forcing the LISFLOOD hydrological model at a 5x5km resolution with meteorological forecasts. The forecasts are initialised twice daily at 00 and 12 UTC with time steps of 6 or 24 hours and lead times between 5 and 15 days depending on the forcing numerical weather prediction model. The forcing meteorological data are high-resolution and ensemble forecasts from the European Centre of Medium-range Weather Forecasts (ECMWF) with 51 ensemble members, high-resolution forecasts from the Deutsches Wetter Dienst (DWD) and the ensemble forecasts from the COSMO Local Ensemble Prediction System (COSMO-LEPS) with 20 ensemble members. The hydrological forecasts are available from 2018-10-10 up until present with a 30-day delay. The real-time data is only available to EFAS partners.\nCompanion datasets, also available through the CDS, are historical simulations which can be used to derive the hydrological climatology and for verification; reforecasts for research, local skill assessment and post-processing; and seasonal forecasts and reforecasts for users looking for longer leadtime forecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, River discharge in the last 6 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,forecast,river,discharge",ATMOSPHERIC,proprietary,River discharge and related forecasted data by the European Flood Awareness System,2018-10-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -EFAS_HISTORICAL,"This dataset provides gridded modelled daily hydrological time series forced with meteorological observations. The data set is a consistent representation of the most important hydrological variables across the European Flood Awareness System (EFAS) domain. The temporal resolution is up to 30 years modelled time series of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model with gridded observational data of precipitation and temperature at a 5x5 km resolution across the EFAS domain. The most recent version\nuses a 6-hourly time step, whereas older versions uses a 24-hour time step. It is available from 1991-01-01 up until near-real time, with a delay of 6 days. The real-time data is only available to EFAS partners.\nCompanion datasets, also available through the CDS, are forecasts for users who are looking medium-range forecasts, reforecasts for research, local skill assessment and post-processing, and seasonal forecasts and reforecasts for users looking for long-term forecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, River discharge in the last 6 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,historical,river,discharge",ATMOSPHERIC,proprietary,River discharge and related historical data from the European Flood Awareness System,1991-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -EFAS_REFORECAST,"This dataset provides gridded modelled hydrological time series forced with medium- to sub-seasonal range meteorological reforecasts. The data is a consistent representation of the most important hydrological variables across the European Flood Awareness System (EFAS) domain. The temporal resolution is 20 years of sub-daily reforecasts initialised twice weekly (Mondays and Thursdays) of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 5x5km resolution with ensemble meteorological reforecasts from the European Centre of Medium-range Weather Forecasts (ECMWF). Reforecasts are forecasts run over past dates and are typically used to assess the skill of a forecast system or to develop tools for statistical error correction of the forecasts. The reforecasts are initialised twice weekly with lead times up to 46 days, at 6-hourly time steps for 20 years. For more specific information on the how the reforecast dataset is produced we refer to the documentation.\nCompanion datasets, also available through the Climate Data Store (CDS), are the operational forecasts, historical simulations which can be used to derive the hydrological climatology, and seasonal forecasts and reforecasts for users looking for long term forecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts an historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,reforecast,river,discharge",ATMOSPHERIC,proprietary,Reforecasts of river discharge and related data by the European Flood Awareness System,1999-01-03T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -EFAS_SEASONAL,"This dataset provides gridded modelled daily hydrological time series forced with seasonal meteorological forecasts. The dataset is a consistent representation of the most important hydrological variables across the European Flood Awareness (EFAS) domain. The temporal resolution is daily forecasts initialised once a month consisting of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 5x5km resolution with seasonal meteorological ensemble forecasts. The forecasts are initialised on the first of each month with a lead time of 215 days at 24-hour time steps. The meteorological data are seasonal forecasts (SEAS5) from the European Centre of Medium-range Weather Forecasts (ECMWF) with 51 ensemble members. The forecasts are available from November 2020.\nCompanion datasets, also available through the Climate Data Store (CDS), are seasonal reforecasts for research, local skill assessment and post-processing of the seasonal forecasts. There are also medium-range forecasts for users who want to look at shorter time ranges. These are accompanied by historical simulations which can be used to derive the hydrological climatology, and medium-range reforecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,seasonal,forecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal forecasts of river discharge and related data by the European Flood Awareness System,2020-11-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -EFAS_SEASONAL_REFORECAST,"This dataset provides modelled daily hydrological time series forced with seasonal meteorological reforecasts. The dataset is a consistent representation of the most important hydrological variables across the European Flood Awareness (EFAS) domain. The temporal resolution is daily forecasts initialised once a month over the reforecast period 1991-2020 of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 5x5km gridded resolution with seasonal meteorological ensemble reforecasts. Reforecasts are forecasts run over past dates and are typically used to assess the skill of a forecast system or to develop tools for statistical error correction of the forecasts. The reforecasts are initialised on the first of each month with a lead time of 215 days at 24-hour time steps. The forcing meteorological data are seasonal reforecasts from the European Centre of Medium-range Weather Forecasts (ECMWF), consisting of 25 ensemble members up until December 2016, and after that 51 members. Hydrometeorological reforecasts are available from 1991-01-01 up until 2020-10-01. \nCompanion datasets, also available through the Climate Data Store (CDS), are seasonal forecasts, for which the seasonal reforecasts can be useful for local skill assessment and post-processing of the seasonal forecasts. For users looking for shorter time ranges there are medium-range forecasts and reforecasts, as well as historical simulations which can be used to derive the hydrological climatology. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area"" ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,seasonal,reforecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal reforecasts of river discharge and related data by the European Flood Awareness System,1991-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -ERA5_LAND,"ERA5-Land is a reanalysis dataset providing a consistent view of the evolution of land variables over several decades at an enhanced resolution compared to ERA5. ERA5-Land has been produced by replaying the land component of the ECMWF ERA5 climate reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. Reanalysis produces data that goes several decades back in time, providing an accurate description of the climate of the past. ERA5-Land uses as input to control the simulated land fields ERA5 atmospheric variables, such as air temperature and air humidity. This is called the atmospheric forcing. Without the constraint of the atmospheric forcing, the model-based estimates can rapidly deviate from reality. Therefore, while observations are not directly used in the production of ERA5-Land, they have an indirect influence through the atmospheric forcing used to run the simulation. In addition, the input air temperature, air humidity and pressure used to run ERA5-Land are corrected to account for the altitude difference between the grid of the forcing and the higher resolution grid of ERA5-Land. This correction is called 'lapse rate correction'. The ERA5-Land dataset, as any other simulation, provides estimates which have some degree of uncertainty. Numerical models can only provide a more or less accurate representation of the real physical processes governing different components of the Earth System. In general, the uncertainty of model estimates grows as we go back in time, because the number of observations available to create a good quality atmospheric forcing is lower. ERA5-land parameter fields can currently be used in combination with the uncertainty of the equivalent ERA5 fields. The temporal and spatial resolutions of ERA5-Land makes this dataset very useful for all kind of land surface applications such as flood or drought forecasting. The temporal and spatial resolution of this dataset, the period covered in time, as well as the fixed grid used for the data distribution at any period enables decisions makers, businesses and individuals to access and use more accurate information on land states. Variables in the dataset/application are: 10m u-component of wind, 10m v-component of wind, 2m dewpoint temperature, 2m temperature, Evaporation from bare soil, Evaporation from open water surfaces excluding oceans, Evaporation from the top of canopy, Evaporation from vegetation transpiration, Forecast albedo, Lake bottom temperature, Lake ice depth, Lake ice temperature, Lake mix-layer depth, Lake mix-layer temperature, Lake shape factor, Lake total layer temperature, Leaf area index, high vegetation, Leaf area index, low vegetation, Potential evaporation, Runoff, Skin reservoir content, Skin temperature, Snow albedo, Snow cover, Snow density, Snow depth, Snow depth water equivalent, Snow evaporation, Snowfall, Snowmelt, Soil temperature level 1, Soil temperature level 2, Soil temperature level 3, Soil temperature level 4, Sub-surface runoff, Surface latent heat flux, Surface net solar radiation, Surface net thermal radiation, Surface pressure, Surface runoff, Surface sensible heat flux, Surface solar radiation downwards, Surface thermal radiation downwards, Temperature of snow layer, Total evaporation, Total precipitation, Volumetric soil water layer 1, Volumetric soil water layer 2, Volumetric soil water layer 3, Volumetric soil water layer 4 ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,hourly,evolution",ATMOSPHERIC,proprietary,ERA5-Land hourly data from 1950 to present,1950-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -ERA5_LAND_MONTHLY,"ERA5-Land is a reanalysis dataset providing a consistent view of the evolution of land variables over several decades at an enhanced resolution compared to ERA5. ERA5-Land has been produced by replaying the land component of the ECMWF ERA5 climate reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. Reanalysis produces data that goes several decades back in time, providing an accurate description of the climate of the past. ERA5-Land provides a consistent view of the water and energy cycles at surface level during several decades. It contains a detailed record from 1950 onwards, with a temporal resolution of 1 hour. The native spatial resolution of the ERA5-Land reanalysis dataset is 9km on a reduced Gaussian grid (TCo1279). The data in the CDS has been regridded to a regular lat-lon grid of 0.1x0.1 degrees. The data presented here is a post-processed subset of the full ERA5-Land dataset. Monthly-mean averages have been pre-calculated to facilitate many applications requiring easy and fast access to the data, when sub-monthly fields are not required. Hourly fields can be found in the ERA5-Land hourly fields CDS page. Documentation can be found in the online ERA5-Land documentation. Variables in the dataset/application are: | 10m u-component of wind, 10m v-component of wind, 2m dewpoint temperature, 2m temperature, Evaporation from bare soil, Evaporation from open water surfaces excluding oceans, Evaporation from the top of canopy, Evaporation from vegetation transpiration, Forecast albedo, Lake bottom temperature, Lake ice depth, Lake ice temperature, Lake mix-layer depth, Lake mix-layer temperature, Lake shape factor, Lake total layer temperature, Leaf area index, high vegetation, Leaf area index, low vegetation, Potential evaporation, Runoff, Skin reservoir content, Skin temperature, Snow albedo, Snow cover, Snow density, Snow depth, Snow depth water equivalent, Snow evaporation, Snowfall, Snowmelt, Soil temperature level 1, Soil temperature level 2, Soil temperature level 3, Soil temperature level 4, Sub-surface runoff, Surface latent heat flux, Surface net solar radiation, Surface net thermal radiation, Surface pressure, Surface runoff, Surface sensible heat flux, Surface solar radiation downwards, Surface thermal radiation downwards, Temperature of snow layer, Total evaporation, Total precipitation, Volumetric soil water layer 1, Volumetric soil water layer 2, Volumetric soil water layer 3, Volumetric soil water layer 4 ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,monthly,evolution",ATMOSPHERIC,proprietary,ERA5-Land monthly averaged data from 1950 to present,1950-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -ERA5_PL,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 4 to 7 decades. Currently data is available from 1950, split into Climate Data Store entries for 1950-1978 (preliminary back extension) and from 1979 onwards (final release plus timely updates, this page). ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days. In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. So far this has not been the case and when this does occur users will be notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. An overview of all ERA5 datasets can be found in this article. Information on access to ERA5 data on native resolution is provided in these guidelines. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities). The present entry is ""ERA5 hourly data on pressure levels from 1979 to present"". Variables in the dataset/application are: Divergence, Fraction of cloud cover, Geopotential, Ozone mass mixing ratio, Potential vorticity, Relative humidity, Specific cloud ice water content, Specific cloud liquid water content, Specific humidity, Specific rain water content, Specific snow water content, Temperature, U-component of wind, V-component of wind, Vertical velocity, Vorticity (relative) ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,hourly,pressure,levels",ATMOSPHERIC,proprietary,ERA5 hourly data on pressure levels from 1940 to present,1940-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -ERA5_PL_MONTHLY,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 8 decades. Data is available from 1940 onwards. ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. So far this has only been the case for the month September 2021, while it will also be the case for October, November and December 2021. For months prior to September 2021 the final release has always been equal to ERA5T, and the goal is to align the two again after December 2021. ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. In case that this occurs users are notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities). ",,ERA5,ERA5,,"Climate,ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,monthly,pressure,levels",ATMOSPHERIC,proprietary,ERA5 monthly averaged data on pressure levels from 1940 to present,1940-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -ERA5_SL,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 8 decades. Data is available from 1940 onwards. ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days. In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. In case that this occurs users are notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric,ocean-wave and land surface quantities). ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,hourly,single,levels",ATMOSPHERIC,proprietary,ERA5 hourly data on single levels from 1940 to present,1940-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,available -ERA5_SL_MONTHLY,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 8 decades. Data is available from 1940 onwards. ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. In case that this occurs users are notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities). ",,ERA5,ERA5,,"Climate,ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,monthly,single,levels",ATMOSPHERIC,proprietary,ERA5 monthly averaged data on single levels from 1940 to present,1940-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -FIRE_HISTORICAL,"This data set provides complete historical reconstruction of meteorological conditions favourable to the start, spread and sustainability of fires. The fire danger metrics provided are part of a vast dataset produced by the Copernicus Emergency Management Service for the European Forest Fire Information System (EFFIS). The European Forest Fire Information System incorporates the fire danger indices for three different models developed in Canada, United States and Australia. In this dataset the fire danger indices are calculated using weather forecast from historical simulations provided by ECMWF ERA5 reanalysis. ERA5 by combining model data and a vast set of quality controlled observations provides a globally complete and consistent data-set and is regarded as a good proxy for observed atmospheric conditions. The selected data records in this data set are regularly extended with time as ERA5 forcing data become available. This dataset is produced by ECMWF in its role of the computational centre for fire danger forecast of the CEMS, on behalf of the Joint Research Centre which is the managing entity of the service. Variables in the dataset/application are: Build-up index, Burning index, Danger rating, Drought code, Duff moisture code, Energy release component, Fine fuel moisture code, Fire daily severity index, Fire danger index, Fire weather index, Ignition component, Initial spread index, Keetch-Byram drought index, Spread component ",,CEMS,CEMS,,"ECMWF,EFFIS,fire,historical,ERA5,european,sustainability,CEMS,system",ATMOSPHERIC,proprietary,Fire danger indices historical data from the Copernicus Emergency Management Service,1979-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -GLACIERS_DIST_RANDOLPH,"A glacier is defined as a perennial mass of ice, and possibly firn and snow, originating on the land surface from the recrystallization of snow or other forms of solid precipitation and showing evidence of past or present flow. There are several types of glaciers such as glacierets, mountain glaciers, valley glaciers and ice fields, as well as ice caps. Some glacier tongues reach into lakes or the sea, and can develop floating ice tongues or ice shelves. Glacier changes are recognized as independent and high-confidence natural indicators of climate change. Past, current and future glacier changes affect global sea level, the regional water cycle and local hazards.\nThis dataset is a snapshot of global glacier outlines compiled from\nmaps, aerial photographs and satellite images mostly acquired in the period 2000-2010. ",,,INSITU,,"ECMWF,WGMS,INSITU,CDS,C3S,glacier,randolph,distribution,inventory",ATMOSPHERIC,proprietary,Glaciers distribution data from the Randolph Glacier Inventory for year 2000,2000-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -GLACIERS_ELEVATION_AND_MASS_CHANGE,This dataset provides in situ and remote sensing derived glacier changes from individual glaciers globally. The dataset represents the latest homogenized state-of-the-art glacier change data collected by scientists and the national correspondents of each country as provided to the World Glacier Monitoring Service (WGMS). The product is an extract of the WGMS Fluctuations of Glacier (FoG) database and consists of two data sets providing time series of glacier changes: glacier elevation change series from the geodetic method and glacier mass-balance series from the glaciological method ,,INSITU,INSITU,,"ECMWF,WGMS,INSITU,CDS,C3S,glacier,elevation,mass,change",ATMOSPHERIC,proprietary,Glaciers elevation and mass change data from 1850 to present from the Fluctuations of Glaciers Database,1850-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -GLOFAS_FORECAST,"This dataset contains global modelled daily data of river discharge forced with meteorological forecasts. The data was produced by the Global Flood Awareness System (GloFAS), which is part of the Copernicus Emergency Management Service (CEMS). River discharge, or river flow as it is also known, is defined as the amount of water that flows through a river section at a given time. \nThis dataset is simulated by forcing a hydrological modelling chain with input from ECMWF ensemble forecast combined with the ECMWF extended-range ensemble forecast up to 30 days. Data availability for the GloFAS forecast is from 2019-11-05 up to near real time.\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,forecast,river,discharge",ATMOSPHERIC,proprietary,River discharge and related forecasted data by the Global Flood Awareness System,2019-11-05T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -GLOFAS_HISTORICAL,"This dataset contains global modelled daily data of river discharge from the Global Flood Awareness System (GloFAS), which is part of the Copernicus Emergency Management Service (CEMS). River discharge, or river flow as it is also known, is defined as the amount of water that flows through a river section at a given time. \nThis dataset is simulated by forcing a hydrological modelling chain with inputs from a global reanalysis. Data availability for the historical simulation is from 1979-01-01 up to near real time.\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,historical,river,discharge",ATMOSPHERIC,proprietary,River discharge and related historical data from the Global Flood Awareness System,1991-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -GLOFAS_REFORECAST,"This dataset provides a gridded modelled time series of river discharge, forced with medium- to sub-seasonal range meteorological reforecasts. The data is a consistent representation of a key hydrological variable across the global domain, and is a product of the Global Flood Awareness System (GloFAS). It is accompanied by an ancillary file for interpretation that provides the upstream area (see the related variables table and associated link in the documentation).\nThis dataset was produced by forcing a hydrological modelling chain with input from the European Centre for Medium-range Weather Forecasts (ECMWF) 11-member ensemble ECMWF-ENS reforecasts. Reforecasts are forecasts run over past dates, and those presented here are used for providing a suitably long time period against which the skill of the 30-day real-time operational forecast can be assessed. The reforecasts are initialised twice weekly with lead times up to 46 days, at 24-hour steps for 20 years in the recent history. For more specific information on the how the reforecast dataset is produced we refer to the documentation.\nCompanion datasets, also available through the Climate Data Store (CDS), are the operational forecasts, historical simulations that can be used to derive the hydrological climatology, and seasonal forecasts and reforecasts for users looking for long term forecasts. For users looking specifically for European hydrological data, we refer to the European Flood Awareness System (EFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,reforecast,river,discharge",ATMOSPHERIC,proprietary,Reforecasts of river discharge and related data by the Global Flood Awareness System,1999-01-03T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -GLOFAS_SEASONAL,"This dataset provides a gridded modelled time series of river discharge, forced with seasonal range meteorological forecasts. The data is a consistent representation of a key hydrological variable across the global domain, and is a product of the Global Flood Awareness System (GloFAS). It is accompanied by an ancillary file for interpretation that provides the upstream area (see the related variables table and associated link in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 0.1° (~11 km at the equator) resolution with downscaled runoff forecasts from the European Centre for Medium-range Weather Forecasts (ECMWF) 51-member ensemble seasonal forecasting system, SEAS5. The forecasts are initialised on the first of each month with a 24-hourly time step, and cover 123 days.\nCompanion datasets, also available through the Climate Data Store (CDS), are the operational forecasts, historical simulations that can be used to derive the hydrological climatology, and medium-range and seasonal reforecasts. The latter dataset enables research, local skill assessment and post-processing of the seasonal forecasts. In addition, the seasonal reforecasts are also used to derive a specific range dependent climatology for the seasonal system. For users looking specifically for European hydrological data, we refer to the European Flood Awareness System (EFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,seasonal,forecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal forecasts of river discharge and related data by the Global Flood Awareness System,2020-01-12T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -GLOFAS_SEASONAL_REFORECAST,"This dataset provides a gridded modelled time series of river discharge forced with seasonal range meteorological reforecasts. The data is a consistent representation of a key hydrological variable across the global domain, and is a product of the Global Flood Awareness System (GloFAS). It is accompanied by an ancillary file for interpretation that provides the upstream area (see the related variables table and associated link in the documentation).\nThis dataset was produced by forcing a hydrological modelling chain with input from the European Centre for Medium-range Weather Forecasts (ECMWF) ensemble seasonal forecasting system, SEAS5. For the period of 1981 to 2016 the number of ensemble members is 25, whilst reforecasts produced for 2017 onwards use a 51-member ensemble. Reforecasts are forecasts run over past dates, with those presented here used for producing the seasonal river discharge thresholds. In addition, they provide a suitably long time period against which the skill of the seasonal forecast can be assessed. The reforecasts are initialised monthly and run for 123 days, with a 24-hourly time step. For more specific information on the how the seasonal reforecast dataset is produced we refer to the documentation.\nCompanion datasets, also available through the Climate Data Store (CDS), include the seasonal forecasts, for which the dataset provided here can be useful for local skill assessment and post-processing. For users looking for shorter term forecasts there are also medium-range forecasts and reforecasts available, as well as historical simulations that can be used to derive the hydrological climatology. For users looking specifically for European hydrological data, we refer to the European Flood Awareness System (EFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area"" ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,seasonal,forecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal reforecasts of river discharge and related data from the Global Flood Awareness System,2020-01-12T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -L57_REFLECTANCE,"Landsat 5,7,8 L2A data (old format) distributed by Theia (2014 to 2017-03-20) using MUSCATE prototype, Lamber 93 projection. ","OLI,TIRS",LANDSAT,"L5,L7,L8",L2A,"OLI,TIRS,LANDSAT,L5,L7,L8,L2,L2A,MUSCATE",OPTICAL,proprietary,"Landsat 5,7,8 Level-2A",2014-01-01T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -L8_OLI_TIRS_C1L1,Landsat 8 Operational Land Imager and Thermal Infrared Sensor Collection 1 Level-1 products. Details at https://landsat.usgs.gov/sites/default/files/documents/LSDS-1656_Landsat_Level-1_Product_Collection_Definition.pdf ,"OLI,TIRS",LANDSAT8,L8,L1,"OLI,TIRS,LANDSAT,LANDSAT8,L8,L1,C1,COLLECTION1",OPTICAL,proprietary,Landsat 8 Level-1,2013-02-11T00:00:00Z,,available,,,,,available,,available,,,,available,,,,,,, -L8_REFLECTANCE,"Landsat 8 L2A data distributed by Theia since 2017-03-20 using operational version of MUSCATE, UTM projection, and tiled using Sentinel-2 tiles. ","OLI,TIRS",LANDSAT8,L8,L2A,"OLI,TIRS,LANDSAT,LANDSAT8,L8,L2,L2A,MUSCATE",OPTICAL,proprietary,Landsat 8 Level-2A,2013-02-11T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -LANDSAT_C2L1,The Landsat Level-1 product is a top of atmosphere product distributed as scaled and calibrated digital numbers. ,"OLI,TIRS",LANDSAT,"L1,L2,L3,L4,L5,L6,L7,L8",L1,"OLI,TIRS,LANDSAT,L1,L2,L3,L4,L5,L6,L7,L8,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-1 Product,1972-07-25T00:00:00Z,available,,,,,,,,,,,,,,available,,,available,available, -LANDSAT_C2L2,Collection 2 Landsat OLI/TIRS Level-2 Science Products (L2SP) include Surface Reflectance and Surface Temperature scene-based products. ,"OLI,TIRS",LANDSAT,"L8,L9",L1,"OLI,TIRS,LANDSAT,L8,L9,L2,C2,COLLECTION2",OPTICAL,proprietary,Landsat OLI and TIRS Collection 2 Level-2 Science Products 30-meter multispectral data.,2013-02-11T00:00:00Z,,,,,,,,,,,,,,,available,,,available,, -LANDSAT_C2L2ALB_BT,"The Landsat Top of Atmosphere Brightness Temperature (BT) product is a top of atmosphere product with radiance calculated 'at-sensor', not atmospherically corrected, and expressed in units of Kelvin. ","OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,BT,Brightness,Temperature,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Top of Atmosphere Brightness Temperature (BT) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,available, -LANDSAT_C2L2ALB_SR,The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor. ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,L2ALB,SR,Surface,Reflectance,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Surface Reflectance (SR) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,available, -LANDSAT_C2L2ALB_ST,The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K). ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,L2ALB,Surface,Temperature,ST,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Surface Temperature (ST) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,available, -LANDSAT_C2L2ALB_TA,The Landsat Top of Atmosphere (TA) Reflectance product applies per pixel angle band corrections to the Level-1 radiance product. ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,L2ALB,TA,Top,Atmosphere,Reflectance,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Top of Atmosphere (TA) Reflectance Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,available, -LANDSAT_C2L2_SR,The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor. ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,SR,surface,reflectance,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 UTM Surface Reflectance (SR) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,available, -LANDSAT_C2L2_ST,The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K). ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,ST,surface,temperature,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 UTM Surface Temperature (ST) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,available, -MODIS_MCD43A4,"The MODerate-resolution Imaging Spectroradiometer (MODIS) Reflectance product MCD43A4 provides 500 meter reflectance data adjusted using a bidirectional reflectance distribution function (BRDF) to model the values as if they were taken from nadir view. The MCD43A4 product contains 16 days of data provided in a level-3 gridded data set in Sinusoidal projection. Both Terra and Aqua data are used in the generation of this product, providing the highest probability for quality assurance input data. It is designated with a shortname beginning with MCD, which is used to refer to 'combined' products, those comprised of data using both Terra and Aqua. ",MODIS,Terra+Aqua,EOS AM-1+PM-1,L3,"MODIS,Terra,Aqua,EOS,AM-1+PM-1,L3,MCD43A4",OPTICAL,proprietary,MODIS MCD43A4,2000-03-05T00:00:00Z,available,available,,,,,,,,,,,,,available,,,,, -NAIP,"The National Agriculture Imagery Program (NAIP) acquires aerial imagery during the agricultural growing seasons in the continental U.S. This ""leaf-on"" imagery and typically ranges from 60 centimeters to 100 centimeters in resolution and is available from the naip-analytic Amazon S3 bucket as 4-band (RGB + NIR) imagery in MRF format. NAIP data is delivered at the state level; every year, a number of states receive updates, with an overall update cycle of two or three years. The tiling format of NAIP imagery is based on a 3.75' x 3.75' quarter quadrangle with a 300 meter buffer on all four sides. NAIP imagery is formatted to the UTM coordinate system using NAD83. NAIP imagery may contain as much as 10% cloud cover per tile. ",film and digital cameras,National Agriculture Imagery Program,NAIP,N/A,"film,digital,cameras,Agriculture,NAIP",OPTICAL,proprietary,National Agriculture Imagery Program,2003-01-01T00:00:00Z,available,available,,,,,,,,,,,,,available,,,,, -NEMSAUTO_TCDC,Total cloud cover from NOAAmodel Environment Monitoring System (NEMS) automatic domain switch. NEMSAUTO is the automatic delivery of the highest resolution meteoblue model available for any requested period of time and location. The NEMS model family are improved NMM successors (operational since 2013). NEMS is a multi-scale model (used from global down to local domains) and significantly improves cloud-development and precipitation forecast. Note that Automatic domain switching is only supported for multi point queries. Support for polygons may follow later. ,,NEMSAUTO,NEMSAUTO,,"meteoblue,NEMS,NEMSAUTO,CLOUD,COVER,TOTAL,TCDC,DAILY,MEAN",ATMOSPHERIC,proprietary,NEMSAUTO Total Cloud Cover daily mean,1984-01-01T00:00:00Z,,,,,,,,,,,,available,,,,,,,, -NEMSGLOBAL_TCDC,Total cloud cover from NOAAmodel Environment Monitoring System (NEMS) global model. NEMSGLOBAL has 30km spatial and 1h temporal resolutions and produces seamless datasets from 1984 to 7 days ahead. ,,NEMSGLOBAL,NEMSGLOBAL,,"meteoblue,NEMS,NEMSGLOBAL,CLOUD,COVER,TOTAL,TCDC,DAILY,MEAN",ATMOSPHERIC,proprietary,NEMSGLOBAL Total Cloud Cover daily mean,1984-01-01T00:00:00Z,,,,,,,,,,,,available,,,,,,,, -OSO,An overview of OSO Land Cover data is given on https://www.theia-land.fr/en/ceslist/land-cover-sec/ and the specific description of OSO products is available on https://www.theia-land.fr/product/carte-doccupation-des-sols-de-la-france-metropolitaine/ ,,,,L3B,"L3B,OSO,land,cover",,proprietary,OSO Land Cover,2016-01-01T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -PLD_BUNDLE,"Pleiades Bundle (Pan, XS)",PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,BUNDLE,Pan,Xs",OPTICAL,proprietary,Pleiades Bundle,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -PLD_PAN,Pleiades Panchromatic (Pan),PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,PAN,Panchromatic",OPTICAL,proprietary,Pleiades Panchromatic,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -PLD_PANSHARPENED,Pleiades Pansharpened (Pan+XS),PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,PANSHARPENED,Pan,Xs",OPTICAL,proprietary,Pleiades Pansharpened,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -PLD_XS,Pleiades Multispectral (XS),PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,XS,Multispectral",OPTICAL,proprietary,Pleiades Multispectral,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -S1_SAR_GRD,"Level-1 Ground Range Detected (GRD) products consist of focused SAR data that has been detected, multi-looked and projected to ground range using an Earth ellipsoid model. Phase information is lost. The resulting product has approximately square spatial resolution pixels and square pixel spacing with reduced speckle at the cost of worse spatial resolution. GRD products can be in one of three resolutions: | Full Resolution (FR), High Resolution (HR), Medium Resolution (MR). The resolution is dependent upon the amount of multi-looking performed. Level-1 GRD products are available in MR and HR for IW and EW modes, MR for WV mode and MR, HR and FR for SM mode. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L1,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L1,GRD,SAFE",RADAR,proprietary,SENTINEL1 Level-1 Ground Range Detected,2014-04-03T00:00:00Z,available,available,,,available,available,,,,,,,available,available,available,available,,,,available -S1_SAR_OCN,"Level-2 OCN products include components for Ocean Swell spectra (OSW) providing continuity with ERS and ASAR WV and two new components: Ocean Wind Fields (OWI) and Surface Radial Velocities (RVL). The OSW is a two-dimensional ocean surface swell spectrum and includes an estimate of the wind speed and direction per swell spectrum. The OSW is generated from Stripmap and Wave modes only. For Stripmap mode, there are multiple spectra derived from internally generated Level-1 SLC images. For Wave mode, there is one spectrum per vignette. The OWI is a ground range gridded estimate of the surface wind speed and direction at 10 m above the surface derived from internally generated Level-1 GRD images of SM, IW or EW modes. The RVL is a ground range gridded difference between the measured Level-2 Doppler grid and the Level-1 calculated geometrical Doppler. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L2,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L2,OCN,SAFE",RADAR,proprietary,SENTINEL1 Level-2 OCN,2014-04-03T00:00:00Z,,,,,available,available,,,,,,,available,available,,available,,,,available -S1_SAR_RAW,"The SAR Level-0 products consist of the sequence of Flexible Dynamic Block Adaptive Quantization (FDBAQ) compressed unfocused SAR raw data. For the data to be usable, it will need to be decompressed and processed using a SAR processor. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L0,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L0,RAW,SAFE",RADAR,proprietary,SENTINEL1 SAR Level-0,2014-04-03T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,,available -S1_SAR_SLC,"Level-1 Single Look Complex (SLC) products consist of focused SAR data geo-referenced using orbit and attitude data from the satellite and provided in zero-Doppler slant-range geometry. The products include a single look in each dimension using the full transmit signal bandwidth and consist of complex samples preserving the phase information. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L1,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L1,SLC,SAFE",RADAR,proprietary,SENTINEL1 Level-1 Single Look Complex,2014-04-03T00:00:00Z,,,,,available,available,,,,,,,available,available,,available,,,,available -S2_MSI_L1C,"The Level-1C product is composed of 100x100 km2 tiles (ortho-images in UTM/WGS84 projection). It results from using a Digital Elevation Model (DEM) to project the image in cartographic geometry. Per-pixel radiometric measurements are provided in Top Of Atmosphere (TOA) reflectances along with the parameters to transform them into radiances. Level-1C products are resampled with a constant Ground Sampling Distance (GSD) of 10, 20 and 60 meters depending on the native resolution of the different spectral bands. In Level-1C products, pixel coordinates refer to the upper left corner of the pixel. Level-1C products will additionally include Cloud Masks and ECMWF data (total column of ozone, total column of water vapour and mean sea level pressure). SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/data-formats ",MSI,SENTINEL2,"S2A,S2B",L1,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L1,L1C,SAFE",OPTICAL,proprietary,SENTINEL2 Level-1C,2015-06-23T00:00:00Z,available,available,,,available,available,available,,available,,,,available,available,,available,,available,,available -S2_MSI_L2A,"The Level-2A product provides Bottom Of Atmosphere (BOA) reflectance images derived from the associated Level-1C products. Each Level-2A product is composed of 100x100 km2 tiles in cartographic geometry (UTM/WGS84 projection). SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/data-formats ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,SAFE",OPTICAL,proprietary,SENTINEL2 Level-2A,2018-03-26T00:00:00Z,available,available,,,available,available,available,,,,,,available,available,available,available,,,,available -S2_MSI_L2AP,"The Level-2A product provides Bottom Of Atmosphere (BOA) reflectance images derived from the associated Level-1C products. Each Level-2A product is composed of 100x100 km2 tiles in cartographic geometry (UTM/WGS84 projection). SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/data-formats. Level-2AP are the pilot products of Level-2A product generated by ESA until March 2018. After March, they are operational products ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,SAFE, pilot",OPTICAL,proprietary,SENTINEL2 Level-2A pilot,2017-05-23T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S2_MSI_L2A_COG,"The Level-2A product provides Bottom Of Atmosphere (BOA) reflectance images derived from the associated Level-1C products. Each Level-2A product is composed of 100x100 km2 tiles in cartographic geometry (UTM/WGS84 projection). Product containing Cloud Optimized GeoTIFF images, without SAFE formatting. ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,COG",OPTICAL,proprietary,SENTINEL2 Level-2A,2015-06-23T00:00:00Z,,,,,,,,available,,,,,,,,,,,, -S2_MSI_L2A_MAJA,"The level 2A products correct the data for atmospheric effects and detect the clouds and their shadows using MAJA. MAJA uses MUSCATE processing center at CNES, in the framework of THEIA land data center. Sentinel-2 level 1C data are downloaded from PEPS. The full description of the product format is available at https://theia.cnes.fr/atdistrib/documents/PSC-NT-411-0362-CNES_01_00_SENTINEL-2A_L2A_Products_Description.pdf ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,MAJA",OPTICAL,proprietary,SENTINEL2 Level-2A,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -S2_MSI_L2B_MAJA_SNOW,The Theia snow product is derived from Sentinel-2 L2A images generated by Theia. It indicates the snow presence or absence on the land surface every fifth day if there is no cloud. The product is distributed by Theia as a raster file (8 bits GeoTIFF) of 20 m resolution and a vector file (Shapefile polygons). More details about the snow products description are available at http://www.cesbio.ups-tlse.fr/multitemp/?page_id=10748#en ,MSI,SENTINEL2,"S2A,S2B",L2,"MSI,MAJA,SENTINEL,sentinel2,S2,S2A,S2B,L2,L2B,SNOW",OPTICAL,proprietary,SENTINEL2 snow product,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -S2_MSI_L2B_MAJA_WATER,A description of the Land Water Quality data distributed by Theia is available at https://theia.cnes.fr/atdistrib/documents/THEIA-ST-411-0477-CNES_01-03_Format_Specification_of_OBS2CO_WaterColor_Products.pdf ,MSI,SENTINEL2,"S2A,S2B",L2,"MSI,MAJA,SENTINEL,sentinel2,S2,S2A,S2B,L2,L2B,WATER",OPTICAL,proprietary,SENTINEL2 L2B-WATER,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -S2_MSI_L3A_WASP,"The Level-3A product provides a monthly synthesis of surface reflectances from Theia's L2A products. The synthesis is based on a weighted arithmetic mean of clear observations. The data processing is produced by WASP (Weighted Average Synthesis Processor), by MUSCATE data center at CNES, in the framework of THEIA data center. The full description of the product format is available at https://theia.cnes.fr/atdistrib/documents/THEIA-ST-411-0419-CNES_01-04_Format_Specification_of_MUSCATE_Level-3A_Products-signed.pdf ",MSI,SENTINEL2,"S2A,S2B",L3,"MSI,SENTINEL,sentinel2,S2,S2A,S2B,L3,L3A,WASP",OPTICAL,proprietary,SENTINEL2 Level-3A,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -S3_EFR,"OLCI (Ocean and Land Colour Instrument) Full resolution: 300m at nadir. Level 1 products are calibrated Top Of Atmosphere radiance values at OLCI 21 spectral bands. Radiances are computed from the instrument digital counts by applying geo-referencing, radiometric processing (non-linearity correction, smear correction, dark offset correction, absolute gain calibration adjusted for gain evolution with time), and stray-light correction for straylight effects in OLCI camera's spectrometer and ground imager. Additionally, spatial resampling of OLCI pixels to the 'ideal' instrument grid, initial pixel classification, and annotation at tie points with auxiliary meteorological data and acquisition geometry are provided. The radiance products are accompanied by error estimate products, however the error values are currently not available. - All Sentinel-3 NRT products are available at pick-up point in less than 3h. - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",OLCI,SENTINEL3,"S3A,S3B",L1,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,EFR",OPTICAL,proprietary,SENTINEL3 EFR,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_ERR,"OLCI (Ocean and Land Colour Instrument) Reduced resolution: 1200m at nadir. All Sentinel-3 NRT products are available at pick-up point in less than 3h. Level 1 products are calibrated Top Of Atmosphere radiance values at OLCI 21 spectral bands. Radiances are computed from the instrument digital counts by applying geo-referencing, radiometric processing (non-linearity correction, smear correction, dark offset correction, absolute gain calibration adjusted for gain evolution with time), and stray-light correction for straylight effects in OLCI camera's spectrometer and ground imager. Additionally, spatial resampling of OLCI pixels to the 'ideal' instrument grid, initial pixel classification, and annotation at tie points with auxiliary meteorological data and acquisition geometry are provided. The radiance products are accompanied by error estimate products, however the error values are currently not available. - All Sentinel-3 NRT products are available at pick-up point in less than 3h - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",OLCI,SENTINEL3,"S3A,S3B",L1,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,ERR",OPTICAL,proprietary,SENTINEL3 ERR,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_LAN,LAN or SR_2_LAN___ (peps),SRAL,SENTINEL3,"S3A,S3B",L2,"SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,LAN",RADAR,proprietary,SENTINEL3 SRAL Level-2 LAN,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_OLCI_L2LFR,"The OLCI Level-2 Land Full Resolution (OL_2_LFR) products contain land and atmospheric geophysical products at Full resolution with a spatial sampling of approximately 300 m. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-land ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2LFR,LFR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Land Full Resolution,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_OLCI_L2LRR,"The OLCI Level-2 Land Reduced Resolution (OL_2_LRR) products contain land and atmospheric geophysical products at Reduced resolution with a spatial sampling of approximately 1.2 km. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-land ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2LRR,LRR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Land Reduced Resolution,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_OLCI_L2WFR,"The OLCI Level-2 Water Full Resolution (OL_2_WFR) products contain water and atmospheric geophysical products at Full resolution with a spatial sampling of approximately 300 m. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WFR,WFR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Full Resolution,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_OLCI_L2WFR_BC003,"OLCI Level 2 Marine products provide spectral information on the colour of the oceans (water reflectances). These radiometric products are used to estimate geophysical parameters e.g. estimates of phytoplankton biomass through determining the Chlorophyll-a (Chl) concentration. In coastal areas, they also allow monitoring of the sediment load via the Total Suspended Matter (TSM) product. Full resolution products are at a nominal 300m resolution. This collection contains reprocessed data from baseline collection 003. Operational data can be found in the corresponding collection. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WFR,WFR,REPROCESSED,BC003",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Full Resolution Reprocessed from BC003,2016-02-16T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_OLCI_L2WRR,"The OLCI Level-2 Water Reduced Resolution (OL_2_WRR) products contain water and atmospheric geophysical products at Reduced resolution with a spatial sampling of approximately 1.2 km. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WRR,WRR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Reduced Resolution,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_OLCI_L2WRR_BC003,"OLCI Level 2 Marine products provide spectral information on the colour of the oceans (water reflectances). These radiometric products are used to estimate geophysical parameters e.g. estimates of phytoplankton biomass through determining the Chlorophyll-a (Chl) concentration. In coastal areas, they also allow monitoring of the sediment load via the Total Suspended Matter (TSM) product. Reduced resolution products are at a nominal 1km resolution. This collection contains reprocessed data from baseline collection 003. Operational data can be found in the corresponding collection. ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WRR,WRR,REPROCESSED,BC003",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Reduced Resolution Reprocessed from BC003,2016-02-16T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_OLCI_L4BALTIC,"Baltic Sea Surface Ocean Colour Plankton from Sentinel-3 OLCI L4 monthly observations For the Baltic Sea Ocean Satellite Observations, the Italian National Research Council (CNR – Rome, Italy), is providing Bio-Geo_Chemical (BGC) regional datasets: * ''plankton'' with the phytoplankton chlorophyll concentration (CHL) evaluated via region-specific neural network (Brando et al. 2021) Upstreams: OLCI-S3A & S3B Temporal resolution: monthly Spatial resolution: 300 meters To find this product in the catalogue, use the search keyword """"OCEANCOLOUR_BAL_BGC_L4_NRT"""". DOI (product) : https://doi.org/10.48670/moi-00295 ",OLCI,SENTINEL3,"S3A,S3B",L4,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L4,BGC,CHL,BALTIC",OPTICAL,proprietary,SENTINEL3 OLCI Baltic Sea Surface Ocean Colour Plankton,2023-04-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_RAC,Sentinel 3 OLCI products output during Radiometric Calibration mode ,OLCI,SENTINEL3,"S3A,S3B",L1,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L2,RAC",OPTICAL,proprietary,SENTINEL3 RAC,2016-02-16T00:00:00Z,,,,,,,,,,,,,,,,available,,,, -S3_SLSTR_L1RBT,"SLSTR Level-1 observation mode products consisting of full resolution, geolocated, co-located nadir and along track view, Top of Atmosphere (TOA) brightness temperatures (in the case of thermal IR channels) or radiances (in the case of visible, NIR and SWIR channels) from all SLSTR channels, and quality flags, pixel classification information and meteorological annotations ",SLSTR,SENTINEL3,"S3A,S3B",L1,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1RBT,RBT",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-1,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_SLSTR_L1RBT_BC004,"SLSTR Level 1B Radiances and Brightness Temperatures (version BC004) - Sentinel 3 - Reprocessed The SLSTR level 1 products contain: the radiances of the 6 visible (VIS), Near Infra-Red (NIR) and Short Wave Infra-Red (SWIR) bands (on the A and B stripe grids); the Brightness Temperature (BT) for the 3 Thermal Infra-Red (TIR) bands; the BT for the 2 Fire (FIR) bands. Resolution: 1km at nadir (TIR), 500m (VIS). All are provided for both the oblique and nadir view. These measurements are accompanied with grid and time information, quality flags, error estimates and meteorological auxiliary data. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SLSTR,SENTINEL3,"S3A,S3B",L1,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1RBT,RBT,VIS,NIR,SWIR,BT,TIR,FIR,Reprocessed,BC004",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-1 RBT - Reprocessed from BC004,2018-05-09T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_SLSTR_L2,"The SLSTR Level-2 products are generated in five different types: 1. SL_2_WCT, including the Sea Surface Temperature for single and dual view, for 2 or 3 channels (internal product only), 2. SL_2_WST, including the Level-2P Sea surface temperature (provided to the users), 3. SL_2_LST, including the Land Surface Temperature parameters (provided to the users), 4. SL_2_FRP, including the Fire Radiative Power parameters (provided to the users), 5.SL_2_AOD, including the Aerosol Optical Depth parameters (provided to the users). The Level-2 product are organized in packages composed of one manifest file and several measurement and annotation data files (between 2 and 21 files depending on the package). The manifest file is in XML format and gathers general information concerning product and processing. The measurement and annotation data files are in netCDF 4 format, and include dimensions, variables and associated attributes. Regarding the measurement files: one measurement file, providing the land surface temperature, associated uncertainties and other supporting fields, is included in the SL_2_LST packet. The annotation data files are generated from the annotation files included in the SL_1RBT package and their format is identical to the files in the Level-1 packet.The SL_2_LST packet contains 10 annotation files, providing the same parameters as in SL_2_WCT and, in addition, some vegetation parameters. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2FRP,FRP,L2WCT,WCT,L2WST,WST,L2AOD,AOD",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2,2017-07-05T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_SLSTR_L2AOD,"The Copernicus NRT S3 AOD processor quantifies the abundance of aerosol particles and monitors their global distribution and long-range transport, at the scale of 9.5 x 9.5 km2. All observations are made available in less than three hours from the SLSTR observation sensing time. It is only applicable during daytime. NOTE: The SLSTR L2 AOD product is generated by EUMETSAT in NRT only. An offline (NTC) AOD product is generated from SYN data by ESA, exploiting the synergy between the SLSTR and OLCI instruments. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2AOD,AOD",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 AOD,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,,,,available,,,,available -S3_SLSTR_L2FRP,"The SLSTR Level-2 FRP product is providing one measurement data file, FRP_in.nc, with Fire Radiative Power (FRP) values and associated parameters generated for each fire detected over land and projected on the SLSTR 1 km grid. The fire detection is based on a mixed thermal band, combining S7 radiometric measurements and, for pixels associated with a saturated value of S7 (i.e. above 311 K), F1 radiometric measurements. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2FRP,FRP",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 FRP,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_SLSTR_L2LST,The SLSTR Level-2 LST product provides land surface parameters generated on the wide 1 km measurement grid. It contains measurement file with Land Surface Temperature (LST) values with associated parameters (LST parameters are computed and provided for each pixel (re-gridded or orphan) included in the 1 km measurement grid) ,SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2LST,LST",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 LST,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,, -S3_SLSTR_L2WST,The SLSTR Level-2 WST product provides water surface parameters generated on the wide 1 km measurement grid. It contains measurement file with Water Surface Temperature (WST) values with associated parameters (WST parameters are computed and provided for each pixel (re-gridded or orphan) included in the 1 km measurement grid) ,SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WST,WST",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 WST,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_SLSTR_L2WST_BC003,"The SLSTR SST has a spatial resolution of 1km at nadir. Skin Sea Surface Temperature following the GHRSST L2P GDS2 format specification, see https://www.ghrsst.org/ . Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 003. Operational data can be found in the corresponding collection. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WST,WST,REPROCESSED,BC003",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 WST Reprocessed from BC003,2016-04-18T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_SRA,"SRAL Level 1B: Complex echoes (In-phase (I) and Quadrature (Q)) for the Low Resolution Mode (LRM) and/or Synthetic Aperture Radar (SAR) mode both for C Band and Ku band. When the altimeter is in SAR mode, this product also contains the so-called Pseudo LRM (PLRM) echoes. - All Sentinel-3 Near Real Time (NRT) products are available at pick-up point in less than 3h. - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days. - All Sentinel-3 Short Time Critical (STC) products are available at pick-up point in less than 48 hours. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",SRAL,SENTINEL3,"S3A,S3B",L1,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1",RADAR,proprietary,SENTINEL3 SRAL Level-1,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_SRA_1A_BC004,"SRAL Level 1A Unpacked L0 Complex Echoes (version BC004) - Sentinel-3 - Reprocessed Fundamental science and engineering product development supporting operational users. This product is most relevant to SAR processing specialists allowing fundamental studies on SAR processing such as Doppler beam formation and for calibration studies using ground-based Transponders. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L1A,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1A,REPROCESSED,BC004",RADAR,proprietary,SENTINEL3 SRAL Level-1A Unpacked - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_SRA_1B_BC004,"SRAL Level 1B (version BC004) - Sentinel-3 - Reprocessed SRAL Level 1B: Complex echoes (In-phase (I) and Quadrature (Q)) for the Low Resolution Mode (LRM) and/or Synthetic Aperture Radar (SAR) mode both for C Band and Ku band. When the altimeter is in SAR mode, this product also contains the so-called Pseudo LRM (PLRM) echoes. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L1B,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1B,REPROCESSED,BC004",RADAR,proprietary,SENTINEL3 SRAL Level-1B - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_SRA_A,"A Level 1A SRAL product contains one ""measurement data file"" containing the L1A measurements parameters: ECHO_SAR_Ku: L1A Tracking measurements (sorted and calibrated) in SAR mode - Ku-band (80-Hz) ECHO_PLRM: L1A Tracking measurements (sorted and calibrated) in pseudo-LRM mode - Ku and C bands (80-Hz) ",SRAL,SENTINEL3,"S3A,S3B",L1,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1",RADAR,proprietary,SENTINEL3 SRAL Level-1 SRA_A,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_SRA_BS,"A Level 1B-S SRAL product contains one ""measurement data file"" containing the L1b measurements parameters: ECHO_SAR_Ku : L1b Tracking measurements in SAR mode - Ku band (20-Hz) as defined in the L1b MEAS product completed with SAR expert information ECHO_PLRM : L1b Tracking measurements in pseudo-LRM mode - Ku and C bands (20-Hz) as defined in the L1b MEAS product ",SRAL,SENTINEL3,"S3A,S3B",L1,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1",RADAR,proprietary,SENTINEL3 SRAL Level-1 SRA_BS,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_SRA_BS_BC004,"SRAL Level 1B Stack Echoes (version BC004) - Sentinel-3 - Reprocessed SRAL Level 1B: Complex echoes (In-phase (I) and Quadrature (Q)) for the Low Resolution Mode (LRM) and/or Synthetic Aperture Radar (SAR) mode both for C Band and Ku band. When the altimeter is in SAR mode, this product also contains the so-called Pseudo LRM (PLRM) echoes. Complex (In-phase and Quadrature) echoes (I's and Q;s) after slant/Doppler range correction. This product is most relevant to geophysical retrieval algorithm developers (over ocean, land and ice surfaces), surface characterisations studies (e.g. impact of sea state bias, wave directional effects etc) and Quality Control systems. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L1B,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1B,REPROCESSED,STACK,ECHOES,BC004",RADAR,proprietary,SENTINEL3 SRAL Level-1B Stack Echoes - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S3_SY_AOD,"The Level-2 SYN AOD product (SY_2_AOD) is produced by a dedicated processor including the whole SYN L1 processing module and a global synergy level 2 processing module retrieving, over land and sea, aerosol optical thickness. The resolution of this product is wider than classic S3 products, as the dataset are provided on a 4.5 km² resolution ",SYNERGY,SENTINEL3,"S3A,S3B",L2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,AOD","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 AOD,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,, -S3_SY_SYN,"The Level-2 SYN product (SY_2_SYN) is produced by the Synergy Level-1/2 SDR software and contains surface reflectance and aerosol parameters over land. All measurement datasets are provided on the OLCI image grid, similar to the one included in the OLCI L1b product. Some sub-sampled annotations and atmospheric datasets are provided on the OLCI tie-points grid. Several associated variables are also provided in annotation data files. ",SYNERGY,SENTINEL3,"S3A,S3B",L2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,SYN","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 SYN,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_SY_V10,"The Level-2 VG1 and V10 SYN products (SY_2_VG1 and SY_2_V10 respectively) are produced by the SYNERGY Level-2 processor and contain 1 km VEGETATION-like product, 1 and 10 days synthesis surface reflectances and NDVI. The product grid and the four spectral bands are similar to the SYN Level-2 VGP product. ",SYNERGY,SENTINEL3,"S3A,S3B",LEVEL-2W,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,V10","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 V10,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,, -S3_SY_VG1,"The Level-2 VG1 and V10 SYN products (SY_2_VG1 and SY_2_V10 respectively) are produced by the SYNERGY Level-2 processor and contain 1 km VEGETATION-like product, 1 and 10 days synthesis surface reflectances and NDVI. The product grid and the four spectral bands are similar to the SYN Level-2 VGP product. ",SYNERGY,SENTINEL3,"S3A,S3B",LEVEL-2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,VG1","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 VG1,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,, -S3_SY_VGP,"The Level-2 VGP SYN product (SY_2_VGP) is produced by the Global Synergy Level-1/2 software and contains 1 km VEGETATION-like product TOA reflectances. The ""1 km VEGETATION-like product"" label means that measurements are provided on a regular latitude-longitude grid, with an equatorial sampling distance of approximately 1 km. This product is restricted in longitude, including only filled ones. ",SYNERGY,SENTINEL3,"S3A,S3B",LEVEL-2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,VGP","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 VGP,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,, -S3_WAT,"The products contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags. Also the sea Ice freeboard measurement is included. The measurements in the standard data file provide the measurements in low (1 Hz = approx. 7km) and high resolution (20 Hz = approx. 300 m), in LRM mode or in SAR mode, for both C-band and Ku band. The SAR mode is the default mode. The reduced measurement data file contains 1 Hz measurements only. The enhanced measurement data file contains also the waveforms and associated parameters and the pseudo LRM measurements when in SAR mode. This product contains the following datasets: Sea Level Global(NRT) (PDS_MG3_CORE_14_GLONRT), Sea Level Global Reduced(NRT)(PDS_MG3_CORE_14_GLONRT_RD), Sea Level Global Standard(NRT) (PDS_MG3_CORE_14_GLONRT_SD), Sea Level Global Enhanced(NRT) (PDS_MG3_CORE_14_GLONRT_EN) - All Sentinel-3 NRT products are available at pick-up point in less than 3h. - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days - All Sentinel-3 Short Time Critical (STC) products are available at pick-up point in less than 48 hours Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",SRAL,SENTINEL3,"S3A,S3B",L2,"SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,WAT",RADAR,proprietary,SENTINEL3 SRAL Level-2 WAT,2016-02-16T00:00:00Z,,,,,available,available,,,,,,,available,,,available,,,,available -S3_WAT_BC004,"The products contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags. Also the sea Ice freeboard measurement is included. The measurements in the standard data file provide the measurements in low (1 Hz = approx. 7km) and high resolution (20 Hz = approx. 300 m), in LRM mode or in SAR mode, for both C-band and Ku band. The SAR mode is the default mode. The reduced measurement data file contains 1 Hz measurements only. The enhanced measurement data file contains also the waveforms and associated parameters and the pseudo LRM measurements when in SAR mode. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L2,"SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,WAT,REPROCESSED,BC004",RADAR,proprietary,SRAL Level 2 Altimetry Global - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S5P_L1B2_IR_ALL,"Solar irradiance spectra for all bands (UV1-6 and SWIR) The TROPOMI instrument is a space-borne, nadir-viewing, imaging spectrometer covering wavelength bands between the ultraviolet and the shortwave infrared. The instrument, the single payload of the Sentinel-5P spacecraft, uses passive remote sensing techniques to attain its objective by measuring, at the Top Of Atmosphere (TOA), the solar radiation reflected by and radiated from the earth. The instrument operates in a push-broom configuration (non-scanning), with a swath width of ~2600 km on the Earth's surface. The typical pixel size (near nadir) will be 7x3.5 km2 for all spectral bands, with the exception of the UV1 band (7x28 km2) and SWIR bands (7x7 km2). ",TROPOMI,SENTINEL5P,S5P,"L1B, L2","SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,IR,SIR,SWIR,Irradiances,UVN",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B and Level 2 Irradiances for the SWIR and UNV bands,2017-10-13T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S5P_L1B_IR_SIR,"Solar irradiance spectra for the SWIR bands (band 7 and band 8). TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,IR,SIR,SWIR,Irradiances",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Irradiances for the SWIR bands,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,,,,,,,, -S5P_L1B_IR_UVN,"Solar irradiance spectra for the UVN bands (band 1 through band 6). TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,IR,UVN,Irradiances",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Irradiances for the UVN bands,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,,,,,,,, -S5P_L1B_RA_BD1,"Sentinel-5 Precursor Level 1B Radiances for spectral band 1. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD1,BAND1,B01",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 1,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L1B_RA_BD2,"Sentinel-5 Precursor Level 1B Radiances for spectral band 2. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD2,BAND2,B02",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 2,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L1B_RA_BD3,"Sentinel-5 Precursor Level 1B Radiances for spectral band 3. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD3,BAND3,B03",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 3,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L1B_RA_BD4,"Sentinel-5 Precursor Level 1B Radiances for spectral band 4. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD4,BAND4,B04",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 4,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L1B_RA_BD5,"Sentinel-5 Precursor Level 1B Radiances for spectral band 5. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD5,BAND5,B05",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 5,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L1B_RA_BD6,"Sentinel-5 Precursor Level 1B Radiances for spectral band 6. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD6,BAND6,B06",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 6,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L1B_RA_BD7,"Sentinel-5 Precursor Level 1B Radiances for spectral band 7. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD7,BAND7,B07",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 7,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L1B_RA_BD8,"Sentinel-5 Precursor Level 1B Radiances for spectral band 8. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD8,BAND8,B08",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 8,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_AER_AI,"TROPOMI aerosol index is referred to as the Ultraviolet Aerosol Index (UVAI). The relatively simple calculation of the Aerosol Index is based on wavelength dependent changes in Rayleigh scattering in the UV spectral range where ozone absorption is very small. UVAI can also be calculated in the presence of clouds so that daily, global coverage is possible. This is ideal for tracking the evolution of episodic aerosol plumes from dust outbreaks, volcanic ash, and biomass burning. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,AER,AI,Ultraviolet,Aerosol,Index",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Ultraviolet Aerosol Index,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_AER_LH,"The TROPOMI Aerosol Layer Height product focuses on retrieval of vertically localised aerosol layers in the free troposphere, such as desert dust, biomass burning aerosol, or volcanic ash plumes. The height of such layers is retrieved for cloud-free conditions. Height information for aerosols in the free troposphere is particularly important for aviation safety. Scientific applications include radiative forcing studies, long-range transport modelling and studies of cloud formation processes. Aerosol height information also helps to interpret the UV Aerosol Index (UVAI) in terms of aerosol absorption as the index is strongly height-dependent. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,AER,LH,Aerosol,Layer,Height",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Aerosol Layer Height,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_CH4,"Methane (CH4) is, after carbon dioxide (CO2), the most important contributor to the anthropogenically enhanced greenhouse effect. Roughly three-quarters of methane emissions are anthropogenic and as such it is important to continue the record of satellite-based measurements. TROPOMI aims at providing CH4 column concentrations with high sensitivity to the Earth's surface, good spatio/temporal coverage, and sufficient accuracy to facilitate inverse modelling of sources and sinks. The output product consists of the retrieved methane column and a row vector referred to as the column averaging kernel A. The column averaging kernel describes how the retrieved column relates to the true profile and should be used in validation exercises (when possible) or use of the product in source/sink inverse modelling. The output product also contains altitude levels of the layer interfaces to which the column averaging kernel corresponds. Additional output for Level-2 data products: viewing geometry, precision of retrieved methane, residuals of the fit, quality flags (cloudiness, terrain roughness etc.) and retrieved albedo and aerosol properties. The latter properties are required for a posteriori filtering and for estimation of total retrieval error. The Sentinel-5 Precursor mission flies in loose formation (about 3.5 - 5 minutes behind) with the S-NPP (SUOMI-National Polar-orbiting Partnership) mission to use VIIRS (Visible Infrared Imaging Radiometer Suite) cloud information to select cloud free TROPOMI pixels for high quality methane retrieval. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,CH4,Methane",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Methane,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_CLOUD,"The TROPOMI instrument, single payload onboard Sentinel-5 Precursor, retrieves operationally the most important quantities for cloud correction of satellite trace gas retrievals: cloud fraction, cloud optical thickness (albedo), and cloud-top pressure (height). Cloud parameters from TROPOMI are not only used for enhancing the accuracy of trace gas retrievals, but also to extend the satellite data record of cloud information derived from oxygen A-band measurements initiated with GOME. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,CLOUD",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Cloud,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_CO,"The TROPOMI instrument, single payload onboard Sentinel-5 Precursor, retrieves the CO global abundance exploiting clear-sky and cloudy-sky Earth radiance measurements in the 2.3 µm spectral range of the shortwave infrared (SWIR) part of the solar spectrum. TROPOMI clear sky observations provide CO total columns with sensitivity to the tropospheric boundary layer. For cloudy atmospheres, the column sensitivity changes according to the light path. The TROPOMI CO retrieval uses the same method employed by SCIAMACHY. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,CO,Carbon,Monoxide",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Carbon Monoxide,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_HCHO,"Formaldehyde is an intermediate gas in almost all oxidation chains of Non-Methane Volatile Organic Compounds (NMVOC), leading eventually to CO2. NMVOCs are, together with NOx, CO and CH4, among the most important precursors of tropospheric O3. The major HCHO source in the remote atmosphere is CH4 oxidation. Over the continents, the oxidation of higher NMVOCs emitted from vegetation, fires, traffic and industrial sources results in important and localised enhancements of the HCHO levels. In addition to the main product results, such as HCHO slant column, vertical column and air mass factor, the level 2 data files contain several additional parameters and diagnostic information. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,HCHO,Formaldehyde",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Formaldehyde,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_NO2,"The TROPOMI instrument, single payload onboard Sentinel-5 Precursor, retrieves operationally tropospheric and stratospheric NO2 column products. The TROPOMI NO2 data products pose an improvement over previous NO2 data sets, particularly in their unprecedented spatial resolution, but also in the separation of the stratospheric and tropospheric contributions of the retrieved slant columns, and in the calculation of the air-mass factors used to convert slant to total columns. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NO2,Nitrogen,Dioxide",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Nitrogen Dioxide,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_NP_BD3,"S5P-NPP Cloud for spectral band 3. The S5P level 2 methane product is dependent on having information on cloud occurrence at spatial resolution finer than that achievable from TROPOMI itself. This information is also useful for other purposes, including assessing the influence of cloud on other L2 products and issues related to spatial co-registration. A level 2 auxiliary product was therefore developed to describe cloud in the TROPOMI field of view (FOV), using co-located observations of VIIRS (Visible Infra-red Imaging Radiometer Suite) on the U.S. S-NPP (Suomi - National Polar-orbiting Partnership). S5P flies in a so-called loose formation with the S-NPP with a temporal separation between them of less than 5 minutes. The main information contained in the S5P-NPP product is: 1. A statistical summary for each S5P FOV of the NPP-VIIRS L2 Cloud Mask (VCM). 2. The mean and standard deviation of the sun-normalised radiance in a number of VIIRS moderate resolution bands. This information is provided for three S5P spectral bands (to account for differences in spatial sampling). ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NP,NPP,Cloud,BD3,B03,BAND3",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 NPP Cloud for band 3,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_NP_BD6,"S5P-NPP Cloud for spectral band 6. The S5P level 2 methane product is dependent on having information on cloud occurrence at spatial resolution finer than that achievable from TROPOMI itself. This information is also useful for other purposes, including assessing the influence of cloud on other L2 products and issues related to spatial co-registration. A level 2 auxiliary product was therefore developed to describe cloud in the TROPOMI field of view (FOV), using co-located observations of VIIRS (Visible Infra-red Imaging Radiometer Suite) on the U.S. S-NPP (Suomi - National Polar-orbiting Partnership). S5P flies in a so-called loose formation with the S-NPP with a temporal separation between them of less than 5 minutes. The main information contained in the S5P-NPP product is: 1. A statistical summary for each S5P FOV of the NPP-VIIRS L2 Cloud Mask (VCM). 2. The mean and standard deviation of the sun-normalised radiance in a number of VIIRS moderate resolution bands. This information is provided for three S5P spectral bands (to account for differences in spatial sampling). ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NP,NPP,Cloud,BD6,B06,BAND6",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 NPP Cloud for band 6,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_NP_BD7,"S5P-NPP Cloud for spectral band 7. The S5P level 2 methane product is dependent on having information on cloud occurrence at spatial resolution finer than that achievable from TROPOMI itself. This information is also useful for other purposes, including assessing the influence of cloud on other L2 products and issues related to spatial co-registration. A level 2 auxiliary product was therefore developed to describe cloud in the TROPOMI field of view (FOV), using co-located observations of VIIRS (Visible Infra-red Imaging Radiometer Suite) on the U.S. S-NPP (Suomi - National Polar-orbiting Partnership). S5P flies in a so-called loose formation with the S-NPP with a temporal separation between them of less than 5 minutes. The main information contained in the S5P-NPP product is: 1. A statistical summary for each S5P FOV of the NPP-VIIRS L2 Cloud Mask (VCM). 2. The mean and standard deviation of the sun-normalised radiance in a number of VIIRS moderate resolution bands. This information is provided for three S5P spectral bands (to account for differences in spatial sampling). ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NP,NPP,Cloud,BD7,B07,BAND7",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 NPP Cloud for band 7,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_O3,"Ozone (O3) is of crucial importance for the equilibrium of the Earth's atmosphere. In the stratosphere, the ozone layer shields the biosphere from dangerous solar ultraviolet radiation. In the troposphere, it acts as an efficient cleansing agent, but at high concentration it also becomes harmful to the health of humans, animals, and vegetation. Ozone is also an important greenhouse-gas contributor to ongoing climate change. These products are provided in NetCDF-CF format and contain total ozone, ozone temperature, and error information including averaging kernels. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,O3,Ozone",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Ozone,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_O3_PR,"Retrieved ozone profiles are used to monitor the evolution of stratospheric and tropospheric ozone. Such monitoring is important as the ozone layer protects life on Earth against harmful UV radiation. The ozone layer is recovering from depletion due to manmade Chlorofluorocarbons (CFCs). Tropospheric ozone is toxic and it plays an important role in tropospheric chemistry. Also, ozone is a greenhouse gas and is therefore also relevant for climate change. The main parameters in the file are the retrieved ozone profile at 33 levels and the retrieved sub-columns of ozone in 6 layers. In addition, the total ozone column and tropospheric ozone columns are provided. For the ozone profile, the precision and smoothing errors, the a-priori profile and the averaging kernel are also provided. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,O3,PR,Ozone,Profile",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Ozone Profile,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S5P_L2_O3_TCL,"Ozone in the tropical troposphere plays various important roles. The intense UV radiation and high humidity in the tropics stimulate the formation of the hydroxyl radical (OH) by the photolysis of ozone. OH is the most important oxidant in the troposphere because it reacts with virtually all trace gases, such as CO, CH4 and other hydrocarbons. The tropics are also characterized by large emissions of nitrogen oxides (NOx), carbon monoxide (CO) and hydrocarbons, both from natural and anthropogenic sources. Ozone that is formed over regions where large amounts of these ozone precursors are emitted, can be transported over great distances and affects areas far from the source. The TROPOMI tropospheric ozone product is a level-2c product that represents three day averaged tropospheric ozone columns on a 0.5° by 1° latitude-longitude grid for the tropical region between 20°N and 20°S. The TROPOMI tropospheric ozone column product uses the TROPOMI Level-2 total OZONE and CLOUD products as input. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,O3,TCL,Tropospheric,Ozone",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Tropospheric Ozone,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,,,,,,,, -S5P_L2_SO2,"Sulphur dioxide (SO2) enters the Earth's atmosphere through both natural (~30%) and anthropogenic processes (~70%). It plays a role in chemistry on a local and global scale and its impact ranges from short term pollution to effects on climate. Beside the total column of SO2, enhanced levels of SO2 are flagged within the products. The recognition of enhanced SO2 values is essential in order to detect and monitor volcanic eruptions and anthropogenic pollution sources. Volcanic SO2 emissions may also pose a threat to aviation, along with volcanic ash. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,SO2,Sulphur,Dioxide",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Sulphur Dioxide,2017-10-13T00:00:00Z,,,,,available,available,,,,,,,available,,,,,,, -S6_AMR_L2_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. AMR-C Level 2 Products as generated by the AMR-C CFI Processor. These products include antenna and brightness temperatures, wet tropospheric correction, water vapour content, and a rain flag. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",AMR-C,SENTINEL6-A,S6A,L2,"SENTINEL,SENTINEL6,S6,S6A,LEO,L2,AMR-C,RADIOMETER,MICROWAVE,F06",RADIOMETER,proprietary,Sentinel 6 - Climate-quality Advanced Microwave Radiometer Level 2 Products Reprocessed at F06,2020-11-28T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S6_P4_L1AHR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The Level-1A product contains Level 1 intermediate output of the HR processor (RAW and RMC). It includes geo-located bursts of Ku echoes (at ~9 kHz) with all instrument calibrations applied. It includes the full rate complex waveforms input to the delay/Doppler or SAR processor. This product is most relevant to altimetry specialists, working on fundamental SAR processing techniques and calibration studies. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L1A,"SENTINEL,SENTINEL6,S6,S6A,LEO,L1A,ALTIMETRIC,HR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 1A High Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S6_P4_L1BAHR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The Level-1B HR product is output of the HR processor. It includes geo-located, and fully calibrated multi-looked high-resolution Ku-band waveforms. This product is most relevant to geophysical retrieval algorithm developers (over ocean, land and ice surfaces), surface characterisations studies (e.g. impact of sea state bias, wave directional effects etc.) and Quality Control systems. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L1B,"SENTINEL,SENTINEL6,S6,S6A,LEO,L1B,ALTIMETRIC,HR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 1B High Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S6_P4_L1BLR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The Level-1B LR product is output of the LR processor. It includes geo-located, and fully calibrated pulse-limited low-resolution Ku-band and C-band waveforms. This product is most relevant to geophysical retrieval algorithm developers (over ocean, land and ice surfaces), surface characterisations studies (e.g. impact of sea state bias, wave directional effects etc) and Quality Control systems. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L1B,"SENTINEL,SENTINEL6,S6,S6A,LEO,L1B,ALTIMETRIC,LR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 1B Low Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S6_P4_L2HR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The level-2 high resolution products contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags derived either from RAW or RMC, or the combination of both. Two measurement data files are available (standard and reduced), each with a different number of variables. The standard data file includes 1 Hz and 20 Hz measurements for the Ku- band as well as geophysical corrections at 1 Hz and some at 20 Hz. The reduced data file contains only 1 Hz measurements for the Ku- and C-bands as well as geophysical corrections at 1 Hz. Note that the HR data products only contain Ku-band measurements. These products are suitable for users seeking information on sea state and those creating downstream added value products from multiple altimeters. Particularly for those seeking the highest resolution measurements. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L2,"SENTINEL,SENTINEL6,S6,S6A,LEO,L2,ALTIMETRIC,HR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 2 High Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -S6_P4_L2LR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The product contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags derived from LR. Two measurement data files are available (standard and reduced), each with a different number of variables. The standard data file includes 1 Hz and 20 Hz measurements for the Ku- and C-bands as well as geophysical corrections at 1 Hz and some at 20 Hz. The reduced data file contains only 1 Hz measurements for the Ku- and C-bands as well as geophysical corrections at 1 Hz. These products are suitable for users seeking information on sea state and those creating downstream added value products from multiple altimeters. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L2,"SENTINEL,SENTINEL6,S6,S6A,LEO,L2,ALTIMETRIC,LR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 2 Low Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SATELLITE_CARBON_DIOXIDE,"This dataset provides observations of atmospheric carbon dioxide (CO2)\namounts obtained from observations collected by several current and historical \nsatellite instruments. Carbon dioxide is a naturally occurring Greenhouse Gas (GHG), but one whose abundance has been increased substantially above its pre-industrial value of some 280 ppm by human activities, primarily because of emissions from combustion of fossil fuels, deforestation and other land-use change. The annual cycle (especially in the northern hemisphere) is primarily due to seasonal uptake and release of atmospheric CO2 by terrestrial vegetation.\nAtmospheric carbon dioxide abundance is indirectly observed by various satellite instruments. These instruments measure spectrally resolved near-infrared and/or infrared radiation reflected or emitted by the Earth and its atmosphere. In the measured signal, molecular absorption signatures from carbon dioxide and other constituent gasses can be identified. It is through analysis of those absorption lines in these radiance observations that the averaged carbon dioxide abundance in the sampled atmospheric column can be determined.\nThe software used to analyse the absorption lines and determine the carbon dioxide concentration in the sampled atmospheric column is referred to as the retrieval algorithm. For this dataset, carbon dioxide abundances have been determined by applying several algorithms to different satellite \ninstruments. Typically, different algorithms have different strengths and weaknesses and therefore, which product to use for a given application typically depends on the application.\nThe data set consists of 2 types of products: (i) column-averaged mixing ratios of CO2, denoted XCO2 and (ii) mid-tropospheric CO2 columns. The XCO2 products have been retrieved from SCIAMACHY/ENVISAT, TANSO-FTS/GOSAT and OCO-2. The mid-tropospheric CO2 product has been retrieved from the IASI instruments on-board the Metop satellite series and from AIRS. \nThe XCO2 products are available as Level 2 (L2) products (satellite orbit tracks) and as Level 3 (L3) product (gridded). The L2 products are available as individual sensor products (SCIAMACHY: BESD and WFMD algorithms; GOSAT: OCFP and SRFP algorithms) and as a multi-sensor merged product (EMMA algorithm). The L3 XCO2 product is provided in OBS4MIPS format. \nThe IASI and AIRS products are available as L2 products generated with the NLIS algorithm.\nThis data set is updated on a yearly basis, with each update cycle adding (if required) a new data version for the entire period, up to one year behind real time.\nThis dataset is produced on behalf of C3S with the exception of the SCIAMACHY and AIRS L2 products that were generated in the framework of the GHG-CCI project of the European Space Agency (ESA) Climate Change Initiative (CCI).\n\nVariables in the dataset/application are:\nColumn-average dry-air mole fraction of atmospheric carbon dioxide (XCO2), Mid-tropospheric columns of atmospheric carbon dioxide (CO2) ",,,,,"ECMWF,CDS,C3S,carbon-dioxide",ATMOSPHERIC,proprietary,Carbon dioxide data from 2002 to present derived from satellite observations,2002-10-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SATELLITE_METHANE,"This dataset provides observations of atmospheric methane (CH4)\namounts obtained from observations collected by several current and historical \nsatellite instruments. Methane is a naturally occurring Greenhouse Gas (GHG), but one whose abundance has been increased substantially above its pre-industrial value of some 720 ppb by human activities, primarily because of agricultural emissions (e.g., rice production, ruminants) and fossil fuel production and use. A clear annual cycle is largely due to seasonal wetland emissions.\nAtmospheric methane abundance is indirectly observed by various satellite instruments. These instruments measure spectrally resolved near-infrared and infrared radiation reflected or emitted by the Earth and its atmosphere. In the measured signal, molecular absorption signatures from methane and constituent gasses can be identified. It is through analysis of those absorption lines in these radiance observations that the averaged methane abundance in the sampled atmospheric column can be determined.\nThe software used to analyse the absorption lines and determine the methane concentration in the sampled atmospheric column is referred to as the retrieval algorithm. For this dataset, methane abundances have been determined by applying several algorithms to different satellite instruments.\nThe data set consists of 2 types of products: (i) column-averaged mixing ratios of CH4, denoted XCH4 and (ii) mid-tropospheric CH4 columns. \nThe XCH4 products have been retrieved from SCIAMACHY/ENVISAT and TANSO-FTS/GOSAT. The mid-tropospheric CH4 product has been retrieved from the IASI instruments onboard the Metop satellite series. The XCH4 products are available as Level 2 (L2) products (satellite orbit tracks) and as Level 3 (L3) product (gridded). The L2 products are available as individual sensor products (SCIAMACHY: WFMD and IMAP algorithms; GOSAT: OCFP, OCPR, SRFP and SRPR algorithms) and as a multi-sensor merged product (EMMA algorithm). The L3 XCH4 product is provided in OBS4MIPS format. The IASI products are available as L2 products generated with the NLIS algorithm.\nThis data set is updated on a yearly basis, with each update cycle adding (if required) a new data version for the entire period, up to one year behind real time.\nThis dataset is produced on behalf of C3S with the exception of the SCIAMACHY L2 products that were generated in the framework of the GHG-CCI project of the European Space Agency (ESA) Climate Change Initiative (CCI).\n\nVariables in the dataset/application are:\nColumn-average dry-air mole fraction of atmospheric methane (XCH4), Mid-tropospheric columns of atmospheric methane (CH4) ",,,,,"ECMWF,CDS,C3S,methane",ATMOSPHERIC,proprietary,Methane data from 2002 to present derived from satellite observations,2002-10-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SATELLITE_SEA_LEVEL_BLACK_SEA,"Sea level anomaly is the height of water over the mean sea surface in a given time and region. Up-to-date altimeter standards are used to estimate the sea level anomalies with a mapping algorithm dedicated to the Black sea region. Anomalies are computed with respect to a twenty-year mean reference period (1993-2012). The steady number of reference satellite used in the production of this dataset contributes to the long-term stability of the sea level record. Improvements of the accuracy, sampling of meso-scale processes and of the high-latitude coverage were achieved by using a few additional satellite missions. New data are provided with a delay of about 4-5 months relatively to near-real time or interim sea level products. This delay is mainly due to the timeliness of the input data, the centred processing temporal window and the validation process. However, this processing and validation adds stability and accuracy to the sea level variables and make them adapted to climate applications. This dataset includes uncertainties for each grid cell. More details about the sea level retrieval, additional filters, optimisation procedures, and the error estimation are given in the Documentation section. Variables in the dataset/application are: Absolute dynamic topography, Absolute geostrophic velocity meridian component, Absolute geostrophic velocity zonal component, Geostrophic velocity anomalies meridian component, Geostrophic velocity anomalies zonal component, Sea level anomaly ",,,,,"Climate,ECMWF,CDS,C3S,methane,sea",HYDROLOGICAL,proprietary,Sea level daily gridded data from satellite observations for the Black Sea from 1993 to 2020,1993-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SEASONAL_MONTHLY_PL,"This entry covers pressure-level data aggregated on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\nGeopotential, Specific humidity, Temperature, U-component of wind, V-component of wind ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,monthly,pressure,levels",ATMOSPHERIC,proprietary,Seasonal forecast monthly statistics on pressure levels,1993-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SEASONAL_MONTHLY_SL,"This entry covers single-level data aggregated on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\n10m u-component of wind, 10m v-component of wind, 10m wind gust since previous post-processing, 10m wind speed, 2m dewpoint temperature, 2m temperature, East-west surface stress rate of accumulation, Evaporation, Maximum 2m temperature in the last 24 hours, Mean sea level pressure, Mean sub-surface runoff rate, Mean surface runoff rate, Minimum 2m temperature in the last 24 hours, North-south surface stress rate of accumulation, Runoff, Sea surface temperature, Sea-ice cover, Snow density, Snow depth, Snowfall, Soil temperature level 1, Solar insolation rate of accumulation, Surface latent heat flux, Surface sensible heat flux, Surface solar radiation, Surface solar radiation downwards, Surface thermal radiation, Surface thermal radiation downwards, Top solar radiation, Top thermal radiation, Total cloud cover, Total precipitation ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,monthly,single,levels",ATMOSPHERIC,proprietary,Seasonal forecast monthly statistics on single levels,1993-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SEASONAL_ORIGINAL_PL,"his entry covers pressure-level data at the original time resolution (once every 12 hours). \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\nGeopotential, Specific humidity, Temperature, U-component of wind, V-component of wind ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,subdaily,pressure,levels",ATMOSPHERIC,proprietary,Seasonal forecast subdaily data on pressure levels,1993-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SEASONAL_ORIGINAL_SL,"This entry covers single-level data at the original time resolution (once a day, or once every 6 hours, depending on the variable). \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\n10m u-component of wind, 10m v-component of wind, 10m wind gust since previous post-processing, 2m dewpoint temperature, 2m temperature, Eastward turbulent surface stress, Evaporation, Land-sea mask, Maximum 2m temperature in the last 24 hours, Mean sea level pressure, Minimum 2m temperature in the last 24 hours, Northward turbulent surface stress, Orography, Runoff, Sea surface temperature, Sea-ice cover, Snow density, Snow depth, Snowfall, Soil temperature level 1, Sub-surface runoff, Surface latent heat flux, Surface net solar radiation, Surface net thermal radiation, Surface runoff, Surface sensible heat flux, Surface solar radiation downwards, Surface thermal radiation downwards, TOA incident solar radiation, Top net solar radiation, Top net thermal radiation, Total cloud cover, Total precipitation ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,daily,single,levels",ATMOSPHERIC,proprietary,Seasonal forecast daily and subdaily data on single levels,2017-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SEASONAL_POSTPROCESSED_PL,"This entry covers pressure-level data post-processed for bias adjustment on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time since 2017.\n\nVariables in the dataset/application are:\nGeopotential anomaly, Specific humidity anomaly, Temperature anomaly, U-component of wind anomaly, V-component of wind anomaly ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,anomalies,pressure,levels",ATMOSPHERIC,proprietary,Seasonal forecast anomalies on pressure levels,2017-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SEASONAL_POSTPROCESSED_SL,"This entry covers single-level data post-processed for bias adjustment on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time since 2017.\n\nVariables in the dataset/application are:\n10m u-component of wind anomaly, 10m v-component of wind anomaly, 10m wind gust anomaly, 10m wind speed anomaly, 2m dewpoint temperature anomaly, 2m temperature anomaly, East-west surface stress anomalous rate of accumulation, Evaporation anomalous rate of accumulation, Maximum 2m temperature in the last 24 hours anomaly, Mean sea level pressure anomaly, Mean sub-surface runoff rate anomaly, Mean surface runoff rate anomaly, Minimum 2m temperature in the last 24 hours anomaly, North-south surface stress anomalous rate of accumulation, Runoff anomalous rate of accumulation, Sea surface temperature anomaly, Sea-ice cover anomaly, Snow density anomaly, Snow depth anomaly, Snowfall anomalous rate of accumulation, Soil temperature anomaly level 1, Solar insolation anomalous rate of accumulation, Surface latent heat flux anomalous rate of accumulation, Surface sensible heat flux anomalous rate of accumulation, Surface solar radiation anomalous rate of accumulation, Surface solar radiation downwards anomalous rate of accumulation, Surface thermal radiation anomalous rate of accumulation, Surface thermal radiation downwards anomalous rate of accumulation, Top solar radiation anomalous rate of accumulation, Top thermal radiation anomalous rate of accumulation, Total cloud cover anomaly, Total precipitation anomalous rate of accumulation ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,anomalies,single,levels",ATMOSPHERIC,proprietary,Seasonal forecast anomalies on single levels,2017-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SIS_HYDRO_MET_PROJ,"This dataset provides precipitation and near surface air temperature for Europe as Essential Climate Variables (ECVs) and as a set of Climate Impact Indicators (CIIs) based on the ECVs. \nECV datasets provide the empirical evidence needed to understand the current climate and predict future changes. \nCIIs contain condensed climate information which facilitate relatively quick and efficient subsequent analysis. Therefore, CIIs make climate information accessible to application focussed users within a sector.\nThe ECVs and CIIs provided here were derived within the water management sectoral information service to address questions specific to the water sector. However, the products are provided in a generic form and are relevant for a range of sectors, for example agriculture and energy.\nThe data represent the current state-of-the-art in Europe for regional climate modelling and indicator production. Data from eight model simulations included in the Coordinated Regional Climate Downscaling Experiment (CORDEX) were used to calculate a total of two ECVs and five CIIs at a spatial resolution of 0.11° x 0.11° and 5km x 5km.\nThe ECV data meet the technical specification set by the Global Climate Observing System (GCOS), as such they are provided on a daily time step. They are bias adjusted using the EFAS gridded observations as a reference dataset. Note these are model output data, not observation data as is the general case for ECVs.\nThe CIIs are provided as mean values over a 30-year time period. For the reference period (1971-2000) data is provided as absolute values, for the future periods the data is provided as absolute values and as the relative or absolute change from the reference period. The future periods cover 3 fixed time periods (2011-2040, 2041-2070 and 2071-2100) and 3 \""degree scenario\"" periods defined by when global warming exceeds a given threshold (1.5 °C, 2.0 °C or 3.0 °C). The global warming is calculated from the global climate model (GCM) used, therefore the actual time period of the degree scenarios will be different for each GCM.\nThis dataset is produced and quality assured by the Swedish Meteorological and Hydrological Institute on behalf of the Copernicus Climate Change Service. \n\nVariables in the dataset/application are:\n2m air temperature, Highest 5-day precipitation amount, Longest dry spells, Number of dry spells, Precipitation ",,,,,"ECMWF,CDS,C3S,hydrology,meterology,water,precipitation,temperature",ATMOSPHERIC,proprietary,Temperature and precipitation climate impact indicators from 1970 to 2100 derived from European climate projections,1970-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -SPOT5_SPIRIT,SPOT 5 stereoscopic survey of Polar Ice. ,,SPOT5,SPOT5,L1A,"SPOT,SPOT5,L1A",OPTICAL,proprietary,Spot 5 SPIRIT,2002-05-04T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -SPOT_SWH,The Spot World Heritage (SWH) programme objective is the free availability for non-commercial use of orthorectified products derived from multispectral images of more than 5 years old from the Spot 1-5 satellites family. More informations on https://www.theia-land.fr/en/product/spot-world-heritage/ ,,SPOT1-5,SPOT1-5,L1C,"SPOT,SPOT1,SPOT2,SPOT3,SPOT4,SPOT5,L1C",OPTICAL,proprietary,Spot World Heritage,1986-02-22T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -SPOT_SWH_OLD,Spot world heritage Old format. ,,SPOT1-5,SPOT1-5,L1C,"SPOT,SPOT1,SPOT2,SPOT3,SPOT4,SPOT5,L1C",OPTICAL,proprietary,Spot World Heritage,1986-02-22T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -TIGGE_CF_SFC,TIGGE (THORPEX Interactive Grand Global Ensemble) Surface Control forecast from ECMWF ,,TIGGE,TIGGE,,"THORPEX,TIGGE,CF,SFC,ECMWF",ATMOSPHERIC,proprietary,TIGGE ECMWF Surface Control forecast,2003-01-01T00:00:00Z,,,,,,,,,,available,,,,,,,,,, -UERRA_EUROPE_SL,"This UERRA dataset contains analyses of surface and near-surface essential climate variables from UERRA-HARMONIE and MESCAN-SURFEX systems. Forecasts up to 30 hours initialised from the analyses at 00 and 12 UTC are available only through the CDS-API (see Documentation). UERRA-HARMONIE is a 3-dimensional variational data assimilation system, while MESCAN-SURFEX is a complementary surface analysis system. Using the Optimal Interpolation method, MESCAN provides the best estimate of daily accumulated precipitation and six-hourly air temperature and relative humidit at 2 meters above the model topography. The land surface platform SURFEX is forced with downscaled forecast fields from UERRA-HARMONIE as well as MESCAN analyses. It is run offline, i.e. without feedback to the atmospheric analysis performed in MESCAN or the UERRA-HARMONIE data assimilation cycles. Using SURFEX offline allows to take full benefit of precipitation analysis and to use the more advanced physics options to better represent surface variables such as surface temperature and surface fluxes, and soil processes related to water and heat transfer in the soil and snow. In general, the assimilation systems are able to estimate biases between observations and to sift good-quality data from poor data. The laws of physics allow for estimates at locations where data coverage is low. The provision of estimates at each grid point in Europe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the much sparser observational networks, e.g. in 1960s, will have an impact on the quality of analyses leading to less accurate estimates. The improvement over global reanalysis products comes with the higher horizontal resolution that allows incorporating more regional details (e.g. topography). Moreover, it enables the system even to use more observations at places with dense observation networks. Variables in the dataset/application are: 10m wind direction, 10m wind speed, 2m relative humidity, 2m temperature, Albedo, High cloud cover, Land sea mask, Low cloud cover, Mean sea level pressure, Medium cloud cover, Orography, Skin temperature, Snow density, Snow depth water equivalent, Surface pressure, Surface roughness, Total cloud cover, Total column integrated water vapour, Total precipitation ",,SURFEX,SURFEX,,"Climate,ECMWF,Reanalysis,Regional,Europe,UERRA,UERRA-HARMONIE,SURFEX,MESCAN-SURFEX,CDS,Atmospheric,single,levels",ATMOSPHERIC,proprietary,UERRA regional reanalysis for Europe on single levels from 1961 to 2019,1918-10-18T00:00:00Z,,,,,,,,,,,,,,,,,,,,available -VENUS_L1C,A light description of Venus L1 data is available at http://www.cesbio.ups-tlse.fr/multitemp/?page_id=12984 ,,VENUS,VENUS,L1C,"VENUS,L1,L1C",OPTICAL,proprietary,Venus Level1-C,2017-08-02T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -VENUS_L2A_MAJA,"Level2 products provide surface reflectances after atmospheric correction, along with masks of clouds and their shadows. Data is processed by MAJA (before called MACCS) for THEIA land data center. ",,VENUS,VENUS,L2A,"VENUS,L2,L2A",OPTICAL,proprietary,Venus Level2-A,2017-08-02T00:00:00Z,,,,,,,,,,,,,,,,,available,,, -VENUS_L3A_MAJA,,,VENUS,VENUS,L3A,"VENUS,L3,L3A",OPTICAL,proprietary,Venus Level3-A,2017-08-02T00:00:00Z,,,,,,,,,,,,,,,,,available,,, +product type,abstract,instrument,platform,platformSerialIdentifier,processingLevel,keywords,sensorType,license,title,missionStartDate,astraea_eod,aws_eos,cop_ads,cop_cds,cop_dataspace,creodias,creodias_s3,earth_search,earth_search_cog,earth_search_gcs,ecmwf,hydroweb_next,meteoblue,onda,peps,planetary_computer,sara,theia,usgs,usgs_satapi_aws,wekeo +CAMS_EAC4,"EAC4 (ECMWF Atmospheric Composition Reanalysis 4) is the fourth generation ECMWF global reanalysis of atmospheric composition. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. For this reason, EAC4 is only available from 2003 onwards. Although the analysis procedure considers chunks of data in a window of 12 hours in one go, EAC4 provides estimates every 3 hours, worldwide. This is made possible by the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,EWMCF,EAC4",ATMOSPHERIC,proprietary,CAMS global reanalysis (EAC4),2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_EAC4_MONTHLY,"EAC4 (ECMWF Atmospheric Composition Reanalysis 4) is the fourth generation ECMWF global reanalysis of atmospheric composition. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. For this reason, EAC4 is only available from 2003 onwards. Although the analysis procedure considers chunks of data in a window of 12 hours in one go, EAC4 provides estimates every 3 hours, worldwide. This is made possible by the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,EWMCF,EAC4",ATMOSPHERIC,proprietary,CAMS global reanalysis (EAC4) monthly averaged fields,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_EU_AIR_QUALITY_FORECAST,"This dataset provides daily air quality analyses and forecasts for Europe. CAMS produces specific daily air quality analyses and forecasts for the European domain at significantly higher spatial resolution (0.1 degrees, approx. 10km) than is available from the global analyses and forecasts. The production is based on an ensemble of eleven air quality forecasting systems across Europe. A median ensemble is calculated from individual outputs, since ensemble products yield on average better performance than the individual model products. The spread between the eleven models are used to provide an estimate of the forecast uncertainty. The analysis combines model data with observations provided by the European Environment Agency (EEA) into a complete and consistent dataset using various data assimilation techniques depending upon the air-quality forecasting system used. In parallel, air quality forecasts are produced once a day for the next four days. Both the analysis and the forecast are available at hourly time steps at seven height levels. Note that only nitrogen monoxide, nitrogen dioxide, sulphur dioxide, ozone, PM2.5, PM10 and dust are regularly validated against in situ observations, and therefore forecasts of all other variables are unvalidated and should be considered experimental. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Air,Forecast,EEA",ATMOSPHERIC,proprietary,CAMS European air quality forecasts,2021-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_EU_AIR_QUALITY_RE,"This dataset provides annual air quality reanalyses for Europe based on both unvalidated (interim) and validated observations. CAMS produces annual air quality (interim) reanalyses for the European domain at significantly higher spatial resolution (0.1 degrees, approx. 10km) than is available from the global reanalyses. The production is currently based on an ensemble of nine air quality data assimilation systems across Europe. A median ensemble is calculated from individual outputs, since ensemble products yield on average better performance than the individual model products. The spread between the nine models can be used to provide an estimate of the analysis uncertainty. The reanalysis combines model data with observations provided by the European Environment Agency (EEA) into a complete and consistent dataset using various data assimilation techniques depending upon the air-quality forecasting system used. Additional sources of observations can complement the in-situ data assimilation, like satellite data. An interim reanalysis is provided each year for the year before based on the unvalidated near-real-time observation data stream that has not undergone full quality control by the data providers yet. Once the fully quality-controlled observations are available from the data provider, typically with an additional delay of about 1 year, a final validated annual reanalysis is provided. Both reanalyses are available at hourly time steps at height levels. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Air,EEA",ATMOSPHERIC,proprietary,CAMS European air quality reanalyses,2013-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GAC_FORECAST,"CAMS produces global forecasts for atmospheric composition twice a day. The forecasts consist of more than 50 chemical species (e.g. ozone, nitrogen dioxide, carbon monoxide) and seven different types of aerosol (desert dust, sea salt, organic matter, black carbon, sulphate, nitrate and ammonium aerosol). In addition, several meteorological variables are available as well. The initial conditions of each forecast are obtained by combining a previous forecast with current satellite observations through a process called data assimilation. This best estimate of the state of the atmosphere at the initial forecast time step, called the analysis, provides a globally complete and consistent dataset allowing for estimates at locations where observation data coverage is low or for atmospheric pollutants for which no direct observations are available. The forecast itself uses a model of the atmosphere based on the laws of physics and chemistry to determine the evolution of the concentrations of all species over time for the next five days. Apart from the required initial state, it also uses inventory-based or observation-based emission estimates as a boundary condition at the surface. The CAMS global forecasting system is upgraded about once a year resulting in technical and scientific changes. The horizontal or vertical resolution can change, new species can be added, and more generally the accuracy of the forecasts can be improved. Details of these system changes can be found in the documentation. Users looking for a more consistent long-term data set should consider using the CAMS Global Reanalysis instead, which is available through the ADS and spans the period from 2003 onwards. Finally, because some meteorological fields in the forecast do not fall within the general CAMS data licence, they are only available with a delay of 5 days. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Forecast,GAC",ATMOSPHERIC,proprietary,CAMS global atmospheric composition forecasts,2015-01-02T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GFE_GFAS,"Emissions of atmospheric pollutants from biomass burning and vegetation fires are key drivers of the evolution of atmospheric composition, with a high degree of spatial and temporal variability, and an accurate representation of them in models is essential. The CAMS Global Fire Assimilation System (GFAS) utilises satellite observations of fire radiative power (FRP) to provide near-real-time information on the location, relative intensity and estimated emissions from biomass burning and vegetation fires. Emissions are estimated by (i) conversion of FRP observations to the dry matter (DM) consumed by the fire, and (ii) application of emission factors to DM for different biomes, based on field and laboratory studies in the scientific literature, to estimate the emissions. Emissions estimates for 40 pyrogenic species are available from GFAS, including aerosols, reactive gases and greenhouse gases, on a regular grid with a spatial resolution of 0.1 degrees longitude by 0.1 degrees latitude. This version of GFAS (v1.2) provides daily averaged data based on a combination of FRP observations from two Moderate Resolution Imaging Spectroradiometer (MODIS) instruments, one on the NASA EOS-Terra satellite and the other on the NASA EOS-Aqua satellite from 1 January 2003 to present. GFAS also provides daily estimates of smoke plume injection heights derived from FRP observations and meteorological information from the operational weather forecasts from ECMWF. GFAS data have been used to provide surface boundary conditions for the CAMS global atmospheric composition and European regional air quality forecasts, and the wider atmospheric chemistry modelling community. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Fire,FRP,DM,MODIS,NASA,EOS,ECMWF,GFAS",ATMOSPHERIC,proprietary,CAMS global biomass burning emissions based on fire radiative power (GFAS),2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GLOBAL_EMISSIONS,"This data set contains gridded distributions of global anthropogenic and natural emissions. Natural and anthropogenic emissions of atmospheric pollutants and greenhouse gases are key drivers of the evolution of the composition of the atmosphere, so an accurate representation of them in forecast models of atmospheric composition is essential. CAMS compiles inventories of emission data that serve as input to its own forecast models, but which can also be used by other atmospheric chemical transport models. These inventories are based on a combination of existing data sets and new information, describing anthropogenic emissions from fossil fuel use on land, shipping, and aviation, and natural emissions from vegetation, soil, the ocean and termites. The anthropogenic emissions on land are further separated in specific activity sectors (e.g., power generation, road traffic, industry). The CAMS emission data sets provide good consistency between the emissions of greenhouse gases, reactive gases, and aerosol particles and their precursors. Because most inventory-based data sets are only available with a delay of several years, the CAMS emission inventories also extend these existing data sets forward in time by using the trends from the most recent available years, producing timely input data for real-time forecast models. Most of the data sets are updated once or twice per year adding the most recent year to the data record, while re-processing the original data record for consistency, when needed. This is reflected by the different version numbers. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Emissions,Pollutants,GHG",ATMOSPHERIC,proprietary,CAMS global emission inventories,2000-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GREENHOUSE_EGG4,"This dataset is part of the ECMWF Atmospheric Composition Reanalysis focusing on long-lived greenhouse gases: carbon dioxide (CO2) and methane (CH4). The emissions and natural fluxes at the surface are crucial for the evolution of the long-lived greenhouse gases in the atmosphere. In this dataset the CO2 fluxes from terrestrial vegetation are modelled in order to simulate the variability across a wide range of scales from diurnal to inter-annual. The CH4 chemical loss is represented by a climatological loss rate and the emissions at the surface are taken from a range of datasets. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. For this reason, EAC4 is only available from 2003 onwards. The analysis procedure assimilates data in a window of 12 hours using the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmospheric,Atmosphere,CO2,CH4,GHG,ECMWF,EGG4",ATMOSPHERIC,proprietary,CAMS global greenhouse gas reanalysis (EGG4),2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GREENHOUSE_EGG4_MONTHLY,"This dataset is part of the ECMWF Atmospheric Composition Reanalysis focusing on long-lived greenhouse gases: carbon dioxide (CO2) and methane (CH4). The emissions and natural fluxes at the surface are crucial for the evolution of the long-lived greenhouse gases in the atmosphere. In this dataset the CO2 fluxes from terrestrial vegetation are modelled in order to simulate the variability across a wide range of scales from diurnal to inter-annual. The CH4 chemical loss is represented by a climatological loss rate and the emissions at the surface are taken from a range of datasets. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. For this reason, EAC4 is only available from 2003 onwards. The analysis procedure assimilates data in a window of 12 hours using the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmospheric,Atmosphere,CO2,CH4,Greenhouse,ECMWF,EGG4",ATMOSPHERIC,proprietary,CAMS global greenhouse gas reanalysis (EGG4) monthly averaged fields,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GREENHOUSE_INVERSION,"This data set contains net fluxes at the surface, atmospheric mixing ratios at model levels, and column-mean atmospheric mixing ratios for carbon dioxide (CO2), methane (CH4) and nitrous oxide (N20). Natural and anthropogenic surface fluxes of greenhouse gases are key drivers of the evolution of Earth’s climate, so their monitoring is essential. Such information has been used in particular as part of the Assessment Reports of the Intergovernmental Panel on Climate Change (IPCC). Ground-based and satellite remote-sensing observations provide a means to quantifying the net fluxes between the land and ocean on the one hand and the atmosphere on the other hand. This is done through a process called atmospheric inversion, which uses transport models of the atmosphere to link the observed concentrations of CO2, CH4 and N2O to the net fluxes at the Earth's surface. By correctly modelling the winds, vertical diffusion, and convection in the global atmosphere, the observed concentrations of the greenhouse gases are used to infer the surface fluxes for the last few decades. For CH4 and N2O, the flux inversions account also for the chemical loss of these greenhouse gases. The net fluxes include contributions from the natural biosphere (e.g., vegetation, wetlands) as well anthropogenic contributions (e.g., fossil fuel emissions, rice fields). The data sets for the three species are updated once or twice per year adding the most recent year to the data record, while re-processing the original data record for consistency. This is reflected by the different version numbers. In addition, fluxes for methane are available based on surface air samples only or based on a combination of surface air samples and satellite observations (reflected by an 's' in the version number). ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmosphere,Atmospheric,IPCC,CO2,CH4,N2O",ATMOSPHERIC,proprietary,CAMS global inversion-optimised greenhouse gas fluxes and concentrations,1979-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GRF,"This dataset provides geographical distributions of the radiative forcing (RF) by key atmospheric constituents. The radiative forcing estimates are based on the CAMS reanalysis and additional model simulations and are provided separately for CO2 CH4, O3 (tropospheric and stratospheric), interactions between anthropogenic aerosols and radiation and interactions between anthropogenic aerosols and clouds. Radiative forcing measures the imbalance in the Earth's energy budget caused by a perturbation of the climate system, such as changes in atmospheric composition caused by human activities. RF is a useful predictor of globally-averaged temperature change, especially when rapid adjustments of atmospheric temperature and moisture profiles are taken into account. RF has therefore become a quantitative metric to compare the potential climate response to different perturbations. Increases in greenhouse gas concentrations over the industrial era exerted a positive RF, causing a gain of energy in the climate system. In contrast, concurrent changes in atmospheric aerosol concentrations are thought to exert a negative RF leading to a loss of energy. Products are quantified both in ""all-sky"" conditions, meaning that the radiative effects of clouds are included in the radiative transfer calculations, and in ""clear-sky"" conditions, which are computed by excluding clouds in the radiative transfer calculations. The upgrade from version 1.5 to 2 consists of an extension of the period by 2017-2018, the addition of an ""effective radiative forcing"" product and new ways to calculate the pre-industrial reference state for aerosols and cloud condensation nuclei. More details are given in the documentation section. New versions may be released in future as scientific methods develop, and existing versions may be extended with later years if data for the period is available from the CAMS reanalysis. Newer versions supercede old versions so it is always recommended to use the latest one. CAMS also produces distributions of aerosol optical depths, distinguishing natural from anthropogenic aerosols, which are a separate dataset. See ""Related Data"". ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmospheric,Atmosphere,RF,CO2,CH4,O3,Aerosol",ATMOSPHERIC,proprietary,CAMS global radiative forcings,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_GRF_AUX,"This dataset provides aerosol optical depths and aerosol-radiation radiative effects for four different aerosol origins: anthropogenic, mineral dust, marine, and land-based fine-mode natural aerosol. The latter mostly consists of biogenic aerosols. The data are a necessary complement to the ""CAMS global radiative forcings"" dataset (see ""Related Data""). The calculation of aerosol radiative forcing requires a discrimination between aerosol of anthropogenic and natural origin. However, the CAMS reanalysis, which is used to provide the aerosol concentrations, does not make this distinction. The anthropogenic fraction was therefore derived by a method which uses aerosol size as a proxy for aerosol origin. ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Atmospheric,Atmosphere,RF,CO2,CH4,O3,Aerosol",ATMOSPHERIC,proprietary,CAMS global radiative forcing - auxilliary variables,2003-01-01T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CAMS_SOLAR_RADIATION,"The CAMS solar radiation services provide historical values (2004 to present) of global (GHI), direct (BHI) and diffuse (DHI) solar irradiation, as well as direct normal irradiation (BNI). The aim is to fulfil the needs of European and national policy development and the requirements of both commercial and public downstream services, e.g. for planning, monitoring, efficiency improvements and the integration of solar energy systems into energy supply grids. For clear-sky conditions, an irradiation time series is provided for any location in the world using information on aerosol, ozone and water vapour from the CAMS global forecasting system. Other properties, such as ground albedo and ground elevation, are also taken into account. Similar time series are available for cloudy (or ""all sky"") conditions but, since the high-resolution cloud information is directly inferred from satellite observations, these are currently only available inside the field-of-view of the Meteosat Second Generation (MSG) satellite, which is roughly Europe, Africa, the Atlantic Ocean and the Middle East. Data is offered in both ASCII and netCDF format. Additionally, an ASCII ""expert mode"" format can be selected which contains in addition to the irradiation, all the input data used in their calculation (aerosol optical properties, water vapour concentration, etc). This additional information is only meaningful in the time frame at which the calculation is performed and so is only available at 1-minute time steps in universal time (UT). ",,CAMS,CAMS,,"Copernicus,ADS,CAMS,Solar,Radiation",ATMOSPHERIC,proprietary,CAMS solar radiation time-series,2004-01-02T00:00:00Z,,,available,,,,,,,,,,,,,,,,,, +CBERS4_AWFI_L2,"China-Brazil Earth Resources Satellite, CBERS-4 AWFI camera Level-2 product. System corrected images, expect some translation error. ",AWFI,CBERS,CBERS-4,L2,"AWFI,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 AWFI Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CBERS4_AWFI_L4,"China-Brazil Earth Resources Satellite, CBERS-4 AWFI camera Level-4 product. Orthorectified with ground control points. ",AWFI,CBERS,CBERS-4,L4,"AWFI,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 AWFI Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CBERS4_MUX_L2,"China-Brazil Earth Resources Satellite, CBERS-4 MUX camera Level-2 product. System corrected images, expect some translation error. ",MUX,CBERS,CBERS-4,L2,"MUX,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 MUX Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CBERS4_MUX_L4,"China-Brazil Earth Resources Satellite, CBERS-4 MUX camera Level-4 product. Orthorectified with ground control points. ",MUX,CBERS,CBERS-4,L4,"MUX,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 MUX Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CBERS4_PAN10M_L2,"China-Brazil Earth Resources Satellite, CBERS-4 PAN10M camera Level-2 product. System corrected images, expect some translation error. ",PAN10M,CBERS,CBERS-4,L2,"PAN10M,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 PAN10M Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CBERS4_PAN10M_L4,"China-Brazil Earth Resources Satellite, CBERS-4 PAN10M camera Level-4 product. Orthorectified with ground control points. ",PAN10M,CBERS,CBERS-4,L4,"PAN10M,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 PAN10M Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CBERS4_PAN5M_L2,"China-Brazil Earth Resources Satellite, CBERS-4 PAN5M camera Level-2 product. System corrected images, expect some translation error. ",PAN5M,CBERS,CBERS-4,L2,"PAN5M,CBERS,CBERS-4,L2",OPTICAL,proprietary,CBERS-4 PAN5M Level-2,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CBERS4_PAN5M_L4,"China-Brazil Earth Resources Satellite, CBERS-4 PAN5M camera Level-4 product. Orthorectified with ground control points. ",PAN5M,CBERS,CBERS-4,L4,"PAN5M,CBERS,CBERS-4,L4",OPTICAL,proprietary,CBERS-4 PAN5M Level-4,2014-12-07T00:00:00Z,,available,,,,,,,,,,,,,,,,,,, +CLMS_CORINE,"The CORINE Land Cover (CLC) inventory was initiated in 1985 (reference year 1990). Updates have been produced in 2000, 2006, 2012, and 2018. It consists of an inventory of land cover in 44 classes. CLC uses a Minimum Mapping Unit (MMU) of 25 hectares (ha) for areal phenomena and a minimum width of 100 m for linear phenomena. The time series are complemented by change layers, which highlight changes in land cover with an MMU of 5 ha. Different MMUs mean that the change layer has higher resolution than the status layer. Due to differences in MMUs the difference between two status layers will not equal to the corresponding CLC-Changes layer. If you are interested in CLC-Changes between two neighbour surveys always use the CLC-Change layer. ",,"Sentinel-2, LANDSAT, SPOT-4/5, IRS P6 LISS III","S2, L5, L7, L8, SPOT4, SPOT5",,"Land-cover,LCL,CORINE,CLMS",,proprietary,CORINE Land Cover,1986-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +CLMS_GLO_DMP_333M,"Dry matter Productivity (DMP) is an indication of the overall growth rate or dry biomass increase of the vegetation and is directly related to ecosystem Net Primary Productivity (NPP), however its units (kilograms of gross dry matter per hectare per day) are customized for agro-statistical purposes. Compared to the Gross DMP (GDMP), or its equivalent Gross Primary Productivity, the main difference lies in the inclusion of the autotrophic respiration. Like the FAPAR products that are used as input for the GDMP estimation, these GDMP products are provided in Near Real Time, with consolidations in the next periods, or as offline product. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Dry-matter-productivity,DMP,OLCI,PROBA-V,Sentinel-3",,proprietary,10-daily Dry Matter Productivity 333M,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +CLMS_GLO_FAPAR_333M,"The FAPAR quantifies the fraction of the solar radiation absorbed by plants for photosynthesis. It refers only to the green and living elements of the canopy. The FAPAR depends on the canopy structure, vegetation element optical properties, atmospheric conditions and angular configuration. To overcome this latter dependency, a daily integrated FAPAR value is assessed. FAPAR is very useful as input to a number of primary productivity models and is recognized as an Essential Climate Variable (ECV) by the Global Climate Observing System (GCOS). The product at 333m resolution is provided in Near Real Time and consolidated in the next six periods. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Fraction-of-absorbed-PAR,FAPAR,OLCI,PROBA-V,Sentinel-3",,proprietary,Global 10-daily Fraction of Absorbed PAR 333m,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +CLMS_GLO_FCOVER_333M,"The Fraction of Vegetation Cover (FCover) corresponds to the fraction of ground covered by green vegetation. Practically, it quantifies the spatial extent of the vegetation. Because it is independent from the illumination direction and it is sensitive to the vegetation amount, FCover is a very good candidate for the replacement of classical vegetation indices for the monitoring of ecosystems. The product at 333m resolution is provided in Near Real Time and consolidated in the next six periods. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Fraction-of-vegetation-cover,OLCI,PROBA-V,Sentinel-3",,proprietary,Global 10-daily Fraction of Vegetation Cover 333m,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +CLMS_GLO_GDMP_333M,"Gross dry matter Productivity (GDMP) is an indication of the overall growth rate or dry biomass increase of the vegetation and is directly related to ecosystem Gross Primary Productivity (GPP), that reflects the ecosystem's overall production of organic compounds from atmospheric carbon dioxide, however its units (kilograms of gross dry matter per hectare per day) are customized for agro-statistical purposes. Like the FAPAR products that are used as input for the GDMP estimation, these GDMP products are provided in Near Real Time, with consolidations in the next periods, or as offline product. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Gross-dry-matter-productivity,GDMP,GPP,OLCI,PROBA-V,Sentinel-3",,proprietary,10-daily Gross Dry Matter Productivity 333M,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +CLMS_GLO_LAI_333M,"LAI was defined by CEOS as half the developed area of the convex hull wrapping the green canopy elements per unit horizontal ground. This definition allows accounting for elements which are not flat such as needles or stems. LAI is strongly non linearly related to reflectance. Therefore, its estimation from remote sensing observations will be scale dependant over heterogeneous landscapes. When observing a canopy made of different layers of vegetation, it is therefore mandatory to consider all the green layers. This is particularly important for forest canopies where the understory may represent a very significant contribution to the total canopy LAI. The derived LAI corresponds therefore to the total green LAI, including the contribution of the green elements of the understory. The product at 333m resolution is provided in Near Real Time and consolidated in the next six periods. ","OLCI,PROBA-V",Sentinel-3,,,"Land,Leaf-area-index,LAI,OLCI,PROBA-V,Sentinel-3",,proprietary,Global 10-daily Leaf Area Index 333m,2014-01-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +CLMS_GLO_NDVI_1KM_LTS,"The Normalized Difference Vegetation Index (NDVI) is a proxy to quantify the vegetation amount. It is defined as NDVI=(NIR-Red)/(NIR+Red) where NIR corresponds to the reflectance in the near infrared band, and Red to the reflectance in the red band. The time series of dekadal (10-daily) NDVI 1km version 2 observations over the period 1999-2017 is used to calculate Long Term Statistics (LTS) for each of the 36 10-daily periods (dekads) of the year. The calculated LTS include the minimum, median, maximum, average, standard deviation and the number of observations in the covered time series period. These LTS can be used as a reference for actual NDVI observations, which allows evaluating whether vegetation conditions deviate from a 'normal' situation. ","VEGETATION,PROBA-V",SPOT,,,"Land,NDVI,LTS,SPOT,VEGETATION,PROBA-V",,proprietary,"Normalized Difference Vegetation Index: global Long Term Statistics (raster 1km) - version 2, Apr 2019",1999-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +CLMS_GLO_NDVI_333M,"The Normalized Difference Vegetation Index (NDVI) is a proxy to quantify the vegetation amount. It is defined as NDVI=(NIR-Red)/(NIR+Red) where NIR corresponds to the reflectance in the near infrared band, and Red to the reflectance in the red band. It is closely related to FAPAR and is little scale dependant. ",PROBA-V,,,,"Land,NDVI,PROBA-V",,proprietary,Global 10-daily Normalized Difference Vegetation Index 333M,2014-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +COP_DEM_GLO30_DGED,"Defence Gridded Elevation Data (DGED, 32 Bit floating point) formatted Copernicus DEM GLO-30 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-30 provides worldwide coverage at 30 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-30,DSM,GDGED",ALTIMETRIC,proprietary,Copernicus DEM GLO-30 DGED,2010-06-21T00:00:00Z,,,,,,available,available,available,,,,,,,,,,,,,available +COP_DEM_GLO30_DTED,"Digital Terrain Elevation Data (DTED, 16 Bit signed integer) formatted Copernicus DEM GLO-30 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-30 provides worldwide coverage at 30 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-30,DSM,DTED",ALTIMETRIC,proprietary,Copernicus DEM GLO-30 DTED,2010-06-21T00:00:00Z,,,,,,available,available,,,,,,,,,,,,,, +COP_DEM_GLO90_DGED,"Defence Gridded Elevation Data (DGED, 32 Bit floating point) formatted Copernicus DEM GLO-90 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-90 provides worldwide coverage at 90 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-90,DSM,GDGED",ALTIMETRIC,proprietary,Copernicus DEM GLO-90 DGED,2010-06-21T00:00:00Z,,,,,,available,available,available,,,,,,,,,,,,,available +COP_DEM_GLO90_DTED,"Digital Terrain Elevation Data (DTED, 16 Bit signed integer) formatted Copernicus DEM GLO-90 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-90 provides worldwide coverage at 90 meters.Data were acquired through the TanDEM-X mission between 2011 and 2015. The datasets were made available for use in 2019 and will be maintained until 2026. ",,TerraSAR,,,"TerraSAR,TanDEM-X,DEM,surface,GLO-90,DSM,DTED",ALTIMETRIC,proprietary,Copernicus DEM GLO-90 DTED,2010-06-21T00:00:00Z,,,,,,available,available,,,,,,,,,,,,,, +EEA_DAILY_SSM_1KM,"Surface Soil Moisture (SSM) is the relative water content of the top few centimetres soil, describing how wet or dry the soil is in its topmost layer, expressed in percent saturation. It is measured by satellite radar sensors and allows insights in local precipitation impacts and soil conditions. SSM is a key driver of water and heat fluxes between the ground and the atmosphere, regulating air temperature and humidity. Moreover, in its role as water supply, it is vital to vegetation health. Vice versa, SSM is very sensitive to external forcing in the form of precipitation, temperature, solar irradiation, humidity, and wind. SSM is thus both an integrator of climatic conditions and a driver of local weather and climate, and plays a major role in global water-, energy- and carbon- cycles. Knowledge on the dynamics of soil moisture is important in the understanding of processes in many environmental and socio-economic fields, e.g., its impact on vegetation vitality, crop yield, droughts or exposure to flood threats. ","C-SAR,Metop ASCAT",Sentinel-1,,,"SSM,C-SAR,Metop-ASCAT,Sentinel-1",RADAR,proprietary,"Surface Soil Moisture: continental Europe daily (raster 1km) - version 1, Apr 2019",2015-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +EEA_DAILY_SWI_1KM,"The Soil Water Index (SWI) quantifies the moisture condition at various depths in the soil. It is mainly driven by the precipitation via the process of infiltration. Soil moisture is a very heterogeneous variable and varies on small scales with soil properties and drainage patterns. Satellite measurements integrate over relative large-scale areas, with the presence of vegetation adding complexity to the interpretation. Soil moisture is a key parameter in numerous environmental studies including hydrology, meteorology and agriculture, and is recognized as an Essential Climate Variable (ECV) by the Global Climate Observing System (GCOS). The SWI product provides daily information about moisture conditions in different soil layers. It includes a quality flag (QFLAG) indicating the availability of SSM measurements for SWI calculations, and a Surface State Flag (SSF) indicating frozen or snow covered soils. ","C-SAR,Metop ASCAT",Sentinel-1,,,"SWI,QFLAG,SSF,C-SAR,Metop-ASCAT,Sentinel-1",RADAR,proprietary,"Soil Water Index: continental Europe daily (raster 1km) - version 1, Apr 2019",2015-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +EEA_DAILY_VI,"Vegetation Indices (VI) comprises four daily vegetation indices (PPI, NDVI, LAI and FAPAR) and quality information, that are part of the Copernicus Land Monitoring Service (CLMS) HR-VPP product suite. The 10m resolution, daily updated Plant Phenology Index (PPI), Normalized Difference Vegetation Index (NDVI), Leaf Area Index (LAI) and Fraction of Absorbed Photosynthetically Active Radiation (fAPAR) are derived from Copernicus Sentinel-2 satellite observations. They are provided together with a related quality indicator (QFLAG2) that flags clouds, shadows, snow, open water and other areas where the VI retrieval is less reliable. These Vegetation Indices are made available as a set of raster files with 10 x 10m resolution, in UTM/WGS84 projection corresponding to the Sentinel-2 tiling grid, for those tiles that cover the EEA38 countries and the United Kingdom and for the period from 2017 until today, with daily updates. The Vegetation Indices are part of the pan-European High Resolution Vegetation Phenology and Productivity (HR-VPP) component of the Copernicus Land Monitoring Service (CLMS). ",,Sentinel-2,"S2A, S2B",,"Land,Plant-phenology-index,Phenology,Vegetation,Sentinel-2,S2A,S2B",RADAR,proprietary,"Vegetation Indices, daily, UTM projection",,,,,,,,,,,,,,,,,,,,,,available +EFAS_FORECAST,"This dataset provides gridded modelled hydrological time series forced with medium-range meteorological forecasts. The data is a consistent representation of the most important hydrological variables across the European Flood Awareness System (EFAS) domain. The temporal resolution is sub-daily high-resolution and ensemble forecasts of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis data set was produced by forcing the LISFLOOD hydrological model at a 5x5km resolution with meteorological forecasts. The forecasts are initialised twice daily at 00 and 12 UTC with time steps of 6 or 24 hours and lead times between 5 and 15 days depending on the forcing numerical weather prediction model. The forcing meteorological data are high-resolution and ensemble forecasts from the European Centre of Medium-range Weather Forecasts (ECMWF) with 51 ensemble members, high-resolution forecasts from the Deutsches Wetter Dienst (DWD) and the ensemble forecasts from the COSMO Local Ensemble Prediction System (COSMO-LEPS) with 20 ensemble members. The hydrological forecasts are available from 2018-10-10 up until present with a 30-day delay. The real-time data is only available to EFAS partners.\nCompanion datasets, also available through the CDS, are historical simulations which can be used to derive the hydrological climatology and for verification; reforecasts for research, local skill assessment and post-processing; and seasonal forecasts and reforecasts for users looking for longer leadtime forecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, River discharge in the last 6 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,forecast,river,discharge",ATMOSPHERIC,proprietary,River discharge and related forecasted data by the European Flood Awareness System,2018-10-11T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +EFAS_HISTORICAL,"This dataset provides gridded modelled daily hydrological time series forced with meteorological observations. The data set is a consistent representation of the most important hydrological variables across the European Flood Awareness System (EFAS) domain. The temporal resolution is up to 30 years modelled time series of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model with gridded observational data of precipitation and temperature at a 5x5 km resolution across the EFAS domain. The most recent version\nuses a 6-hourly time step, whereas older versions uses a 24-hour time step. It is available from 1991-01-01 up until near-real time, with a delay of 6 days. The real-time data is only available to EFAS partners.\nCompanion datasets, also available through the CDS, are forecasts for users who are looking medium-range forecasts, reforecasts for research, local skill assessment and post-processing, and seasonal forecasts and reforecasts for users looking for long-term forecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, River discharge in the last 6 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,historical,river,discharge",ATMOSPHERIC,proprietary,River discharge and related historical data from the European Flood Awareness System,1992-01-02T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +EFAS_REFORECAST,"This dataset provides gridded modelled hydrological time series forced with medium- to sub-seasonal range meteorological reforecasts. The data is a consistent representation of the most important hydrological variables across the European Flood Awareness System (EFAS) domain. The temporal resolution is 20 years of sub-daily reforecasts initialised twice weekly (Mondays and Thursdays) of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 5x5km resolution with ensemble meteorological reforecasts from the European Centre of Medium-range Weather Forecasts (ECMWF). Reforecasts are forecasts run over past dates and are typically used to assess the skill of a forecast system or to develop tools for statistical error correction of the forecasts. The reforecasts are initialised twice weekly with lead times up to 46 days, at 6-hourly time steps for 20 years. For more specific information on the how the reforecast dataset is produced we refer to the documentation.\nCompanion datasets, also available through the Climate Data Store (CDS), are the operational forecasts, historical simulations which can be used to derive the hydrological climatology, and seasonal forecasts and reforecasts for users looking for long term forecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts an historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,reforecast,river,discharge",ATMOSPHERIC,proprietary,Reforecasts of river discharge and related data by the European Flood Awareness System,2003-03-27T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +EFAS_SEASONAL,"This dataset provides gridded modelled daily hydrological time series forced with seasonal meteorological forecasts. The dataset is a consistent representation of the most important hydrological variables across the European Flood Awareness (EFAS) domain. The temporal resolution is daily forecasts initialised once a month consisting of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 5x5km resolution with seasonal meteorological ensemble forecasts. The forecasts are initialised on the first of each month with a lead time of 215 days at 24-hour time steps. The meteorological data are seasonal forecasts (SEAS5) from the European Centre of Medium-range Weather Forecasts (ECMWF) with 51 ensemble members. The forecasts are available from November 2020.\nCompanion datasets, also available through the Climate Data Store (CDS), are seasonal reforecasts for research, local skill assessment and post-processing of the seasonal forecasts. There are also medium-range forecasts for users who want to look at shorter time ranges. These are accompanied by historical simulations which can be used to derive the hydrological climatology, and medium-range reforecasts. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,seasonal,forecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal forecasts of river discharge and related data by the European Flood Awareness System,2020-11-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +EFAS_SEASONAL_REFORECAST,"This dataset provides modelled daily hydrological time series forced with seasonal meteorological reforecasts. The dataset is a consistent representation of the most important hydrological variables across the European Flood Awareness (EFAS) domain. The temporal resolution is daily forecasts initialised once a month over the reforecast period 1991-2020 of:\n\nRiver discharge\nSoil moisture for three soil layers\nSnow water equivalent\n\nIt also provides static data on soil depth for the three soil layers. Soil moisture and river discharge data are accompanied by ancillary files for interpretation (see related variables and links in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 5x5km gridded resolution with seasonal meteorological ensemble reforecasts. Reforecasts are forecasts run over past dates and are typically used to assess the skill of a forecast system or to develop tools for statistical error correction of the forecasts. The reforecasts are initialised on the first of each month with a lead time of 215 days at 24-hour time steps. The forcing meteorological data are seasonal reforecasts from the European Centre of Medium-range Weather Forecasts (ECMWF), consisting of 25 ensemble members up until December 2016, and after that 51 members. Hydrometeorological reforecasts are available from 1991-01-01 up until 2020-10-01. \nCompanion datasets, also available through the Climate Data Store (CDS), are seasonal forecasts, for which the seasonal reforecasts can be useful for local skill assessment and post-processing of the seasonal forecasts. For users looking for shorter time ranges there are medium-range forecasts and reforecasts, as well as historical simulations which can be used to derive the hydrological climatology. For users looking for global hydrological data, we refer to the Global Flood Awareness System (GloFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours, Snow depth water equivalent, Soil depth, Volumetric soil moisture\n\nVariables in the dataset/application are:\nOrography, Upstream area"" ",,CEMS,CEMS,,"ECMWF,CEMS,EFAS,seasonal,reforecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal reforecasts of river discharge and related data by the European Flood Awareness System,1991-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +ERA5_LAND,"ERA5-Land is a reanalysis dataset providing a consistent view of the evolution of land variables over several decades at an enhanced resolution compared to ERA5. ERA5-Land has been produced by replaying the land component of the ECMWF ERA5 climate reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. Reanalysis produces data that goes several decades back in time, providing an accurate description of the climate of the past. ERA5-Land uses as input to control the simulated land fields ERA5 atmospheric variables, such as air temperature and air humidity. This is called the atmospheric forcing. Without the constraint of the atmospheric forcing, the model-based estimates can rapidly deviate from reality. Therefore, while observations are not directly used in the production of ERA5-Land, they have an indirect influence through the atmospheric forcing used to run the simulation. In addition, the input air temperature, air humidity and pressure used to run ERA5-Land are corrected to account for the altitude difference between the grid of the forcing and the higher resolution grid of ERA5-Land. This correction is called 'lapse rate correction'. The ERA5-Land dataset, as any other simulation, provides estimates which have some degree of uncertainty. Numerical models can only provide a more or less accurate representation of the real physical processes governing different components of the Earth System. In general, the uncertainty of model estimates grows as we go back in time, because the number of observations available to create a good quality atmospheric forcing is lower. ERA5-land parameter fields can currently be used in combination with the uncertainty of the equivalent ERA5 fields. The temporal and spatial resolutions of ERA5-Land makes this dataset very useful for all kind of land surface applications such as flood or drought forecasting. The temporal and spatial resolution of this dataset, the period covered in time, as well as the fixed grid used for the data distribution at any period enables decisions makers, businesses and individuals to access and use more accurate information on land states. Variables in the dataset/application are: 10m u-component of wind, 10m v-component of wind, 2m dewpoint temperature, 2m temperature, Evaporation from bare soil, Evaporation from open water surfaces excluding oceans, Evaporation from the top of canopy, Evaporation from vegetation transpiration, Forecast albedo, Lake bottom temperature, Lake ice depth, Lake ice temperature, Lake mix-layer depth, Lake mix-layer temperature, Lake shape factor, Lake total layer temperature, Leaf area index, high vegetation, Leaf area index, low vegetation, Potential evaporation, Runoff, Skin reservoir content, Skin temperature, Snow albedo, Snow cover, Snow density, Snow depth, Snow depth water equivalent, Snow evaporation, Snowfall, Snowmelt, Soil temperature level 1, Soil temperature level 2, Soil temperature level 3, Soil temperature level 4, Sub-surface runoff, Surface latent heat flux, Surface net solar radiation, Surface net thermal radiation, Surface pressure, Surface runoff, Surface sensible heat flux, Surface solar radiation downwards, Surface thermal radiation downwards, Temperature of snow layer, Total evaporation, Total precipitation, Volumetric soil water layer 1, Volumetric soil water layer 2, Volumetric soil water layer 3, Volumetric soil water layer 4 ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,hourly,evolution",ATMOSPHERIC,proprietary,ERA5-Land hourly data from 1950 to present,1950-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +ERA5_LAND_MONTHLY,"ERA5-Land is a reanalysis dataset providing a consistent view of the evolution of land variables over several decades at an enhanced resolution compared to ERA5. ERA5-Land has been produced by replaying the land component of the ECMWF ERA5 climate reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. Reanalysis produces data that goes several decades back in time, providing an accurate description of the climate of the past. ERA5-Land provides a consistent view of the water and energy cycles at surface level during several decades. It contains a detailed record from 1950 onwards, with a temporal resolution of 1 hour. The native spatial resolution of the ERA5-Land reanalysis dataset is 9km on a reduced Gaussian grid (TCo1279). The data in the CDS has been regridded to a regular lat-lon grid of 0.1x0.1 degrees. The data presented here is a post-processed subset of the full ERA5-Land dataset. Monthly-mean averages have been pre-calculated to facilitate many applications requiring easy and fast access to the data, when sub-monthly fields are not required. Hourly fields can be found in the ERA5-Land hourly fields CDS page. Documentation can be found in the online ERA5-Land documentation. Variables in the dataset/application are: | 10m u-component of wind, 10m v-component of wind, 2m dewpoint temperature, 2m temperature, Evaporation from bare soil, Evaporation from open water surfaces excluding oceans, Evaporation from the top of canopy, Evaporation from vegetation transpiration, Forecast albedo, Lake bottom temperature, Lake ice depth, Lake ice temperature, Lake mix-layer depth, Lake mix-layer temperature, Lake shape factor, Lake total layer temperature, Leaf area index, high vegetation, Leaf area index, low vegetation, Potential evaporation, Runoff, Skin reservoir content, Skin temperature, Snow albedo, Snow cover, Snow density, Snow depth, Snow depth water equivalent, Snow evaporation, Snowfall, Snowmelt, Soil temperature level 1, Soil temperature level 2, Soil temperature level 3, Soil temperature level 4, Sub-surface runoff, Surface latent heat flux, Surface net solar radiation, Surface net thermal radiation, Surface pressure, Surface runoff, Surface sensible heat flux, Surface solar radiation downwards, Surface thermal radiation downwards, Temperature of snow layer, Total evaporation, Total precipitation, Volumetric soil water layer 1, Volumetric soil water layer 2, Volumetric soil water layer 3, Volumetric soil water layer 4 ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,monthly,evolution",ATMOSPHERIC,proprietary,ERA5-Land monthly averaged data from 1950 to present,1950-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +ERA5_PL,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 4 to 7 decades. Currently data is available from 1950, split into Climate Data Store entries for 1950-1978 (preliminary back extension) and from 1979 onwards (final release plus timely updates, this page). ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days. In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. So far this has not been the case and when this does occur users will be notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. An overview of all ERA5 datasets can be found in this article. Information on access to ERA5 data on native resolution is provided in these guidelines. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities). The present entry is ""ERA5 hourly data on pressure levels from 1979 to present"". Variables in the dataset/application are: Divergence, Fraction of cloud cover, Geopotential, Ozone mass mixing ratio, Potential vorticity, Relative humidity, Specific cloud ice water content, Specific cloud liquid water content, Specific humidity, Specific rain water content, Specific snow water content, Temperature, U-component of wind, V-component of wind, Vertical velocity, Vorticity (relative) ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,hourly,pressure,levels",ATMOSPHERIC,proprietary,ERA5 hourly data on pressure levels from 1940 to present,1940-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +ERA5_PL_MONTHLY,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 8 decades. Data is available from 1940 onwards. ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. So far this has only been the case for the month September 2021, while it will also be the case for October, November and December 2021. For months prior to September 2021 the final release has always been equal to ERA5T, and the goal is to align the two again after December 2021. ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. In case that this occurs users are notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities). ",,ERA5,ERA5,,"Climate,ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,monthly,pressure,levels",ATMOSPHERIC,proprietary,ERA5 monthly averaged data on pressure levels from 1940 to present,1940-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +ERA5_SL,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 8 decades. Data is available from 1940 onwards. ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days. In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. In case that this occurs users are notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric,ocean-wave and land surface quantities). ",,ERA5,ERA5,,"ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,hourly,single,levels",ATMOSPHERIC,proprietary,ERA5 hourly data on single levels from 1940 to present,1940-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +ERA5_SL_MONTHLY,"ERA5 is the fifth generation ECMWF reanalysis for the global climate and weather for the past 8 decades. Data is available from 1940 onwards. ERA5 replaces the ERA-Interim reanalysis. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using the laws of physics. This principle, called data assimilation, is based on the method used by numerical weather prediction centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. Reanalysis works in the same way, but at reduced resolution to allow for the provision of a dataset spanning back several decades. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. ERA5 provides hourly estimates for a large number of atmospheric, ocean-wave and land-surface quantities. An uncertainty estimate is sampled by an underlying 10-member ensemble at three-hourly intervals. Ensemble mean and spread have been pre-computed for convenience. Such uncertainty estimates are closely related to the information content of the available observing system which has evolved considerably over time. They also indicate flow-dependent sensitive areas. To facilitate many climate applications, monthly-mean averages have been pre-calculated too, though monthly means are not available for the ensemble mean and spread. ERA5 is updated daily with a latency of about 5 days (monthly means are available around the 6th of each month). In case that serious flaws are detected in this early release (called ERA5T), this data could be different from the final release 2 to 3 months later. In case that this occurs users are notified. The data set presented here is a regridded subset of the full ERA5 data set on native resolution. It is online on spinning disk, which should ensure fast and easy access. It should satisfy the requirements for most common applications. Data has been regridded to a regular lat-lon grid of 0.25 degrees for the reanalysis and 0.5 degrees for the uncertainty estimate (0.5 and 1 degree respectively for ocean waves). There are four main sub sets: hourly and monthly products, both on pressure levels (upper air fields) and single levels (atmospheric, ocean-wave and land surface quantities). ",,ERA5,ERA5,,"Climate,ECMWF,Reanalysis,ERA5,CDS,Atmospheric,land,sea,monthly,single,levels",ATMOSPHERIC,proprietary,ERA5 monthly averaged data on single levels from 1940 to present,1940-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +FIRE_HISTORICAL,"This data set provides complete historical reconstruction of meteorological conditions favourable to the start, spread and sustainability of fires. The fire danger metrics provided are part of a vast dataset produced by the Copernicus Emergency Management Service for the European Forest Fire Information System (EFFIS). The European Forest Fire Information System incorporates the fire danger indices for three different models developed in Canada, United States and Australia. In this dataset the fire danger indices are calculated using weather forecast from historical simulations provided by ECMWF ERA5 reanalysis. ERA5 by combining model data and a vast set of quality controlled observations provides a globally complete and consistent data-set and is regarded as a good proxy for observed atmospheric conditions. The selected data records in this data set are regularly extended with time as ERA5 forcing data become available. This dataset is produced by ECMWF in its role of the computational centre for fire danger forecast of the CEMS, on behalf of the Joint Research Centre which is the managing entity of the service. Variables in the dataset/application are: Build-up index, Burning index, Danger rating, Drought code, Duff moisture code, Energy release component, Fine fuel moisture code, Fire daily severity index, Fire danger index, Fire weather index, Ignition component, Initial spread index, Keetch-Byram drought index, Spread component ",,CEMS,CEMS,,"ECMWF,EFFIS,fire,historical,ERA5,european,sustainability,CEMS,system",ATMOSPHERIC,proprietary,Fire danger indices historical data from the Copernicus Emergency Management Service,1940-01-03T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +GLACIERS_DIST_RANDOLPH,"A glacier is defined as a perennial mass of ice, and possibly firn and snow, originating on the land surface from the recrystallization of snow or other forms of solid precipitation and showing evidence of past or present flow. There are several types of glaciers such as glacierets, mountain glaciers, valley glaciers and ice fields, as well as ice caps. Some glacier tongues reach into lakes or the sea, and can develop floating ice tongues or ice shelves. Glacier changes are recognized as independent and high-confidence natural indicators of climate change. Past, current and future glacier changes affect global sea level, the regional water cycle and local hazards.\nThis dataset is a snapshot of global glacier outlines compiled from\nmaps, aerial photographs and satellite images mostly acquired in the period 2000-2010. ",,,INSITU,,"ECMWF,WGMS,INSITU,CDS,C3S,glacier,randolph,distribution,inventory",ATMOSPHERIC,proprietary,Glaciers distribution data from the Randolph Glacier Inventory for year 2000,2000-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +GLACIERS_ELEVATION_AND_MASS_CHANGE,This dataset provides in situ and remote sensing derived glacier changes from individual glaciers globally. The dataset represents the latest homogenized state-of-the-art glacier change data collected by scientists and the national correspondents of each country as provided to the World Glacier Monitoring Service (WGMS). The product is an extract of the WGMS Fluctuations of Glacier (FoG) database and consists of two data sets providing time series of glacier changes: glacier elevation change series from the geodetic method and glacier mass-balance series from the glaciological method ,,INSITU,INSITU,,"ECMWF,WGMS,INSITU,CDS,C3S,glacier,elevation,mass,change",ATMOSPHERIC,proprietary,Glaciers elevation and mass change data from 1850 to present from the Fluctuations of Glaciers Database,1850-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +GLOFAS_FORECAST,"This dataset contains global modelled daily data of river discharge forced with meteorological forecasts. The data was produced by the Global Flood Awareness System (GloFAS), which is part of the Copernicus Emergency Management Service (CEMS). River discharge, or river flow as it is also known, is defined as the amount of water that flows through a river section at a given time. \nThis dataset is simulated by forcing a hydrological modelling chain with input from ECMWF ensemble forecast combined with the ECMWF extended-range ensemble forecast up to 30 days. Data availability for the GloFAS forecast is from 2019-11-05 up to near real time.\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,forecast,river,discharge",ATMOSPHERIC,proprietary,River discharge and related forecasted data by the Global Flood Awareness System,2021-05-26T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +GLOFAS_HISTORICAL,"This dataset contains global modelled daily data of river discharge from the Global Flood Awareness System (GloFAS), which is part of the Copernicus Emergency Management Service (CEMS). River discharge, or river flow as it is also known, is defined as the amount of water that flows through a river section at a given time. \nThis dataset is simulated by forcing a hydrological modelling chain with inputs from a global reanalysis. Data availability for the historical simulation is from 1979-01-01 up to near real time.\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,historical,river,discharge",ATMOSPHERIC,proprietary,River discharge and related historical data from the Global Flood Awareness System,1979-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +GLOFAS_REFORECAST,"This dataset provides a gridded modelled time series of river discharge, forced with medium- to sub-seasonal range meteorological reforecasts. The data is a consistent representation of a key hydrological variable across the global domain, and is a product of the Global Flood Awareness System (GloFAS). It is accompanied by an ancillary file for interpretation that provides the upstream area (see the related variables table and associated link in the documentation).\nThis dataset was produced by forcing a hydrological modelling chain with input from the European Centre for Medium-range Weather Forecasts (ECMWF) 11-member ensemble ECMWF-ENS reforecasts. Reforecasts are forecasts run over past dates, and those presented here are used for providing a suitably long time period against which the skill of the 30-day real-time operational forecast can be assessed. The reforecasts are initialised twice weekly with lead times up to 46 days, at 24-hour steps for 20 years in the recent history. For more specific information on the how the reforecast dataset is produced we refer to the documentation.\nCompanion datasets, also available through the Climate Data Store (CDS), are the operational forecasts, historical simulations that can be used to derive the hydrological climatology, and seasonal forecasts and reforecasts for users looking for long term forecasts. For users looking specifically for European hydrological data, we refer to the European Flood Awareness System (EFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,reforecast,river,discharge",ATMOSPHERIC,proprietary,Reforecasts of river discharge and related data by the Global Flood Awareness System,2003-03-27T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +GLOFAS_SEASONAL,"This dataset provides a gridded modelled time series of river discharge, forced with seasonal range meteorological forecasts. The data is a consistent representation of a key hydrological variable across the global domain, and is a product of the Global Flood Awareness System (GloFAS). It is accompanied by an ancillary file for interpretation that provides the upstream area (see the related variables table and associated link in the documentation).\nThis dataset was produced by forcing the LISFLOOD hydrological model at a 0.1° (~11 km at the equator) resolution with downscaled runoff forecasts from the European Centre for Medium-range Weather Forecasts (ECMWF) 51-member ensemble seasonal forecasting system, SEAS5. The forecasts are initialised on the first of each month with a 24-hourly time step, and cover 123 days.\nCompanion datasets, also available through the Climate Data Store (CDS), are the operational forecasts, historical simulations that can be used to derive the hydrological climatology, and medium-range and seasonal reforecasts. The latter dataset enables research, local skill assessment and post-processing of the seasonal forecasts. In addition, the seasonal reforecasts are also used to derive a specific range dependent climatology for the seasonal system. For users looking specifically for European hydrological data, we refer to the European Flood Awareness System (EFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,seasonal,forecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal forecasts of river discharge and related data by the Global Flood Awareness System,2021-06-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +GLOFAS_SEASONAL_REFORECAST,"This dataset provides a gridded modelled time series of river discharge forced with seasonal range meteorological reforecasts. The data is a consistent representation of a key hydrological variable across the global domain, and is a product of the Global Flood Awareness System (GloFAS). It is accompanied by an ancillary file for interpretation that provides the upstream area (see the related variables table and associated link in the documentation).\nThis dataset was produced by forcing a hydrological modelling chain with input from the European Centre for Medium-range Weather Forecasts (ECMWF) ensemble seasonal forecasting system, SEAS5. For the period of 1981 to 2016 the number of ensemble members is 25, whilst reforecasts produced for 2017 onwards use a 51-member ensemble. Reforecasts are forecasts run over past dates, with those presented here used for producing the seasonal river discharge thresholds. In addition, they provide a suitably long time period against which the skill of the seasonal forecast can be assessed. The reforecasts are initialised monthly and run for 123 days, with a 24-hourly time step. For more specific information on the how the seasonal reforecast dataset is produced we refer to the documentation.\nCompanion datasets, also available through the Climate Data Store (CDS), include the seasonal forecasts, for which the dataset provided here can be useful for local skill assessment and post-processing. For users looking for shorter term forecasts there are also medium-range forecasts and reforecasts available, as well as historical simulations that can be used to derive the hydrological climatology. For users looking specifically for European hydrological data, we refer to the European Flood Awareness System (EFAS) forecasts and historical simulations. All these datasets are part of the operational flood forecasting within the Copernicus Emergency Management Service (CEMS).\n\nVariables in the dataset/application are:\nRiver discharge in the last 24 hours\n\nVariables in the dataset/application are:\nUpstream area"" ",,CEMS,CEMS,,"ECMWF,CEMS,GloFAS,seasonal,forecast,river,discharge",ATMOSPHERIC,proprietary,Seasonal reforecasts of river discharge and related data from the Global Flood Awareness System,1981-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +L57_REFLECTANCE,"Landsat 5,7,8 L2A data (old format) distributed by Theia (2014 to 2017-03-20) using MUSCATE prototype, Lamber 93 projection. ","OLI,TIRS",LANDSAT,"L5,L7,L8",L2A,"OLI,TIRS,LANDSAT,L5,L7,L8,L2,L2A,MUSCATE",OPTICAL,proprietary,"Landsat 5,7,8 Level-2A",2014-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +L8_OLI_TIRS_C1L1,Landsat 8 Operational Land Imager and Thermal Infrared Sensor Collection 1 Level-1 products. Details at https://landsat.usgs.gov/sites/default/files/documents/LSDS-1656_Landsat_Level-1_Product_Collection_Definition.pdf ,"OLI,TIRS",LANDSAT8,L8,L1,"OLI,TIRS,LANDSAT,LANDSAT8,L8,L1,C1,COLLECTION1",OPTICAL,proprietary,Landsat 8 Level-1,2013-02-11T00:00:00Z,,available,,,,,,,,available,,,,available,,,,,,, +L8_REFLECTANCE,"Landsat 8 L2A data distributed by Theia since 2017-03-20 using operational version of MUSCATE, UTM projection, and tiled using Sentinel-2 tiles. ","OLI,TIRS",LANDSAT8,L8,L2A,"OLI,TIRS,LANDSAT,LANDSAT8,L8,L2,L2A,MUSCATE",OPTICAL,proprietary,Landsat 8 Level-2A,2013-02-11T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +LANDSAT_C2L1,The Landsat Level-1 product is a top of atmosphere product distributed as scaled and calibrated digital numbers. ,"OLI,TIRS",LANDSAT,"L1,L2,L3,L4,L5,L6,L7,L8",L1,"OLI,TIRS,LANDSAT,L1,L2,L3,L4,L5,L6,L7,L8,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-1 Product,1972-07-25T00:00:00Z,available,,,,,,,,,,,,,,,available,,,available,available, +LANDSAT_C2L2,Collection 2 Landsat OLI/TIRS Level-2 Science Products (L2SP) include Surface Reflectance and Surface Temperature scene-based products. ,"OLI,TIRS",LANDSAT,"L8,L9",L1,"OLI,TIRS,LANDSAT,L8,L9,L2,C2,COLLECTION2",OPTICAL,proprietary,Landsat OLI and TIRS Collection 2 Level-2 Science Products 30-meter multispectral data.,2013-02-11T00:00:00Z,,,,,,,,available,,,,,,,,available,,,available,, +LANDSAT_C2L2ALB_BT,"The Landsat Top of Atmosphere Brightness Temperature (BT) product is a top of atmosphere product with radiance calculated 'at-sensor', not atmospherically corrected, and expressed in units of Kelvin. ","OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,BT,Brightness,Temperature,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Top of Atmosphere Brightness Temperature (BT) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,,available, +LANDSAT_C2L2ALB_SR,The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor. ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,L2ALB,SR,Surface,Reflectance,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Surface Reflectance (SR) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,,available, +LANDSAT_C2L2ALB_ST,The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K). ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,L2ALB,Surface,Temperature,ST,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Surface Temperature (ST) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,,available, +LANDSAT_C2L2ALB_TA,The Landsat Top of Atmosphere (TA) Reflectance product applies per pixel angle band corrections to the Level-1 radiance product. ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,L2ALB,TA,Top,Atmosphere,Reflectance,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 Albers Top of Atmosphere (TA) Reflectance Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,,available, +LANDSAT_C2L2_SR,The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor. ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,SR,surface,reflectance,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 UTM Surface Reflectance (SR) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,,available, +LANDSAT_C2L2_ST,The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K). ,"OLI,TIRS",LANDSAT,"L4,L5,L7,L8",L2,"OLI,TIRS,LANDSAT,L4,L5,L7,L8,L2,ST,surface,temperature,C2,COLLECTION2",OPTICAL,proprietary,Landsat Collection 2 Level-2 UTM Surface Temperature (ST) Product,1982-08-22T00:00:00Z,,,,,,,,,,,,,,,,,,,,available, +MODIS_MCD43A4,"The MODerate-resolution Imaging Spectroradiometer (MODIS) Reflectance product MCD43A4 provides 500 meter reflectance data adjusted using a bidirectional reflectance distribution function (BRDF) to model the values as if they were taken from nadir view. The MCD43A4 product contains 16 days of data provided in a level-3 gridded data set in Sinusoidal projection. Both Terra and Aqua data are used in the generation of this product, providing the highest probability for quality assurance input data. It is designated with a shortname beginning with MCD, which is used to refer to 'combined' products, those comprised of data using both Terra and Aqua. ",MODIS,Terra+Aqua,EOS AM-1+PM-1,L3,"MODIS,Terra,Aqua,EOS,AM-1+PM-1,L3,MCD43A4",OPTICAL,proprietary,MODIS MCD43A4,2000-03-05T00:00:00Z,available,available,,,,,,,,,,,,,,available,,,,, +NAIP,"The National Agriculture Imagery Program (NAIP) acquires aerial imagery during the agricultural growing seasons in the continental U.S. This ""leaf-on"" imagery and typically ranges from 60 centimeters to 100 centimeters in resolution and is available from the naip-analytic Amazon S3 bucket as 4-band (RGB + NIR) imagery in MRF format. NAIP data is delivered at the state level; every year, a number of states receive updates, with an overall update cycle of two or three years. The tiling format of NAIP imagery is based on a 3.75' x 3.75' quarter quadrangle with a 300 meter buffer on all four sides. NAIP imagery is formatted to the UTM coordinate system using NAD83. NAIP imagery may contain as much as 10% cloud cover per tile. ",film and digital cameras,National Agriculture Imagery Program,NAIP,N/A,"film,digital,cameras,Agriculture,NAIP",OPTICAL,proprietary,National Agriculture Imagery Program,2003-01-01T00:00:00Z,available,available,,,,,,available,,,,,,,,available,,,,, +NEMSAUTO_TCDC,Total cloud cover from NOAAmodel Environment Monitoring System (NEMS) automatic domain switch. NEMSAUTO is the automatic delivery of the highest resolution meteoblue model available for any requested period of time and location. The NEMS model family are improved NMM successors (operational since 2013). NEMS is a multi-scale model (used from global down to local domains) and significantly improves cloud-development and precipitation forecast. Note that Automatic domain switching is only supported for multi point queries. Support for polygons may follow later. ,,NEMSAUTO,NEMSAUTO,,"meteoblue,NEMS,NEMSAUTO,CLOUD,COVER,TOTAL,TCDC,DAILY,MEAN",ATMOSPHERIC,proprietary,NEMSAUTO Total Cloud Cover daily mean,1984-01-01T00:00:00Z,,,,,,,,,,,,,available,,,,,,,, +NEMSGLOBAL_TCDC,Total cloud cover from NOAAmodel Environment Monitoring System (NEMS) global model. NEMSGLOBAL has 30km spatial and 1h temporal resolutions and produces seamless datasets from 1984 to 7 days ahead. ,,NEMSGLOBAL,NEMSGLOBAL,,"meteoblue,NEMS,NEMSGLOBAL,CLOUD,COVER,TOTAL,TCDC,DAILY,MEAN",ATMOSPHERIC,proprietary,NEMSGLOBAL Total Cloud Cover daily mean,1984-01-01T00:00:00Z,,,,,,,,,,,,,available,,,,,,,, +OSO,An overview of OSO Land Cover data is given on https://www.theia-land.fr/en/ceslist/land-cover-sec/ and the specific description of OSO products is available on https://www.theia-land.fr/product/carte-doccupation-des-sols-de-la-france-metropolitaine/ ,,,,L3B,"L3B,OSO,land,cover",,proprietary,OSO Land Cover,2016-01-01T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +PLD_BUNDLE,"Pleiades Bundle (Pan, XS)",PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,BUNDLE,Pan,Xs",OPTICAL,proprietary,Pleiades Bundle,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +PLD_PAN,Pleiades Panchromatic (Pan),PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,PAN,Panchromatic",OPTICAL,proprietary,Pleiades Panchromatic,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +PLD_PANSHARPENED,Pleiades Pansharpened (Pan+XS),PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,PANSHARPENED,Pan,Xs",OPTICAL,proprietary,Pleiades Pansharpened,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +PLD_XS,Pleiades Multispectral (XS),PHR,PLEIADES,"P1A,P1B",PRIMARY,"PHR,PLEIADES,P1A,P1B,PRIMARY,PLD,XS,Multispectral",OPTICAL,proprietary,Pleiades Multispectral,2011-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +S1_SAR_GRD,"Level-1 Ground Range Detected (GRD) products consist of focused SAR data that has been detected, multi-looked and projected to ground range using an Earth ellipsoid model. Phase information is lost. The resulting product has approximately square spatial resolution pixels and square pixel spacing with reduced speckle at the cost of worse spatial resolution. GRD products can be in one of three resolutions: | Full Resolution (FR), High Resolution (HR), Medium Resolution (MR). The resolution is dependent upon the amount of multi-looking performed. Level-1 GRD products are available in MR and HR for IW and EW modes, MR for WV mode and MR, HR and FR for SM mode. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L1,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L1,GRD,SAFE",RADAR,proprietary,SENTINEL1 Level-1 Ground Range Detected,2014-04-03T00:00:00Z,available,available,,,available,available,available,available,,,,,,available,available,available,available,,,,available +S1_SAR_OCN,"Level-2 OCN products include components for Ocean Swell spectra (OSW) providing continuity with ERS and ASAR WV and two new components: Ocean Wind Fields (OWI) and Surface Radial Velocities (RVL). The OSW is a two-dimensional ocean surface swell spectrum and includes an estimate of the wind speed and direction per swell spectrum. The OSW is generated from Stripmap and Wave modes only. For Stripmap mode, there are multiple spectra derived from internally generated Level-1 SLC images. For Wave mode, there is one spectrum per vignette. The OWI is a ground range gridded estimate of the surface wind speed and direction at 10 m above the surface derived from internally generated Level-1 GRD images of SM, IW or EW modes. The RVL is a ground range gridded difference between the measured Level-2 Doppler grid and the Level-1 calculated geometrical Doppler. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L2,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L2,OCN,SAFE",RADAR,proprietary,SENTINEL1 Level-2 OCN,2014-04-03T00:00:00Z,,,,,available,available,available,,,,,,,available,available,,available,,,,available +S1_SAR_RAW,"The SAR Level-0 products consist of the sequence of Flexible Dynamic Block Adaptive Quantization (FDBAQ) compressed unfocused SAR raw data. For the data to be usable, it will need to be decompressed and processed using a SAR processor. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L0,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L0,RAW,SAFE",RADAR,proprietary,SENTINEL1 SAR Level-0,2014-04-03T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,,available +S1_SAR_SLC,"Level-1 Single Look Complex (SLC) products consist of focused SAR data geo-referenced using orbit and attitude data from the satellite and provided in zero-Doppler slant-range geometry. The products include a single look in each dimension using the full transmit signal bandwidth and consist of complex samples preserving the phase information. SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/data-formats/safe-specification ",SAR,SENTINEL1,"S1A,S1B",L1,"SAR,SENTINEL,SENTINEL1,S1,S1A,S1B,L1,SLC,SAFE",RADAR,proprietary,SENTINEL1 Level-1 Single Look Complex,2014-04-03T00:00:00Z,,,,,available,available,available,,,,,,,available,available,,available,,,,available +S2_MSI_L1C,"The Level-1C product is composed of 100x100 km2 tiles (ortho-images in UTM/WGS84 projection). It results from using a Digital Elevation Model (DEM) to project the image in cartographic geometry. Per-pixel radiometric measurements are provided in Top Of Atmosphere (TOA) reflectances along with the parameters to transform them into radiances. Level-1C products are resampled with a constant Ground Sampling Distance (GSD) of 10, 20 and 60 meters depending on the native resolution of the different spectral bands. In Level-1C products, pixel coordinates refer to the upper left corner of the pixel. Level-1C products will additionally include Cloud Masks and ECMWF data (total column of ozone, total column of water vapour and mean sea level pressure). SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/data-formats ",MSI,SENTINEL2,"S2A,S2B",L1,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L1,L1C,SAFE",OPTICAL,proprietary,SENTINEL2 Level-1C,2015-06-23T00:00:00Z,available,available,,,available,available,available,available,,available,,,,available,available,,available,,available,,available +S2_MSI_L2A,"The Level-2A product provides Bottom Of Atmosphere (BOA) reflectance images derived from the associated Level-1C products. Each Level-2A product is composed of 100x100 km2 tiles in cartographic geometry (UTM/WGS84 projection). SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/data-formats ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,SAFE",OPTICAL,proprietary,SENTINEL2 Level-2A,2018-03-26T00:00:00Z,available,available,,,available,available,available,,,,,,,available,,available,available,,,,available +S2_MSI_L2AP,"The Level-2A product provides Bottom Of Atmosphere (BOA) reflectance images derived from the associated Level-1C products. Each Level-2A product is composed of 100x100 km2 tiles in cartographic geometry (UTM/WGS84 projection). SAFE formatted product, see https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/data-formats. Level-2AP are the pilot products of Level-2A product generated by ESA until March 2018. After March, they are operational products ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,SAFE, pilot",OPTICAL,proprietary,SENTINEL2 Level-2A pilot,2017-05-23T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S2_MSI_L2A_COG,"The Level-2A product provides Bottom Of Atmosphere (BOA) reflectance images derived from the associated Level-1C products. Each Level-2A product is composed of 100x100 km2 tiles in cartographic geometry (UTM/WGS84 projection). Product containing Cloud Optimized GeoTIFF images, without SAFE formatting. ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,COG",OPTICAL,proprietary,SENTINEL2 Level-2A,2015-06-23T00:00:00Z,,,,,,,,,available,,,,,,,,,,,, +S2_MSI_L2A_MAJA,"The level 2A products correct the data for atmospheric effects and detect the clouds and their shadows using MAJA. MAJA uses MUSCATE processing center at CNES, in the framework of THEIA land data center. Sentinel-2 level 1C data are downloaded from PEPS. The full description of the product format is available at https://theia.cnes.fr/atdistrib/documents/PSC-NT-411-0362-CNES_01_00_SENTINEL-2A_L2A_Products_Description.pdf ",MSI,SENTINEL2,"S2A,S2B",L2,"MSI,SENTINEL,SENTINEL2,S2,S2A,S2B,L2,L2A,MAJA",OPTICAL,proprietary,SENTINEL2 Level-2A,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +S2_MSI_L2B_MAJA_SNOW,The Theia snow product is derived from Sentinel-2 L2A images generated by Theia. It indicates the snow presence or absence on the land surface every fifth day if there is no cloud. The product is distributed by Theia as a raster file (8 bits GeoTIFF) of 20 m resolution and a vector file (Shapefile polygons). More details about the snow products description are available at http://www.cesbio.ups-tlse.fr/multitemp/?page_id=10748#en ,MSI,SENTINEL2,"S2A,S2B",L2,"MSI,MAJA,SENTINEL,sentinel2,S2,S2A,S2B,L2,L2B,SNOW",OPTICAL,proprietary,SENTINEL2 snow product,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +S2_MSI_L2B_MAJA_WATER,A description of the Land Water Quality data distributed by Theia is available at https://theia.cnes.fr/atdistrib/documents/THEIA-ST-411-0477-CNES_01-03_Format_Specification_of_OBS2CO_WaterColor_Products.pdf ,MSI,SENTINEL2,"S2A,S2B",L2,"MSI,MAJA,SENTINEL,sentinel2,S2,S2A,S2B,L2,L2B,WATER",OPTICAL,proprietary,SENTINEL2 L2B-WATER,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +S2_MSI_L3A_WASP,"The Level-3A product provides a monthly synthesis of surface reflectances from Theia's L2A products. The synthesis is based on a weighted arithmetic mean of clear observations. The data processing is produced by WASP (Weighted Average Synthesis Processor), by MUSCATE data center at CNES, in the framework of THEIA data center. The full description of the product format is available at https://theia.cnes.fr/atdistrib/documents/THEIA-ST-411-0419-CNES_01-04_Format_Specification_of_MUSCATE_Level-3A_Products-signed.pdf ",MSI,SENTINEL2,"S2A,S2B",L3,"MSI,SENTINEL,sentinel2,S2,S2A,S2B,L3,L3A,WASP",OPTICAL,proprietary,SENTINEL2 Level-3A,2015-06-23T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +S3_EFR,"OLCI (Ocean and Land Colour Instrument) Full resolution: 300m at nadir. Level 1 products are calibrated Top Of Atmosphere radiance values at OLCI 21 spectral bands. Radiances are computed from the instrument digital counts by applying geo-referencing, radiometric processing (non-linearity correction, smear correction, dark offset correction, absolute gain calibration adjusted for gain evolution with time), and stray-light correction for straylight effects in OLCI camera's spectrometer and ground imager. Additionally, spatial resampling of OLCI pixels to the 'ideal' instrument grid, initial pixel classification, and annotation at tie points with auxiliary meteorological data and acquisition geometry are provided. The radiance products are accompanied by error estimate products, however the error values are currently not available. - All Sentinel-3 NRT products are available at pick-up point in less than 3h. - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",OLCI,SENTINEL3,"S3A,S3B",L1,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,EFR",OPTICAL,proprietary,SENTINEL3 EFR,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_ERR,"OLCI (Ocean and Land Colour Instrument) Reduced resolution: 1200m at nadir. All Sentinel-3 NRT products are available at pick-up point in less than 3h. Level 1 products are calibrated Top Of Atmosphere radiance values at OLCI 21 spectral bands. Radiances are computed from the instrument digital counts by applying geo-referencing, radiometric processing (non-linearity correction, smear correction, dark offset correction, absolute gain calibration adjusted for gain evolution with time), and stray-light correction for straylight effects in OLCI camera's spectrometer and ground imager. Additionally, spatial resampling of OLCI pixels to the 'ideal' instrument grid, initial pixel classification, and annotation at tie points with auxiliary meteorological data and acquisition geometry are provided. The radiance products are accompanied by error estimate products, however the error values are currently not available. - All Sentinel-3 NRT products are available at pick-up point in less than 3h - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",OLCI,SENTINEL3,"S3A,S3B",L1,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,ERR",OPTICAL,proprietary,SENTINEL3 ERR,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_LAN,LAN or SR_2_LAN___ (peps),SRAL,SENTINEL3,"S3A,S3B",L2,"SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,LAN",RADAR,proprietary,SENTINEL3 SRAL Level-2 LAN,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_OLCI_L2LFR,"The OLCI Level-2 Land Full Resolution (OL_2_LFR) products contain land and atmospheric geophysical products at Full resolution with a spatial sampling of approximately 300 m. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-land ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2LFR,LFR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Land Full Resolution,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_OLCI_L2LRR,"The OLCI Level-2 Land Reduced Resolution (OL_2_LRR) products contain land and atmospheric geophysical products at Reduced resolution with a spatial sampling of approximately 1.2 km. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-land ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2LRR,LRR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Land Reduced Resolution,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_OLCI_L2WFR,"The OLCI Level-2 Water Full Resolution (OL_2_WFR) products contain water and atmospheric geophysical products at Full resolution with a spatial sampling of approximately 300 m. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WFR,WFR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Full Resolution,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_OLCI_L2WFR_BC003,"OLCI Level 2 Marine products provide spectral information on the colour of the oceans (water reflectances). These radiometric products are used to estimate geophysical parameters e.g. estimates of phytoplankton biomass through determining the Chlorophyll-a (Chl) concentration. In coastal areas, they also allow monitoring of the sediment load via the Total Suspended Matter (TSM) product. Full resolution products are at a nominal 300m resolution. This collection contains reprocessed data from baseline collection 003. Operational data can be found in the corresponding collection. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WFR,WFR,REPROCESSED,BC003",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Full Resolution Reprocessed from BC003,2016-02-16T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_OLCI_L2WRR,"The OLCI Level-2 Water Reduced Resolution (OL_2_WRR) products contain water and atmospheric geophysical products at Reduced resolution with a spatial sampling of approximately 1.2 km. The products are assumed to be computed in Near Real Time (NRT) (i.e. delivered to users less than 3 hours after acquisition), in Non-Time Critical (NTC) (i.e. within 1 month after acquisition) or in re-processed NTC. Details at https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WRR,WRR",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Reduced Resolution,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_OLCI_L2WRR_BC003,"OLCI Level 2 Marine products provide spectral information on the colour of the oceans (water reflectances). These radiometric products are used to estimate geophysical parameters e.g. estimates of phytoplankton biomass through determining the Chlorophyll-a (Chl) concentration. In coastal areas, they also allow monitoring of the sediment load via the Total Suspended Matter (TSM) product. Reduced resolution products are at a nominal 1km resolution. This collection contains reprocessed data from baseline collection 003. Operational data can be found in the corresponding collection. ",OLCI,SENTINEL3,"S3A,S3B",L2,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WRR,WRR,REPROCESSED,BC003",OPTICAL,proprietary,SENTINEL3 OLCI Level-2 Water Reduced Resolution Reprocessed from BC003,2016-02-16T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_OLCI_L4BALTIC,"Baltic Sea Surface Ocean Colour Plankton from Sentinel-3 OLCI L4 monthly observations For the Baltic Sea Ocean Satellite Observations, the Italian National Research Council (CNR – Rome, Italy), is providing Bio-Geo_Chemical (BGC) regional datasets: * ''plankton'' with the phytoplankton chlorophyll concentration (CHL) evaluated via region-specific neural network (Brando et al. 2021) Upstreams: OLCI-S3A & S3B Temporal resolution: monthly Spatial resolution: 300 meters To find this product in the catalogue, use the search keyword """"OCEANCOLOUR_BAL_BGC_L4_NRT"""". DOI (product) : https://doi.org/10.48670/moi-00295 ",OLCI,SENTINEL3,"S3A,S3B",L4,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L4,BGC,CHL,BALTIC",OPTICAL,proprietary,SENTINEL3 OLCI Baltic Sea Surface Ocean Colour Plankton,2023-04-10T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_RAC,Sentinel 3 OLCI products output during Radiometric Calibration mode ,OLCI,SENTINEL3,"S3A,S3B",L1,"OLCI,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L2,RAC",OPTICAL,proprietary,SENTINEL3 RAC,2016-02-16T00:00:00Z,,,,,,,,,,,,,,,,,available,,,, +S3_SLSTR_L1RBT,"SLSTR Level-1 observation mode products consisting of full resolution, geolocated, co-located nadir and along track view, Top of Atmosphere (TOA) brightness temperatures (in the case of thermal IR channels) or radiances (in the case of visible, NIR and SWIR channels) from all SLSTR channels, and quality flags, pixel classification information and meteorological annotations ",SLSTR,SENTINEL3,"S3A,S3B",L1,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1RBT,RBT",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-1,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_SLSTR_L1RBT_BC004,"SLSTR Level 1B Radiances and Brightness Temperatures (version BC004) - Sentinel 3 - Reprocessed The SLSTR level 1 products contain: the radiances of the 6 visible (VIS), Near Infra-Red (NIR) and Short Wave Infra-Red (SWIR) bands (on the A and B stripe grids); the Brightness Temperature (BT) for the 3 Thermal Infra-Red (TIR) bands; the BT for the 2 Fire (FIR) bands. Resolution: 1km at nadir (TIR), 500m (VIS). All are provided for both the oblique and nadir view. These measurements are accompanied with grid and time information, quality flags, error estimates and meteorological auxiliary data. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SLSTR,SENTINEL3,"S3A,S3B",L1,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1RBT,RBT,VIS,NIR,SWIR,BT,TIR,FIR,Reprocessed,BC004",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-1 RBT - Reprocessed from BC004,2018-05-09T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_SLSTR_L2,"The SLSTR Level-2 products are generated in five different types: 1. SL_2_WCT, including the Sea Surface Temperature for single and dual view, for 2 or 3 channels (internal product only), 2. SL_2_WST, including the Level-2P Sea surface temperature (provided to the users), 3. SL_2_LST, including the Land Surface Temperature parameters (provided to the users), 4. SL_2_FRP, including the Fire Radiative Power parameters (provided to the users), 5.SL_2_AOD, including the Aerosol Optical Depth parameters (provided to the users). The Level-2 product are organized in packages composed of one manifest file and several measurement and annotation data files (between 2 and 21 files depending on the package). The manifest file is in XML format and gathers general information concerning product and processing. The measurement and annotation data files are in netCDF 4 format, and include dimensions, variables and associated attributes. Regarding the measurement files: one measurement file, providing the land surface temperature, associated uncertainties and other supporting fields, is included in the SL_2_LST packet. The annotation data files are generated from the annotation files included in the SL_1RBT package and their format is identical to the files in the Level-1 packet.The SL_2_LST packet contains 10 annotation files, providing the same parameters as in SL_2_WCT and, in addition, some vegetation parameters. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2FRP,FRP,L2WCT,WCT,L2WST,WST,L2AOD,AOD",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2,2017-07-05T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_SLSTR_L2AOD,"The Copernicus NRT S3 AOD processor quantifies the abundance of aerosol particles and monitors their global distribution and long-range transport, at the scale of 9.5 x 9.5 km2. All observations are made available in less than three hours from the SLSTR observation sensing time. It is only applicable during daytime. NOTE: The SLSTR L2 AOD product is generated by EUMETSAT in NRT only. An offline (NTC) AOD product is generated from SYN data by ESA, exploiting the synergy between the SLSTR and OLCI instruments. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2AOD,AOD",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 AOD,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,,,,available,,,,available +S3_SLSTR_L2FRP,"The SLSTR Level-2 FRP product is providing one measurement data file, FRP_in.nc, with Fire Radiative Power (FRP) values and associated parameters generated for each fire detected over land and projected on the SLSTR 1 km grid. The fire detection is based on a mixed thermal band, combining S7 radiometric measurements and, for pixels associated with a saturated value of S7 (i.e. above 311 K), F1 radiometric measurements. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2FRP,FRP",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 FRP,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_SLSTR_L2LST,The SLSTR Level-2 LST product provides land surface parameters generated on the wide 1 km measurement grid. It contains measurement file with Land Surface Temperature (LST) values with associated parameters (LST parameters are computed and provided for each pixel (re-gridded or orphan) included in the 1 km measurement grid) ,SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2LST,LST",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 LST,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,, +S3_SLSTR_L2WST,The SLSTR Level-2 WST product provides water surface parameters generated on the wide 1 km measurement grid. It contains measurement file with Water Surface Temperature (WST) values with associated parameters (WST parameters are computed and provided for each pixel (re-gridded or orphan) included in the 1 km measurement grid) ,SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WST,WST",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 WST,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_SLSTR_L2WST_BC003,"The SLSTR SST has a spatial resolution of 1km at nadir. Skin Sea Surface Temperature following the GHRSST L2P GDS2 format specification, see https://www.ghrsst.org/ . Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 003. Operational data can be found in the corresponding collection. ",SLSTR,SENTINEL3,"S3A,S3B",L2,"SLSTR,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,L2WST,WST,REPROCESSED,BC003",ATMOSPHERIC,proprietary,SENTINEL3 SLSTR Level-2 WST Reprocessed from BC003,2016-04-18T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_SRA,"SRAL Level 1B: Complex echoes (In-phase (I) and Quadrature (Q)) for the Low Resolution Mode (LRM) and/or Synthetic Aperture Radar (SAR) mode both for C Band and Ku band. When the altimeter is in SAR mode, this product also contains the so-called Pseudo LRM (PLRM) echoes. - All Sentinel-3 Near Real Time (NRT) products are available at pick-up point in less than 3h. - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days. - All Sentinel-3 Short Time Critical (STC) products are available at pick-up point in less than 48 hours. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",SRAL,SENTINEL3,"S3A,S3B",L1,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1",RADAR,proprietary,SENTINEL3 SRAL Level-1,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_SRA_1A_BC004,"SRAL Level 1A Unpacked L0 Complex Echoes (version BC004) - Sentinel-3 - Reprocessed Fundamental science and engineering product development supporting operational users. This product is most relevant to SAR processing specialists allowing fundamental studies on SAR processing such as Doppler beam formation and for calibration studies using ground-based Transponders. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L1A,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1A,REPROCESSED,BC004",RADAR,proprietary,SENTINEL3 SRAL Level-1A Unpacked - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_SRA_1B_BC004,"SRAL Level 1B (version BC004) - Sentinel-3 - Reprocessed SRAL Level 1B: Complex echoes (In-phase (I) and Quadrature (Q)) for the Low Resolution Mode (LRM) and/or Synthetic Aperture Radar (SAR) mode both for C Band and Ku band. When the altimeter is in SAR mode, this product also contains the so-called Pseudo LRM (PLRM) echoes. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L1B,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1B,REPROCESSED,BC004",RADAR,proprietary,SENTINEL3 SRAL Level-1B - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_SRA_A,"A Level 1A SRAL product contains one ""measurement data file"" containing the L1A measurements parameters: ECHO_SAR_Ku: L1A Tracking measurements (sorted and calibrated) in SAR mode - Ku-band (80-Hz) ECHO_PLRM: L1A Tracking measurements (sorted and calibrated) in pseudo-LRM mode - Ku and C bands (80-Hz) ",SRAL,SENTINEL3,"S3A,S3B",L1,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1",RADAR,proprietary,SENTINEL3 SRAL Level-1 SRA_A,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_SRA_BS,"A Level 1B-S SRAL product contains one ""measurement data file"" containing the L1b measurements parameters: ECHO_SAR_Ku : L1b Tracking measurements in SAR mode - Ku band (20-Hz) as defined in the L1b MEAS product completed with SAR expert information ECHO_PLRM : L1b Tracking measurements in pseudo-LRM mode - Ku and C bands (20-Hz) as defined in the L1b MEAS product ",SRAL,SENTINEL3,"S3A,S3B",L1,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1",RADAR,proprietary,SENTINEL3 SRAL Level-1 SRA_BS,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_SRA_BS_BC004,"SRAL Level 1B Stack Echoes (version BC004) - Sentinel-3 - Reprocessed SRAL Level 1B: Complex echoes (In-phase (I) and Quadrature (Q)) for the Low Resolution Mode (LRM) and/or Synthetic Aperture Radar (SAR) mode both for C Band and Ku band. When the altimeter is in SAR mode, this product also contains the so-called Pseudo LRM (PLRM) echoes. Complex (In-phase and Quadrature) echoes (I's and Q;s) after slant/Doppler range correction. This product is most relevant to geophysical retrieval algorithm developers (over ocean, land and ice surfaces), surface characterisations studies (e.g. impact of sea state bias, wave directional effects etc) and Quality Control systems. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L1B,"SRA,SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L1,L1B,REPROCESSED,STACK,ECHOES,BC004",RADAR,proprietary,SENTINEL3 SRAL Level-1B Stack Echoes - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S3_SY_AOD,"The Level-2 SYN AOD product (SY_2_AOD) is produced by a dedicated processor including the whole SYN L1 processing module and a global synergy level 2 processing module retrieving, over land and sea, aerosol optical thickness. The resolution of this product is wider than classic S3 products, as the dataset are provided on a 4.5 km² resolution ",SYNERGY,SENTINEL3,"S3A,S3B",L2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,AOD","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 AOD,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,, +S3_SY_SYN,"The Level-2 SYN product (SY_2_SYN) is produced by the Synergy Level-1/2 SDR software and contains surface reflectance and aerosol parameters over land. All measurement datasets are provided on the OLCI image grid, similar to the one included in the OLCI L1b product. Some sub-sampled annotations and atmospheric datasets are provided on the OLCI tie-points grid. Several associated variables are also provided in annotation data files. ",SYNERGY,SENTINEL3,"S3A,S3B",L2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,SYN","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 SYN,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_SY_V10,"The Level-2 VG1 and V10 SYN products (SY_2_VG1 and SY_2_V10 respectively) are produced by the SYNERGY Level-2 processor and contain 1 km VEGETATION-like product, 1 and 10 days synthesis surface reflectances and NDVI. The product grid and the four spectral bands are similar to the SYN Level-2 VGP product. ",SYNERGY,SENTINEL3,"S3A,S3B",LEVEL-2W,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,V10","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 V10,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,, +S3_SY_VG1,"The Level-2 VG1 and V10 SYN products (SY_2_VG1 and SY_2_V10 respectively) are produced by the SYNERGY Level-2 processor and contain 1 km VEGETATION-like product, 1 and 10 days synthesis surface reflectances and NDVI. The product grid and the four spectral bands are similar to the SYN Level-2 VGP product. ",SYNERGY,SENTINEL3,"S3A,S3B",LEVEL-2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,VG1","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 VG1,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,, +S3_SY_VGP,"The Level-2 VGP SYN product (SY_2_VGP) is produced by the Global Synergy Level-1/2 software and contains 1 km VEGETATION-like product TOA reflectances. The ""1 km VEGETATION-like product"" label means that measurements are provided on a regular latitude-longitude grid, with an equatorial sampling distance of approximately 1 km. This product is restricted in longitude, including only filled ones. ",SYNERGY,SENTINEL3,"S3A,S3B",LEVEL-2,"SYNERGY,SY,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,VGP","OPTICAL,RADAR",proprietary,SENTINEL3 SYNERGY Level-2 VGP,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,, +S3_WAT,"The products contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags. Also the sea Ice freeboard measurement is included. The measurements in the standard data file provide the measurements in low (1 Hz = approx. 7km) and high resolution (20 Hz = approx. 300 m), in LRM mode or in SAR mode, for both C-band and Ku band. The SAR mode is the default mode. The reduced measurement data file contains 1 Hz measurements only. The enhanced measurement data file contains also the waveforms and associated parameters and the pseudo LRM measurements when in SAR mode. This product contains the following datasets: Sea Level Global(NRT) (PDS_MG3_CORE_14_GLONRT), Sea Level Global Reduced(NRT)(PDS_MG3_CORE_14_GLONRT_RD), Sea Level Global Standard(NRT) (PDS_MG3_CORE_14_GLONRT_SD), Sea Level Global Enhanced(NRT) (PDS_MG3_CORE_14_GLONRT_EN) - All Sentinel-3 NRT products are available at pick-up point in less than 3h. - All Sentinel-3 Non Time Critical (NTC) products are available at pick-up point in less than 30 days - All Sentinel-3 Short Time Critical (STC) products are available at pick-up point in less than 48 hours Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. ",SRAL,SENTINEL3,"S3A,S3B",L2,"SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,WAT",RADAR,proprietary,SENTINEL3 SRAL Level-2 WAT,2016-02-16T00:00:00Z,,,,,available,available,available,,,,,,,available,,,available,,,,available +S3_WAT_BC004,"The products contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags. Also the sea Ice freeboard measurement is included. The measurements in the standard data file provide the measurements in low (1 Hz = approx. 7km) and high resolution (20 Hz = approx. 300 m), in LRM mode or in SAR mode, for both C-band and Ku band. The SAR mode is the default mode. The reduced measurement data file contains 1 Hz measurements only. The enhanced measurement data file contains also the waveforms and associated parameters and the pseudo LRM measurements when in SAR mode. Sentinel-3 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. This collection contains reprocessed data from baseline collection 004. Operational data can be found in the corresponding collection. ",SRAL,SENTINEL3,"S3A,S3B",L2,"SRAL,SENTINEL,SENTINEL3,S3,S3A,S3B,L2,WAT,REPROCESSED,BC004",RADAR,proprietary,SRAL Level 2 Altimetry Global - Reprocessed from BC004,2016-03-01T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S5P_L1B2_IR_ALL,"Solar irradiance spectra for all bands (UV1-6 and SWIR) The TROPOMI instrument is a space-borne, nadir-viewing, imaging spectrometer covering wavelength bands between the ultraviolet and the shortwave infrared. The instrument, the single payload of the Sentinel-5P spacecraft, uses passive remote sensing techniques to attain its objective by measuring, at the Top Of Atmosphere (TOA), the solar radiation reflected by and radiated from the earth. The instrument operates in a push-broom configuration (non-scanning), with a swath width of ~2600 km on the Earth's surface. The typical pixel size (near nadir) will be 7x3.5 km2 for all spectral bands, with the exception of the UV1 band (7x28 km2) and SWIR bands (7x7 km2). ",TROPOMI,SENTINEL5P,S5P,"L1B, L2","SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,IR,SIR,SWIR,Irradiances,UVN",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B and Level 2 Irradiances for the SWIR and UNV bands,2017-10-13T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S5P_L1B_IR_SIR,"Solar irradiance spectra for the SWIR bands (band 7 and band 8). TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,IR,SIR,SWIR,Irradiances",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Irradiances for the SWIR bands,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,,,,,,,, +S5P_L1B_IR_UVN,"Solar irradiance spectra for the UVN bands (band 1 through band 6). TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,IR,UVN,Irradiances",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Irradiances for the UVN bands,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,,,,,,,, +S5P_L1B_RA_BD1,"Sentinel-5 Precursor Level 1B Radiances for spectral band 1. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD1,BAND1,B01",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 1,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L1B_RA_BD2,"Sentinel-5 Precursor Level 1B Radiances for spectral band 2. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD2,BAND2,B02",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 2,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L1B_RA_BD3,"Sentinel-5 Precursor Level 1B Radiances for spectral band 3. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD3,BAND3,B03",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 3,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L1B_RA_BD4,"Sentinel-5 Precursor Level 1B Radiances for spectral band 4. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD4,BAND4,B04",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 4,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L1B_RA_BD5,"Sentinel-5 Precursor Level 1B Radiances for spectral band 5. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD5,BAND5,B05",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 5,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L1B_RA_BD6,"Sentinel-5 Precursor Level 1B Radiances for spectral band 6. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD6,BAND6,B06",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 6,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L1B_RA_BD7,"Sentinel-5 Precursor Level 1B Radiances for spectral band 7. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD7,BAND7,B07",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 7,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L1B_RA_BD8,"Sentinel-5 Precursor Level 1B Radiances for spectral band 8. TROPOMI utilises a single telescope to form an image of the target area onto a rectangular slit that acts as the entrance slit of the spectrometer system. There are four different spectrometers, each with its own optics and detector: mediumwave ultraviolet (UV), longwave ultraviolet combined with visible (UVIS), near infrared (NIR), and shortwave infrared (SWIR). The spectrometers for UV, UVIS and NIR are jointly referred to as UVN. Radiation for the SWIR spectrometer is transferred by an optical relay part in the UVN system from the telescope to an interface position (the pupil stop) for the SWIR spectrometer. This is done because of the more stringent thermal requirements on the SWIR part of the instrument. Each of the detectors is divided in two halves, which yields a total of eight spectral bands. ",TROPOMI,SENTINEL5P,S5P,L1B,"SENTINEL,SENTINEL5P,S5P,L1,L1B,TROPOMI,RA,Radiances,BD8,BAND8,B08",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 1B Radiances for spectral band 8,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_AER_AI,"TROPOMI aerosol index is referred to as the Ultraviolet Aerosol Index (UVAI). The relatively simple calculation of the Aerosol Index is based on wavelength dependent changes in Rayleigh scattering in the UV spectral range where ozone absorption is very small. UVAI can also be calculated in the presence of clouds so that daily, global coverage is possible. This is ideal for tracking the evolution of episodic aerosol plumes from dust outbreaks, volcanic ash, and biomass burning. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,AER,AI,Ultraviolet,Aerosol,Index",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Ultraviolet Aerosol Index,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_AER_LH,"The TROPOMI Aerosol Layer Height product focuses on retrieval of vertically localised aerosol layers in the free troposphere, such as desert dust, biomass burning aerosol, or volcanic ash plumes. The height of such layers is retrieved for cloud-free conditions. Height information for aerosols in the free troposphere is particularly important for aviation safety. Scientific applications include radiative forcing studies, long-range transport modelling and studies of cloud formation processes. Aerosol height information also helps to interpret the UV Aerosol Index (UVAI) in terms of aerosol absorption as the index is strongly height-dependent. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,AER,LH,Aerosol,Layer,Height",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Aerosol Layer Height,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_CH4,"Methane (CH4) is, after carbon dioxide (CO2), the most important contributor to the anthropogenically enhanced greenhouse effect. Roughly three-quarters of methane emissions are anthropogenic and as such it is important to continue the record of satellite-based measurements. TROPOMI aims at providing CH4 column concentrations with high sensitivity to the Earth's surface, good spatio/temporal coverage, and sufficient accuracy to facilitate inverse modelling of sources and sinks. The output product consists of the retrieved methane column and a row vector referred to as the column averaging kernel A. The column averaging kernel describes how the retrieved column relates to the true profile and should be used in validation exercises (when possible) or use of the product in source/sink inverse modelling. The output product also contains altitude levels of the layer interfaces to which the column averaging kernel corresponds. Additional output for Level-2 data products: viewing geometry, precision of retrieved methane, residuals of the fit, quality flags (cloudiness, terrain roughness etc.) and retrieved albedo and aerosol properties. The latter properties are required for a posteriori filtering and for estimation of total retrieval error. The Sentinel-5 Precursor mission flies in loose formation (about 3.5 - 5 minutes behind) with the S-NPP (SUOMI-National Polar-orbiting Partnership) mission to use VIIRS (Visible Infrared Imaging Radiometer Suite) cloud information to select cloud free TROPOMI pixels for high quality methane retrieval. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,CH4,Methane",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Methane,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_CLOUD,"The TROPOMI instrument, single payload onboard Sentinel-5 Precursor, retrieves operationally the most important quantities for cloud correction of satellite trace gas retrievals: cloud fraction, cloud optical thickness (albedo), and cloud-top pressure (height). Cloud parameters from TROPOMI are not only used for enhancing the accuracy of trace gas retrievals, but also to extend the satellite data record of cloud information derived from oxygen A-band measurements initiated with GOME. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,CLOUD",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Cloud,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_CO,"The TROPOMI instrument, single payload onboard Sentinel-5 Precursor, retrieves the CO global abundance exploiting clear-sky and cloudy-sky Earth radiance measurements in the 2.3 µm spectral range of the shortwave infrared (SWIR) part of the solar spectrum. TROPOMI clear sky observations provide CO total columns with sensitivity to the tropospheric boundary layer. For cloudy atmospheres, the column sensitivity changes according to the light path. The TROPOMI CO retrieval uses the same method employed by SCIAMACHY. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,CO,Carbon,Monoxide",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Carbon Monoxide,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_HCHO,"Formaldehyde is an intermediate gas in almost all oxidation chains of Non-Methane Volatile Organic Compounds (NMVOC), leading eventually to CO2. NMVOCs are, together with NOx, CO and CH4, among the most important precursors of tropospheric O3. The major HCHO source in the remote atmosphere is CH4 oxidation. Over the continents, the oxidation of higher NMVOCs emitted from vegetation, fires, traffic and industrial sources results in important and localised enhancements of the HCHO levels. In addition to the main product results, such as HCHO slant column, vertical column and air mass factor, the level 2 data files contain several additional parameters and diagnostic information. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,HCHO,Formaldehyde",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Formaldehyde,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_NO2,"The TROPOMI instrument, single payload onboard Sentinel-5 Precursor, retrieves operationally tropospheric and stratospheric NO2 column products. The TROPOMI NO2 data products pose an improvement over previous NO2 data sets, particularly in their unprecedented spatial resolution, but also in the separation of the stratospheric and tropospheric contributions of the retrieved slant columns, and in the calculation of the air-mass factors used to convert slant to total columns. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NO2,Nitrogen,Dioxide",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Nitrogen Dioxide,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_NP_BD3,"S5P-NPP Cloud for spectral band 3. The S5P level 2 methane product is dependent on having information on cloud occurrence at spatial resolution finer than that achievable from TROPOMI itself. This information is also useful for other purposes, including assessing the influence of cloud on other L2 products and issues related to spatial co-registration. A level 2 auxiliary product was therefore developed to describe cloud in the TROPOMI field of view (FOV), using co-located observations of VIIRS (Visible Infra-red Imaging Radiometer Suite) on the U.S. S-NPP (Suomi - National Polar-orbiting Partnership). S5P flies in a so-called loose formation with the S-NPP with a temporal separation between them of less than 5 minutes. The main information contained in the S5P-NPP product is: 1. A statistical summary for each S5P FOV of the NPP-VIIRS L2 Cloud Mask (VCM). 2. The mean and standard deviation of the sun-normalised radiance in a number of VIIRS moderate resolution bands. This information is provided for three S5P spectral bands (to account for differences in spatial sampling). ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NP,NPP,Cloud,BD3,B03,BAND3",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 NPP Cloud for band 3,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_NP_BD6,"S5P-NPP Cloud for spectral band 6. The S5P level 2 methane product is dependent on having information on cloud occurrence at spatial resolution finer than that achievable from TROPOMI itself. This information is also useful for other purposes, including assessing the influence of cloud on other L2 products and issues related to spatial co-registration. A level 2 auxiliary product was therefore developed to describe cloud in the TROPOMI field of view (FOV), using co-located observations of VIIRS (Visible Infra-red Imaging Radiometer Suite) on the U.S. S-NPP (Suomi - National Polar-orbiting Partnership). S5P flies in a so-called loose formation with the S-NPP with a temporal separation between them of less than 5 minutes. The main information contained in the S5P-NPP product is: 1. A statistical summary for each S5P FOV of the NPP-VIIRS L2 Cloud Mask (VCM). 2. The mean and standard deviation of the sun-normalised radiance in a number of VIIRS moderate resolution bands. This information is provided for three S5P spectral bands (to account for differences in spatial sampling). ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NP,NPP,Cloud,BD6,B06,BAND6",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 NPP Cloud for band 6,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_NP_BD7,"S5P-NPP Cloud for spectral band 7. The S5P level 2 methane product is dependent on having information on cloud occurrence at spatial resolution finer than that achievable from TROPOMI itself. This information is also useful for other purposes, including assessing the influence of cloud on other L2 products and issues related to spatial co-registration. A level 2 auxiliary product was therefore developed to describe cloud in the TROPOMI field of view (FOV), using co-located observations of VIIRS (Visible Infra-red Imaging Radiometer Suite) on the U.S. S-NPP (Suomi - National Polar-orbiting Partnership). S5P flies in a so-called loose formation with the S-NPP with a temporal separation between them of less than 5 minutes. The main information contained in the S5P-NPP product is: 1. A statistical summary for each S5P FOV of the NPP-VIIRS L2 Cloud Mask (VCM). 2. The mean and standard deviation of the sun-normalised radiance in a number of VIIRS moderate resolution bands. This information is provided for three S5P spectral bands (to account for differences in spatial sampling). ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,NP,NPP,Cloud,BD7,B07,BAND7",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 NPP Cloud for band 7,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_O3,"Ozone (O3) is of crucial importance for the equilibrium of the Earth's atmosphere. In the stratosphere, the ozone layer shields the biosphere from dangerous solar ultraviolet radiation. In the troposphere, it acts as an efficient cleansing agent, but at high concentration it also becomes harmful to the health of humans, animals, and vegetation. Ozone is also an important greenhouse-gas contributor to ongoing climate change. These products are provided in NetCDF-CF format and contain total ozone, ozone temperature, and error information including averaging kernels. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,O3,Ozone",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Ozone,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_O3_PR,"Retrieved ozone profiles are used to monitor the evolution of stratospheric and tropospheric ozone. Such monitoring is important as the ozone layer protects life on Earth against harmful UV radiation. The ozone layer is recovering from depletion due to manmade Chlorofluorocarbons (CFCs). Tropospheric ozone is toxic and it plays an important role in tropospheric chemistry. Also, ozone is a greenhouse gas and is therefore also relevant for climate change. The main parameters in the file are the retrieved ozone profile at 33 levels and the retrieved sub-columns of ozone in 6 layers. In addition, the total ozone column and tropospheric ozone columns are provided. For the ozone profile, the precision and smoothing errors, the a-priori profile and the averaging kernel are also provided. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,O3,PR,Ozone,Profile",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Ozone Profile,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S5P_L2_O3_TCL,"Ozone in the tropical troposphere plays various important roles. The intense UV radiation and high humidity in the tropics stimulate the formation of the hydroxyl radical (OH) by the photolysis of ozone. OH is the most important oxidant in the troposphere because it reacts with virtually all trace gases, such as CO, CH4 and other hydrocarbons. The tropics are also characterized by large emissions of nitrogen oxides (NOx), carbon monoxide (CO) and hydrocarbons, both from natural and anthropogenic sources. Ozone that is formed over regions where large amounts of these ozone precursors are emitted, can be transported over great distances and affects areas far from the source. The TROPOMI tropospheric ozone product is a level-2c product that represents three day averaged tropospheric ozone columns on a 0.5° by 1° latitude-longitude grid for the tropical region between 20°N and 20°S. The TROPOMI tropospheric ozone column product uses the TROPOMI Level-2 total OZONE and CLOUD products as input. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,O3,TCL,Tropospheric,Ozone",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Tropospheric Ozone,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,,,,,,,, +S5P_L2_SO2,"Sulphur dioxide (SO2) enters the Earth's atmosphere through both natural (~30%) and anthropogenic processes (~70%). It plays a role in chemistry on a local and global scale and its impact ranges from short term pollution to effects on climate. Beside the total column of SO2, enhanced levels of SO2 are flagged within the products. The recognition of enhanced SO2 values is essential in order to detect and monitor volcanic eruptions and anthropogenic pollution sources. Volcanic SO2 emissions may also pose a threat to aviation, along with volcanic ash. ",TROPOMI,SENTINEL5P,S5P,L2,"SENTINEL,SENTINEL5P,S5P,L2,TROPOMI,SO2,Sulphur,Dioxide",ATMOSPHERIC,proprietary,Sentinel-5 Precursor Level 2 Sulphur Dioxide,2017-10-13T00:00:00Z,,,,,available,available,available,,,,,,,available,,,,,,, +S6_AMR_L2_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. AMR-C Level 2 Products as generated by the AMR-C CFI Processor. These products include antenna and brightness temperatures, wet tropospheric correction, water vapour content, and a rain flag. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",AMR-C,SENTINEL6-A,S6A,L2,"SENTINEL,SENTINEL6,S6,S6A,LEO,L2,AMR-C,RADIOMETER,MICROWAVE,F06",RADIOMETER,proprietary,Sentinel 6 - Climate-quality Advanced Microwave Radiometer Level 2 Products Reprocessed at F06,2020-11-28T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S6_P4_L1AHR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The Level-1A product contains Level 1 intermediate output of the HR processor (RAW and RMC). It includes geo-located bursts of Ku echoes (at ~9 kHz) with all instrument calibrations applied. It includes the full rate complex waveforms input to the delay/Doppler or SAR processor. This product is most relevant to altimetry specialists, working on fundamental SAR processing techniques and calibration studies. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L1A,"SENTINEL,SENTINEL6,S6,S6A,LEO,L1A,ALTIMETRIC,HR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 1A High Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S6_P4_L1BAHR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The Level-1B HR product is output of the HR processor. It includes geo-located, and fully calibrated multi-looked high-resolution Ku-band waveforms. This product is most relevant to geophysical retrieval algorithm developers (over ocean, land and ice surfaces), surface characterisations studies (e.g. impact of sea state bias, wave directional effects etc.) and Quality Control systems. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L1B,"SENTINEL,SENTINEL6,S6,S6A,LEO,L1B,ALTIMETRIC,HR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 1B High Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S6_P4_L1BLR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The Level-1B LR product is output of the LR processor. It includes geo-located, and fully calibrated pulse-limited low-resolution Ku-band and C-band waveforms. This product is most relevant to geophysical retrieval algorithm developers (over ocean, land and ice surfaces), surface characterisations studies (e.g. impact of sea state bias, wave directional effects etc) and Quality Control systems. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L1B,"SENTINEL,SENTINEL6,S6,S6A,LEO,L1B,ALTIMETRIC,LR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 1B Low Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S6_P4_L2HR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The level-2 high resolution products contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags derived either from RAW or RMC, or the combination of both. Two measurement data files are available (standard and reduced), each with a different number of variables. The standard data file includes 1 Hz and 20 Hz measurements for the Ku- band as well as geophysical corrections at 1 Hz and some at 20 Hz. The reduced data file contains only 1 Hz measurements for the Ku- and C-bands as well as geophysical corrections at 1 Hz. Note that the HR data products only contain Ku-band measurements. These products are suitable for users seeking information on sea state and those creating downstream added value products from multiple altimeters. Particularly for those seeking the highest resolution measurements. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L2,"SENTINEL,SENTINEL6,S6,S6A,LEO,L2,ALTIMETRIC,HR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 2 High Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +S6_P4_L2LR_F06,"This is a reprocessed dataset at baseline F06, which is continued by the NRT/NTC data stream from 29/April/2022 onwards. The product contain the typical altimetry measurements, like the altimeter range, the sea surface height, the wind speed, significant wave height and all required geophysical corrections and related flags derived from LR. Two measurement data files are available (standard and reduced), each with a different number of variables. The standard data file includes 1 Hz and 20 Hz measurements for the Ku- and C-bands as well as geophysical corrections at 1 Hz and some at 20 Hz. The reduced data file contains only 1 Hz measurements for the Ku- and C-bands as well as geophysical corrections at 1 Hz. These products are suitable for users seeking information on sea state and those creating downstream added value products from multiple altimeters. Sentinel-6 is part of a series of Sentinel satellites, under the umbrella of the EU Copernicus programme. It is a collaborative Copernicus mission, implemented and co-funded by the European Commission, ESA, EUMETSAT and the USA, through NASA and the National Oceanic and Atmospheric Administration (NOAA). ",Poseidon-4,SENTINEL6-A,S6A,L2,"SENTINEL,SENTINEL6,S6,S6A,LEO,L2,ALTIMETRIC,LR,POSEIDON4,P4,F06",ALTIMETRIC,proprietary,Sentinel 6 - Poseidon-4 Altimetry Level 2 Low Resolution Reprocessed at F06,2020-12-17T00:00:00Z,,,,,,,,,,,,,,,,,,,,,available +SATELLITE_CARBON_DIOXIDE,"This dataset provides observations of atmospheric carbon dioxide (CO2)\namounts obtained from observations collected by several current and historical \nsatellite instruments. Carbon dioxide is a naturally occurring Greenhouse Gas (GHG), but one whose abundance has been increased substantially above its pre-industrial value of some 280 ppm by human activities, primarily because of emissions from combustion of fossil fuels, deforestation and other land-use change. The annual cycle (especially in the northern hemisphere) is primarily due to seasonal uptake and release of atmospheric CO2 by terrestrial vegetation.\nAtmospheric carbon dioxide abundance is indirectly observed by various satellite instruments. These instruments measure spectrally resolved near-infrared and/or infrared radiation reflected or emitted by the Earth and its atmosphere. In the measured signal, molecular absorption signatures from carbon dioxide and other constituent gasses can be identified. It is through analysis of those absorption lines in these radiance observations that the averaged carbon dioxide abundance in the sampled atmospheric column can be determined.\nThe software used to analyse the absorption lines and determine the carbon dioxide concentration in the sampled atmospheric column is referred to as the retrieval algorithm. For this dataset, carbon dioxide abundances have been determined by applying several algorithms to different satellite \ninstruments. Typically, different algorithms have different strengths and weaknesses and therefore, which product to use for a given application typically depends on the application.\nThe data set consists of 2 types of products: (i) column-averaged mixing ratios of CO2, denoted XCO2 and (ii) mid-tropospheric CO2 columns. The XCO2 products have been retrieved from SCIAMACHY/ENVISAT, TANSO-FTS/GOSAT and OCO-2. The mid-tropospheric CO2 product has been retrieved from the IASI instruments on-board the Metop satellite series and from AIRS. \nThe XCO2 products are available as Level 2 (L2) products (satellite orbit tracks) and as Level 3 (L3) product (gridded). The L2 products are available as individual sensor products (SCIAMACHY: BESD and WFMD algorithms; GOSAT: OCFP and SRFP algorithms) and as a multi-sensor merged product (EMMA algorithm). The L3 XCO2 product is provided in OBS4MIPS format. \nThe IASI and AIRS products are available as L2 products generated with the NLIS algorithm.\nThis data set is updated on a yearly basis, with each update cycle adding (if required) a new data version for the entire period, up to one year behind real time.\nThis dataset is produced on behalf of C3S with the exception of the SCIAMACHY and AIRS L2 products that were generated in the framework of the GHG-CCI project of the European Space Agency (ESA) Climate Change Initiative (CCI).\n\nVariables in the dataset/application are:\nColumn-average dry-air mole fraction of atmospheric carbon dioxide (XCO2), Mid-tropospheric columns of atmospheric carbon dioxide (CO2) ",,,,,"ECMWF,CDS,C3S,carbon-dioxide",ATMOSPHERIC,proprietary,Carbon dioxide data from 2002 to present derived from satellite observations,2002-10-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SATELLITE_METHANE,"This dataset provides observations of atmospheric methane (CH4)\namounts obtained from observations collected by several current and historical \nsatellite instruments. Methane is a naturally occurring Greenhouse Gas (GHG), but one whose abundance has been increased substantially above its pre-industrial value of some 720 ppb by human activities, primarily because of agricultural emissions (e.g., rice production, ruminants) and fossil fuel production and use. A clear annual cycle is largely due to seasonal wetland emissions.\nAtmospheric methane abundance is indirectly observed by various satellite instruments. These instruments measure spectrally resolved near-infrared and infrared radiation reflected or emitted by the Earth and its atmosphere. In the measured signal, molecular absorption signatures from methane and constituent gasses can be identified. It is through analysis of those absorption lines in these radiance observations that the averaged methane abundance in the sampled atmospheric column can be determined.\nThe software used to analyse the absorption lines and determine the methane concentration in the sampled atmospheric column is referred to as the retrieval algorithm. For this dataset, methane abundances have been determined by applying several algorithms to different satellite instruments.\nThe data set consists of 2 types of products: (i) column-averaged mixing ratios of CH4, denoted XCH4 and (ii) mid-tropospheric CH4 columns. \nThe XCH4 products have been retrieved from SCIAMACHY/ENVISAT and TANSO-FTS/GOSAT. The mid-tropospheric CH4 product has been retrieved from the IASI instruments onboard the Metop satellite series. The XCH4 products are available as Level 2 (L2) products (satellite orbit tracks) and as Level 3 (L3) product (gridded). The L2 products are available as individual sensor products (SCIAMACHY: WFMD and IMAP algorithms; GOSAT: OCFP, OCPR, SRFP and SRPR algorithms) and as a multi-sensor merged product (EMMA algorithm). The L3 XCH4 product is provided in OBS4MIPS format. The IASI products are available as L2 products generated with the NLIS algorithm.\nThis data set is updated on a yearly basis, with each update cycle adding (if required) a new data version for the entire period, up to one year behind real time.\nThis dataset is produced on behalf of C3S with the exception of the SCIAMACHY L2 products that were generated in the framework of the GHG-CCI project of the European Space Agency (ESA) Climate Change Initiative (CCI).\n\nVariables in the dataset/application are:\nColumn-average dry-air mole fraction of atmospheric methane (XCH4), Mid-tropospheric columns of atmospheric methane (CH4) ",,,,,"ECMWF,CDS,C3S,methane",ATMOSPHERIC,proprietary,Methane data from 2002 to present derived from satellite observations,2002-10-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SATELLITE_SEA_LEVEL_BLACK_SEA,"Sea level anomaly is the height of water over the mean sea surface in a given time and region. Up-to-date altimeter standards are used to estimate the sea level anomalies with a mapping algorithm dedicated to the Black sea region. Anomalies are computed with respect to a twenty-year mean reference period (1993-2012). The steady number of reference satellite used in the production of this dataset contributes to the long-term stability of the sea level record. Improvements of the accuracy, sampling of meso-scale processes and of the high-latitude coverage were achieved by using a few additional satellite missions. New data are provided with a delay of about 4-5 months relatively to near-real time or interim sea level products. This delay is mainly due to the timeliness of the input data, the centred processing temporal window and the validation process. However, this processing and validation adds stability and accuracy to the sea level variables and make them adapted to climate applications. This dataset includes uncertainties for each grid cell. More details about the sea level retrieval, additional filters, optimisation procedures, and the error estimation are given in the Documentation section. Variables in the dataset/application are: Absolute dynamic topography, Absolute geostrophic velocity meridian component, Absolute geostrophic velocity zonal component, Geostrophic velocity anomalies meridian component, Geostrophic velocity anomalies zonal component, Sea level anomaly ",,,,,"Climate,ECMWF,CDS,C3S,methane,sea",HYDROLOGICAL,proprietary,Sea level daily gridded data from satellite observations for the Black Sea from 1993 to 2020,1993-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SEASONAL_MONTHLY_PL,"This entry covers pressure-level data aggregated on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\nGeopotential, Specific humidity, Temperature, U-component of wind, V-component of wind ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,monthly,pressure,levels",ATMOSPHERIC,proprietary,Seasonal forecast monthly statistics on pressure levels,1981-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SEASONAL_MONTHLY_SL,"This entry covers single-level data aggregated on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\n10m u-component of wind, 10m v-component of wind, 10m wind gust since previous post-processing, 10m wind speed, 2m dewpoint temperature, 2m temperature, East-west surface stress rate of accumulation, Evaporation, Maximum 2m temperature in the last 24 hours, Mean sea level pressure, Mean sub-surface runoff rate, Mean surface runoff rate, Minimum 2m temperature in the last 24 hours, North-south surface stress rate of accumulation, Runoff, Sea surface temperature, Sea-ice cover, Snow density, Snow depth, Snowfall, Soil temperature level 1, Solar insolation rate of accumulation, Surface latent heat flux, Surface sensible heat flux, Surface solar radiation, Surface solar radiation downwards, Surface thermal radiation, Surface thermal radiation downwards, Top solar radiation, Top thermal radiation, Total cloud cover, Total precipitation ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,monthly,single,levels",ATMOSPHERIC,proprietary,Seasonal forecast monthly statistics on single levels,1981-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SEASONAL_ORIGINAL_PL,"his entry covers pressure-level data at the original time resolution (once every 12 hours). \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\nGeopotential, Specific humidity, Temperature, U-component of wind, V-component of wind ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,subdaily,pressure,levels",ATMOSPHERIC,proprietary,Seasonal forecast subdaily data on pressure levels,1981-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SEASONAL_ORIGINAL_SL,"This entry covers single-level data at the original time resolution (once a day, or once every 6 hours, depending on the variable). \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time (since 2017) and retrospective forecasts (hindcasts) initialised at equivalent intervals during the period 1993-2016.\n\nVariables in the dataset/application are:\n10m u-component of wind, 10m v-component of wind, 10m wind gust since previous post-processing, 2m dewpoint temperature, 2m temperature, Eastward turbulent surface stress, Evaporation, Land-sea mask, Maximum 2m temperature in the last 24 hours, Mean sea level pressure, Minimum 2m temperature in the last 24 hours, Northward turbulent surface stress, Orography, Runoff, Sea surface temperature, Sea-ice cover, Snow density, Snow depth, Snowfall, Soil temperature level 1, Sub-surface runoff, Surface latent heat flux, Surface net solar radiation, Surface net thermal radiation, Surface runoff, Surface sensible heat flux, Surface solar radiation downwards, Surface thermal radiation downwards, TOA incident solar radiation, Top net solar radiation, Top net thermal radiation, Total cloud cover, Total precipitation ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,daily,single,levels",ATMOSPHERIC,proprietary,Seasonal forecast daily and subdaily data on single levels,1981-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SEASONAL_POSTPROCESSED_PL,"This entry covers pressure-level data post-processed for bias adjustment on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time since 2017.\n\nVariables in the dataset/application are:\nGeopotential anomaly, Specific humidity anomaly, Temperature anomaly, U-component of wind anomaly, V-component of wind anomaly ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,anomalies,pressure,levels",ATMOSPHERIC,proprietary,Seasonal forecast anomalies on pressure levels,2017-09-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SEASONAL_POSTPROCESSED_SL,"This entry covers single-level data post-processed for bias adjustment on a monthly time resolution. \nSeasonal forecasts provide a long-range outlook of changes in the Earth system over periods of a few weeks or months, as a result of predictable changes in some of the slow-varying components of the system. For example, ocean temperatures typically vary slowly, on timescales of weeks or months; as the ocean has an impact on the overlaying atmosphere, the variability of its properties (e.g. temperature) can modify both local and remote atmospheric conditions. Such modifications of the 'usual' atmospheric conditions are the essence of all long-range (e.g. seasonal) forecasts. This is different from a weather forecast, which gives a lot more precise detail - both in time and space - of the evolution of the state of the atmosphere over a few days into the future. Beyond a few days, the chaotic nature of the atmosphere limits the possibility to predict precise changes at local scales. This is one of the reasons long-range forecasts of atmospheric conditions have large uncertainties. To quantify such uncertainties, long-range forecasts use ensembles, and meaningful forecast products reflect a distributions of outcomes.\nGiven the complex, non-linear interactions between the individual components of the Earth system, the best tools for long-range forecasting are climate models which include as many of the key components of the system and possible; typically, such models include representations of the atmosphere, ocean and land surface. These models are initialised with data describing the state of the system at the starting point of the forecast, and used to predict the evolution of this state in time.\nWhile uncertainties coming from imperfect knowledge of the initial conditions of the components of the Earth system can be described with the use of ensembles, uncertainty arising from approximations made in the models are very much dependent on the choice of model. A convenient way to quantify the effect of these approximations is to combine outputs from several models, independently developed, initialised and operated.\nTo this effect, the C3S provides a multi-system seasonal forecast service, where data produced by state-of-the-art seasonal forecast systems developed, implemented and operated at forecast centres in several European countries is collected, processed and combined to enable user-relevant applications. The composition of the C3S seasonal multi-system and the full content of the database underpinning the service are described in the documentation. The data is grouped in several catalogue entries (CDS datasets), currently defined by the type of variable (single-level or multi-level, on pressure surfaces) and the level of post-processing applied (data at original time resolution, processing on temporal aggregation and post-processing related to bias adjustment).\nThe variables available in this data set are listed in the table below. The data includes forecasts created in real-time since 2017.\n\nVariables in the dataset/application are:\n10m u-component of wind anomaly, 10m v-component of wind anomaly, 10m wind gust anomaly, 10m wind speed anomaly, 2m dewpoint temperature anomaly, 2m temperature anomaly, East-west surface stress anomalous rate of accumulation, Evaporation anomalous rate of accumulation, Maximum 2m temperature in the last 24 hours anomaly, Mean sea level pressure anomaly, Mean sub-surface runoff rate anomaly, Mean surface runoff rate anomaly, Minimum 2m temperature in the last 24 hours anomaly, North-south surface stress anomalous rate of accumulation, Runoff anomalous rate of accumulation, Sea surface temperature anomaly, Sea-ice cover anomaly, Snow density anomaly, Snow depth anomaly, Snowfall anomalous rate of accumulation, Soil temperature anomaly level 1, Solar insolation anomalous rate of accumulation, Surface latent heat flux anomalous rate of accumulation, Surface sensible heat flux anomalous rate of accumulation, Surface solar radiation anomalous rate of accumulation, Surface solar radiation downwards anomalous rate of accumulation, Surface thermal radiation anomalous rate of accumulation, Surface thermal radiation downwards anomalous rate of accumulation, Top solar radiation anomalous rate of accumulation, Top thermal radiation anomalous rate of accumulation, Total cloud cover anomaly, Total precipitation anomalous rate of accumulation ",,,,,"ECMWF,CDS,C3S,seasonal,forecast,anomalies,single,levels",ATMOSPHERIC,proprietary,Seasonal forecast anomalies on single levels,2017-09-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SIS_HYDRO_MET_PROJ,"This dataset provides precipitation and near surface air temperature for Europe as Essential Climate Variables (ECVs) and as a set of Climate Impact Indicators (CIIs) based on the ECVs. \nECV datasets provide the empirical evidence needed to understand the current climate and predict future changes. \nCIIs contain condensed climate information which facilitate relatively quick and efficient subsequent analysis. Therefore, CIIs make climate information accessible to application focussed users within a sector.\nThe ECVs and CIIs provided here were derived within the water management sectoral information service to address questions specific to the water sector. However, the products are provided in a generic form and are relevant for a range of sectors, for example agriculture and energy.\nThe data represent the current state-of-the-art in Europe for regional climate modelling and indicator production. Data from eight model simulations included in the Coordinated Regional Climate Downscaling Experiment (CORDEX) were used to calculate a total of two ECVs and five CIIs at a spatial resolution of 0.11° x 0.11° and 5km x 5km.\nThe ECV data meet the technical specification set by the Global Climate Observing System (GCOS), as such they are provided on a daily time step. They are bias adjusted using the EFAS gridded observations as a reference dataset. Note these are model output data, not observation data as is the general case for ECVs.\nThe CIIs are provided as mean values over a 30-year time period. For the reference period (1971-2000) data is provided as absolute values, for the future periods the data is provided as absolute values and as the relative or absolute change from the reference period. The future periods cover 3 fixed time periods (2011-2040, 2041-2070 and 2071-2100) and 3 \""degree scenario\"" periods defined by when global warming exceeds a given threshold (1.5 °C, 2.0 °C or 3.0 °C). The global warming is calculated from the global climate model (GCM) used, therefore the actual time period of the degree scenarios will be different for each GCM.\nThis dataset is produced and quality assured by the Swedish Meteorological and Hydrological Institute on behalf of the Copernicus Climate Change Service. \n\nVariables in the dataset/application are:\n2m air temperature, Highest 5-day precipitation amount, Longest dry spells, Number of dry spells, Precipitation ",,,,,"ECMWF,CDS,C3S,hydrology,meterology,water,precipitation,temperature",ATMOSPHERIC,proprietary,Temperature and precipitation climate impact indicators from 1970 to 2100 derived from European climate projections,1970-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +SPOT5_SPIRIT,SPOT 5 stereoscopic survey of Polar Ice. ,,SPOT5,SPOT5,L1A,"SPOT,SPOT5,L1A",OPTICAL,proprietary,Spot 5 SPIRIT,2002-05-04T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +SPOT_SWH,The Spot World Heritage (SWH) programme objective is the free availability for non-commercial use of orthorectified products derived from multispectral images of more than 5 years old from the Spot 1-5 satellites family. More informations on https://www.theia-land.fr/en/product/spot-world-heritage/ ,,SPOT1-5,SPOT1-5,L1C,"SPOT,SPOT1,SPOT2,SPOT3,SPOT4,SPOT5,L1C",OPTICAL,proprietary,Spot World Heritage,1986-02-22T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +SPOT_SWH_OLD,Spot world heritage Old format. ,,SPOT1-5,SPOT1-5,L1C,"SPOT,SPOT1,SPOT2,SPOT3,SPOT4,SPOT5,L1C",OPTICAL,proprietary,Spot World Heritage,1986-02-22T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +TIGGE_CF_SFC,TIGGE (THORPEX Interactive Grand Global Ensemble) Surface Control forecast from ECMWF ,,TIGGE,TIGGE,,"THORPEX,TIGGE,CF,SFC,ECMWF",ATMOSPHERIC,proprietary,TIGGE ECMWF Surface Control forecast,2003-01-01T00:00:00Z,,,,,,,,,,,available,,,,,,,,,, +UERRA_EUROPE_SL,"This UERRA dataset contains analyses of surface and near-surface essential climate variables from UERRA-HARMONIE and MESCAN-SURFEX systems. Forecasts up to 30 hours initialised from the analyses at 00 and 12 UTC are available only through the CDS-API (see Documentation). UERRA-HARMONIE is a 3-dimensional variational data assimilation system, while MESCAN-SURFEX is a complementary surface analysis system. Using the Optimal Interpolation method, MESCAN provides the best estimate of daily accumulated precipitation and six-hourly air temperature and relative humidit at 2 meters above the model topography. The land surface platform SURFEX is forced with downscaled forecast fields from UERRA-HARMONIE as well as MESCAN analyses. It is run offline, i.e. without feedback to the atmospheric analysis performed in MESCAN or the UERRA-HARMONIE data assimilation cycles. Using SURFEX offline allows to take full benefit of precipitation analysis and to use the more advanced physics options to better represent surface variables such as surface temperature and surface fluxes, and soil processes related to water and heat transfer in the soil and snow. In general, the assimilation systems are able to estimate biases between observations and to sift good-quality data from poor data. The laws of physics allow for estimates at locations where data coverage is low. The provision of estimates at each grid point in Europe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the much sparser observational networks, e.g. in 1960s, will have an impact on the quality of analyses leading to less accurate estimates. The improvement over global reanalysis products comes with the higher horizontal resolution that allows incorporating more regional details (e.g. topography). Moreover, it enables the system even to use more observations at places with dense observation networks. Variables in the dataset/application are: 10m wind direction, 10m wind speed, 2m relative humidity, 2m temperature, Albedo, High cloud cover, Land sea mask, Low cloud cover, Mean sea level pressure, Medium cloud cover, Orography, Skin temperature, Snow density, Snow depth water equivalent, Surface pressure, Surface roughness, Total cloud cover, Total column integrated water vapour, Total precipitation ",,SURFEX,SURFEX,,"Climate,ECMWF,Reanalysis,Regional,Europe,UERRA,UERRA-HARMONIE,SURFEX,MESCAN-SURFEX,CDS,Atmospheric,single,levels",ATMOSPHERIC,proprietary,UERRA regional reanalysis for Europe on single levels from 1961 to 2019,1961-01-01T00:00:00Z,,,,available,,,,,,,,,,,,,,,,,available +VENUS_L1C,A light description of Venus L1 data is available at http://www.cesbio.ups-tlse.fr/multitemp/?page_id=12984 ,,VENUS,VENUS,L1C,"VENUS,L1,L1C",OPTICAL,proprietary,Venus Level1-C,2017-08-02T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +VENUS_L2A_MAJA,"Level2 products provide surface reflectances after atmospheric correction, along with masks of clouds and their shadows. Data is processed by MAJA (before called MACCS) for THEIA land data center. ",,VENUS,VENUS,L2A,"VENUS,L2,L2A",OPTICAL,proprietary,Venus Level2-A,2017-08-02T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, +VENUS_L3A_MAJA,,,VENUS,VENUS,L3A,"VENUS,L3,L3A",OPTICAL,proprietary,Venus Level3-A,2017-08-02T00:00:00Z,,,,,,,,,,,,,,,,,,available,,, diff --git a/docs/api_reference/core.rst b/docs/api_reference/core.rst index 6a5f8551e..106fe39dd 100644 --- a/docs/api_reference/core.rst +++ b/docs/api_reference/core.rst @@ -80,9 +80,10 @@ Misc EODataAccessGateway.group_by_extent EODataAccessGateway.guess_product_type + EODataAccessGateway.list_queryables .. autoclass:: eodag.api.core.EODataAccessGateway :members: set_preferred_provider, get_preferred_provider, update_providers_config, list_product_types, available_providers, search, search_all, search_iter_page, crunch, download, download_all, serialize, deserialize, deserialize_and_register, load_stac_items, group_by_extent, guess_product_type, get_cruncher, - update_product_types_list, fetch_product_types_list, discover_product_types + update_product_types_list, fetch_product_types_list, discover_product_types, list_queryables diff --git a/docs/api_reference/index.rst b/docs/api_reference/index.rst index 23163af07..945d91346 100644 --- a/docs/api_reference/index.rst +++ b/docs/api_reference/index.rst @@ -14,4 +14,5 @@ The Python API Reference provides an overview of all public objects, functions a eoproduct utils exceptions + types call_graphs diff --git a/docs/api_reference/types.rst b/docs/api_reference/types.rst new file mode 100644 index 000000000..0850b0ac3 --- /dev/null +++ b/docs/api_reference/types.rst @@ -0,0 +1,6 @@ +===== +Types +===== + +.. automodule:: eodag.types.__init__ + :members: diff --git a/docs/api_reference/utils.rst b/docs/api_reference/utils.rst index 315bf26f1..1c1dc1bd1 100644 --- a/docs/api_reference/utils.rst +++ b/docs/api_reference/utils.rst @@ -13,7 +13,7 @@ Logging Callbacks ----------------- -.. autofunction:: eodag.utils.DownloadedCallback +.. autofunction::eodag.api.product.DownloadedCallback .. autofunction:: eodag.utils.ProgressCallback Notebook diff --git a/docs/cli_user_guide.rst b/docs/cli_user_guide.rst index d36035d7a..50763fc06 100644 --- a/docs/cli_user_guide.rst +++ b/docs/cli_user_guide.rst @@ -28,7 +28,6 @@ Then you can start playing with it: list List supported product types search Search satellite images by their product types,... serve-rest Start eodag HTTP server - serve-rpc Start eodag rpc server version Print eodag version and exit * Each command has its own help, see for instance the help of the ``list`` command with ``eodag list --help``: diff --git a/docs/conf.py b/docs/conf.py index 657510baa..bf5ca4a4d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,11 +17,7 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -try: - from importlib.metadata import metadata # type: ignore -except ImportError: # pragma: no cover - # for python < 3.8 - from importlib_metadata import metadata # type: ignore +from importlib.metadata import metadata from typing import Dict # -- General configuration ------------------------------------------------ diff --git a/docs/getting_started_guide/configure.rst b/docs/getting_started_guide/configure.rst index ac0559c59..7b98933c5 100644 --- a/docs/getting_started_guide/configure.rst +++ b/docs/getting_started_guide/configure.rst @@ -58,6 +58,11 @@ one (e.g. credentials). *PEPS*'s configuration template is shown below: +.. note:: + + Please write settings values as plain text, without quotes to avoid ``PyYAML`` interpreting potential special + characters. See https://pyyaml.org/wiki/PyYAMLDocumentation for more information. + | Users can directly modify the default file, which is then loaded automatically: @@ -137,6 +142,7 @@ Core configuration using environment variables Some EODAG core settings can be overriden using environment variables: +* ``EODAG_CFG_DIR`` customized configuration directory in place of ``~/.config/eodag`` * ``EODAG_CFG_FILE`` for defining the desired path to the `user configuration file\ `_ * ``EODAG_LOCS_CFG_FILE`` for defining the desired path to the diff --git a/docs/getting_started_guide/providers.rst b/docs/getting_started_guide/providers.rst index ff07d3e40..377917eb1 100644 --- a/docs/getting_started_guide/providers.rst +++ b/docs/getting_started_guide/providers.rst @@ -13,6 +13,7 @@ Products from the following providers are made avaiable through ``eodag``: * `peps `_: French National Space Agency (CNES) catalog for Sentinel products * `aws_eos `_: EOS search for Amazon public datasets * `creodias `_: CloudFerro DIAS +* `creodias_s3 `_: CloudFerro DIAS data through S3 protocol * `onda `_: Serco DIAS * `astraea_eod `_: Astraea Earth OnDemand STAC API * `usgs_satapi_aws `_: USGS Landsatlook SAT API diff --git a/docs/getting_started_guide/register.rst b/docs/getting_started_guide/register.rst index d4397c391..ab9540628 100644 --- a/docs/getting_started_guide/register.rst +++ b/docs/getting_started_guide/register.rst @@ -22,6 +22,9 @@ to each provider supported by ``eodag``: `Authenticate using an OTP `__ to see how to proceed. +* ``creodias_s3``: Create an account on `creodias `__, then go to `keymanager `__ and + click `Add credential` to generate the s3 access key and secret key. Add those credentials to the user configuration file (variables `aws_access_key_id` and `aws_secret_access_key`). + * ``onda``: create an account `here: `__ * ``ecmwf``: create an account `here `__. @@ -116,7 +119,7 @@ to each provider supported by ``eodag``: .. code-block:: bash - curl -X GET --header "Authorization: Basic $(echo USERNAME:PASSWORD | base64)" "https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/gettoken" + curl -X GET --header "Authorization: Basic $(echo USERNAME:PASSWORD | base64)" "https://wekeo-broker.prod.wekeo2.eu/databroker/gettoken" The WEkEO API will respond with a token: @@ -128,4 +131,4 @@ to each provider supported by ``eodag``: .. code-block:: bash - curl --request PUT --header 'accept: application/json' --header 'authorization: ' --data 'accepted=true' https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/termsaccepted/Copernicus_General_License + curl --request PUT --header 'accept: application/json' --header 'authorization: ' --data 'accepted=true' https://wekeo-broker.prod.wekeo2.eu/databroker/termsaccepted/Copernicus_General_License diff --git a/docs/index.rst b/docs/index.rst index 03ffebfa4..51e24aa19 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -30,7 +30,8 @@ types (Sentinel 1, Sentinel 2, Sentinel 3, Landsat, etc.) that can be searched a `usgs_satapi_aws `_, `earth_search `_, `earth_search_gcs `_, `usgs `_, `theia `_, - `peps `_, `aws_eos `_, `creodias `_, + `peps `_, `aws_eos `_, + `creodias `_, `creodias_s3 `_, `onda `_, `ecmwf `_, `cop_ads `_, `cop_cds `_, `sara `_, diff --git a/docs/notebooks/api_user_guide/4_search.ipynb b/docs/notebooks/api_user_guide/4_search.ipynb index 9e0049489..86c6967b7 100644 --- a/docs/notebooks/api_user_guide/4_search.ipynb +++ b/docs/notebooks/api_user_guide/4_search.ipynb @@ -33,8 +33,8 @@ "name": "stderr", "output_type": "stream", "text": [ - "2023-01-13 11:03:19,171 eodag.config [INFO ] Loading user configuration from: /home/sylvain/.config/eodag/eodag.yml\n", - "2023-01-13 11:03:19,388 eodag.core [INFO ] Locations configuration loaded from /home/sylvain/.config/eodag/locations.yml\n" + "2024-02-19 12:00:29,455 eodag.config [INFO ] Loading user configuration from: /home/sylvain/.config/eodag/eodag.yml\n", + "2024-02-19 12:00:29,545 eodag.core [INFO ] Locations configuration loaded from /home/sylvain/.config/eodag/locations.yml\n" ] } ], @@ -2494,6 +2494,284 @@ "2023-07-03 13:38:24,041 eodag.core [INFO] (core) No result from provider 'peps' due to an error during search. Raise verbosity of log messages for details\n", "2023-07-03 13:38:24,041 eodag.core [WARNING] (core) No result could be obtained from provider peps, we will try to get the data from another provider" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Queryables" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To know which query parameters can be used to filter the search result, the `list_queryables` method can be used. It has two optional arguments, `provider` and `productType`, additional keyword arguments to filter the queryable values can be added.\n", + "\n", + "If the function is called without any arguments, the basic queryables that are available for all providers and product types will be returned:" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'productType': typing.Annotated[str, FieldInfo(annotation=NoneType, required=True)],\n", + " 'id': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'start': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='startTimeFromAscendingNode', alias_priority=2)],\n", + " 'end': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='completionTimeFromAscendingNode', alias_priority=2)],\n", + " 'geom': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='geometry', alias_priority=2)]}" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dag.list_queryables()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If the function is called for a specific product type, the queryables available for this product type, i.e., the basic queryables and product type specific queryables, are shown. Only parameters available for all providers offering the product are returned." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'sky_type': typing.Annotated[typing.Literal['clear', 'observed_cloud'], FieldInfo(annotation=NoneType, required=False, default='clear')],\n", + " 'time_step': typing.Annotated[typing.Literal['15minute', '1day', '1hour', '1minute', '1month'], FieldInfo(annotation=NoneType, required=False, default='1minute')],\n", + " 'time_reference': typing.Annotated[typing.Literal['true_solar_time', 'universal_time'], FieldInfo(annotation=NoneType, required=False, default='true_solar_time')],\n", + " 'location': typing.Annotated[dict, FieldInfo(annotation=NoneType, required=False, default={'latitude': 0, 'longitude': 0})],\n", + " 'altitude': typing.Annotated[int, FieldInfo(annotation=NoneType, required=False, default=-999)],\n", + " 'format': typing.Annotated[typing.Literal['csv', 'netcdf'], FieldInfo(annotation=NoneType, required=False, default='csv')],\n", + " 'end': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='completionTimeFromAscendingNode', alias_priority=2)],\n", + " 'geom': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='geometry', alias_priority=2)],\n", + " 'productType': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='CAMS_SOLAR_RADIATION')],\n", + " 'id': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'start': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='startTimeFromAscendingNode', alias_priority=2)]}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dag.list_queryables(productType=\"CAMS_SOLAR_RADIATION\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The queryables can also be retrieved for a specific provider. These queryables are either taken from the queryables endpoint of the provider (if available), the constraints for the query parameter values defined by the provider (if available) or the provider configuration in EODAG. In the example below we use a provider offering a queryables endpoint." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2024-02-19 12:01:07,071 eodag.search.qssearch [INFO ] Fetching queryables: https://planetarycomputer.microsoft.com/api/stac/v1/search/../collections/sentinel-1-grd/queryables\n" + ] + }, + { + "data": { + "text/plain": [ + "{'id': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'end': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='completionTimeFromAscendingNode', alias_priority=2)],\n", + " 'geom': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='geometry', alias_priority=2)],\n", + " 'platform': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 's1:shape': typing.Annotated[list, FieldInfo(annotation=NoneType, required=False)],\n", + " 'end_datetime': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, title='End datetime', description='End datetime', metadata=[PydanticGeneralMetadata(pattern='(\\\\+00:00|Z)$')])],\n", + " 's1:resolution': typing.Annotated[typing.Literal['full', 'high', 'medium'], FieldInfo(annotation=NoneType, required=False, title='Resolution')],\n", + " 's1:datatake_id': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, title='Datatake ID')],\n", + " 'start_datetime': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, title='Start datetime', description='Start datetime', metadata=[PydanticGeneralMetadata(pattern='(\\\\+00:00|Z)$')])],\n", + " 's1:orbit_source': typing.Annotated[typing.Literal['DOWNLINK', 'POEORB', 'PREORB', 'RESORB'], FieldInfo(annotation=NoneType, required=False, title='Orbit Source')],\n", + " 's1:slice_number': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, title='Slice Number')],\n", + " 's1:total_slices': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, title='Total Slices')],\n", + " 'sar:looks_range': typing.Annotated[int, FieldInfo(annotation=NoneType, required=False, title='Looks range')],\n", + " 'orbitDirection': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'sar:product_type': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, title='Product type')],\n", + " 'sar:looks_azimuth': typing.Annotated[int, FieldInfo(annotation=NoneType, required=False, title='Looks azimuth')],\n", + " 'polarizationChannels': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'dopplerFrequency': typing.Annotated[typing.Optional[float], FieldInfo(annotation=NoneType, required=False)],\n", + " 'sat:absolute_orbit': typing.Annotated[NoneType, FieldInfo(annotation=NoneType, required=False, title='Absolute Orbit', json_schema_extra={'$ref': 'https://stac-extensions.github.io/sat/v1.0.0/schema.json#/definitions/fields/properties/sat:absolute_orbit'})],\n", + " 'orbitNumber': typing.Annotated[typing.Optional[int], FieldInfo(annotation=NoneType, required=False)],\n", + " 's1:processing_level': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False)],\n", + " 'sensorMode': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'sar:center_frequency': typing.Annotated[float, FieldInfo(annotation=NoneType, required=False, title='Center Frequency (GHz)')],\n", + " 'sar:resolution_range': typing.Annotated[float, FieldInfo(annotation=NoneType, required=False, title='Resolution range (m)')],\n", + " 's1:product_timeliness': typing.Annotated[typing.Literal['Fast-24h', 'NRT-10m', 'NRT-1h', 'NRT-3h', 'Off-line', 'Reprocessing'], FieldInfo(annotation=NoneType, required=False, title='Product Timeliness')],\n", + " 'sar:resolution_azimuth': typing.Annotated[float, FieldInfo(annotation=NoneType, required=False, title='Resolution azimuth (m)')],\n", + " 'sar:pixel_spacing_range': typing.Annotated[float, FieldInfo(annotation=NoneType, required=False, title='Pixel spacing range (m)')],\n", + " 'sar:observation_direction': typing.Annotated[typing.Literal['left', 'right'], FieldInfo(annotation=NoneType, required=False, title='Antenna pointing direction')],\n", + " 'sar:pixel_spacing_azimuth': typing.Annotated[float, FieldInfo(annotation=NoneType, required=False, title='Pixel spacing azimuth (m)')],\n", + " 'sar:looks_equivalent_number': typing.Annotated[float, FieldInfo(annotation=NoneType, required=False, title='Equivalent number of looks (ENL)')],\n", + " 's1:instrument_configuration_ID': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False)],\n", + " 'sat:platform_international_designator': typing.Annotated[typing.Literal['0000-000A', '2014-016A', '2016-025A'], FieldInfo(annotation=NoneType, required=False, title='Platform Designation')],\n", + " 'productType': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='S1_SAR_GRD')],\n", + " 'doi': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'platformSerialIdentifier': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'instrument': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'processingLevel': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'resolution': typing.Annotated[typing.Optional[int], FieldInfo(annotation=NoneType, required=False)],\n", + " 'publicationDate': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'cloudCover': typing.Annotated[typing.Optional[typing.Annotated[int, Gt(gt=0), Lt(lt=100)]], FieldInfo(annotation=NoneType, required=False)],\n", + " 'productVersion': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'creationDate': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'modificationDate': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'availabilityTime': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'acquisitionStation': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'acquisitionSubType': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'illuminationAzimuthAngle': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'illuminationElevationAngle': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'start': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='startTimeFromAscendingNode', alias_priority=2)]}" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dag.list_queryables(productType=\"S1_SAR_GRD\", provider=\"planetary_computer\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For each queryable the possible values will be included in the result if they are available from either the provider queryables or the constraints. If constraints are available for a provider, the queryables can be filtered by the value of a specific parameter. E.g. if the queryables month and day are available, we can set the month to a specific value to get only the days available for that month." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'api_product_type': typing.Annotated[typing.Literal['reanalysis'], FieldInfo(annotation=NoneType, required=False, default='reanalysis')],\n", + " 'time': typing.Annotated[typing.Literal['00:00', '01:00', '02:00', '03:00', '04:00', '05:00', '06:00', '07:00', '08:00', '09:00', '10:00', '11:00', '12:00', '13:00', '14:00', '15:00', '16:00', '17:00', '18:00', '19:00', '20:00', '21:00', '22:00', '23:00'], FieldInfo(annotation=NoneType, required=False, default='00:00')],\n", + " 'format': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='grib')],\n", + " 'month': typing.Annotated[typing.Literal['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12'], FieldInfo(annotation=NoneType, required=False, default='02')],\n", + " 'day': typing.Annotated[typing.Literal['01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29'], FieldInfo(annotation=NoneType, required=True)],\n", + " 'variable': typing.Annotated[typing.Literal['100m_u_component_of_wind', '100m_v_component_of_wind', '10m_u_component_of_neutral_wind', '10m_u_component_of_wind', '10m_v_component_of_neutral_wind', '10m_v_component_of_wind', '10m_wind_gust_since_previous_post_processing', '2m_dewpoint_temperature', '2m_temperature', 'air_density_over_the_oceans', 'angle_of_sub_gridscale_orography', 'anisotropy_of_sub_gridscale_orography', 'benjamin_feir_index', 'boundary_layer_dissipation', 'boundary_layer_height', 'charnock', 'clear_sky_direct_solar_radiation_at_surface', 'cloud_base_height', 'coefficient_of_drag_with_waves', 'convective_available_potential_energy', 'convective_inhibition', 'convective_precipitation', 'convective_rain_rate', 'convective_snowfall', 'convective_snowfall_rate_water_equivalent', 'downward_uv_radiation_at_the_surface', 'duct_base_height', 'eastward_gravity_wave_surface_stress', 'eastward_turbulent_surface_stress', 'evaporation', 'forecast_albedo', 'forecast_logarithm_of_surface_roughness_for_heat', 'forecast_surface_roughness', 'free_convective_velocity_over_the_oceans', 'friction_velocity', 'geopotential', 'gravity_wave_dissipation', 'high_cloud_cover', 'high_vegetation_cover', 'ice_temperature_layer_1', 'ice_temperature_layer_2', 'ice_temperature_layer_3', 'ice_temperature_layer_4', 'instantaneous_10m_wind_gust', 'instantaneous_eastward_turbulent_surface_stress', 'instantaneous_large_scale_surface_precipitation_fraction', 'instantaneous_moisture_flux', 'instantaneous_northward_turbulent_surface_stress', 'instantaneous_surface_sensible_heat_flux', 'k_index', 'lake_bottom_temperature', 'lake_cover', 'lake_depth', 'lake_ice_depth', 'lake_ice_temperature', 'lake_mix_layer_depth', 'lake_mix_layer_temperature', 'lake_shape_factor', 'lake_total_layer_temperature', 'land_sea_mask', 'large_scale_precipitation', 'large_scale_precipitation_fraction', 'large_scale_rain_rate', 'large_scale_snowfall', 'large_scale_snowfall_rate_water_equivalent', 'leaf_area_index_high_vegetation', 'leaf_area_index_low_vegetation', 'low_cloud_cover', 'low_vegetation_cover', 'maximum_2m_temperature_since_previous_post_processing', 'maximum_individual_wave_height', 'maximum_total_precipitation_rate_since_previous_post_processing', 'mean_boundary_layer_dissipation', 'mean_convective_precipitation_rate', 'mean_convective_snowfall_rate', 'mean_direction_of_total_swell', 'mean_direction_of_wind_waves', 'mean_eastward_gravity_wave_surface_stress', 'mean_eastward_turbulent_surface_stress', 'mean_evaporation_rate', 'mean_gravity_wave_dissipation', 'mean_large_scale_precipitation_fraction', 'mean_large_scale_precipitation_rate', 'mean_large_scale_snowfall_rate', 'mean_northward_gravity_wave_surface_stress', 'mean_northward_turbulent_surface_stress', 'mean_period_of_total_swell', 'mean_period_of_wind_waves', 'mean_potential_evaporation_rate', 'mean_runoff_rate', 'mean_sea_level_pressure', 'mean_snow_evaporation_rate', 'mean_snowfall_rate', 'mean_snowmelt_rate', 'mean_square_slope_of_waves', 'mean_sub_surface_runoff_rate', 'mean_surface_direct_short_wave_radiation_flux', 'mean_surface_direct_short_wave_radiation_flux_clear_sky', 'mean_surface_downward_long_wave_radiation_flux', 'mean_surface_downward_long_wave_radiation_flux_clear_sky', 'mean_surface_downward_short_wave_radiation_flux', 'mean_surface_downward_short_wave_radiation_flux_clear_sky', 'mean_surface_downward_uv_radiation_flux', 'mean_surface_latent_heat_flux', 'mean_surface_net_long_wave_radiation_flux', 'mean_surface_net_long_wave_radiation_flux_clear_sky', 'mean_surface_net_short_wave_radiation_flux', 'mean_surface_net_short_wave_radiation_flux_clear_sky', 'mean_surface_runoff_rate', 'mean_surface_sensible_heat_flux', 'mean_top_downward_short_wave_radiation_flux', 'mean_top_net_long_wave_radiation_flux', 'mean_top_net_long_wave_radiation_flux_clear_sky', 'mean_top_net_short_wave_radiation_flux', 'mean_top_net_short_wave_radiation_flux_clear_sky', 'mean_total_precipitation_rate', 'mean_vertical_gradient_of_refractivity_inside_trapping_layer', 'mean_vertically_integrated_moisture_divergence', 'mean_wave_direction', 'mean_wave_direction_of_first_swell_partition', 'mean_wave_direction_of_second_swell_partition', 'mean_wave_direction_of_third_swell_partition', 'mean_wave_period', 'mean_wave_period_based_on_first_moment', 'mean_wave_period_based_on_first_moment_for_swell', 'mean_wave_period_based_on_first_moment_for_wind_waves', 'mean_wave_period_based_on_second_moment_for_swell', 'mean_wave_period_based_on_second_moment_for_wind_waves', 'mean_wave_period_of_first_swell_partition', 'mean_wave_period_of_second_swell_partition', 'mean_wave_period_of_third_swell_partition', 'mean_zero_crossing_wave_period', 'medium_cloud_cover', 'minimum_2m_temperature_since_previous_post_processing', 'minimum_total_precipitation_rate_since_previous_post_processing', 'minimum_vertical_gradient_of_refractivity_inside_trapping_layer', 'model_bathymetry', 'near_ir_albedo_for_diffuse_radiation', 'near_ir_albedo_for_direct_radiation', 'normalized_energy_flux_into_ocean', 'normalized_energy_flux_into_waves', 'normalized_stress_into_ocean', 'northward_gravity_wave_surface_stress', 'northward_turbulent_surface_stress', 'ocean_surface_stress_equivalent_10m_neutral_wind_direction', 'ocean_surface_stress_equivalent_10m_neutral_wind_speed', 'peak_wave_period', 'period_corresponding_to_maximum_individual_wave_height', 'potential_evaporation', 'precipitation_type', 'runoff', 'sea_ice_cover', 'sea_surface_temperature', 'significant_height_of_combined_wind_waves_and_swell', 'significant_height_of_total_swell', 'significant_height_of_wind_waves', 'significant_wave_height_of_first_swell_partition', 'significant_wave_height_of_second_swell_partition', 'significant_wave_height_of_third_swell_partition', 'skin_reservoir_content', 'skin_temperature', 'slope_of_sub_gridscale_orography', 'snow_albedo', 'snow_density', 'snow_depth', 'snow_evaporation', 'snowfall', 'snowmelt', 'soil_temperature_level_1', 'soil_temperature_level_2', 'soil_temperature_level_3', 'soil_temperature_level_4', 'soil_type', 'standard_deviation_of_filtered_subgrid_orography', 'standard_deviation_of_orography', 'sub_surface_runoff', 'surface_latent_heat_flux', 'surface_net_solar_radiation', 'surface_net_solar_radiation_clear_sky', 'surface_net_thermal_radiation', 'surface_net_thermal_radiation_clear_sky', 'surface_pressure', 'surface_runoff', 'surface_sensible_heat_flux', 'surface_solar_radiation_downward_clear_sky', 'surface_solar_radiation_downwards', 'surface_thermal_radiation_downward_clear_sky', 'surface_thermal_radiation_downwards', 'temperature_of_snow_layer', 'toa_incident_solar_radiation', 'top_net_solar_radiation', 'top_net_solar_radiation_clear_sky', 'top_net_thermal_radiation', 'top_net_thermal_radiation_clear_sky', 'total_cloud_cover', 'total_column_cloud_ice_water', 'total_column_cloud_liquid_water', 'total_column_ozone', 'total_column_rain_water', 'total_column_snow_water', 'total_column_supercooled_liquid_water', 'total_column_water', 'total_column_water_vapour', 'total_precipitation', 'total_sky_direct_solar_radiation_at_surface', 'total_totals_index', 'trapping_layer_base_height', 'trapping_layer_top_height', 'type_of_high_vegetation', 'type_of_low_vegetation', 'u_component_stokes_drift', 'uv_visible_albedo_for_diffuse_radiation', 'uv_visible_albedo_for_direct_radiation', 'v_component_stokes_drift', 'vertical_integral_of_divergence_of_cloud_frozen_water_flux', 'vertical_integral_of_divergence_of_cloud_liquid_water_flux', 'vertical_integral_of_divergence_of_geopotential_flux', 'vertical_integral_of_divergence_of_kinetic_energy_flux', 'vertical_integral_of_divergence_of_mass_flux', 'vertical_integral_of_divergence_of_moisture_flux', 'vertical_integral_of_divergence_of_ozone_flux', 'vertical_integral_of_divergence_of_thermal_energy_flux', 'vertical_integral_of_divergence_of_total_energy_flux', 'vertical_integral_of_eastward_cloud_frozen_water_flux', 'vertical_integral_of_eastward_cloud_liquid_water_flux', 'vertical_integral_of_eastward_geopotential_flux', 'vertical_integral_of_eastward_heat_flux', 'vertical_integral_of_eastward_kinetic_energy_flux', 'vertical_integral_of_eastward_mass_flux', 'vertical_integral_of_eastward_ozone_flux', 'vertical_integral_of_eastward_total_energy_flux', 'vertical_integral_of_eastward_water_vapour_flux', 'vertical_integral_of_energy_conversion', 'vertical_integral_of_kinetic_energy', 'vertical_integral_of_mass_of_atmosphere', 'vertical_integral_of_mass_tendency', 'vertical_integral_of_northward_cloud_frozen_water_flux', 'vertical_integral_of_northward_cloud_liquid_water_flux', 'vertical_integral_of_northward_geopotential_flux', 'vertical_integral_of_northward_heat_flux', 'vertical_integral_of_northward_kinetic_energy_flux', 'vertical_integral_of_northward_mass_flux', 'vertical_integral_of_northward_ozone_flux', 'vertical_integral_of_northward_total_energy_flux', 'vertical_integral_of_northward_water_vapour_flux', 'vertical_integral_of_potential_and_internal_energy', 'vertical_integral_of_potential_internal_and_latent_energy', 'vertical_integral_of_temperature', 'vertical_integral_of_thermal_energy', 'vertical_integral_of_total_energy', 'vertically_integrated_moisture_divergence', 'volumetric_soil_water_layer_1', 'volumetric_soil_water_layer_2', 'volumetric_soil_water_layer_3', 'volumetric_soil_water_layer_4', 'wave_spectral_directional_width', 'wave_spectral_directional_width_for_swell', 'wave_spectral_directional_width_for_wind_waves', 'wave_spectral_kurtosis', 'wave_spectral_peakedness', 'wave_spectral_skewness', 'zero_degree_level'], FieldInfo(annotation=NoneType, required=True)],\n", + " 'year': typing.Annotated[typing.Literal['1940', '1941', '1942', '1943', '1944', '1945', '1946', '1947', '1948', '1949', '1950', '1951', '1952', '1953', '1954', '1955', '1956', '1957', '1958', '1959', '1960', '1961', '1962', '1963', '1964', '1965', '1966', '1967', '1968', '1969', '1970', '1971', '1972', '1973', '1974', '1975', '1976', '1977', '1978', '1979', '1980', '1981', '1982', '1983', '1984', '1985', '1986', '1987', '1988', '1989', '1990', '1991', '1992', '1993', '1994', '1995', '1996', '1997', '1998', '1999', '2000', '2001', '2002', '2003', '2004', '2005', '2006', '2007', '2008', '2009', '2010', '2011', '2012', '2013', '2014', '2015', '2016', '2017', '2018', '2019', '2020', '2021', '2022', '2023', '2024'], FieldInfo(annotation=NoneType, required=True)],\n", + " 'end': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='completionTimeFromAscendingNode', alias_priority=2)],\n", + " 'geom': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='geometry', alias_priority=2)],\n", + " 'productType': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='ERA5_SL')],\n", + " 'id': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'start': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='startTimeFromAscendingNode', alias_priority=2)]}" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dag.list_queryables(\n", + " productType=\"ERA5_SL\", \n", + " provider=\"cop_cds\", \n", + " month=\"02\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If default values are defined for a parameter in the configuration, the values of the other parameters will be filtered by this default value if constraints are available. If all possible configurations shall be shown, the default value can be removed by setting it to empty. The example below shows on request in which all default values are used and one where the default for api_product_type is removed." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'variable': typing.Annotated[typing.Literal['glacier_area'], FieldInfo(annotation=NoneType, required=False, default='glacier_area')],\n", + " 'api_product_type': typing.Annotated[typing.Literal['gridded', 'hypsometry', 'vector'], FieldInfo(annotation=NoneType, required=False, default='gridded')],\n", + " 'version': typing.Annotated[typing.Literal['6_0'], FieldInfo(annotation=NoneType, required=False, default='6_0')],\n", + " 'format': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='zip')],\n", + " 'end': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='completionTimeFromAscendingNode', alias_priority=2)],\n", + " 'geom': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='geometry', alias_priority=2)],\n", + " 'productType': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='GLACIERS_DIST_RANDOLPH')],\n", + " 'id': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'start': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='startTimeFromAscendingNode', alias_priority=2)]}" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dag.list_queryables(\n", + " productType=\"GLACIERS_DIST_RANDOLPH\", \n", + " provider=\"cop_cds\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'variable': typing.Annotated[typing.Literal['glacier_area'], FieldInfo(annotation=NoneType, required=False, default='glacier_area')],\n", + " 'api_product_type': typing.Annotated[typing.Literal['gridded', 'hypsometry', 'vector'], FieldInfo(annotation=NoneType, required=True)],\n", + " 'version': typing.Annotated[typing.Literal['5_0', '6_0'], FieldInfo(annotation=NoneType, required=False, default='6_0')],\n", + " 'format': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='zip')],\n", + " 'end': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='completionTimeFromAscendingNode', alias_priority=2)],\n", + " 'geom': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='geometry', alias_priority=2)],\n", + " 'productType': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False, default='GLACIERS_DIST_RANDOLPH')],\n", + " 'id': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False)],\n", + " 'start': typing.Annotated[typing.Optional[str], FieldInfo(annotation=NoneType, required=False, alias='startTimeFromAscendingNode', alias_priority=2)]}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "dag.list_queryables(\n", + " productType=\"GLACIERS_DIST_RANDOLPH\", \n", + " provider=\"cop_cds\", \n", + " api_product_type=None\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -2512,7 +2790,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.10.12" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/docs/notebooks/api_user_guide/7_download.ipynb b/docs/notebooks/api_user_guide/7_download.ipynb index 4317884d9..d8a31eea0 100644 --- a/docs/notebooks/api_user_guide/7_download.ipynb +++ b/docs/notebooks/api_user_guide/7_download.ipynb @@ -1111,17 +1111,243 @@ "offline_product.location" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Download assets" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Searching on some providers (mainly STAC providers) returns products having an `assets` attribute listing single files that can be individually donwloaded." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "search_results, total_count = dag.search(\n", + " productType=\"S2_MSI_L2A\", \n", + " provider=\"planetary_computer\"\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['AOT',\n", + " 'B01',\n", + " 'B02',\n", + " 'B03',\n", + " 'B04',\n", + " 'B05',\n", + " 'B06',\n", + " 'B07',\n", + " 'B08',\n", + " 'B09',\n", + " 'B11',\n", + " 'B12',\n", + " 'B8A',\n", + " 'SCL',\n", + " 'WVP',\n", + " 'visual',\n", + " 'preview',\n", + " 'safe-manifest',\n", + " 'granule-metadata',\n", + " 'inspire-metadata',\n", + " 'product-metadata',\n", + " 'datastrip-metadata',\n", + " 'tilejson',\n", + " 'rendered_preview']" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# view assets keys\n", + "[*search_results[0].assets]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'href': 'https://sentinel2l2a01.blob.core.windows.net/sentinel2-l2/04/V/DM/2023/11/30/S2B_MSIL2A_20231130T221859_N0509_R115_T04VDM_20231130T231119.SAFE/GRANULE/L2A_T04VDM_A035176_20231130T221857/IMG_DATA/R20m/T04VDM_20231130T221859_B05_20m.tif', 'proj:bbox': [399960.0, 6590220.0, 509760.0, 6700020.0], 'proj:shape': [5490, 5490], 'proj:transform': [20.0, 0.0, 399960.0, 0.0, -20.0, 6700020.0], 'gsd': 20.0, 'type': 'image/tiff; application=geotiff; profile=cloud-optimized', 'roles': ['data'], 'title': 'Band 5 - Vegetation red edge 1 - 20m', 'eo:bands': [{'name': 'B05', 'common_name': 'rededge', 'description': 'Band 5 - Vegetation red edge 1', 'center_wavelength': 0.704, 'full_width_half_max': 0.019}]}" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# view single asset content\n", + "search_results[0].assets[\"B05\"]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Configure provider for download, if not already done" + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "dag.update_providers_config(f\"\"\"\n", + " planetary_computer:\n", + " auth:\n", + " credentials:\n", + " apikey: PLEASE_CHANGE_ME\n", + " download:\n", + " outputs_prefix: {os.path.abspath(workspace)}\n", + "\"\"\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Download several assets using core `download()` method, allowing regex in `asset` parameter to identify assets to download" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "2b3b72362500471a90132f82de014fe8", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0.00B [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "'/home/sylvain/workspace/eodag/docs/notebooks/api_user_guide/eodag_workspace_download/S2B_MSIL2A_20231130T221859_R115_T04VDM_20231130T231119'" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "path = dag.download(search_results[0], asset=r\"B0[23]\")\n", + "path" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "T04VDM_20231130T221859_B02_10m.tif T04VDM_20231130T221859_B03_10m.tif\n" + ] + } + ], + "source": [ + "! ls {path}" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Download a single asset using `asset.download()` method" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "dbfd4d0a51d64b6b978aa1629e21b0f8", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "0.00B [00:00, ?B/s]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "'/home/sylvain/workspace/eodag/docs/notebooks/api_user_guide/eodag_workspace_download/S2B_MSIL2A_20231130T221859_R115_T04VDM_20231130T231119'" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "path = search_results[0].assets[\"B05\"].download()\n", + "path" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "T04VDM_20231130T221859_B02_10m.tif T04VDM_20231130T221859_B05_20m.tif\n", + "T04VDM_20231130T221859_B03_10m.tif\n" + ] + } + ], + "source": [ + "! ls {path}" + ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3.8.10 64-bit", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -1135,7 +1361,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.10.12" }, "vscode": { "interpreter": { @@ -1145,204 +1371,164 @@ "widgets": { "application/vnd.jupyter.widget-state+json": { "state": { - "00f74c9afcee41f781080736e4b1b2bb": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "01fe9acbf32c4e9f85fe4808dc168a7a": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "02f22b40afa244f58e3af27e9fc77886": { + "01cda13104c04e9987f5fc3fb3d12e3c": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", "state": {} }, - "03e06789065b4af2af8cbeebaf8fcd8d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", + "0e3c2a8e89fe4af4b6fcb07db3036276": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", "state": { - "layout": "IPY_MODEL_4cef95ea6f3c4b57966f14bfd05443ac", - "style": "IPY_MODEL_5e08ebacc2e547869cc52977b723cc0c", - "value": "Extracting files from S2A_MSIL1C_20201226T105451_N0209_R051_T31TCK_20201226T130209.zip: 100%" + "flex": "2" } }, - "04a03a9173fb4bbfbc23410f3adce072": { + "12bd52f10fcd4a1681b2c2ce9f0ab728": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", "state": { - "description_width": "" + "layout": "IPY_MODEL_70d76df867bf4026bbc93ce1b4d6cac9", + "style": "IPY_MODEL_5a907a533bf44c68bcc5978db19a34f5", + "value": "S2B_MSIL2A_20231130T221859_R115_T04VDM_20231130T231119: " } }, - "0655c8b70af64a26b25bf7326b78d82e": { + "1876178fca154473b30e125beeb2b8fe": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", "state": { - "layout": "IPY_MODEL_17ae13e6a8a6446db465bcc94c8bc39b", - "style": "IPY_MODEL_763143b8898945969c8effe0a64b09a4", - "value": " 34.8k/34.8k [00:00<00:00, 293kB/s]" + "description_width": "", + "font_size": null, + "text_color": null } }, - "07909752099f404eb16272abde2c49de": { + "1e862fc0cfd1445bad4592a15f2efbad": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", + "model_module_version": "2.0.0", + "model_name": "FloatProgressModel", "state": { - "layout": "IPY_MODEL_0f77844e8f274624a3a444b59dc647a0", - "style": "IPY_MODEL_bb193001221f4c609dfe808ac234ed48", - "value": "quicklooks/S2B_MSIL1C_20201111T105259_N0209_R051_T31TCK_20201111T130651: 100%" + "bar_style": "success", + "layout": "IPY_MODEL_b4db254a24894672b72b84ba384fed32", + "max": 27396501, + "style": "IPY_MODEL_c02892dee90c44f4865ab1c89d141d50", + "value": 27396501 } }, - "09ed254fab5948e788dbf109df540e47": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "0d2e58c337ba419a8f57cba7ee7edaaa": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "0e4a22a56c35441787891e03a4d82f84": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", + "229601dc5e0743d8b13e0f5d8f7548c4": { + "model_module": "@jupyter-widgets/controls", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", "state": { - "flex": "2" + "description_width": "" } }, - "0f77844e8f274624a3a444b59dc647a0": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "0ff348ae46564d968020db92ca112bda": { + "259ba31dfb6d443ab8daa1237188de5f": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", "state": { - "layout": "IPY_MODEL_d7d98c90410e405fb06ca685a4e110d7", - "style": "IPY_MODEL_5a8509581a6043e68f68fa66e43727e2", - "value": "quicklooks/S2A_MSIL1C_20201229T110451_N0209_R094_T31TCL_20201229T131620: 100%" + "description_width": "", + "font_size": null, + "text_color": null } }, - "10090630957349eab6a987405d9aa8c5": { + "2b3b72362500471a90132f82de014fe8": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HBoxModel", "state": { "children": [ - "IPY_MODEL_22d94bd1aec24f3da650e0cc29dac906", - "IPY_MODEL_e79bd3627dc54cf8ad30dc99ed906dd4", - "IPY_MODEL_d09a2f4728cc4550b1a54f8805d9b599" + "IPY_MODEL_c571bee4657d4dffa8b63e216c5a675e", + "IPY_MODEL_1e862fc0cfd1445bad4592a15f2efbad", + "IPY_MODEL_a7a3065894874ab99725a3cc2117a5a9" ], - "layout": "IPY_MODEL_fb659e31419940758f6495224b0c556b" + "layout": "IPY_MODEL_c0537c8d38804e2b927492e42b95d2c6" } }, - "1030ffa146c54cafb45d8f44bbee952a": { + "3068616e2e454125884c3361e9d95019": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "117ca44508454e2d8688ec994a1e4de0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { - "layout": "IPY_MODEL_d680ca0387114bec8b7d8db3fc69ce75", - "style": "IPY_MODEL_04a03a9173fb4bbfbc23410f3adce072", - "value": " 0.00/? [00:00<?, ?B/s]" + "layout": "IPY_MODEL_eef129ae4a464b9aa082df0c3f13eaad", + "style": "IPY_MODEL_1876178fca154473b30e125beeb2b8fe", + "value": " 7.77M/? [00:01<00:00, 7.96MB/s]" } }, - "1429f1fdbbe84d9b993b910e600516ff": { + "5a907a533bf44c68bcc5978db19a34f5": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", "state": { - "description_width": "" + "description_width": "", + "font_size": null, + "text_color": null } }, - "16de0fec1e8e428eb2d447371233bbea": { + "64886108403149bd8cb2b69b16e91794": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", "state": { - "flex": "2" + "display": "inline-flex", + "flex_flow": "row wrap", + "width": "100%" } }, - "1772512a86e547dabd6e04d4efd3c34e": { + "6939b84ef0c046b6aefe5511c58c20e6": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", + "model_module_version": "2.0.0", + "model_name": "HTMLStyleModel", "state": { - "layout": "IPY_MODEL_8c2d666a4f514e4ab9d27d1f75fa76d1", - "style": "IPY_MODEL_423d59f141b1415aae4239116e4080a0", - "value": "quicklooks/S2A_MSIL1C_20201226T105451_N0209_R051_T31TCK_20201226T130209: 100%" + "description_width": "", + "font_size": null, + "text_color": null } }, - "17ae13e6a8a6446db465bcc94c8bc39b": { + "70d76df867bf4026bbc93ce1b4d6cac9": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", "state": {} }, - "17b2ffad2f654ad58d6252635ac48dcb": { + "a5434e9264a7431fb8f89119d71677ee": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": { - "flex": "2" - } + "state": {} }, - "17bf04fa819b40219e9beadabb29b7c5": { + "a7a3065894874ab99725a3cc2117a5a9": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "HTMLModel", "state": { - "layout": "IPY_MODEL_ff22fadc6c5f45b0b0a3b5f66675972e", - "style": "IPY_MODEL_2f57d7b1ff074bf5b3b38f85935a8385", - "value": "quicklooks/S2A_MSIL1C_20201116T105331_N0209_R051_T31TCL_20201116T130215: 100%" + "layout": "IPY_MODEL_01cda13104c04e9987f5fc3fb3d12e3c", + "style": "IPY_MODEL_259ba31dfb6d443ab8daa1237188de5f", + "value": " 54.8M/? [00:07<00:00, 10.1MB/s]" } }, - "183a21f5ddfb4dc38c02884e5304b958": { + "b4db254a24894672b72b84ba384fed32": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", - "state": {} + "state": { + "flex": "2" + } }, - "18db9b4f07684178aa762e848300efdd": { + "c02892dee90c44f4865ab1c89d141d50": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", + "model_module_version": "2.0.0", + "model_name": "ProgressStyleModel", "state": { - "bar_style": "success", - "layout": "IPY_MODEL_e1af015f87604c1da38e9b239e2cb245", - "max": 31027, - "style": "IPY_MODEL_467f80b29a6649778f074cc9415d13fe", - "value": 31027 + "description_width": "" } }, - "196653d86ddb488690b2ac1c22e9ad62": { + "c0537c8d38804e2b927492e42b95d2c6": { "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", + "model_module_version": "2.0.0", "model_name": "LayoutModel", "state": { "display": "inline-flex", @@ -1350,1876 +1536,45 @@ "width": "100%" } }, - "19822930690b497893863b9e99f3a241": { + "c571bee4657d4dffa8b63e216c5a675e": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", + "model_module_version": "2.0.0", + "model_name": "HTMLModel", "state": { - "children": [ - "IPY_MODEL_03e06789065b4af2af8cbeebaf8fcd8d", - "IPY_MODEL_647afe974789478d80398973b73e85af", - "IPY_MODEL_d0a814b63c6142b9b719fe15eddbff08" - ], - "layout": "IPY_MODEL_e40f15bb04334d218372168c6cca289a" + "layout": "IPY_MODEL_a5434e9264a7431fb8f89119d71677ee", + "style": "IPY_MODEL_6939b84ef0c046b6aefe5511c58c20e6", + "value": "S2B_MSIL2A_20231130T221859_R115_T04VDM_20231130T231119: " } }, - "1aedf149d7024a77983ffa6f98273a77": { + "dbfd4d0a51d64b6b978aa1629e21b0f8": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", + "model_module_version": "2.0.0", + "model_name": "HBoxModel", "state": { - "description_width": "" + "children": [ + "IPY_MODEL_12bd52f10fcd4a1681b2c2ce9f0ab728", + "IPY_MODEL_f2662afd51ce4245a1d22f1bf9fa7840", + "IPY_MODEL_3068616e2e454125884c3361e9d95019" + ], + "layout": "IPY_MODEL_64886108403149bd8cb2b69b16e91794" } }, - "1b4dc95217cf4b4f8b2504330cb59737": { + "eef129ae4a464b9aa082df0c3f13eaad": { + "model_module": "@jupyter-widgets/base", + "model_module_version": "2.0.0", + "model_name": "LayoutModel", + "state": {} + }, + "f2662afd51ce4245a1d22f1bf9fa7840": { "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", + "model_module_version": "2.0.0", "model_name": "FloatProgressModel", "state": { "bar_style": "success", - "layout": "IPY_MODEL_7dca97d9546e42659b1c9c5d39c5c538", - "max": 59709, - "style": "IPY_MODEL_6721216acd9943cb8db0e9a90556e429", - "value": 59709 - } - }, - "1cef912b3ce946719ce39005deeaf6b1": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_469f00f2dabc488ba416390c34ffeec4", - "style": "IPY_MODEL_8b6e207b90d64b2e930880d7126fe7da", - "value": "Downloaded products: 100%" - } - }, - "1d069dea16364e8e90ee3d0e0c7545f3": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_69813a07faed41a193b488f17a39652a", - "style": "IPY_MODEL_00f74c9afcee41f781080736e4b1b2bb", - "value": "quicklooks/S2A_MSIL1C_20201119T110341_N0209_R094_T31TCL_20201119T131255: 100%" - } - }, - "1f42625a6b974229a7904fbc6dff7b90": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "1fa040c68d4c485aaa765e8b75b3b1fa": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_262cee5dc0714675aa4b96a10eec5d69", - "IPY_MODEL_cb510d4f2af346b49443e75d5594f645", - "IPY_MODEL_34e4abd13c064f37891a10e6c7478c83" - ], - "layout": "IPY_MODEL_633e812d87144aa2a1b9ad445d8bc19b" - } - }, - "219820c194b843e78067877129a0c3a1": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "21b2328036e946929a68e99d432251d9": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_07909752099f404eb16272abde2c49de", - "IPY_MODEL_9966a4cf14d4446ca4ede9c0ed788343", - "IPY_MODEL_568e21db9e87479697943fe919ec3013" - ], - "layout": "IPY_MODEL_6c7a04f5d93c4f928997b285d31f5a8f" - } - }, - "22d94bd1aec24f3da650e0cc29dac906": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_85508941196c49d29cce45b5331cbe56", - "style": "IPY_MODEL_c3a6159390414ae98dfa57c983bccc8b" - } - }, - "244bae2d8b5340cb8af3f6f39d9e92ce": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "24aada7c6196411988ccc56032f72f50": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "262cee5dc0714675aa4b96a10eec5d69": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_beb1dd42c7534718814f736fb88e27a9", - "style": "IPY_MODEL_5e6a11e680454c6a98c412df1a40cd13", - "value": "This will be displayed: 50%" - } - }, - "271171c6018d468ab8ed7b10aa40a590": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "291884d7015249aa8c319ffeebbab550": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_9a9113ea4e3746f7a271fe941a2d06d8", - "max": 2, - "style": "IPY_MODEL_c51adc32d6014f3a8364637c5db3b460", - "value": 2 - } - }, - "29af2523e95c4d5dbaa9adb76b492817": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "2a46ccd2f87649e6899a26d07aedcad8": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_1cef912b3ce946719ce39005deeaf6b1", - "IPY_MODEL_453fc9821906409fadeb299d4e019e9e", - "IPY_MODEL_5d3ccd320d2a4ec09478f4a9b6f2f641" - ], - "layout": "IPY_MODEL_6d4c95edeb814b6f889c64d2ffccb3f4" - } - }, - "2a8c51f87edd4f5390baccd3c3598b0b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "2ac768193be742dcbb21d451eb315444": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "2aee8cada4644f5b999c901717bd8738": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_0e4a22a56c35441787891e03a4d82f84", - "max": 42572, - "style": "IPY_MODEL_653e24b71d3d413584c6fcfe53c6491d", - "value": 42572 - } - }, - "2ea68e5c5f9c43078d92d6c6f647af0e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "2f57d7b1ff074bf5b3b38f85935a8385": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "305f43107aa14767acbf1dbb13346807": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "30f9dc5197754e75a8876bbb71d8f5e8": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "327873754896413ab5405ed2b7806929": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_17b2ffad2f654ad58d6252635ac48dcb", - "max": 74912, - "style": "IPY_MODEL_e29ee39192ff46f88283e71ff431a230", - "value": 74912 - } - }, - "32dafad1ddd3406d9bd5b63401f8c5da": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "34e4abd13c064f37891a10e6c7478c83": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_df40ca64039946c8a5b4ffe39368b108", - "style": "IPY_MODEL_39e9af7de34641838c6c8697ad250f33", - "value": " 1.00/2.00 [00:00<00:00, 21.0B/s]" - } - }, - "3796a6c56004488695fd34f1afb0e7a9": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "380cbc0470cf47cd821b014c1cd6217c": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "38edd85961d74c818defdcb64537bc96": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "3992b9075025477fb04563159c7c7bd6": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "39e9af7de34641838c6c8697ad250f33": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "3d99bda929054df2b820476f0c82d4e9": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_d8a77bdbda5040ee9622c50eda3cb028", - "style": "IPY_MODEL_bc05430de9944e8598b226b573283653", - "value": " 115/115 [00:03<00:00, 30.89file/s]" - } - }, - "3e868360b7374cc9989c323a0fd11dd2": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "3f4a4c1b6411425fac29c4cee5a60017": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_d1ad41f5d8c745daa87fa01040f39f81", - "style": "IPY_MODEL_a6b4a137291f4a47a1c7a60510cb93bd", - "value": " 52.9k/52.9k [00:00<00:00, 367kB/s]" - } - }, - "40d77e0163074d4888606cd4e0bfe001": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_1d069dea16364e8e90ee3d0e0c7545f3", - "IPY_MODEL_b5341318f45a401693961d59df314378", - "IPY_MODEL_b108c9a755cf45918f1c52794bc87a00" - ], - "layout": "IPY_MODEL_8b52c95044fb4166befc4129abf7d9f7" - } - }, - "423d59f141b1415aae4239116e4080a0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "43136fd323c940dea9c1ae793e46ad18": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "43ff9d824e6d400b833c8b00b80e1abb": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "453fc9821906409fadeb299d4e019e9e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_95f30579e5e048a6be63dac3dd30ef08", - "max": 2, - "style": "IPY_MODEL_fc3e5a300d8b4889930d2bb0e16d95c9", - "value": 2 - } - }, - "467f80b29a6649778f074cc9415d13fe": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "469f00f2dabc488ba416390c34ffeec4": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "4a1f7023a808454d8b773ba80d5af471": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_f3eb570e7b594a4f9e8d9ef35fe64cf8", - "style": "IPY_MODEL_923a18a82c8c4a89b96ab1efafa0066b", - "value": "S2A_MSIL1C_20201229T110451_N0209_R094_T31TCL_20201229T131620: " - } - }, - "4a85e182af6c407b824cc9d4858ae610": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "4aafddc26bc148e08383e043cc377f05": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_8aa1f40dda0542d9853e50f7aefd36fa", - "style": "IPY_MODEL_e1462a2b6de348cc8f33d8a89899638f", - "value": "quicklooks/S2A_MSIL1C_20201226T105451_N0209_R051_T31TCL_20201226T130209: 100%" - } - }, - "4cef95ea6f3c4b57966f14bfd05443ac": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "4de63a52aaf648c884a6a64b3579f511": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "4e0c490a8eb74c86a4256f5969a965ae": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "537aff289e3e42af89557bd5604b555f": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "53e42af2060247eea722a9f51c31e1f2": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_3992b9075025477fb04563159c7c7bd6", - "style": "IPY_MODEL_1aedf149d7024a77983ffa6f98273a77", - "value": "S2A_MSIL1C_20201226T105451_N0209_R051_T31TCK_20201226T130209: " - } - }, - "568e21db9e87479697943fe919ec3013": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_d1b8e9aa118c434baea4ce3fed2fd41e", - "style": "IPY_MODEL_ed73c4698dc14453b87fc7969bf2dc2f", - "value": " 44.5k/44.5k [00:00<00:00, 624kB/s]" - } - }, - "5711172ea3ed4d1e85191a0239a9b591": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "57741e62b5c7426292ad04776bc517fd": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "5a8509581a6043e68f68fa66e43727e2": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "5d02707ed9dd4e04abe112d64a628b32": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "5d3ccd320d2a4ec09478f4a9b6f2f641": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_4de63a52aaf648c884a6a64b3579f511", - "style": "IPY_MODEL_1429f1fdbbe84d9b993b910e600516ff", - "value": " 2/2 [06:52<00:00, 218.54s/product]" - } - }, - "5e08ebacc2e547869cc52977b723cc0c": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "5e6a11e680454c6a98c412df1a40cd13": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "5e91084fed10434faa41c5f526a112c0": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "6057fe1d2b4f4a63ac381bdb00f9f3ac": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "6108654b209c4914b3def12ccd3ab861": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "61bdd92fb2334ac58ad23d70fade3968": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "633e812d87144aa2a1b9ad445d8bc19b": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "647afe974789478d80398973b73e85af": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_a243ca5f69584f4589b61bc9ae7cdaf8", - "max": 115, - "style": "IPY_MODEL_7b9e16093a8f4cb69422d655fd3fe4db", - "value": 115 - } - }, - "653e24b71d3d413584c6fcfe53c6491d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "6721216acd9943cb8db0e9a90556e429": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "6946468fcf3d4f5db4445e83fe2c758b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_4aafddc26bc148e08383e043cc377f05", - "IPY_MODEL_1b4dc95217cf4b4f8b2504330cb59737", - "IPY_MODEL_9d382c1b2f6e4cfbbcdce072a7c77b49" - ], - "layout": "IPY_MODEL_57741e62b5c7426292ad04776bc517fd" - } - }, - "69813a07faed41a193b488f17a39652a": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "69b6401afcfc4489b652038bb1e658c2": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "6a986a0d90e2472cbc303acfbf30e5e0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_db7c123e3cd142e2b4813c6e3a960fa4", - "style": "IPY_MODEL_b53fb4232fe7496891ef2765c9328a8d", - "value": " 74.9k/74.9k [00:00<00:00, 602kB/s]" - } - }, - "6be28f31ff9745108093212ce6dedfdc": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_fb52dc0b1568450fa88389e4962955ab", - "IPY_MODEL_18db9b4f07684178aa762e848300efdd", - "IPY_MODEL_7e835bce4cb64430a1b7998a1fc9eb1d" - ], - "layout": "IPY_MODEL_196653d86ddb488690b2ac1c22e9ad62" - } - }, - "6c05528d741e41f5adb4a90353b5ef53": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_afe75eb3f8954c14aaa6f8bedcb255b0", - "max": 25159, - "style": "IPY_MODEL_3e868360b7374cc9989c323a0fd11dd2", - "value": 25159 - } - }, - "6c7a04f5d93c4f928997b285d31f5a8f": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "6cab70ca773d492f91e28b04cfaeacc0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_93250f4992a447b882fa86981875f801", - "IPY_MODEL_dbaf5b346f674c0a858ef6a848bec165", - "IPY_MODEL_3d99bda929054df2b820476f0c82d4e9" - ], - "layout": "IPY_MODEL_e09230db12fc49c4aa9b1cc4132579ff" - } - }, - "6cb651aedc51425fbec47f27a6be9ae7": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "6d4c95edeb814b6f889c64d2ffccb3f4": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "6da46c359b144e90873b48f934030125": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "6eb037657e4e47cfa08d92ef1aff5b3d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "6f46281c8a0f4de1a0c77d763556ee86": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_6108654b209c4914b3def12ccd3ab861", - "max": 1, - "style": "IPY_MODEL_2a8c51f87edd4f5390baccd3c3598b0b", - "value": 1 - } - }, - "70f39c06c0934426a3e978eadbe01ccf": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_4a1f7023a808454d8b773ba80d5af471", - "IPY_MODEL_dde500cb7b8242fab6e440a9396831d9", - "IPY_MODEL_e563fc133b25432bb2c2adf5fe702eb6" - ], - "layout": "IPY_MODEL_8373ec08012c4b8ca3656fd644eb51af" - } - }, - "759a7904c1764b55811cc6b9389af4ac": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "763143b8898945969c8effe0a64b09a4": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "79c9f30f27cf4ce59dbfd6586d89d0b7": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_16de0fec1e8e428eb2d447371233bbea", - "max": 34820, - "style": "IPY_MODEL_9db50f66074e4a06bc02552d9e17fc9a", - "value": 34820 - } - }, - "7ad860a453894c48bb11adee1c6b87a1": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_5e91084fed10434faa41c5f526a112c0", - "max": 69185, - "style": "IPY_MODEL_83a9ef2bfc0c48a7b920d0fe0274373f", - "value": 69185 - } - }, - "7b52af387bf645948ebdf25e04af668a": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "7b9e16093a8f4cb69422d655fd3fe4db": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "7dca97d9546e42659b1c9c5d39c5c538": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "7e835bce4cb64430a1b7998a1fc9eb1d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_09ed254fab5948e788dbf109df540e47", - "style": "IPY_MODEL_5711172ea3ed4d1e85191a0239a9b591", - "value": " 31.0k/31.0k [00:00<00:00, 198kB/s]" - } - }, - "81f3d64165f14f66baf8a9e86386bf02": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "820742df10c54b21b2051bdb2c7a9bae": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "8373ec08012c4b8ca3656fd644eb51af": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "83a9ef2bfc0c48a7b920d0fe0274373f": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "83d277cb7bb04b97b12313b01f7ed2e6": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "8468c2a9b6114623aa155b2c59ceeaa5": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "85508941196c49d29cce45b5331cbe56": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "8942010860f947888d5965798ee45afc": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "89cd99569f774d3b825a5318beca64b7": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "8a61e9f8cd4d4d6592946cae74efbad0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_f8d37b2d5b3e4173b2948c1733e079de", - "style": "IPY_MODEL_1f42625a6b974229a7904fbc6dff7b90", - "value": "quicklooks/S2B_MSIL1C_20201114T110319_N0209_R094_T31TCL_20201114T120840: 100%" - } - }, - "8aa1f40dda0542d9853e50f7aefd36fa": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "8b52c95044fb4166befc4129abf7d9f7": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "8b6e207b90d64b2e930880d7126fe7da": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "8c193e75fb464c47b1214ebd94ebbaff": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_bcc14a89d764416386ec6d59b5505a46", - "style": "IPY_MODEL_83d277cb7bb04b97b12313b01f7ed2e6", - "value": " 25.2k/25.2k [00:00<00:00, 1.56MB/s]" - } - }, - "8c2d666a4f514e4ab9d27d1f75fa76d1": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "8c93479c548e422f80ece4ab82230c60": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "8ddead9fd363442399080031ff27a935": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_53e42af2060247eea722a9f51c31e1f2", - "IPY_MODEL_ccb1fcd06dcc4d6db3f76f6de45d7a57", - "IPY_MODEL_117ca44508454e2d8688ec994a1e4de0" - ], - "layout": "IPY_MODEL_89cd99569f774d3b825a5318beca64b7" - } - }, - "902870969f25419dbcd664d9e8436096": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_4a85e182af6c407b824cc9d4858ae610", - "style": "IPY_MODEL_d78f00ee13ea477a8ab73eec4d28431c", - "value": "Extracting files from S2A_MSIL1C_20201229T110451_N0209_R094_T31TCL_20201229T131620.zip: 100%" - } - }, - "9229a49e42494e979d82b203ecf1791f": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "923a18a82c8c4a89b96ab1efafa0066b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "93250f4992a447b882fa86981875f801": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_a9056f384e7d45b5b677e2de78c98dec", - "style": "IPY_MODEL_38edd85961d74c818defdcb64537bc96", - "value": "Extracting files from S2A_MSIL1C_20201116T105331_N0209_R051_T31TCK_20201116T130215.zip: 100%" - } - }, - "937e9ea300ab4447b78a13effec6c492": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "95b0991836ff42e3b094b13ffdfaffae": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_c574e7d53d034842b4f7a7ceee91d0fd", - "style": "IPY_MODEL_6057fe1d2b4f4a63ac381bdb00f9f3ac", - "value": " 2/2 [00:00<00:00, 16.06product/s]" - } - }, - "95f30579e5e048a6be63dac3dd30ef08": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "9966a4cf14d4446ca4ede9c0ed788343": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_9c34f40fb5a947f0be3bf9b7346a8a56", - "max": 44538, - "style": "IPY_MODEL_d8f1ca3388a24da1b11ca6c3a92d4b5b", - "value": 44538 - } - }, - "9a5ba9ed24b846cb9cc1c0a0421ba9a0": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "9a9113ea4e3746f7a271fe941a2d06d8": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "9b57cb5be33146a2bd4000e377f3883c": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_ac4ba49caaae4f1d8374c4408e57cd72", - "style": "IPY_MODEL_820742df10c54b21b2051bdb2c7a9bae", - "value": " 69.2k/69.2k [00:00<00:00, 497kB/s]" - } - }, - "9bf4a38a541242fea78eefc7fa72de07": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "9c02540b8038464094375e39f05d6369": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "9c34f40fb5a947f0be3bf9b7346a8a56": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "9c52e67acb594733928a16672fb66c3a": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "bar_color": "purple", - "description_width": "" - } - }, - "9d382c1b2f6e4cfbbcdce072a7c77b49": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_24aada7c6196411988ccc56032f72f50", - "style": "IPY_MODEL_43136fd323c940dea9c1ae793e46ad18", - "value": " 59.7k/59.7k [00:00<00:00, 441kB/s]" - } - }, - "9db50f66074e4a06bc02552d9e17fc9a": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "9db842c9e7d54eb1b85831fa450aa367": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_43ff9d824e6d400b833c8b00b80e1abb", - "style": "IPY_MODEL_b5d1a593f9934df88a65e374560a4d4e", - "value": " 115/115 [00:02<00:00, 57.15file/s]" - } - }, - "9f26eea696724aedb1b4ca0594efc65c": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "a1336165c66b4c2aa434c1a3fec3f64c": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "a22e380414424faa81abc918334decf4": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "a243ca5f69584f4589b61bc9ae7cdaf8": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "a36cc750caed485da743a68e269f73e8": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "a626306a00ca4710a9044d2f697fcae3": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "a6b4a137291f4a47a1c7a60510cb93bd": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "a9056f384e7d45b5b677e2de78c98dec": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "aafa0a3dbe8b415ebafbaed2c8947a3b": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "ac4ba49caaae4f1d8374c4408e57cd72": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "ae684630e6744654b7e470539dde03b2": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "afe75eb3f8954c14aaa6f8bedcb255b0": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "b108c9a755cf45918f1c52794bc87a00": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_3796a6c56004488695fd34f1afb0e7a9", - "style": "IPY_MODEL_ceba04b837714aeaac109a633fbe3277", - "value": " 26.2k/26.2k [00:00<00:00, 609kB/s]" - } - }, - "b17b293c653148ffafdcf1cca9b8c820": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_b535ae02f7274b7d98f1e000a952b6c2", - "IPY_MODEL_79c9f30f27cf4ce59dbfd6586d89d0b7", - "IPY_MODEL_0655c8b70af64a26b25bf7326b78d82e" - ], - "layout": "IPY_MODEL_feff37ac49ab455db2692829d288f2bd" - } - }, - "b453a291ecb6475796dacd159268dbb8": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "b5341318f45a401693961d59df314378": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_a22e380414424faa81abc918334decf4", - "max": 26177, - "style": "IPY_MODEL_2ea68e5c5f9c43078d92d6c6f647af0e", - "value": 26177 - } - }, - "b535ae02f7274b7d98f1e000a952b6c2": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_69b6401afcfc4489b652038bb1e658c2", - "style": "IPY_MODEL_bd31817f2f5d46fe93f9d0c65408d4df", - "value": "quicklooks/S2B_MSIL1C_20201231T105349_N0209_R051_T31TCL_20201231T120402: 100%" - } - }, - "b53fb4232fe7496891ef2765c9328a8d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "b5d1a593f9934df88a65e374560a4d4e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "b80a0b834b0141b6ad4d0b107e2711ce": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "bb193001221f4c609dfe808ac234ed48": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "bc05430de9944e8598b226b573283653": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "bcc14a89d764416386ec6d59b5505a46": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "bd31817f2f5d46fe93f9d0c65408d4df": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "beb1dd42c7534718814f736fb88e27a9": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "bff2e9acb0f14a2b9b861637fcabca23": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "c3a6159390414ae98dfa57c983bccc8b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "c4d2e6939a674cc6893de18514a4e708": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "c51adc32d6014f3a8364637c5db3b460": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "c574e7d53d034842b4f7a7ceee91d0fd": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "c7f9c5c92ea0496481f279da544b6d0b": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "c84044d9182c4a079b5e6544984f275e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_183a21f5ddfb4dc38c02884e5304b958", - "style": "IPY_MODEL_2ac768193be742dcbb21d451eb315444", - "value": " 42.6k/42.6k [00:00<00:00, 269kB/s]" - } - }, - "c8c953e4721f4c5ba4c5b887a0677040": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_8468c2a9b6114623aa155b2c59ceeaa5", - "max": 115, - "style": "IPY_MODEL_32dafad1ddd3406d9bd5b63401f8c5da", - "value": 115 - } - }, - "c94db8bcfcec4543aacf46bb9bb8b2b8": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_f71b8ecf5faa40598267a71a222b731e", - "IPY_MODEL_6f46281c8a0f4de1a0c77d763556ee86", - "IPY_MODEL_d9e1075a05034b2daf5ae4af1022aa94" - ], - "layout": "IPY_MODEL_c7f9c5c92ea0496481f279da544b6d0b" - } - }, - "cb510d4f2af346b49443e75d5594f645": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "danger", - "layout": "IPY_MODEL_7b52af387bf645948ebdf25e04af668a", - "max": 2, - "style": "IPY_MODEL_9a5ba9ed24b846cb9cc1c0a0421ba9a0", - "value": 1 - } - }, - "cbc05aeb9102482fb012e888d154cdab": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "ccb1fcd06dcc4d6db3f76f6de45d7a57": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_9f26eea696724aedb1b4ca0594efc65c", - "max": 1, - "style": "IPY_MODEL_b453a291ecb6475796dacd159268dbb8" - } - }, - "ccf12fa726af4e32abfd15d20fb1e0b1": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_902870969f25419dbcd664d9e8436096", - "IPY_MODEL_c8c953e4721f4c5ba4c5b887a0677040", - "IPY_MODEL_9db842c9e7d54eb1b85831fa450aa367" - ], - "layout": "IPY_MODEL_305f43107aa14767acbf1dbb13346807" - } - }, - "ceba04b837714aeaac109a633fbe3277": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "d09a2f4728cc4550b1a54f8805d9b599": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_759a7904c1764b55811cc6b9389af4ac", - "style": "IPY_MODEL_1030ffa146c54cafb45d8f44bbee952a", - "value": " 1.00/? [00:01<00:00, 1.37s/B]" - } - }, - "d0a814b63c6142b9b719fe15eddbff08": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_a36cc750caed485da743a68e269f73e8", - "style": "IPY_MODEL_bff2e9acb0f14a2b9b861637fcabca23", - "value": " 115/115 [00:03<00:00, 31.10file/s]" - } - }, - "d1ad41f5d8c745daa87fa01040f39f81": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "d1b8e9aa118c434baea4ce3fed2fd41e": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "d644b0b5219743c38fbc79ec5f2a413e": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "d680ca0387114bec8b7d8db3fc69ce75": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "d712e591f1c841d29e07865dfe94bc05": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_fd45fe078ae142b7becc36f21ab7b455", - "IPY_MODEL_291884d7015249aa8c319ffeebbab550", - "IPY_MODEL_95b0991836ff42e3b094b13ffdfaffae" - ], - "layout": "IPY_MODEL_8c93479c548e422f80ece4ab82230c60" - } - }, - "d78f00ee13ea477a8ab73eec4d28431c": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "d7d98c90410e405fb06ca685a4e110d7": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "d8a77bdbda5040ee9622c50eda3cb028": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "d8f1ca3388a24da1b11ca6c3a92d4b5b": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "d9e1075a05034b2daf5ae4af1022aa94": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_29af2523e95c4d5dbaa9adb76b492817", - "style": "IPY_MODEL_271171c6018d468ab8ed7b10aa40a590", - "value": " 1.00/? [00:00<00:00, 24.9B/s]" - } - }, - "db7c123e3cd142e2b4813c6e3a960fa4": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "db9a46b90c7640ee906189a5981cde64": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_8a61e9f8cd4d4d6592946cae74efbad0", - "IPY_MODEL_6c05528d741e41f5adb4a90353b5ef53", - "IPY_MODEL_8c193e75fb464c47b1214ebd94ebbaff" - ], - "layout": "IPY_MODEL_8942010860f947888d5965798ee45afc" - } - }, - "dbaf5b346f674c0a858ef6a848bec165": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_6cb651aedc51425fbec47f27a6be9ae7", - "max": 115, - "style": "IPY_MODEL_937e9ea300ab4447b78a13effec6c492", - "value": 115 - } - }, - "dde500cb7b8242fab6e440a9396831d9": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_a1336165c66b4c2aa434c1a3fec3f64c", - "max": 1, - "style": "IPY_MODEL_6da46c359b144e90873b48f934030125" - } - }, - "df40ca64039946c8a5b4ffe39368b108": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "e09230db12fc49c4aa9b1cc4132579ff": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "e1310103b37240b3b96f00504aed1fb6": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_0ff348ae46564d968020db92ca112bda", - "IPY_MODEL_2aee8cada4644f5b999c901717bd8738", - "IPY_MODEL_c84044d9182c4a079b5e6544984f275e" - ], - "layout": "IPY_MODEL_30f9dc5197754e75a8876bbb71d8f5e8" - } - }, - "e1462a2b6de348cc8f33d8a89899638f": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "e1462a75fc5746f8b2665fb669861945": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_17bf04fa819b40219e9beadabb29b7c5", - "IPY_MODEL_327873754896413ab5405ed2b7806929", - "IPY_MODEL_6a986a0d90e2472cbc303acfbf30e5e0" - ], - "layout": "IPY_MODEL_61bdd92fb2334ac58ad23d70fade3968" - } - }, - "e1af015f87604c1da38e9b239e2cb245": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "flex": "2" - } - }, - "e29ee39192ff46f88283e71ff431a230": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "e40f15bb04334d218372168c6cca289a": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "e563fc133b25432bb2c2adf5fe702eb6": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_aafa0a3dbe8b415ebafbaed2c8947a3b", - "style": "IPY_MODEL_01fe9acbf32c4e9f85fe4808dc168a7a", - "value": " 0.00/? [00:00<?, ?B/s]" - } - }, - "e566de2185874613b49505c7bfa726d5": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_d644b0b5219743c38fbc79ec5f2a413e", - "max": 52886, - "style": "IPY_MODEL_b80a0b834b0141b6ad4d0b107e2711ce", - "value": 52886 - } - }, - "e6f0b799d36e46a6bb529a17984ef7a1": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_1772512a86e547dabd6e04d4efd3c34e", - "IPY_MODEL_e566de2185874613b49505c7bfa726d5", - "IPY_MODEL_3f4a4c1b6411425fac29c4cee5a60017" - ], - "layout": "IPY_MODEL_4e0c490a8eb74c86a4256f5969a965ae" - } - }, - "e79bd3627dc54cf8ad30dc99ed906dd4": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "info", - "layout": "IPY_MODEL_a626306a00ca4710a9044d2f697fcae3", - "max": 1, - "style": "IPY_MODEL_9c52e67acb594733928a16672fb66c3a", - "value": 1 - } - }, - "ebe893f80b9d47819bf914a4ba064a53": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_02f22b40afa244f58e3af27e9fc77886", - "style": "IPY_MODEL_81f3d64165f14f66baf8a9e86386bf02", - "value": " 3/3 [00:00<00:00, 69.06carrots/s]" - } - }, - "ed73c4698dc14453b87fc7969bf2dc2f": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "DescriptionStyleModel", - "state": { - "description_width": "" - } - }, - "ef0b8dcba1224b96909517a948065519": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "f07dea83351848c990a11322e82a5478": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "FloatProgressModel", - "state": { - "bar_style": "success", - "layout": "IPY_MODEL_537aff289e3e42af89557bd5604b555f", - "max": 3, - "style": "IPY_MODEL_6eb037657e4e47cfa08d92ef1aff5b3d", - "value": 3 - } - }, - "f1fbb8e667ea4f85a22303514788c234": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_ff7272062b0d44f6a7f6481d2164f25e", - "IPY_MODEL_f07dea83351848c990a11322e82a5478", - "IPY_MODEL_ebe893f80b9d47819bf914a4ba064a53" - ], - "layout": "IPY_MODEL_ae684630e6744654b7e470539dde03b2" - } - }, - "f2523b74e59e4205b0ec0ebd2e765940": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_c4d2e6939a674cc6893de18514a4e708", - "style": "IPY_MODEL_9bf4a38a541242fea78eefc7fa72de07", - "value": "quicklooks/S2A_MSIL1C_20201116T105331_N0209_R051_T31TCK_20201116T130215: 100%" - } - }, - "f3eb570e7b594a4f9e8d9ef35fe64cf8": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "f71b8ecf5faa40598267a71a222b731e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_cbc05aeb9102482fb012e888d154cdab", - "style": "IPY_MODEL_244bae2d8b5340cb8af3f6f39d9e92ce", - "value": "This will be also displayed: " - } - }, - "f8d37b2d5b3e4173b2948c1733e079de": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "f9006c9cb3914908a0822081edcf9d3d": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HBoxModel", - "state": { - "children": [ - "IPY_MODEL_f2523b74e59e4205b0ec0ebd2e765940", - "IPY_MODEL_7ad860a453894c48bb11adee1c6b87a1", - "IPY_MODEL_9b57cb5be33146a2bd4000e377f3883c" - ], - "layout": "IPY_MODEL_9c02540b8038464094375e39f05d6369" - } - }, - "fb52dc0b1568450fa88389e4962955ab": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_ef0b8dcba1224b96909517a948065519", - "style": "IPY_MODEL_219820c194b843e78067877129a0c3a1", - "value": "quicklooks/S2B_MSIL1C_20201231T105349_N0209_R051_T31TCK_20201231T120402: 100%" - } - }, - "fb659e31419940758f6495224b0c556b": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "300px" - } - }, - "fc3e5a300d8b4889930d2bb0e16d95c9": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "ProgressStyleModel", - "state": { - "description_width": "" - } - }, - "fd45fe078ae142b7becc36f21ab7b455": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_0d2e58c337ba419a8f57cba7ee7edaaa", - "style": "IPY_MODEL_5d02707ed9dd4e04abe112d64a628b32", - "value": "Downloaded products: 100%" - } - }, - "feff37ac49ab455db2692829d288f2bd": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": { - "display": "inline-flex", - "flex_flow": "row wrap", - "width": "100%" - } - }, - "ff22fadc6c5f45b0b0a3b5f66675972e": { - "model_module": "@jupyter-widgets/base", - "model_module_version": "1.2.0", - "model_name": "LayoutModel", - "state": {} - }, - "ff7272062b0d44f6a7f6481d2164f25e": { - "model_module": "@jupyter-widgets/controls", - "model_module_version": "1.5.0", - "model_name": "HTMLModel", - "state": { - "layout": "IPY_MODEL_9229a49e42494e979d82b203ecf1791f", - "style": "IPY_MODEL_380cbc0470cf47cd821b014c1cd6217c", - "value": "Eating carrots: 100%" + "layout": "IPY_MODEL_0e3c2a8e89fe4af4b6fcb07db3036276", + "max": 3887409, + "style": "IPY_MODEL_229601dc5e0743d8b13e0f5d8f7548c4", + "value": 3887409 } } }, diff --git a/docs/notebooks/tutos/tuto_wekeo.ipynb b/docs/notebooks/tutos/tuto_wekeo.ipynb index 0767374b1..104353047 100644 --- a/docs/notebooks/tutos/tuto_wekeo.ipynb +++ b/docs/notebooks/tutos/tuto_wekeo.ipynb @@ -36,7 +36,7 @@ "# Use your authentication to accept Copernicus Terms and Conditions\n", "# (needed only once)\n", "response = requests.put(\n", - " \"https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/termsaccepted/Copernicus_General_License\",\n", + " \"https://wekeo-broker.prod.wekeo2.eu/databroker/termsaccepted/Copernicus_General_License\",\n", " data={\"accepted\": \"true\"},\n", " auth=auth,\n", ")" diff --git a/docs/stac_rest.rst b/docs/stac_rest.rst index f7cd8c9b7..4a66fda39 100644 --- a/docs/stac_rest.rst +++ b/docs/stac_rest.rst @@ -120,7 +120,7 @@ available on `https://hub.docker.com/r/csspace/eodag-server None: product_types_config_path = resource_filename( "eodag", os.path.join("resources/", "product_types.yml") ) @@ -98,8 +131,27 @@ def __init__(self, user_conf_file_path=None, locations_conf_path=None): self.product_types_config_md5 = obj_md5sum(self.product_types_config.source) self.providers_config = load_default_config() - self.conf_dir = os.path.join(os.path.expanduser("~"), ".config", "eodag") - makedirs(self.conf_dir) + env_var_cfg_dir = "EODAG_CFG_DIR" + self.conf_dir = os.getenv( + env_var_cfg_dir, + default=os.path.join(os.path.expanduser("~"), ".config", "eodag"), + ) + try: + makedirs(self.conf_dir) + except OSError as e: + logger.debug(e) + tmp_conf_dir = os.path.join(tempfile.gettempdir(), ".config", "eodag") + logger.warning( + f"Cannot create configuration directory {self.conf_dir}. " + + f"Falling back to temporary directory {tmp_conf_dir}." + ) + if os.getenv(env_var_cfg_dir) is None: + logger.warning( + "You can set the path of the configuration directory " + + f"with the environment variable {env_var_cfg_dir}" + ) + self.conf_dir = tmp_conf_dir + makedirs(self.conf_dir) self._plugins_manager = PluginManager(self.providers_config) # use updated providers_config @@ -133,7 +185,7 @@ def __init__(self, user_conf_file_path=None, locations_conf_path=None): self._plugins_manager.rebuild(self.providers_config) # store pruned providers configs - self._pruned_providers_config = {} + self._pruned_providers_config: Dict[str, Any] = {} # filter out providers needing auth that have no credentials set self._prune_providers_list() @@ -141,7 +193,7 @@ def __init__(self, user_conf_file_path=None, locations_conf_path=None): self._plugins_manager.sort_providers() # Build a search index for product types - self._product_types_index = None + self._product_types_index: Optional[Index] = None self.build_index() # set locations configuration @@ -173,12 +225,13 @@ def __init__(self, user_conf_file_path=None, locations_conf_path=None): os.path.join(self.conf_dir, "shp"), ) self.set_locations_conf(locations_conf_path) + self.search_errors: Set = set() - def get_version(self): + def get_version(self) -> str: """Get eodag package version""" return pkg_resources.get_distribution("eodag").version - def build_index(self): + def build_index(self) -> None: """Build a `Whoosh `_ index for product types searches. """ @@ -232,6 +285,7 @@ def build_index(self): product_types_schema = Schema( ID=fields.STORED, + alias=fields.ID, abstract=fields.STORED, instrument=fields.IDLIST, platform=fields.ID, @@ -245,7 +299,6 @@ def build_index(self): missionEndDate=fields.ID, keywords=fields.KEYWORD(analyzer=kw_analyzer), ) - non_indexable_fields = [] self._product_types_index = create_in(index_dir, product_types_schema) ix_writer = self._product_types_index.writer() for product_type in self.list_product_types(fetch_providers=False): @@ -257,12 +310,12 @@ def build_index(self): **{ k: v for k, v in versioned_product_type.items() - if k not in non_indexable_fields + if k in product_types_schema.names() } ) ix_writer.commit() - def set_preferred_provider(self, provider): + def set_preferred_provider(self, provider: str) -> None: """Set max priority for the given provider. :param provider: The name of the provider that should be considered as the @@ -278,7 +331,7 @@ def set_preferred_provider(self, provider): new_priority = max_priority + 1 self._plugins_manager.set_priority(provider, new_priority) - def get_preferred_provider(self): + def get_preferred_provider(self) -> Tuple[str, int]: """Get the provider currently set as the preferred one for searching products, along with its priority. @@ -292,7 +345,7 @@ def get_preferred_provider(self): preferred, priority = max(providers_with_priority, key=itemgetter(1)) return preferred, priority - def update_providers_config(self, yaml_conf): + def update_providers_config(self, yaml_conf: str) -> None: """Update providers configuration with given input. Can be used to add a provider to existing configuration or update an existing one. @@ -354,7 +407,7 @@ def update_providers_config(self, yaml_conf): # re-create _plugins_manager using up-to-date providers_config self._plugins_manager.build_product_type_to_provider_config_map() - def _prune_providers_list(self): + def _prune_providers_list(self) -> None: """Removes from config providers needing auth that have no credentials set.""" update_needed = False for provider in list(self.providers_config.keys()): @@ -420,7 +473,7 @@ def _prune_providers_list(self): # rebuild _plugins_manager with updated providers list self._plugins_manager.rebuild(self.providers_config) - def set_locations_conf(self, locations_conf_path): + def set_locations_conf(self, locations_conf_path: str) -> None: """Set locations configuration. This configuration (YML format) will contain a shapefile list associated to a name and attribute parameters needed to identify the needed geometry. @@ -451,14 +504,16 @@ def set_locations_conf(self, locations_conf_path): locations_config = locations_config[main_key] logger.info("Locations configuration loaded from %s" % locations_conf_path) - self.locations_config = locations_config + self.locations_config: List[Dict[str, Any]] = locations_config else: logger.info( "Could not load locations configuration from %s" % locations_conf_path ) self.locations_config = [] - def list_product_types(self, provider=None, fetch_providers=True): + def list_product_types( + self, provider: Optional[str] = None, fetch_providers: bool = True + ) -> List[Dict[str, Any]]: """Lists supported product types. :param provider: (optional) The name of a provider that must support the product @@ -475,16 +530,18 @@ def list_product_types(self, provider=None, fetch_providers=True): # First, update product types list if possible self.fetch_product_types_list(provider=provider) - product_types = [] + product_types: List[Dict[str, Any]] = [] if provider is not None: if provider in self.providers_config: provider_supported_products = self.providers_config[provider].products for product_type_id in provider_supported_products: if product_type_id == GENERIC_PRODUCT_TYPE: continue - product_type = dict( - ID=product_type_id, **self.product_types_config[product_type_id] - ) + config = self.product_types_config[product_type_id] + if "alias" in config: + config["_id"] = product_type_id + product_type_id = config["alias"] + product_type = dict(ID=product_type_id, **config) if product_type_id not in product_types: product_types.append(product_type) return sorted(product_types, key=itemgetter("ID")) @@ -506,7 +563,7 @@ def list_product_types(self, provider=None, fetch_providers=True): # Return the product_types sorted in lexicographic order of their ID return sorted(product_types, key=itemgetter("ID")) - def fetch_product_types_list(self, provider=None): + def fetch_product_types_list(self, provider: Optional[str] = None) -> None: """Fetch product types list and update if needed :param provider: (optional) The name of a provider for which product types list @@ -517,7 +574,7 @@ def fetch_product_types_list(self, provider=None): return # providers discovery confs that are fetchable - providers_discovery_configs_fetchable = {} + providers_discovery_configs_fetchable: Dict[str, Any] = {} # check if any provider has not already been fetched for product types already_fetched = True for provider_to_fetch, provider_config in ( @@ -642,7 +699,9 @@ def fetch_product_types_list(self, provider=None): # update eodag product types list with new conf self.update_product_types_list(provider_ext_product_types_conf) - def discover_product_types(self, provider=None): + def discover_product_types( + self, provider: Optional[str] = None + ) -> Optional[Dict[str, Any]]: """Fetch providers for product types :param provider: (optional) The name of a provider to fetch. Defaults to all @@ -651,7 +710,7 @@ def discover_product_types(self, provider=None): :returns: external product types configuration :rtype: dict """ - ext_product_types_conf = {} + ext_product_types_conf: Dict[str, Any] = {} providers_to_fetch = [ p for p in ( @@ -668,9 +727,9 @@ def discover_product_types(self, provider=None): elif hasattr(self.providers_config[provider], "api"): search_plugin_config = self.providers_config[provider].api else: - return + return None if getattr(search_plugin_config, "discover_product_types", None): - search_plugin = next( + search_plugin: Union[Search, Api] = next( self._plugins_manager.get_search_plugins(provider=provider) ) # append auth to search plugin if needed @@ -700,7 +759,9 @@ def discover_product_types(self, provider=None): return ext_product_types_conf - def update_product_types_list(self, ext_product_types_conf): + def update_product_types_list( + self, ext_product_types_conf: Dict[str, Optional[Dict[str, Dict[str, Any]]]] + ) -> None: """Update eodag product types list :param ext_product_types_conf: external product types configuration @@ -709,23 +770,24 @@ def update_product_types_list(self, ext_product_types_conf): for provider, new_product_types_conf in ext_product_types_conf.items(): if new_product_types_conf and provider in self.providers_config: try: - if hasattr(self.providers_config[provider], "search"): - search_plugin_config = self.providers_config[provider].search - elif hasattr(self.providers_config[provider], "api"): - search_plugin_config = self.providers_config[provider].api - else: + search_plugin_config = getattr( + self.providers_config[provider], "search", None + ) or getattr(self.providers_config[provider], "api", None) + if search_plugin_config is None: continue if not hasattr(search_plugin_config, "discover_product_types"): # conf has been updated and provider product types are no more discoverable continue - provider_products_config = self.providers_config[provider].products + provider_products_config = ( + self.providers_config[provider].products or {} + ) except UnsupportedProvider: logger.debug( "Ignoring external product types for unknown provider %s", provider, ) continue - new_product_types = [] + new_product_types: List[str] = [] for ( new_product_type, new_product_type_conf, @@ -786,7 +848,7 @@ def update_product_types_list(self, ext_product_types_conf): # rebuild index after product types list update self.build_index() - def available_providers(self, product_type=None): + def available_providers(self, product_type: Optional[str] = None) -> List[str]: """Gives the sorted list of the available providers :param product_type: (optional) Only list providers configured for this product_type @@ -804,8 +866,52 @@ def available_providers(self, product_type=None): else: return sorted(tuple(self.providers_config.keys())) - def guess_product_type(self, **kwargs): - """Find the eodag product type code that best matches a set of search params + def get_product_type_from_alias(self, alias_or_id: str) -> str: + """Return the ID of a product type by either its ID or alias + + :param alias_or_id: Alias of the product type. If an existing ID is given, this + method will directly return the given value. + :type alias_or_id: str + :returns: Internal name of the product type. + :rtype: str + """ + product_types = [ + k + for k, v in self.product_types_config.items() + if v.get("alias", None) == alias_or_id + ] + + if len(product_types) > 1: + raise NoMatchingProductType( + f"Too many matching product types for alias {alias_or_id}: {product_types}" + ) + + if len(product_types) == 0: + if alias_or_id in self.product_types_config: + return alias_or_id + else: + raise NoMatchingProductType( + f"Could not find product type from alias or ID {alias_or_id}" + ) + + return product_types[0] + + def get_alias_from_product_type(self, product_type: str) -> str: + """Return the alias of a product type by its ID. If no alias was defined for the + given product type, its ID is returned instead. + + :param product_type: product type ID + :type product_type: str + :returns: Alias of the product type or its ID if no alias has been defined for it. + :rtype: str + """ + if product_type not in self.product_types_config: + raise NoMatchingProductType(product_type) + + return self.product_types_config[product_type].get("alias", product_type) + + def guess_product_type(self, **kwargs: Any) -> List[str]: + """Find eodag product types codes that best match a set of search params :param kwargs: A set of search parameters as keywords arguments :returns: The best match for the given parameters @@ -842,23 +948,23 @@ def guess_product_type(self, **kwargs): results = searcher.search(query, limit=None) else: results.upgrade_and_extend(searcher.search(query, limit=None)) - guesses = [r["ID"] for r in results or []] + guesses: List[str] = [r["ID"] for r in results or []] if guesses: return guesses raise NoMatchingProductType() def search( self, - page=DEFAULT_PAGE, - items_per_page=DEFAULT_ITEMS_PER_PAGE, - raise_errors=False, - start=None, - end=None, - geom=None, - locations=None, - provider=None, - **kwargs, - ): + page: int = DEFAULT_PAGE, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + raise_errors: bool = False, + start: Optional[str] = None, + end: Optional[str] = None, + geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, + locations: Optional[Dict[str, str]] = None, + provider: Optional[str] = None, + **kwargs: Any, + ) -> Tuple[SearchResult, int]: """Look for products matching criteria on known providers. The default behaviour is to look for products on the provider with the @@ -930,14 +1036,20 @@ def search( search_kwargs.update( page=1, items_per_page=2, + raise_errors=raise_errors, ) return self._search_by_id( search_kwargs.pop("id"), provider=provider, **search_kwargs ) + # remove datacube query string from kwargs which was only needed for search-by-id + search_kwargs.pop("_dc_qs", None) + search_kwargs.update( page=page, items_per_page=items_per_page, ) + + self.search_errors = set() # Loop over available providers and return the first non-empty results for i, search_plugin in enumerate(search_plugins): search_plugin.clear() @@ -960,13 +1072,13 @@ def search( def search_iter_page( self, - items_per_page=DEFAULT_ITEMS_PER_PAGE, - start=None, - end=None, - geom=None, - locations=None, - **kwargs, - ): + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + start: Optional[str] = None, + end: Optional[str] = None, + geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, + locations: Optional[Dict[str, str]] = None, + **kwargs: Any, + ) -> Iterator[SearchResult]: """Iterate over the pages of a products search. :param items_per_page: (optional) The number of results requested per page @@ -1021,13 +1133,17 @@ def search_iter_page( ) else: logger.error( - "No result could be obtained from any available " "provider" + "No result could be obtained from any available provider" ) raise + raise RequestError("No result could be obtained from any available provider") def search_iter_page_plugin( - self, items_per_page=DEFAULT_ITEMS_PER_PAGE, search_plugin=None, **kwargs - ): + self, + search_plugin: Union[Search, Api], + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + **kwargs: Any, + ) -> Iterator[SearchResult]: """Iterate over the pages of a products search using a given search plugin. :param items_per_page: (optional) The number of results requested per page @@ -1143,13 +1259,13 @@ def search_iter_page_plugin( def search_all( self, - items_per_page=None, - start=None, - end=None, - geom=None, - locations=None, - **kwargs, - ): + items_per_page: Optional[int] = None, + start: Optional[str] = None, + end: Optional[str] = None, + geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, + locations: Optional[Dict[str, str]] = None, + **kwargs: Any, + ) -> SearchResult: """Search and return all the products matching the search criteria. It iterates over the pages of a search query and collects all the returned @@ -1202,7 +1318,7 @@ def search_all( # Get the search plugin and the maximized value # of items_per_page if defined for the provider used. try: - product_type = ( + product_type = self.get_product_type_from_alias( kwargs.get("productType", None) or self.guess_product_type(**kwargs)[0] ) except NoMatchingProductType: @@ -1222,21 +1338,19 @@ def search_all( start=start, end=end, geom=geom, locations=locations, **kwargs ) for i, search_plugin in enumerate(search_plugins): - if items_per_page is None: - items_per_page = search_plugin.config.pagination.get( - "max_items_per_page", DEFAULT_MAX_ITEMS_PER_PAGE - ) - + itp = items_per_page or search_plugin.config.pagination.get( + "max_items_per_page", DEFAULT_MAX_ITEMS_PER_PAGE + ) logger.debug( "Searching for all the products with provider %s and a maximum of %s " "items per page.", search_plugin.provider, - items_per_page, + itp, ) all_results = SearchResult([]) try: for page_results in self.search_iter_page_plugin( - items_per_page=items_per_page, + items_per_page=itp, search_plugin=search_plugin, **search_kwargs, ): @@ -1267,8 +1381,11 @@ def search_all( search_plugin.provider, ) return all_results + raise RequestError("No result could be obtained from any available provider") - def _search_by_id(self, uid, provider=None, **kwargs): + def _search_by_id( + self, uid: str, provider: Optional[str] = None, **kwargs: Any + ) -> Tuple[SearchResult, int]: """Internal method that enables searching a product by its id. Keeps requesting providers until a result matching the id is supplied. The @@ -1292,12 +1409,18 @@ def _search_by_id(self, uid, provider=None, **kwargs): of EO products retrieved (0 or 1) :rtype: tuple(:class:`~eodag.api.search_result.SearchResult`, int) """ - get_search_plugins_kwargs = dict( - provider=provider, product_type=kwargs.get("productType", None) - ) + product_type = kwargs.get("productType", None) + if product_type is not None: + try: + product_type = self.get_product_type_from_alias(product_type) + except NoMatchingProductType: + logger.warning("product type %s not found", product_type) + get_search_plugins_kwargs = dict(provider=provider, product_type=product_type) search_plugins = self._plugins_manager.get_search_plugins( **get_search_plugins_kwargs ) + # datacube query string + _dc_qs = kwargs.pop("_dc_qs", None) for plugin in search_plugins: logger.info( @@ -1305,7 +1428,10 @@ def _search_by_id(self, uid, provider=None, **kwargs): ) logger.debug("Using plugin class for search: %s", plugin.__class__.__name__) plugin.clear() - results, _ = self._do_search(plugin, id=uid, **kwargs) + if isinstance(plugin, BuildPostSearchResult): + results, _ = self._do_search(plugin, id=uid, _dc_qs=_dc_qs, **kwargs) + else: + results, _ = self._do_search(plugin, id=uid, **kwargs) if len(results) == 1: if not results[0].product_type: # guess product type from properties @@ -1329,8 +1455,14 @@ def _search_by_id(self, uid, provider=None, **kwargs): return SearchResult([]), 0 def _prepare_search( - self, start=None, end=None, geom=None, locations=None, provider=None, **kwargs - ): + self, + start: Optional[str] = None, + end: Optional[str] = None, + geom: Optional[Union[str, Dict[str, float], BaseGeometry]] = None, + locations: Optional[Dict[str, str]] = None, + provider: Optional[str] = None, + **kwargs: Any, + ) -> Tuple[List[Union[Search, Api]], Dict[str, Any]]: """Internal method to prepare the search kwargs and get the search plugins. Product query: @@ -1396,7 +1528,13 @@ def _prepare_search( else: return [], kwargs + if product_type is not None: + try: + product_type = self.get_product_type_from_alias(product_type) + except NoMatchingProductType: + logger.warning("unknown product type " + product_type) kwargs["productType"] = product_type + if start is not None: kwargs["startTimeFromAscendingNode"] = start if end is not None: @@ -1433,7 +1571,7 @@ def _prepare_search( preferred_provider = self.get_preferred_provider()[0] - search_plugins: List[Search] = [] + search_plugins: List[Union[Search, Api]] = [] for plugin in self._plugins_manager.get_search_plugins( product_type=product_type, provider=provider ): @@ -1480,6 +1618,7 @@ def _prepare_search( search_plugin.provider, fetch_providers=False ) if p["ID"] == product_type + or ("_id" in p and p["_id"] == product_type) ][0], **{"productType": product_type}, ) @@ -1496,7 +1635,13 @@ def _prepare_search( return search_plugins, kwargs - def _do_search(self, search_plugin, count=True, raise_errors=False, **kwargs): + def _do_search( + self, + search_plugin: Union[Search, Api], + count: bool = True, + raise_errors: bool = False, + **kwargs: Any, + ) -> Tuple[SearchResult, Optional[int]]: """Internal method that performs a search on a given provider. :param search_plugin: A search plugin @@ -1530,7 +1675,7 @@ def _do_search(self, search_plugin, count=True, raise_errors=False, **kwargs): auth_plugin = self._plugins_manager.get_auth_plugin(search_plugin.provider) can_authenticate = callable(getattr(auth_plugin, "authenticate", None)) - results = SearchResult([]) + results: List[EOProduct] = [] total_results = 0 try: @@ -1619,6 +1764,14 @@ def _do_search(self, search_plugin, count=True, raise_errors=False, **kwargs): else: eo_product.product_type = guesses[0] + try: + if eo_product.product_type is not None: + eo_product.product_type = self.get_product_type_from_alias( + eo_product.product_type + ) + except NoMatchingProductType: + logger.warning("product type %s not found", eo_product.product_type) + if eo_product.search_intersection is not None: download_plugin = self._plugins_manager.get_download_plugin( eo_product @@ -1647,7 +1800,7 @@ def _do_search(self, search_plugin, count=True, raise_errors=False, **kwargs): "available in the searched collection (e.g. SENTINEL2) instead of " "the total number of products matching the search criteria" ) - except Exception: + except Exception as e: log_msg = f"No result from provider '{search_plugin.provider}' due to an error during search." if not raise_errors: log_msg += " Raise verbosity of log messages for details" @@ -1661,9 +1814,10 @@ def _do_search(self, search_plugin, count=True, raise_errors=False, **kwargs): "Error while searching on provider %s (ignored):", search_plugin.provider, ) + self.search_errors.add((search_plugin.provider, e)) return SearchResult(results), total_results - def crunch(self, results, **kwargs): + def crunch(self, results: SearchResult, **kwargs: Any) -> SearchResult: """Apply the filters given through the keyword arguments to the results :param results: The results of a eodag search request @@ -1680,7 +1834,7 @@ def crunch(self, results, **kwargs): return results @staticmethod - def group_by_extent(searches): + def group_by_extent(searches: List[SearchResult]) -> List[SearchResult]: """Combines multiple SearchResults and return a list of SearchResults grouped by extent (i.e. bounding box). @@ -1690,7 +1844,7 @@ def group_by_extent(searches): """ # Dict with extents as keys, each extent being defined by a str # "{minx}{miny}{maxx}{maxy}" (each float rounded to 2 dec). - products_grouped_by_extent = {} + products_grouped_by_extent: Dict[str, Any] = {} for search in searches: for product in search: @@ -1706,20 +1860,20 @@ def group_by_extent(searches): def download_all( self, - search_result, - downloaded_callback=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + search_result: SearchResult, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> List[str]: """Download all products resulting from a search. :param search_result: A collection of EO products resulting from a search :type search_result: :class:`~eodag.api.search_result.SearchResult` :param downloaded_callback: (optional) A method or a callable object which takes as parameter the ``product``. You can use the base class - :class:`~eodag.utils.DownloadedCallback` and override + :class:`~eodag.api.product.DownloadedCallback` and override its ``__call__`` method. Will be called each time a product finishes downloading :type downloaded_callback: Callable[[:class:`~eodag.api.product._product.EOProduct`], None] @@ -1765,7 +1919,9 @@ def download_all( return paths @staticmethod - def serialize(search_result, filename="search_results.geojson"): + def serialize( + search_result: SearchResult, filename: str = "search_results.geojson" + ) -> str: """Registers results of a search into a geojson file. :param search_result: A collection of EO products resulting from a search @@ -1780,7 +1936,7 @@ def serialize(search_result, filename="search_results.geojson"): return filename @staticmethod - def deserialize(filename): + def deserialize(filename: str) -> SearchResult: """Loads results of a search from a geojson file. :param filename: A filename containing a search result encoded as a geojson @@ -1791,7 +1947,7 @@ def deserialize(filename): with open(filename, "r") as fh: return SearchResult.from_geojson(geojson.load(fh)) - def deserialize_and_register(self, filename): + def deserialize_and_register(self, filename: str) -> SearchResult: """Loads results of a search from a geojson file and register products with the information needed to download itself @@ -1816,14 +1972,14 @@ def deserialize_and_register(self, filename): ) def load_stac_items( self, - filename, - recursive=False, - max_connections=100, - provider=None, - productType=None, - timeout=HTTP_REQ_TIMEOUT, - **kwargs, - ): + filename: str, + recursive: bool = False, + max_connections: int = 100, + provider: Optional[str] = None, + productType: Optional[str] = None, + timeout: int = HTTP_REQ_TIMEOUT, + **kwargs: Any, + ) -> SearchResult: """Loads STAC items from a geojson file / STAC catalog or collection, and convert to SearchResult. Features are parsed using eodag provider configuration, as if they were @@ -1856,13 +2012,7 @@ def load_stac_items( max_connections=max_connections, timeout=timeout, ) - nb_features = len(features) feature_collection = geojson.FeatureCollection(features) - feature_collection["context"] = { - "limit": nb_features, - "matched": nb_features, - "returned": nb_features, - } plugin = next( self._plugins_manager.get_search_plugins( @@ -1886,12 +2036,12 @@ def load_stac_items( def download( self, - product, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> str: """Download a single product. This is an alias to the method of the same name on @@ -1946,7 +2096,7 @@ def download( return path - def _setup_downloader(self, product): + def _setup_downloader(self, product: EOProduct) -> None: if product.downloader is None: auth = product.downloader_auth if auth is None: @@ -1955,7 +2105,7 @@ def _setup_downloader(self, product): self._plugins_manager.get_download_plugin(product), auth ) - def get_cruncher(self, name, **options): + def get_cruncher(self, name: str, **options: Any) -> Crunch: """Build a crunch plugin from a configuration :param name: The name of the cruncher to build @@ -1968,3 +2118,133 @@ def get_cruncher(self, name, **options): plugin_conf = {"name": name} plugin_conf.update({key.replace("-", "_"): val for key, val in options.items()}) return self._plugins_manager.get_crunch_plugin(name, **plugin_conf) + + def list_queryables( + self, + provider: Optional[str] = None, + **kwargs: Any, + ) -> Dict[str, Annotated[Any, FieldInfo]]: + """Fetch the queryable properties for a given product type and/or provider. + + :param provider: (optional) The provider. + :type provider: str + :param kwargs: additional filters for queryables (`productType` or other search + arguments) + :type kwargs: Any + :returns: A dict containing the EODAG queryable properties, associating + parameters to their annotated type + :rtype: Dict[str, Annotated[Any, FieldInfo]] + """ + # unknown product type + available_product_types = [ + pt["ID"] for pt in self.list_product_types(fetch_providers=False) + ] + product_type = kwargs.get("productType", None) + if product_type is not None and product_type not in available_product_types: + self.fetch_product_types_list() + + # dictionary of the queryable properties of the providers supporting the given product type + providers_available_queryables: Dict[ + str, Dict[str, Annotated[Any, FieldInfo]] + ] = dict() + + if provider is None and product_type is None: + return model_fields_to_annotated(CommonQueryables.model_fields) + elif provider is None: + for plugin in self._plugins_manager.get_search_plugins( + product_type, provider + ): + providers_available_queryables[plugin.provider] = self.list_queryables( + provider=plugin.provider, **kwargs + ) + + # return providers queryables intersection + queryables_keys: AbstractSet[str] = set() + for queryables in providers_available_queryables.values(): + queryables_keys = ( + queryables_keys & queryables.keys() + if queryables_keys + else queryables.keys() + ) + return { + k: v + for k, v in providers_available_queryables.popitem()[1].items() + if k in queryables_keys + } + + all_queryables = copy_deepcopy( + model_fields_to_annotated(Queryables.model_fields) + ) + + try: + plugin = next( + self._plugins_manager.get_search_plugins(product_type, provider) + ) + except StopIteration: + # return default queryables if no plugin is found + return model_fields_to_annotated(CommonQueryables.model_fields) + + providers_available_queryables[plugin.provider] = dict() + + # unknown product type: try again after fetch_product_types_list() + if ( + product_type + and product_type not in plugin.config.products.keys() + and provider is None + ): + raise UnsupportedProductType(product_type) + elif product_type and product_type not in plugin.config.products.keys(): + raise UnsupportedProductType( + f"{product_type} is not available for provider {provider}" + ) + + metadata_mapping = deepcopy(getattr(plugin.config, "metadata_mapping", {})) + + # product_type-specific metadata-mapping + metadata_mapping.update( + getattr(plugin.config, "products", {}) + .get(product_type, {}) + .get("metadata_mapping", {}) + ) + + # default values + default_values = deepcopy( + getattr(plugin.config, "products", {}).get(product_type, {}) + ) + default_values.pop("metadata_mapping", None) + kwargs = dict(default_values, **kwargs) + + # remove not mapped parameters or non-queryables + for param in list(metadata_mapping.keys()): + if NOT_MAPPED in metadata_mapping[param] or not isinstance( + metadata_mapping[param], list + ): + del metadata_mapping[param] + + for key, value in all_queryables.items(): + annotated_args = get_args(value) + if len(annotated_args) < 1: + continue + field_info = annotated_args[1] + if not isinstance(field_info, FieldInfo): + continue + if key in kwargs: + field_info.default = kwargs[key] + if field_info.is_required() or ( + (field_info.alias or key) in metadata_mapping + ): + providers_available_queryables[plugin.provider][key] = value + + provider_queryables = plugin.discover_queryables(**kwargs) or dict() + # use EODAG configured queryables by default + provider_queryables.update(providers_available_queryables[provider]) + + # always keep at least CommonQueryables + common_queryables = copy_deepcopy(CommonQueryables.model_fields) + for key, queryable in common_queryables.items(): + if key in kwargs: + queryable.default = kwargs[key] + + provider_queryables.update(model_fields_to_annotated(common_queryables)) + + return provider_queryables diff --git a/eodag/api/product/_assets.py b/eodag/api/product/_assets.py new file mode 100644 index 000000000..d1897b5d9 --- /dev/null +++ b/eodag/api/product/_assets.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +# Copyright 2023, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import re +from collections import UserDict +from typing import TYPE_CHECKING, Any, Dict, List + +from eodag.utils.exceptions import NotAvailableError + +if TYPE_CHECKING: + from eodag.api.product import EOProduct + + +class AssetsDict(UserDict): + """A UserDict object listing assets contained in a + :class:`~eodag.api.product._product.EOProduct` resulting from a search. + + :param product: Product resulting from a search + :type product: :class:`~eodag.api.product._product.EOProduct` + :param args: (optional) Arguments used to init the dictionary + :type args: Any + :param kwargs: (optional) Additional named-arguments used to init the dictionary + :type kwargs: Any + """ + + product: EOProduct + + def __init__(self, product: EOProduct, *args: Any, **kwargs: Any) -> None: + self.product = product + super(AssetsDict, self).__init__(*args, **kwargs) + + def __setitem__(self, key: str, value: Dict[str, Any]) -> None: + super().__setitem__(key, Asset(self.product, key, value)) + + def as_dict(self) -> Dict[str, Any]: + """Builds a representation of AssetsDict to enable its serialization + + :returns: The representation of a :class:`~eodag.api.product._assets.AssetsDict` + as a Python dict + :rtype: dict + """ + return {k: v.as_dict() for k, v in self.data.items()} + + def get_values(self, asset_filter: str = "") -> List[Asset]: + """ + retrieves the assets matching the given filter + :param asset_filter: filter with which the assets should be matched + :type asset_filter: str + :return: list of assets + :rtype: List[Asset] + """ + if asset_filter: + filter_regex = re.compile(asset_filter) + assets_keys = list(self.keys()) + assets_keys = list(filter(filter_regex.fullmatch, assets_keys)) + filtered_assets = {} + if len(assets_keys) > 0: + filtered_assets = {a_key: self.get(a_key) for a_key in assets_keys} + assets_values = [a for a in filtered_assets.values() if a and "href" in a] + if not assets_values: + raise NotAvailableError( + rf"No asset key matching re.fullmatch(r'{asset_filter}') was found in {self.product}" + ) + else: + return assets_values + else: + return [a for a in self.values() if "href" in a] + + +class Asset(UserDict): + """A UserDict object containg one of the assets of a + :class:`~eodag.api.product._product.EOProduct` resulting from a search. + + :param product: Product resulting from a search + :type product: :class:`~eodag.api.product._product.EOProduct` + :param key: asset key + :type key: str + :param args: (optional) Arguments used to init the dictionary + :type args: Any + :param kwargs: (optional) Additional named-arguments used to init the dictionary + :type kwargs: Any + """ + + product: EOProduct + + def __init__(self, product: EOProduct, key: str, *args: Any, **kwargs: Any) -> None: + self.product = product + self.key = key + super(Asset, self).__init__(*args, **kwargs) + + def as_dict(self) -> Dict[str, Any]: + """Builds a representation of Asset to enable its serialization + + :returns: The representation of a :class:`~eodag.api.product._assets.Asset` as a + Python dict + :rtype: dict + """ + return self.data + + def download(self, **kwargs: Any) -> str: + """Downloads a single asset + + :param kwargs: (optional) Additional named-arguments passed to `plugin.download()` + :type kwargs: Any + :returns: The absolute path to the downloaded product on the local filesystem + :rtype: str + """ + return self.product.download(asset=self.key, **kwargs) diff --git a/eodag/api/product/_product.py b/eodag/api/product/_product.py index ef5db84a8..c48affb11 100644 --- a/eodag/api/product/_product.py +++ b/eodag/api/product/_product.py @@ -15,21 +15,26 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import base64 import logging import os import re import urllib.parse +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union import requests from requests import RequestException from shapely import geometry, wkb, wkt from shapely.errors import ShapelyError +from eodag.api.product._assets import AssetsDict from eodag.api.product.drivers import DRIVERS, NoDriver from eodag.api.product.metadata_mapping import NOT_AVAILABLE, NOT_MAPPED -from eodag.plugins.download.base import DEFAULT_DOWNLOAD_TIMEOUT, DEFAULT_DOWNLOAD_WAIT from eodag.utils import ( + DEFAULT_DOWNLOAD_TIMEOUT, + DEFAULT_DOWNLOAD_WAIT, DEFAULT_STREAM_REQUESTS_TIMEOUT, USER_AGENT, ProgressCallback, @@ -37,6 +42,14 @@ ) from eodag.utils.exceptions import DownloadError, MisconfiguredError +if TYPE_CHECKING: + from shapely.geometry.base import BaseGeometry + + from eodag.api.product.drivers.base import DatasetDriver + from eodag.plugins.apis.base import Api + from eodag.plugins.authentication.base import Authentication + from eodag.plugins.download.base import Download + try: from shapely.errors import GEOSException except ImportError: @@ -86,10 +99,23 @@ class EOProduct: mentioned CRS. """ - def __init__(self, provider, properties, **kwargs): + provider: str + properties: Dict[str, Any] + product_type: Optional[str] + location: str + remote_location: str + search_kwargs: Any + geometry: BaseGeometry + search_intersection: Optional[BaseGeometry] + assets: AssetsDict + + def __init__( + self, provider: str, properties: Dict[str, Any], **kwargs: Any + ) -> None: self.provider = provider self.product_type = kwargs.get("productType") self.location = self.remote_location = properties.get("downloadLink", "") + self.assets = AssetsDict(self) self.properties = { key: value for key, value in properties.items() @@ -108,7 +134,7 @@ def __init__(self, provider, properties, **kwargs): f"No geometry available to build EOProduct(id={properties.get('id', None)}, provider={provider})" ) elif properties["geometry"] == NOT_AVAILABLE: - product_geometry = properties["defaultGeometry"] + product_geometry = properties.pop("defaultGeometry") else: product_geometry = properties["geometry"] # Let's try 'latmin lonmin latmax lonmax' @@ -157,10 +183,10 @@ def __init__(self, provider, properties, **kwargs): ) self.search_intersection = None self.driver = self.get_driver() - self.downloader = None - self.downloader_auth = None + self.downloader: Optional[Union[Api, Download]] = None + self.downloader_auth: Optional[Authentication] = None - def as_dict(self): + def as_dict(self) -> Dict[str, Any]: """Builds a representation of EOProduct as a dictionary to enable its geojson serialization @@ -171,27 +197,28 @@ def as_dict(self): search_intersection = None if self.search_intersection is not None: search_intersection = geometry.mapping(self.search_intersection) - geojson_repr = { + + geojson_repr: Dict[str, Any] = { "type": "Feature", "geometry": geometry.mapping(self.geometry), "id": self.properties["id"], + "assets": self.assets.as_dict(), "properties": { "eodag_product_type": self.product_type, "eodag_provider": self.provider, "eodag_search_intersection": search_intersection, + **{ + key: value + for key, value in self.properties.items() + if key not in ("geometry", "id") + }, }, } - geojson_repr["properties"].update( - { - key: value - for key, value in self.properties.items() - if key not in ("geometry", "id") - } - ) + return geojson_repr @classmethod - def from_geojson(cls, feature): + def from_geojson(cls, feature: Dict[str, Any]) -> EOProduct: """Builds an :class:`~eodag.api.product._product.EOProduct` object from its representation as geojson @@ -210,13 +237,14 @@ def from_geojson(cls, feature): obj.search_intersection = geometry.shape( feature["properties"]["eodag_search_intersection"] ) + obj.assets = AssetsDict(obj, feature.get("assets", {})) return obj # Implementation of geo-interface protocol (See # https://gist.github.com/sgillies/2217756) __geo_interface__ = property(as_dict) - def __repr__(self): + def __repr__(self) -> str: try: return "{}(id={}, provider={})".format( self.__class__.__name__, self.properties["id"], self.provider @@ -226,7 +254,9 @@ def __repr__(self): f"Unable to get {e.args[0]} key from EOProduct.properties" ) - def register_downloader(self, downloader, authenticator): + def register_downloader( + self, downloader: Union[Api, Download], authenticator: Optional[Authentication] + ) -> None: """Give to the product the information needed to download itself. :param downloader: The download method that it can use @@ -275,11 +305,11 @@ def register_downloader(self, downloader, authenticator): def download( self, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> str: """Download the EO product using the provided download plugin and the authenticator if necessary. @@ -360,7 +390,9 @@ def download( return fs_path - def _init_progress_bar(self, progress_callback): + def _init_progress_bar( + self, progress_callback: Optional[ProgressCallback] + ) -> Tuple[ProgressCallback, bool]: # progress bar init if progress_callback is None: progress_callback = ProgressCallback(position=1) @@ -373,9 +405,14 @@ def _init_progress_bar(self, progress_callback): progress_callback.unit_scale = True progress_callback.desc = str(self.properties.get("id", "")) progress_callback.refresh() - return [progress_callback, close_progress_callback] + return (progress_callback, close_progress_callback) - def get_quicklook(self, filename=None, base_dir=None, progress_callback=None): + def get_quicklook( + self, + filename: Optional[str] = None, + base_dir: Optional[str] = None, + progress_callback: Optional[ProgressCallback] = None, + ) -> str: """Download the quicklook image of a given EOProduct from its provider if it exists. @@ -395,7 +432,7 @@ def get_quicklook(self, filename=None, base_dir=None, progress_callback=None): :rtype: str """ - def format_quicklook_address(): + def format_quicklook_address() -> None: """If the quicklook address is a Python format string, resolve the formatting with the properties of the product.""" fstrmatch = re.match(r".*{.+}*.*", self.properties["quicklook"]) @@ -490,7 +527,7 @@ def format_quicklook_address(): return quicklook_file - def get_driver(self): + def get_driver(self) -> DatasetDriver: """Get the most appropriate driver""" try: for driver_conf in DRIVERS: diff --git a/eodag/api/product/drivers/__init__.py b/eodag/api/product/drivers/__init__.py index cac383fac..168d57367 100644 --- a/eodag/api/product/drivers/__init__.py +++ b/eodag/api/product/drivers/__init__.py @@ -16,7 +16,7 @@ # See the License for the specific language governing permissions and # limitations under the License. """EODAG drivers package""" -from eodag.api.product.drivers.base import NoDriver # noqa +from eodag.api.product.drivers.base import DatasetDriver, NoDriver # noqa try: from eodag_cube.api.product.drivers import DRIVERS diff --git a/eodag/api/product/drivers/base.py b/eodag/api/product/drivers/base.py index 0c03a2abd..746caf9cc 100644 --- a/eodag/api/product/drivers/base.py +++ b/eodag/api/product/drivers/base.py @@ -15,12 +15,18 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from eodag.api.product import EOProduct class DatasetDriver(metaclass=type): """Dataset driver""" - def get_data_address(self, eo_product, band): + def get_data_address(self, eo_product: EOProduct, band: str) -> str: """Retrieve the address of the dataset represented by `eo_product`. :param eo_product: The product whom underlying dataset address is to be retrieved diff --git a/eodag/api/product/metadata_mapping.py b/eodag/api/product/metadata_mapping.py index b369ddc8d..f63f3a0d6 100644 --- a/eodag/api/product/metadata_mapping.py +++ b/eodag/api/product/metadata_mapping.py @@ -15,12 +15,25 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import ast import json import logging import re from datetime import datetime, timedelta from string import Formatter +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + List, + Optional, + Tuple, + Union, + cast, +) import geojson import orjson @@ -34,6 +47,7 @@ from shapely.geometry import MultiPolygon, Polygon from shapely.ops import transform +from eodag.types.queryables import Queryables from eodag.utils import ( DEFAULT_PROJ, deepcopy, @@ -46,6 +60,11 @@ update_nested_dict, ) +if TYPE_CHECKING: + from shapely.geometry.base import BaseGeometry + + from eodag.config import PluginConfig + logger = logging.getLogger("eodag.product.metadata_mapping") SEP = r"#" @@ -62,7 +81,9 @@ COMPLEX_QS_REGEX = re.compile(r"^(.+=)?([^=]*)({.+})+([^=&]*)$") -def get_metadata_path(map_value): +def get_metadata_path( + map_value: Union[str, List[str]] +) -> Tuple[Union[List[str], None], str]: """Return the jsonpath or xpath to the value of a EO product metadata in a provider search result. @@ -112,12 +133,12 @@ def get_metadata_path(map_value): return None, path -def get_metadata_path_value(map_value): +def get_metadata_path_value(map_value: Union[str, List[str]]) -> str: """Get raw metadata path without converter""" return map_value[1] if isinstance(map_value, list) else map_value -def get_search_param(map_value): +def get_search_param(map_value: List[str]) -> str: """See :func:`~eodag.api.product.metadata_mapping.get_metadata_path` :param map_value: The value originating from the definition of `metadata_mapping` @@ -130,7 +151,7 @@ def get_search_param(map_value): return map_value[0] -def format_metadata(search_param, *args, **kwargs): +def format_metadata(search_param: str, *args: Tuple[Any], **kwargs: Any) -> str: """Format a string of form {#} The currently understood converters are: @@ -181,11 +202,11 @@ class MetadataFormatter(Formatter): + r"(?P[^\d\W]\w*)(\((?P.*)\))*$" ) - def __init__(self): + def __init__(self) -> None: self.custom_converter = None self.custom_args = None - def get_field(self, field_name, args, kwargs): + def get_field(self, field_name: str, args: Any, kwargs: Any) -> Any: conversion_func_spec = self.CONVERSION_REGEX.match(field_name) # Register a custom converter if any for later use (see convert_field) # This is done because we don't have the value associated to field_name at @@ -198,7 +219,7 @@ def get_field(self, field_name, args, kwargs): return super(MetadataFormatter, self).get_field(field_name, args, kwargs) - def convert_field(self, value, conversion): + def convert_field(self, value: Any, conversion: Any) -> Any: # Do custom conversion if any (see get_field) if self.custom_converter is not None: if self.custom_args is not None and value is not None: @@ -215,7 +236,7 @@ def convert_field(self, value, conversion): return super(MetadataFormatter, self).convert_field(value, conversion) @staticmethod - def convert_datetime_to_timestamp_milliseconds(date_time): + def convert_datetime_to_timestamp_milliseconds(date_time: str) -> int: """Convert a date_time (str) to a Unix timestamp in milliseconds "2021-04-21T18:27:19.123Z" => "1619029639123" @@ -225,7 +246,9 @@ def convert_datetime_to_timestamp_milliseconds(date_time): return int(1e3 * get_timestamp(date_time)) @staticmethod - def convert_to_iso_utc_datetime_from_milliseconds(timestamp): + def convert_to_iso_utc_datetime_from_milliseconds( + timestamp: int, + ) -> Union[str, int]: """Convert a timestamp in milliseconds (int) to its ISO8601 UTC format 1619029639123 => "2021-04-21T18:27:19.123Z" @@ -240,7 +263,9 @@ def convert_to_iso_utc_datetime_from_milliseconds(timestamp): return timestamp @staticmethod - def convert_to_iso_utc_datetime(date_time: str, timespec="milliseconds") -> str: + def convert_to_iso_utc_datetime( + date_time: str, timespec: str = "milliseconds" + ) -> str: """Convert a date_time (str) to its ISO 8601 representation in UTC "2021-04-21" => "2021-04-21T00:00:00.000Z" @@ -261,7 +286,9 @@ def convert_to_iso_utc_datetime(date_time: str, timespec="milliseconds") -> str: return dt.isoformat(timespec=timespec).replace("+00:00", "Z") @staticmethod - def convert_to_iso_date(datetime_string, time_delta_args_str="0,0,0,0,0,0,0"): + def convert_to_iso_date( + datetime_string: str, time_delta_args_str: str = "0,0,0,0,0,0,0" + ) -> str: """Convert an ISO8601 datetime (str) to its ISO8601 date format "2021-04-21T18:27:19.123Z" => "2021-04-21" @@ -278,8 +305,10 @@ def convert_to_iso_date(datetime_string, time_delta_args_str="0,0,0,0,0,0,0"): return dt.isoformat()[:10] @staticmethod - def convert_to_rounded_wkt(value): - wkt_value = wkt.dumps(value, rounding_precision=COORDS_ROUNDING_PRECISION) + def convert_to_rounded_wkt(value: BaseGeometry) -> str: + wkt_value = cast( + str, wkt.dumps(value, rounding_precision=COORDS_ROUNDING_PRECISION) + ) # If needed, simplify WKT to prevent too long request failure tolerance = 0.1 while len(wkt_value) > WKT_MAX_LEN and tolerance <= 1: @@ -288,9 +317,12 @@ def convert_to_rounded_wkt(value): len(wkt_value), tolerance, ) - wkt_value = wkt.dumps( - value.simplify(tolerance), - rounding_precision=COORDS_ROUNDING_PRECISION, + wkt_value = cast( + str, + wkt.dumps( + value.simplify(tolerance), + rounding_precision=COORDS_ROUNDING_PRECISION, + ), ) tolerance += 0.1 if len(wkt_value) > WKT_MAX_LEN and tolerance > 1: @@ -298,7 +330,7 @@ def convert_to_rounded_wkt(value): return wkt_value @staticmethod - def convert_to_bounds_lists(input_geom): + def convert_to_bounds_lists(input_geom: BaseGeometry) -> List[List[float]]: if isinstance(input_geom, MultiPolygon): geoms = [geom for geom in input_geom.geoms] # sort with larger one at first (stac-browser only plots first one) @@ -308,7 +340,7 @@ def convert_to_bounds_lists(input_geom): return [list(input_geom.bounds[0:4])] @staticmethod - def convert_to_bounds(input_geom_unformatted): + def convert_to_bounds(input_geom_unformatted: Any) -> List[float]: input_geom = get_geometry_from_various(geometry=input_geom_unformatted) if isinstance(input_geom, MultiPolygon): geoms = [geom for geom in input_geom.geoms] @@ -328,21 +360,23 @@ def convert_to_bounds(input_geom_unformatted): return list(input_geom.bounds[0:4]) @staticmethod - def convert_to_nwse_bounds(input_geom): + def convert_to_nwse_bounds(input_geom: BaseGeometry) -> List[float]: return list(input_geom.bounds[-1:] + input_geom.bounds[:-1]) @staticmethod - def convert_to_nwse_bounds_str(input_geom, separator=","): + def convert_to_nwse_bounds_str( + input_geom: BaseGeometry, separator: str = "," + ) -> str: return separator.join( str(x) for x in MetadataFormatter.convert_to_nwse_bounds(input_geom) ) @staticmethod - def convert_to_geojson(string): + def convert_to_geojson(string: str) -> str: return geojson.dumps(string) @staticmethod - def convert_from_ewkt(ewkt_string): + def convert_from_ewkt(ewkt_string: str) -> Union[BaseGeometry, str]: """Convert EWKT (Extended Well-Known text) to shapely geometry""" ewkt_regex = re.compile(r"^(?P[A-Za-z]+=[0-9]+);(?P.*)$") @@ -368,7 +402,7 @@ def convert_from_ewkt(ewkt_string): return ewkt_string @staticmethod - def convert_to_ewkt(input_geom): + def convert_to_ewkt(input_geom: BaseGeometry) -> str: """Convert shapely geometry to EWKT (Extended Well-Known text)""" proj = DEFAULT_PROJ.upper().replace("EPSG", "SRID").replace(":", "=") @@ -377,7 +411,7 @@ def convert_to_ewkt(input_geom): return f"{proj};{wkt_geom}" @staticmethod - def convert_from_georss(georss): + def convert_from_georss(georss: Any) -> Union[BaseGeometry, Any]: """Convert GeoRSS to shapely geometry""" if "polygon" in georss.tag: @@ -399,15 +433,14 @@ def convert_from_georss(georss): ).transform # function to get deepest elements - def flatten_elements(nested): - + def flatten_elements(nested) -> Iterator[Any]: for e in nested: if len(e) > 0: yield from flatten_elements(e) else: yield e - polygons_list = [] + polygons_list: List[Polygon] = [] for elem in flatten_elements(georss[0]): coords_list = elem.text.split() polygon_args = [ @@ -430,21 +463,21 @@ def flatten_elements(nested): return georss @staticmethod - def convert_csv_list(values_list): + def convert_csv_list(values_list: Any) -> Any: if isinstance(values_list, list): return ",".join([str(x) for x in values_list]) else: return values_list @staticmethod - def convert_remove_extension(string): + def convert_remove_extension(string: str) -> str: parts = string.split(".") if parts: return parts[0] return "" @staticmethod - def convert_get_group_name(string, pattern): + def convert_get_group_name(string: str, pattern: str) -> str: try: return re.search(pattern, str(string)).lastgroup except AttributeError: @@ -454,12 +487,14 @@ def convert_get_group_name(string, pattern): return NOT_AVAILABLE @staticmethod - def convert_replace_str(string, args): + def convert_replace_str(string: str, args: str) -> str: old, new = ast.literal_eval(args) return re.sub(old, new, string) @staticmethod - def convert_recursive_sub_str(input_obj, args): + def convert_recursive_sub_str( + input_obj: Union[Dict[Any, Any], List[Any]], args: str + ) -> Union[Dict[Any, Any], List[Any]]: old, new = ast.literal_eval(args) return items_recursive_apply( input_obj, @@ -468,7 +503,9 @@ def convert_recursive_sub_str(input_obj, args): ) @staticmethod - def convert_dict_update(input_dict, args): + def convert_dict_update( + input_dict: Dict[Any, Any], args: str + ) -> Dict[Any, Any]: """Converts""" new_items_list = ast.literal_eval(args) @@ -477,12 +514,12 @@ def convert_dict_update(input_dict, args): return dict(input_dict, **new_items_dict) @staticmethod - def convert_slice_str(string, args): + def convert_slice_str(string: str, args: str) -> str: cmin, cmax, cstep = [x.strip() for x in args.split(",")] return string[int(cmin) : int(cmax) : int(cstep)] @staticmethod - def convert_fake_l2a_title_from_l1c(string): + def convert_fake_l2a_title_from_l1c(string: str) -> str: id_regex = re.compile( r"^(?P\w+)_(?P\w+)_(?P\w+)_(?P\w+)_(?P\w+)_(?P\w+)_(?P\w+)$" ) @@ -499,7 +536,7 @@ def convert_fake_l2a_title_from_l1c(string): return NOT_AVAILABLE @staticmethod - def convert_s2msil2a_title_to_aws_productinfo(string): + def convert_s2msil2a_title_to_aws_productinfo(string: str) -> str: id_regex = re.compile( r"^(?P\w+)_(?P\w+)_(?P[0-9]{4})(?P[0-9]{2})(?P[0-9]{2})T[0-9]+_" + r"(?P[A-Z0-9_]+)_(?P[A-Z0-9_]+)_T(?P[0-9]{2})(?P[A-Z])(?P[A-Z]{2})_" @@ -524,8 +561,8 @@ def convert_s2msil2a_title_to_aws_productinfo(string): return NOT_AVAILABLE @staticmethod - def convert_split_id_into_s1_params(product_id): - parts = re.split(r"_(?!_)", product_id) + def convert_split_id_into_s1_params(product_id: str) -> Dict[str, str]: + parts: List[str] = re.split(r"_(?!_)", product_id) if len(parts) < 9: logger.error( "id %s does not match expected Sentinel-1 id format", product_id @@ -559,25 +596,25 @@ def convert_split_id_into_s1_params(product_id): return params @staticmethod - def convert_get_processing_level_from_s1_id(product_id): - parts = re.split(r"_(?!_)", product_id) + def convert_get_processing_level_from_s1_id(product_id: str) -> str: + parts: List[str] = re.split(r"_(?!_)", product_id) level = "LEVEL" + parts[3][0] return level @staticmethod - def convert_get_sensor_mode_from_s1_id(product_id): - parts = re.split(r"_(?!_)", product_id) + def convert_get_sensor_mode_from_s1_id(product_id: str) -> str: + parts: List[str] = re.split(r"_(?!_)", product_id) return parts[1] @staticmethod - def convert_get_processing_level_from_s2_id(product_id): - parts = re.split(r"_(?!_)", product_id) + def convert_get_processing_level_from_s2_id(product_id: str) -> str: + parts: List[str] = re.split(r"_(?!_)", product_id) processing_level = "S2" + parts[1] return processing_level @staticmethod - def convert_split_id_into_s3_params(product_id): - parts = re.split(r"_(?!_)", product_id) + def convert_split_id_into_s3_params(product_id: str) -> Dict[str, str]: + parts: List[str] = re.split(r"_(?!_)", product_id) params = {"productType": product_id[4:15]} dates = re.findall("[0-9]{8}T[0-9]{6}", product_id) start_date = datetime.strptime(dates[0], "%Y%m%dT%H%M%S") - timedelta( @@ -593,8 +630,8 @@ def convert_split_id_into_s3_params(product_id): return params @staticmethod - def convert_split_id_into_s5p_params(product_id): - parts = re.split(r"_(?!_)", product_id) + def convert_split_id_into_s5p_params(product_id: str) -> Dict[str, str]: + parts: List[str] = re.split(r"_(?!_)", product_id) params = { "productType": product_id[9:19], "processingMode": parts[1], @@ -611,13 +648,13 @@ def convert_split_id_into_s5p_params(product_id): return params @staticmethod - def convert_get_processing_level_from_s5p_id(product_id): - parts = re.split(r"_(?!_)", product_id) + def convert_get_processing_level_from_s5p_id(product_id: str) -> str: + parts: List[str] = re.split(r"_(?!_)", product_id) processing_level = parts[2].replace("_", "") return processing_level @staticmethod - def convert_split_cop_dem_id(product_id): + def convert_split_cop_dem_id(product_id: str) -> List[int]: parts = product_id.split("_") lattitude = parts[3] longitude = parts[5] @@ -633,7 +670,7 @@ def convert_split_cop_dem_id(product_id): return bbox @staticmethod - def convert_split_corine_id(product_id): + def convert_split_corine_id(product_id: str) -> str: if "clc" in product_id: year = product_id.split("_")[1][3:] product_type = "Corine Land Cover " + year @@ -646,7 +683,9 @@ def convert_split_corine_id(product_id): return product_type @staticmethod - def convert_to_datetime_dict(date: str, format: str) -> dict: + def convert_to_datetime_dict( + date: str, format: str + ) -> Dict[str, Union[List[str], str]]: """Convert a date (str) to a dictionary where values are in the format given in argument date == "2021-04-21T18:27:19.123Z" and format == "list" => { @@ -696,14 +735,57 @@ def convert_to_datetime_dict(date: str, format: str) -> dict: } @staticmethod - def convert_get_ecmwf_time(date: str) -> list: + def convert_interval_to_datetime_dict( + date: str, separator: str = "/" + ) -> Dict[str, List[str]]: + """Convert a date interval ('/' separated str) to a dictionary where values are lists + + date == "2021-04-21/2021-04-22" => { + "year": ["2021"], + "month": ["04"], + "day": ["21", "22"], + } + """ + if separator not in date: + raise ValueError( + f"Could not format {date} using convert_interval_to_datetime_dict: {separator} separator missing" + ) + start, end = date.split(separator) + start_utc_date = MetadataFormatter.convert_to_iso_utc_datetime(start) + end_utc_date = MetadataFormatter.convert_to_iso_utc_datetime(end) + start_date_object = datetime.strptime( + start_utc_date, "%Y-%m-%dT%H:%M:%S.%fZ" + ) + end_date_object = datetime.strptime(end_utc_date, "%Y-%m-%dT%H:%M:%S.%fZ") + + delta_utc_date = end_date_object - start_date_object + + years = set() + months = set() + days = set() + + for i in range(delta_utc_date.days + 1): + date_object = start_date_object + timedelta(days=i) + years.add(date_object.strftime("%Y")) + months.add(date_object.strftime("%m")) + days.add(date_object.strftime("%d")) + + return { + "year": list(years), + "month": list(months), + "day": list(days), + } + + @staticmethod + def convert_get_ecmwf_time(date: str) -> List[str]: """Get the time of a date (str) in the ECMWF format (["HH:00"]) "2021-04-21T18:27:19.123Z" => ["18:00"] "2021-04-21" => ["00:00"] """ return [ - MetadataFormatter.convert_to_datetime_dict(date, "str")["hour"] + ":00" + str(MetadataFormatter.convert_to_datetime_dict(date, "str")["hour"]) + + ":00" ] @staticmethod @@ -728,7 +810,11 @@ def convert_get_dates_from_string(text: str, split_param="-"): return MetadataFormatter().vformat(search_param, args, kwargs) -def properties_from_json(json, mapping, discovery_config=None): +def properties_from_json( + json: Dict[str, Any], + mapping: Dict[str, Any], + discovery_config: Optional[Dict[str, Any]] = None, +) -> Dict[str, Any]: """Extract properties from a provider json result. :param json: The representation of a provider result as a json object @@ -744,7 +830,7 @@ def properties_from_json(json, mapping, discovery_config=None): :returns: The metadata of the :class:`~eodag.api.product._product.EOProduct` :rtype: dict """ - properties = {} + properties: Dict[str, Any] = {} templates = {} used_jsonpaths = [] for metadata, value in mapping.items(): @@ -759,7 +845,10 @@ def properties_from_json(json, mapping, discovery_config=None): else: properties[metadata] = path_or_text else: - match = path_or_text.find(json) + try: + match = path_or_text.find(json) + except KeyError: + match = [] if len(match) == 1: extracted_value = match[0].value used_jsonpaths.append(match[0].full_path) @@ -861,11 +950,11 @@ def properties_from_json(json, mapping, discovery_config=None): def properties_from_xml( - xml_as_text, - mapping, - empty_ns_prefix="ns", - discovery_config=None, -): + xml_as_text: str, + mapping: Any, + empty_ns_prefix: str = "ns", + discovery_config: Optional[Dict[str, Any]] = None, +) -> Dict[str, Any]: """Extract properties from a provider xml result. :param xml_as_text: The representation of a provider result as xml @@ -887,7 +976,7 @@ def properties_from_xml( :returns: the metadata of the :class:`~eodag.api.product._product.EOProduct` :rtype: dict """ - properties = {} + properties: Dict[str, Any] = {} templates = {} used_xpaths = [] root = etree.XML(xml_as_text) @@ -1014,7 +1103,11 @@ def properties_from_xml( return properties -def mtd_cfg_as_conversion_and_querypath(src_dict, dest_dict={}, result_type="json"): +def mtd_cfg_as_conversion_and_querypath( + src_dict: Dict[str, Any], + dest_dict: Dict[str, Any] = {}, + result_type: str = "json", +) -> Dict[str, Any]: """Metadata configuration dictionary to querypath with conversion dictionnary Transform every src_dict value from jsonpath_str to tuple `(conversion, jsonpath_object)` or from xpath_str to tuple `(conversion, xpath_str)` @@ -1064,7 +1157,9 @@ def mtd_cfg_as_conversion_and_querypath(src_dict, dest_dict={}, result_type="jso return dest_dict -def format_query_params(product_type, config, **kwargs): +def format_query_params( + product_type: str, config: PluginConfig, **kwargs: Any +) -> Dict[str, Any]: """format the search parameters to query parameters""" if "raise_errors" in kwargs.keys(): del kwargs["raise_errors"] @@ -1076,7 +1171,7 @@ def format_query_params(product_type, config, **kwargs): **config.products.get(product_type, {}).get("metadata_mapping", {}), ) - query_params = {} + query_params: Dict[str, Any] = {} # Get all the search parameters that are recognised as queryables by the # provider (they appear in the queryables dictionary) queryables = _get_queryables(kwargs, config, product_type_metadata_mapping) @@ -1136,7 +1231,7 @@ def format_query_params(product_type, config, **kwargs): return query_params -def _resolve_hashes(formatted_query_param): +def _resolve_hashes(formatted_query_param: str) -> str: """ resolves structures of the format {"a": "abc", "b": "cde"}["a"] given in the formatted_query_param the structure is replaced by the value corresponding to the given key in the hash @@ -1165,9 +1260,11 @@ def _resolve_hashes(formatted_query_param): return formatted_query_param -def _format_free_text_search(config, metadata_mapping, **kwargs): +def _format_free_text_search( + config: PluginConfig, metadata_mapping: Dict[str, Any], **kwargs: Any +) -> Dict[str, Any]: """Build the free text search parameter using the search parameters""" - query_params = {} + query_params: Dict[str, Any] = {} if not getattr(config, "free_text_search_operations", None): return query_params for param, operations_config in config.free_text_search_operations.items(): @@ -1205,10 +1302,14 @@ def _format_free_text_search(config, metadata_mapping, **kwargs): return query_params -def _get_queryables(search_params, config, metadata_mapping): +def _get_queryables( + search_params: Dict[str, Any], + config: PluginConfig, + metadata_mapping: Dict[str, Any], +) -> Dict[str, Any]: """Retrieve the metadata mappings that are query-able""" logger.debug("Retrieving queryable metadata from metadata_mapping") - queryables = {} + queryables: Dict[str, Any] = {} for eodag_search_key, user_input in search_params.items(): if user_input is not None: md_mapping = metadata_mapping.get(eodag_search_key, (None, NOT_MAPPED)) @@ -1254,6 +1355,73 @@ def _get_queryables(search_params, config, metadata_mapping): return queryables +def get_queryable_from_provider( + provider_queryable: str, metadata_mapping: Dict[str, Union[str, List[str]]] +) -> Optional[str]: + """Get EODAG configured queryable parameter from provider queryable parameter + + :param provider_queryable: provider queryable parameter + :type provider_queryable: str + :param metadata_mapping: metadata-mapping configuration + :type metadata_mapping: Dict[str, Union[str, List[str]]]) + :returns: EODAG configured queryable parameter or None + :rtype: Optional[str] + """ + pattern = rf"\b{provider_queryable}\b" + for param, param_conf in metadata_mapping.items(): + if isinstance(param_conf, list) and re.search(pattern, param_conf[0]): + return Queryables.get_queryable_from_alias(param) + return None + + +def get_provider_queryable_path( + queryable: str, metadata_mapping: Dict[str, Union[str, List[str]]] +) -> Optional[str]: + """Get EODAG configured queryable path from its parameter + + :param queryable: eodag queryable parameter + :type queryable: str + :param metadata_mapping: metadata-mapping configuration + :type metadata_mapping: Dict[str, Union[str, List[str]]]) + :returns: EODAG configured queryable path or None + :rtype: Optional[str] + """ + parameter_conf = metadata_mapping.get(queryable, None) + if isinstance(parameter_conf, list): + return parameter_conf[0] + else: + return None + + +def get_provider_queryable_key( + eodag_key: str, + provider_queryables: Dict[str, Any], + metadata_mapping: Dict[str, Union[List[Any], str]], +) -> str: + """finds the provider queryable corresponding to the given eodag key based on the metadata mapping + :param eodag_key: key in eodag + :type eodag_key: str + :param provider_queryables: queryables returned from the provider + :type provider_queryables: dict + :param metadata_mapping: metadata mapping from which the keys are retrieved + :type metadata_mapping: Dict[str, Union[List[Any], str]] + :returns: provider queryable key + :rtype: str + """ + if eodag_key not in metadata_mapping: + return "" + + mapping_key = metadata_mapping[eodag_key] + if isinstance(mapping_key, list): + for queryable in provider_queryables: + pattern = rf"\b{queryable}\b" + if re.search(pattern, mapping_key[0]): + return queryable + return "" + else: + return eodag_key + + # Keys taken from OpenSearch extension for Earth Observation http://docs.opengeospatial.org/is/13-026r9/13-026r9.html # For a metadata to be queryable, The way to query it must be specified in the # provider metadata_mapping configuration parameter. It will be automatically diff --git a/eodag/api/search_result.py b/eodag/api/search_result.py index eaea06538..6961ee551 100644 --- a/eodag/api/search_result.py +++ b/eodag/api/search_result.py @@ -15,7 +15,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from collections import UserList +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union from shapely.geometry import GeometryCollection, shape @@ -26,6 +29,11 @@ from eodag.plugins.crunch.filter_overlap import FilterOverlap from eodag.plugins.crunch.filter_property import FilterProperty +if TYPE_CHECKING: + from shapely.geometry.base import BaseGeometry + + from eodag.plugins.crunch.base import Crunch + class SearchResult(UserList): """An object representing a collection of :class:`~eodag.api.product._product.EOProduct` resulting from a search. @@ -34,10 +42,12 @@ class SearchResult(UserList): :type products: list(:class:`~eodag.api.product._product.EOProduct`) """ - def __init__(self, products): + data: List[EOProduct] + + def __init__(self, products: List[EOProduct]) -> None: super(SearchResult, self).__init__(products) - def crunch(self, cruncher, **search_params): + def crunch(self, cruncher: Crunch, **search_params: Any) -> SearchResult: """Do some crunching with the underlying EO products. :param cruncher: The plugin instance to use to work on the products @@ -47,24 +57,28 @@ def crunch(self, cruncher, **search_params): :returns: The result of the application of the crunching method to the EO products :rtype: :class:`~eodag.api.search_result.SearchResult` """ - crunched_results = cruncher.proceed(self, **search_params) + crunched_results = cruncher.proceed(self.data, **search_params) return SearchResult(crunched_results) - def filter_date(self, start=None, end=None): + def filter_date( + self, start: Optional[str] = None, end: Optional[str] = None + ) -> SearchResult: """ Apply :class:`~eodag.plugins.crunch.filter_date.FilterDate` crunch, check its documentation to know more. """ return self.crunch(FilterDate(dict(start=start, end=end))) - def filter_latest_intersect(self, geometry): + def filter_latest_intersect( + self, geometry: Union[Dict[str, Any], BaseGeometry, Any] + ) -> SearchResult: """ Apply :class:`~eodag.plugins.crunch.filter_latest_intersect.FilterLatestIntersect` crunch, check its documentation to know more. """ return self.crunch(FilterLatestIntersect({}), geometry=geometry) - def filter_latest_by_name(self, name_pattern): + def filter_latest_by_name(self, name_pattern: str) -> SearchResult: """ Apply :class:`~eodag.plugins.crunch.filter_latest_tpl_name.FilterLatestByName` crunch, check its documentation to know more. @@ -73,12 +87,12 @@ def filter_latest_by_name(self, name_pattern): def filter_overlap( self, - geometry, - minimum_overlap=0, - contains=False, - intersects=False, - within=False, - ): + geometry: Any, + minimum_overlap: int = 0, + contains: bool = False, + intersects: bool = False, + within: bool = False, + ) -> SearchResult: """ Apply :class:`~eodag.plugins.crunch.filter_overlap.FilterOverlap` crunch, check its documentation to know more. @@ -95,14 +109,16 @@ def filter_overlap( geometry=geometry, ) - def filter_property(self, operator="eq", **search_property): + def filter_property( + self, operator: str = "eq", **search_property: Any + ) -> SearchResult: """ Apply :class:`~eodag.plugins.crunch.filter_property.FilterProperty` crunch, check its documentation to know more. """ return self.crunch(FilterProperty(dict(operator=operator, **search_property))) - def filter_online(self): + def filter_online(self) -> SearchResult: """ Use cruncher :class:`~eodag.plugins.crunch.filter_property.FilterProperty`, filter for online products. @@ -110,7 +126,7 @@ def filter_online(self): return self.filter_property(storageStatus="ONLINE") @staticmethod - def from_geojson(feature_collection): + def from_geojson(feature_collection: Dict[str, Any]) -> SearchResult: """Builds an :class:`~eodag.api.search_result.SearchResult` object from its representation as geojson :param feature_collection: A collection representing a search result. @@ -119,18 +135,20 @@ def from_geojson(feature_collection): :rtype: :class:`~eodag.api.search_result.SearchResult` """ return SearchResult( - EOProduct.from_geojson(feature) - for feature in feature_collection["features"] + [ + EOProduct.from_geojson(feature) + for feature in feature_collection["features"] + ] ) - def as_geojson_object(self): + def as_geojson_object(self) -> Dict[str, Any]: """GeoJSON representation of SearchResult""" return { "type": "FeatureCollection", "features": [product.as_dict() for product in self], } - def as_shapely_geometry_object(self): + def as_shapely_geometry_object(self) -> GeometryCollection: """:class:`shapely.geometry.GeometryCollection` representation of SearchResult""" return GeometryCollection( [ @@ -139,12 +157,12 @@ def as_shapely_geometry_object(self): ] ) - def as_wkt_object(self): + def as_wkt_object(self) -> str: """WKT representation of SearchResult""" return self.as_shapely_geometry_object().wkt @property - def __geo_interface__(self): + def __geo_interface__(self) -> Dict[str, Any]: """Implements the geo-interface protocol. See https://gist.github.com/sgillies/2217756 diff --git a/eodag/cli.py b/eodag/cli.py index 02274a8f0..c135f5fd3 100755 --- a/eodag/cli.py +++ b/eodag/cli.py @@ -35,31 +35,31 @@ list List supported product types search Search satellite images by their product types,... serve-rest Start eodag HTTP server - serve-rpc Start eodag rpc server version Print eodag version and exit noqa: D103 """ +from __future__ import annotations + import json import os import shutil import sys import textwrap - -try: - from importlib.metadata import metadata # type: ignore -except ImportError: # pragma: no cover - # for python < 3.8 - from importlib_metadata import metadata # type: ignore +from importlib.metadata import metadata +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Set import click import uvicorn -from eodag.api.core import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE, EODataAccessGateway -from eodag.utils import parse_qs +from eodag.api.core import EODataAccessGateway +from eodag.utils import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE, parse_qs from eodag.utils.exceptions import NoMatchingProductType, UnsupportedProvider from eodag.utils.logging import setup_logging +if TYPE_CHECKING: + from click import Context + # A list of supported crunchers that the user can choose (see --cruncher option below) CRUNCHERS = [ "FilterLatestByName", @@ -75,7 +75,7 @@ class MutuallyExclusiveOption(click.Option): from https://gist.github.com/jacobtolar/fb80d5552a9a9dfc32b12a829fa21c0c """ - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self.mutually_exclusive = set(kwargs.pop("mutually_exclusive", [])) help = kwargs.get("help", "") if self.mutually_exclusive: @@ -86,7 +86,9 @@ def __init__(self, *args, **kwargs): ) super(MutuallyExclusiveOption, self).__init__(*args, **kwargs) - def handle_parse_result(self, ctx, opts, args): + def handle_parse_result( + self, ctx: Context, opts: Mapping[str, Any], args: List[str] + ): """Raise error or use parent handle_parse_result()""" if self.mutually_exclusive.intersection(opts) and self.name in opts: raise click.UsageError( @@ -105,7 +107,7 @@ def handle_parse_result(self, ctx, opts, args): help="Control the verbosity of the logs. For maximum verbosity, type -vvv", ) @click.pass_context -def eodag(ctx, verbose): +def eodag(ctx: Context, verbose: int) -> None: """Earth Observation Data Access Gateway: work on EO products from any provider""" if ctx.obj is None: ctx.obj = {} @@ -113,7 +115,7 @@ def eodag(ctx, verbose): @eodag.command(name="version", help="Print eodag version and exit") -def version(): +def version() -> None: """Print eodag version and exit""" click.echo( "{__title__} ({__description__}): version {__version__}".format( @@ -253,7 +255,7 @@ def version(): help="Custom query-string argument(s). Format :'key1=value1&key2=value2'", ) @click.pass_context -def search_crunch(ctx, **kwargs): +def search_crunch(ctx: Context, **kwargs: Any) -> None: """Search product types and optionnaly apply crunchers to search results""" # Process inputs for search product_type = kwargs.pop("producttype") @@ -317,8 +319,7 @@ def search_crunch(ctx, **kwargs): else: criteria[k] = v if locations_qs is not None: - locations = parse_qs(locations_qs) - locations = {key: val[0] for key, val in locations.items()} + locations = {key: val[0] for key, val in parse_qs(locations_qs).items()} else: locations = None criteria["locations"] = locations @@ -334,9 +335,9 @@ def search_crunch(ctx, **kwargs): locs_file = click.format_filename(locs_file) # Process inputs for crunch - cruncher_names = set(kwargs.pop("cruncher") or []) + cruncher_names: Set[Any] = set(kwargs.pop("cruncher") or []) cruncher_args = kwargs.pop("cruncher_args") - cruncher_args_dict = {} + cruncher_args_dict: Dict[str, Dict[str, Any]] = {} if cruncher_args: for cruncher, argname, argval in cruncher_args: cruncher_args_dict.setdefault(cruncher, {}).setdefault(argname, argval) @@ -402,7 +403,7 @@ def search_crunch(ctx, **kwargs): "--no-fetch", is_flag=True, help="Do not fetch providers for new product types" ) @click.pass_context -def list_pt(ctx, **kwargs): +def list_pt(ctx: Context, **kwargs: Any) -> None: """Print the list of supported product types""" setup_logging(verbose=ctx.obj["verbosity"]) dag = EODataAccessGateway() @@ -445,6 +446,8 @@ def list_pt(ctx, **kwargs): provider=provider, fetch_providers=fetch_providers ) if pt["ID"] in guessed_product_types + or "alias" in pt + and pt["alias"] in guessed_product_types ] else: product_types = dag.list_product_types( @@ -480,7 +483,7 @@ def list_pt(ctx, **kwargs): "DEFAULT: ext_product_types.json", ) @click.pass_context -def discover_pt(ctx, **kwargs): +def discover_pt(ctx: Context, **kwargs: Any) -> None: """Fetch external product types configuration and save result""" setup_logging(verbose=ctx.obj["verbosity"]) dag = EODataAccessGateway() @@ -519,7 +522,7 @@ def discover_pt(ctx, **kwargs): help="Download only quicklooks of products instead full set of files", ) @click.pass_context -def download(ctx, **kwargs): +def download(ctx: Context, **kwargs: Any) -> None: """Download a bunch of products from a serialized search result""" search_result_path = kwargs.pop("search_results") if not search_result_path: @@ -585,42 +588,6 @@ def download(ctx, **kwargs): ) -@eodag.command(help="Start eodag rpc server") -@click.option( - "-h", - "--host", - type=click.STRING, - default="localhost", - help="Interface where to listen for requests", -) -@click.option( - "-p", - "--port", - type=click.INT, - default=50051, - help="The port where to listen for requests", -) -@click.option( - "-f", - "--conf", - type=click.Path(exists=True), - help="File path to the user configuration file with its credentials", -) -@click.pass_context -def serve_rpc(ctx, host, port, conf): - """Serve EODAG functionalities through a RPC interface""" - setup_logging(verbose=ctx.obj["verbosity"]) - try: - from eodag_cube.rpc.server import EODAGRPCServer - except ImportError: - raise NotImplementedError( - "eodag-cube needed for this functionnality, install using `pip install eodag-cube`" - ) - - server = EODAGRPCServer(host, port, conf) - server.serve() - - @eodag.command( help="Start eodag HTTP server\n\n" "Set EODAG_CORS_ALLOWED_ORIGINS environment variable to configure Cross-Origin Resource Sharing allowed origins as " @@ -667,7 +634,15 @@ def serve_rpc(ctx, host, port, conf): help="Run in debug mode (for development purpose)", ) @click.pass_context -def serve_rest(ctx, daemon, world, port, config, locs, debug): +def serve_rest( + ctx: Context, + daemon: bool, + world: bool, + port: int, + config: str, + locs: str, + debug: bool, +) -> None: """Serve EODAG functionalities through a WEB interface""" setup_logging(verbose=ctx.obj["verbosity"]) # Set the settings of the app @@ -797,18 +772,18 @@ def serve_rest(ctx, daemon, world, port, config, locs, debug): ) @click.pass_context def deploy_wsgi_app( - ctx, - root, - config, - webserver, - threads, - user, - group, - server_name, - wsgi_process_group, - wsgi_daemon_process, - name, -): + ctx: Context, + root: str, + config: str, + webserver: str, + threads: int, + user: str, + group: str, + server_name: str, + wsgi_process_group: str, + wsgi_daemon_process: str, + name: str, +) -> None: """Deploy the WEB interface of eodag behind a web server""" setup_logging(verbose=ctx.obj["verbosity"]) import eodag as eodag_package diff --git a/eodag/config.py b/eodag/config.py index d47787c5a..9f437fd48 100644 --- a/eodag/config.py +++ b/eodag/config.py @@ -15,22 +15,41 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging import os import tempfile +from inspect import isclass +from typing import ( + Any, + Dict, + ItemsView, + Iterator, + List, + Optional, + Tuple, + TypedDict, + Union, + ValuesView, + get_type_hints, +) import orjson import requests import yaml import yaml.constructor import yaml.parser +from jsonpath_ng import JSONPath from pkg_resources import resource_filename +from requests.auth import AuthBase from eodag.utils import ( HTTP_REQ_TIMEOUT, USER_AGENT, cached_yaml_load, cached_yaml_load_all, + cast_scalar_value, deepcopy, dict_items_recursive_apply, merge_mappings, @@ -52,31 +71,31 @@ class SimpleYamlProxyConfig: """A simple configuration class acting as a proxy to an underlying dict object as returned by yaml.load""" - def __init__(self, conf_file_path): + def __init__(self, conf_file_path: str) -> None: try: - self.source = cached_yaml_load(conf_file_path) + self.source: Dict[str, Any] = cached_yaml_load(conf_file_path) except yaml.parser.ParserError as e: print("Unable to load user configuration file") raise e - def __getitem__(self, item): + def __getitem__(self, item: Any) -> Any: return self.source[item] - def __contains__(self, item): + def __contains__(self, item: Any) -> Any: return item in self.source - def __iter__(self): + def __iter__(self) -> Iterator[str]: return iter(self.source) - def items(self): + def items(self) -> ItemsView[str, Any]: """Iterate over keys and values of source""" return self.source.items() - def values(self): + def values(self) -> ValuesView[Any]: """Iterate over values of source""" return self.source.values() - def update(self, other): + def update(self, other: "SimpleYamlProxyConfig") -> None: """Update a :class:`~eodag.config.SimpleYamlProxyConfig`""" if not isinstance(other, self.__class__): raise ValueError("'{}' must be of type {}".format(other, self.__class__)) @@ -105,12 +124,21 @@ class ProviderConfig(yaml.YAMLObject): :type kwargs: Any """ + name: str + priority: int = 0 # Set default priority to 0 + api: PluginConfig + search: PluginConfig + products: Dict[str, Any] + download: PluginConfig + auth: PluginConfig + product_types_fetched: bool # set in core.update_product_types_list + yaml_loader = yaml.Loader yaml_dumper = yaml.SafeDumper yaml_tag = "!provider" @classmethod - def from_yaml(cls, loader, node): + def from_yaml(cls, loader: yaml.Loader, node: Any) -> ProviderConfig: """Build a :class:`~eodag.config.ProviderConfig` from Yaml""" cls.validate(tuple(node_key.value for node_key, _ in node.value)) for node_key, node_value in node.value: @@ -120,7 +148,7 @@ def from_yaml(cls, loader, node): return loader.construct_yaml_object(node, cls) @classmethod - def from_mapping(cls, mapping): + def from_mapping(cls, mapping: Dict[str, Any]) -> ProviderConfig: """Build a :class:`~eodag.config.ProviderConfig` from a mapping""" cls.validate(mapping) for key in ("api", "search", "download", "auth"): @@ -131,7 +159,7 @@ def from_mapping(cls, mapping): return c @staticmethod - def validate(config_keys): + def validate(config_keys: Union[Tuple[str, ...], Dict[str, Any]]) -> None: """Validate a :class:`~eodag.config.ProviderConfig` :param config_keys: The configurations keys to validate @@ -149,7 +177,7 @@ def validate(config_keys): "type of plugin" ) - def update(self, mapping): + def update(self, mapping: Optional[Dict[str, Any]]) -> None: """Update the configuration parameters with values from `mapping` :param mapping: The mapping from which to override configuration parameters @@ -167,7 +195,7 @@ def update(self, mapping): }, ) for key in ("api", "search", "download", "auth"): - current_value = getattr(self, key, None) + current_value: Optional[Dict[str, Any]] = getattr(self, key, None) mapping_value = mapping.get(key, {}) if current_value is not None: current_value.update(mapping_value) @@ -187,32 +215,118 @@ class PluginConfig(yaml.YAMLObject): :type free_params: dict """ + class Pagination(TypedDict): + """Search pagination configuration""" + + max_items_per_page: int + total_items_nb_key_path: Union[str, JSONPath] + next_page_url_key_path: Union[str, JSONPath] + next_page_query_obj_key_path: Union[str, JSONPath] + next_page_merge_key_path: Union[str, JSONPath] + next_page_url_tpl: str + next_page_query_obj: str + count_endpoint: str + start_page: int + + class OrderStatusOnSuccess(TypedDict): + """Configuration for order on-success during download""" + + need_search: bool + result_type: str + results_entry: str + metadata_mapping: Dict[str, Union[str, List[str]]] + + name: str + type: str + + # search & api --------------------------------------------------------------------- + priority: int # copied from ProviderConfig in PluginManager.get_search_plugins() + products: Dict[ + str, Any + ] # copied from ProviderConfig in PluginManager.get_search_plugins() + product_type_config: Dict[str, Any] # set in core._prepare_search + auth: Union[AuthBase, Dict[str, str]] # set in core._do_search + api_endpoint: str + need_auth: bool + result_type: str + results_entry: str + pagination: PluginConfig.Pagination + query_params_key: str + discover_metadata: Dict[str, str] + discover_product_types: Dict[str, Any] + discover_queryables: Dict[str, Any] + metadata_mapping: Dict[str, Union[str, List[str]]] + free_params: Dict[Any, Any] + free_text_search_operations: Dict[str, Any] # ODataV4Search + metadata_pre_mapping: Dict[str, Any] # ODataV4Search + data_request_url: str # DataRequestSearch + status_url: str # DataRequestSearch + result_url: str # DataRequestSearch + search_definition: Dict[str, Any] # CSWSearch + merge_responses: bool # PostJsonSearch for aws_eos + collection: bool # PostJsonSearch for aws_eos + max_connections: int # StaticStacSearch + timeout: float # StaticStacSearch + s3_bucket: str # CreodiasS3Search + + # download ------------------------------------------------------------------------- + base_uri: str + outputs_prefix: str + extract: bool + order_enabled: bool # HTTPDownload + order_method: str # HTTPDownload + order_headers: Dict[str, str] # HTTPDownload + order_status_on_success: PluginConfig.OrderStatusOnSuccess + bucket_path_level: int # S3RestDownload + + # auth ----------------------------------------------------------------------------- + credentials: Dict[str, str] + auth_uri: str + auth_base_uri: str + auth_error_code: int + headers: Dict[str, str] + token_provision: str # KeycloakOIDCPasswordAuth + client_id: str # KeycloakOIDCPasswordAuth + client_secret: str # KeycloakOIDCPasswordAuth + realm: str # KeycloakOIDCPasswordAuth + user_consent_needed: str # OIDCAuthorizationCodeFlowAuth + authentication_uri_source: str # OIDCAuthorizationCodeFlowAuth + redirect_uri: str # OIDCAuthorizationCodeFlowAuth + authorization_uri: str # OIDCAuthorizationCodeFlowAuth + login_form_xpath: str # OIDCAuthorizationCodeFlowAuth + user_consent_form_xpath: str # OIDCAuthorizationCodeFlowAuth + user_consent_form_data: Dict[str, str] # OIDCAuthorizationCodeFlowAuth + token_exchange_post_data_method: str # OIDCAuthorizationCodeFlowAuth + token_uri: str # OIDCAuthorizationCodeFlowAuth + token_key: str # OIDCAuthorizationCodeFlowAuth + signed_url_key: str # SASAuth + yaml_loader = yaml.Loader yaml_dumper = yaml.SafeDumper yaml_tag = "!plugin" @classmethod - def from_yaml(cls, loader, node): + def from_yaml(cls, loader: yaml.Loader, node: Any) -> PluginConfig: """Build a :class:`~eodag.config.PluginConfig` from Yaml""" cls.validate(tuple(node_key.value for node_key, _ in node.value)) return loader.construct_yaml_object(node, cls) @classmethod - def from_mapping(cls, mapping): + def from_mapping(cls, mapping: Dict[str, Any]) -> PluginConfig: """Build a :class:`~eodag.config.PluginConfig` from a mapping""" c = cls() c.__dict__.update(mapping) return c @staticmethod - def validate(config_keys): + def validate(config_keys: Tuple[Any, ...]) -> None: """Validate a :class:`~eodag.config.PluginConfig`""" if "type" not in config_keys: raise ValidationError( "A Plugin config must specify the Plugin it configures" ) - def update(self, mapping): + def update(self, mapping: Optional[Dict[Any, Any]]) -> None: """Update the configuration parameters with values from `mapping` :param mapping: The mapping from which to override configuration parameters @@ -225,7 +339,7 @@ def update(self, mapping): ) -def load_default_config(): +def load_default_config() -> Dict[str, ProviderConfig]: """Load the providers configuration into a dictionnary. Load from eodag `resources/providers.yml` or `EODAG_PROVIDERS_CFG_FILE` environment @@ -240,7 +354,7 @@ def load_default_config(): return load_config(eodag_providers_cfg_file) -def load_config(config_path): +def load_config(config_path: str) -> Dict[str, ProviderConfig]: """Load the providers configuration into a dictionnary from a given file :param config_path: The path to the provider config file @@ -249,11 +363,11 @@ def load_config(config_path): :rtype: dict """ logger.debug(f"Loading configuration from {config_path}") - config = {} + config: Dict[str, ProviderConfig] = {} try: # Providers configs are stored in this file as separated yaml documents # Load all of it - providers_configs = cached_yaml_load_all(config_path) + providers_configs: List[ProviderConfig] = cached_yaml_load_all(config_path) except yaml.parser.ParserError as e: logger.error("Unable to load configuration") raise e @@ -265,7 +379,10 @@ def load_config(config_path): return config -def provider_config_init(provider_config, stac_search_default_conf=None): +def provider_config_init( + provider_config: ProviderConfig, + stac_search_default_conf: Optional[Dict[str, Any]] = None, +) -> None: """Applies some default values to provider config :param provider_config: An eodag provider configuration @@ -282,14 +399,17 @@ def provider_config_init(provider_config, stac_search_default_conf=None): param_value.outputs_prefix = tempfile.gettempdir() if not getattr(param_value, "delete_archive", None): param_value.delete_archive = True - # Set default priority to 0 - provider_config.__dict__.setdefault("priority", 0) try: - if stac_search_default_conf is not None and provider_config.search.type in [ - "StacSearch", - "StaticStacSearch", - ]: + if ( + stac_search_default_conf is not None + and provider_config.search + and provider_config.search.type + in [ + "StacSearch", + "StaticStacSearch", + ] + ): # search config set to stac defaults overriden with provider config per_provider_stac_provider_config = deepcopy(stac_search_default_conf) provider_config.search.__dict__ = update_nested_dict( @@ -301,7 +421,7 @@ def provider_config_init(provider_config, stac_search_default_conf=None): pass -def override_config_from_file(config, file_path): +def override_config_from_file(config: Dict[str, Any], file_path: str) -> None: """Override a configuration with the values in a file :param config: An eodag providers configuration dictionary @@ -321,14 +441,16 @@ def override_config_from_file(config, file_path): override_config_from_mapping(config, config_in_file) -def override_config_from_env(config): +def override_config_from_env(config: Dict[str, Any]) -> None: """Override a configuration with environment variables values :param config: An eodag providers configuration dictionary :type config: dict """ - def build_mapping_from_env(env_var, env_value, mapping): + def build_mapping_from_env( + env_var: str, env_value: str, mapping: Dict[str, Any] + ) -> None: """Recursively build a dictionary from an environment variable. The environment variable must respect the pattern: KEY1__KEY2__[...]__KEYN. @@ -350,13 +472,38 @@ def build_mapping_from_env(env_var, env_value, mapping): :type mapping: dict """ parts = env_var.split("__") - if len(parts) == 1: + iter_parts = iter(parts) + env_type = get_type_hints(PluginConfig).get(next(iter_parts, ""), str) + child_env_type = ( + get_type_hints(env_type).get(next(iter_parts, ""), None) + if isclass(env_type) + else None + ) + if len(parts) == 2 and child_env_type: + # for nested config (pagination, ...) + # try converting env_value type from type hints + try: + env_value = cast_scalar_value(env_value, child_env_type) + except TypeError: + logger.warning( + f"Could not convert {parts} value {env_value} to {child_env_type}" + ) + mapping.setdefault(parts[0], {}) + mapping[parts[0]][parts[1]] = env_value + elif len(parts) == 1: + # try converting env_value type from type hints + try: + env_value = cast_scalar_value(env_value, env_type) + except TypeError: + logger.warning( + f"Could not convert {parts[0]} value {env_value} to {env_type}" + ) mapping[parts[0]] = env_value else: new_map = mapping.setdefault(parts[0], {}) build_mapping_from_env("__".join(parts[1:]), env_value, new_map) - mapping_from_env = {} + mapping_from_env: Dict[str, Any] = {} for env_var in os.environ: if env_var.startswith("EODAG__"): build_mapping_from_env( @@ -368,7 +515,9 @@ def build_mapping_from_env(env_var, env_value, mapping): override_config_from_mapping(config, mapping_from_env) -def override_config_from_mapping(config, mapping): +def override_config_from_mapping( + config: Dict[str, Any], mapping: Dict[str, Any] +) -> None: """Override a configuration with the values in a mapping :param config: An eodag providers configuration dictionary @@ -377,7 +526,7 @@ def override_config_from_mapping(config, mapping): :type mapping: dict """ for provider, new_conf in mapping.items(): - old_conf = config.get(provider) + old_conf: Optional[Dict[str, Any]] = config.get(provider) if old_conf is not None: old_conf.update(new_conf) else: @@ -398,7 +547,7 @@ def override_config_from_mapping(config, mapping): logger.debug(tb.format_exc()) -def merge_configs(config, other_config): +def merge_configs(config: Dict[str, Any], other_config: Dict[str, Any]) -> None: """Override a configuration with the values of another configuration :param config: An eodag providers configuration dictionary @@ -432,7 +581,7 @@ def merge_configs(config, other_config): config[provider] = new_conf -def load_yml_config(yml_path): +def load_yml_config(yml_path: str) -> Dict[Any, Any]: """Load a conf dictionnary from given yml absolute path :returns: The yml configuration file @@ -442,7 +591,7 @@ def load_yml_config(yml_path): return dict_items_recursive_apply(config.source, string_to_jsonpath) -def load_stac_config(): +def load_stac_config() -> Dict[str, Any]: """Load the stac configuration into a dictionnary :returns: The stac configuration @@ -453,7 +602,7 @@ def load_stac_config(): ) -def load_stac_api_config(): +def load_stac_api_config() -> Dict[str, Any]: """Load the stac API configuration into a dictionnary :returns: The stac API configuration @@ -464,7 +613,7 @@ def load_stac_api_config(): ) -def load_stac_provider_config(): +def load_stac_provider_config() -> Dict[str, Any]: """Load the stac provider configuration into a dictionnary :returns: The stac provider configuration @@ -475,7 +624,9 @@ def load_stac_provider_config(): ).source -def get_ext_product_types_conf(conf_uri=EXT_PRODUCT_TYPES_CONF_URI): +def get_ext_product_types_conf( + conf_uri: str = EXT_PRODUCT_TYPES_CONF_URI, +) -> Dict[str, Any]: """Read external product types conf :param conf_uri: URI to local or remote configuration file diff --git a/eodag/plugins/apis/base.py b/eodag/plugins/apis/base.py index f6ac01eea..902e06e6b 100644 --- a/eodag/plugins/apis/base.py +++ b/eodag/plugins/apis/base.py @@ -15,10 +15,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple + +from pydantic.fields import Field, FieldInfo + +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.api.search_result import SearchResult + from eodag.config import PluginConfig + from eodag.utils import DownloadedCallback, ProgressCallback from eodag.plugins.base import PluginTopic -from eodag.plugins.download.base import DEFAULT_DOWNLOAD_TIMEOUT, DEFAULT_DOWNLOAD_WAIT +from eodag.utils import ( + DEFAULT_DOWNLOAD_TIMEOUT, + DEFAULT_DOWNLOAD_WAIT, + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, + Annotated, +) logger = logging.getLogger("eodag.apis.base") @@ -55,11 +72,18 @@ class Api(PluginTopic): (it certainly indicates that the download didn't complete) """ - def clear(self): + def clear(self) -> None: """Method used to clear a search context between two searches.""" pass - def query(self, *args, count=True, **kwargs): + def query( + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Implementation of how the products must be searched goes here. This method must return a tuple with (1) a list of EOProduct instances (see eodag.api.product module) @@ -68,16 +92,50 @@ def query(self, *args, count=True, **kwargs): """ raise NotImplementedError("A Api plugin must implement a method named query") + def discover_product_types(self) -> Optional[Dict[str, Any]]: + """Fetch product types list from provider using `discover_product_types` conf""" + return None + + def discover_queryables( + self, **kwargs: Any + ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + """Fetch queryables list from provider using `discover_queryables` conf + + :param kwargs: additional filters for queryables (`productType` and other search + arguments) + :type kwargs: Any + :returns: fetched queryable parameters dict + :rtype: Optional[Dict[str, Annotated[Any, FieldInfo]]] + """ + return None + + def get_defaults_as_queryables( + self, product_type: str + ) -> Dict[str, Annotated[Any, FieldInfo]]: + """ + Return given product type defaut settings as queryables + + :param product_type: given product type + :type product_type: str + :returns: queryable parameters dict + :rtype: Dict[str, Annotated[Any, FieldInfo]] + """ + defaults = self.config.products.get(product_type, {}) + queryables = {} + for parameter, value in defaults.items(): + queryables[parameter] = Annotated[type(value), Field(default=value)] + return queryables + def download( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): - r""" + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> Optional[str]: + """ Base download method. Not available, it must be defined for each plugin. :param product: The EO product to download @@ -107,14 +165,14 @@ def download( def download_all( self, - products, - auth=None, - downloaded_callback=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + products: SearchResult, + auth: Optional[PluginConfig] = None, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> List[str]: """ Base download_all method. @@ -124,7 +182,7 @@ def download_all( :type auth: :class:`~eodag.config.PluginConfig` :param downloaded_callback: (optional) A method or a callable object which takes as parameter the ``product``. You can use the base class - :class:`~eodag.utils.DownloadedCallback` and override + :class:`~eodag.api.product.DownloadedCallback` and override its ``__call__`` method. Will be called each time a product finishes downloading :type downloaded_callback: Callable[[:class:`~eodag.api.product._product.EOProduct`], None] diff --git a/eodag/plugins/apis/cds.py b/eodag/plugins/apis/cds.py index abfc56afc..c3f71127f 100644 --- a/eodag/plugins/apis/cds.py +++ b/eodag/plugins/apis/cds.py @@ -15,32 +15,70 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging -from datetime import datetime +from datetime import datetime, timedelta +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union, cast +from urllib.parse import unquote_plus import cdsapi import geojson import requests - -from eodag.plugins.apis.base import Api -from eodag.plugins.download.base import ( - DEFAULT_DOWNLOAD_TIMEOUT, - DEFAULT_DOWNLOAD_WAIT, - Download, +from dateutil.parser import isoparse +from pydantic import create_model +from pydantic.fields import FieldInfo +from typing_extensions import get_args + +from eodag.api.product._assets import Asset +from eodag.api.product.metadata_mapping import ( + get_queryable_from_provider, + mtd_cfg_as_conversion_and_querypath, ) +from eodag.plugins.apis.base import Api +from eodag.plugins.download.http import HTTPDownload from eodag.plugins.search.base import Search from eodag.plugins.search.build_search_result import BuildPostSearchResult from eodag.rest.stac import DEFAULT_MISSION_START_DATE -from eodag.utils import datetime_range, get_geometry_from_various, path_to_uri, urlsplit -from eodag.utils.exceptions import AuthenticationError, DownloadError, RequestError +from eodag.types import json_field_definition_to_python, model_fields_to_annotated +from eodag.types.queryables import CommonQueryables +from eodag.utils import ( + DEFAULT_DOWNLOAD_TIMEOUT, + DEFAULT_DOWNLOAD_WAIT, + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, + Annotated, + datetime_range, + deepcopy, + get_geometry_from_various, + path_to_uri, + urlencode, + urlsplit, +) +from eodag.utils.constraints import ( + fetch_constraints, + get_constraint_queryables_with_additional_params, +) +from eodag.utils.exceptions import ( + AuthenticationError, + DownloadError, + RequestError, + ValidationError, +) from eodag.utils.logging import get_logging_verbose +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.api.search_result import SearchResult + from eodag.config import PluginConfig + from eodag.utils import DownloadedCallback, ProgressCallback + logger = logging.getLogger("eodag.apis.cds") CDS_KNOWN_FORMATS = {"grib": "grib", "netcdf": "nc"} -class CdsApi(Download, Api, BuildPostSearchResult): +class CdsApi(HTTPDownload, Api, BuildPostSearchResult): """A plugin that enables to build download-request and download data on CDS API. Builds a single ready-to-download :class:`~eodag.api.product._product.EOProduct` @@ -52,7 +90,7 @@ class CdsApi(Download, Api, BuildPostSearchResult): query build methods. """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: # init self.config.metadata_mapping using Search Base plugin Search.__init__(self, provider, config) @@ -61,49 +99,169 @@ def __init__(self, provider, config): # needed for compatibility self.config.__dict__.setdefault("pagination", {"next_page_query_obj": "{{}}"}) - def do_search(self, *args, **kwargs): + # parse jsonpath on init: product type specific metadata-mapping + for product_type in self.config.products.keys(): + if "metadata_mapping" in self.config.products[product_type].keys(): + self.config.products[product_type][ + "metadata_mapping" + ] = mtd_cfg_as_conversion_and_querypath( + self.config.products[product_type]["metadata_mapping"] + ) + # Complete and ready to use product type specific metadata-mapping + product_type_metadata_mapping = deepcopy(self.config.metadata_mapping) + + # update config using provider product type definition metadata_mapping + # from another product + other_product_for_mapping = cast( + str, + self.config.products[product_type].get( + "metadata_mapping_from_product", "" + ), + ) + if other_product_for_mapping: + other_product_type_def_params = self.get_product_type_def_params( + other_product_for_mapping, # **kwargs + ) + product_type_metadata_mapping.update( + other_product_type_def_params.get("metadata_mapping", {}) + ) + # from current product + product_type_metadata_mapping.update( + self.config.products[product_type]["metadata_mapping"] + ) + + self.config.products[product_type][ + "metadata_mapping" + ] = product_type_metadata_mapping + + def get_product_type_cfg(self, key: str, default: Any = None) -> Any: + """ + Get the value of a configuration option specific to the current product type. + + This method retrieves the value of a configuration option from the + `_product_type_config` attribute. If the option is not found, the provided + default value is returned. + + :param key: The configuration option key. + :type key: str + :param default: The default value to be returned if the option is not found (default is None). + :type default: Any + + :return: The value of the specified configuration option or the default value. + :rtype: Any + """ + product_type_cfg = getattr(self.config, "product_type_config", {}) + non_none_cfg = {k: v for k, v in product_type_cfg.items() if v} + + return non_none_cfg.get(key, default) + + def _preprocess_search_params(self, params: Dict[Any]) -> None: + """Preprocess search parameters before making a request to the CDS API. + + This method is responsible for checking and updating the provided search parameters + to ensure that required parameters like 'productType', 'startTimeFromAscendingNode', + 'completionTimeFromAscendingNode', and 'geometry' are properly set. If not specified + in the input parameters, default values or values from the configuration are used. + + :param params: Search parameters to be preprocessed. + :type params: dict + """ + _dc_qs = params.get("_dc_qs", None) + if _dc_qs is not None: + # if available, update search params using datacube query-string + _dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs))) + if "/" in _dc_qp.get("date", ""): + ( + params["startTimeFromAscendingNode"], + params["completionTimeFromAscendingNode"], + ) = _dc_qp["date"].split("/") + elif _dc_qp.get("date", None): + params["startTimeFromAscendingNode"] = params[ + "completionTimeFromAscendingNode" + ] = _dc_qp["date"] + + if "/" in _dc_qp.get("area", ""): + params["geometry"] = _dc_qp["area"].split("/") + + non_none_params = {k: v for k, v in params.items() if v} + + # productType + dataset = params.get("dataset", None) + params["productType"] = non_none_params.get("productType", dataset) + + # dates + mission_start_dt = datetime.fromisoformat( + self.get_product_type_cfg( + "missionStartDate", DEFAULT_MISSION_START_DATE + ).replace( + "Z", "+00:00" + ) # before 3.11 + ) + + default_end_from_cfg = self.config.products.get(params["productType"], {}).get( + "_default_end_date", None + ) + default_end_str = ( + default_end_from_cfg + or ( + datetime.utcnow() + if params.get("startTimeFromAscendingNode") + else mission_start_dt + timedelta(days=1) + ).isoformat() + ) + + params["startTimeFromAscendingNode"] = non_none_params.get( + "startTimeFromAscendingNode", mission_start_dt.isoformat() + ) + params["completionTimeFromAscendingNode"] = non_none_params.get( + "completionTimeFromAscendingNode", default_end_str + ) + + # temporary _date parameter mixing start & end + end_date = isoparse(params["completionTimeFromAscendingNode"]) + timedelta( + days=-1 + ) + params[ + "_date" + ] = f"{params['startTimeFromAscendingNode']}/{end_date.isoformat()}" + + # geometry + if "geometry" in params: + params["geometry"] = get_geometry_from_various(geometry=params["geometry"]) + + def build_query_string( + self, product_type: str, **kwargs: Any + ) -> Tuple[Dict[str, Any], str]: + """Build The query string using the search parameters""" + qp, _ = BuildPostSearchResult.build_query_string( + self, product_type=product_type, **kwargs + ) + if "_date" in qp: + qp.update(qp.pop("_date", {})) + + return qp, urlencode(qp, doseq=True, quote_via=lambda x, *_args, **_kwargs: x) + + def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]: """Should perform the actual search request.""" return [{}] def query( - self, product_type=None, items_per_page=None, page=None, count=True, **kwargs - ): + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Build ready-to-download SearchResult""" - # check productType, dates, geometry, use defaults if not specified - # productType - if not kwargs.get("productType"): - kwargs["productType"] = kwargs.get("dataset", None) - # start date - if "startTimeFromAscendingNode" not in kwargs: - kwargs["startTimeFromAscendingNode"] = ( - getattr(self.config, "product_type_config", {}).get( - "missionStartDate", None - ) - or DEFAULT_MISSION_START_DATE - ) - # end date - if "completionTimeFromAscendingNode" not in kwargs: - kwargs["completionTimeFromAscendingNode"] = getattr( - self.config, "product_type_config", {} - ).get("missionEndDate", None) or datetime.utcnow().isoformat( - timespec="seconds" - ) - # geometry - if not kwargs.get("geometry", None): - kwargs["geometry"] = [ - -180, - -90, - 180, - 90, - ] - kwargs["geometry"] = get_geometry_from_various(geometry=kwargs["geometry"]) + self._preprocess_search_params(kwargs) return BuildPostSearchResult.query( self, items_per_page=items_per_page, page=page, count=count, **kwargs ) - def _get_cds_client(self, **auth_dict): + def _get_cds_client(self, **auth_dict: Any) -> cdsapi.Client: """Returns cdsapi client.""" # eodag logging info eodag_verbosity = get_logging_verbose() @@ -131,7 +289,7 @@ def _get_cds_client(self, **auth_dict): return client - def authenticate(self): + def authenticate(self) -> Dict[str, str]: """Returns information needed for auth :returns: {key, url} dictionary @@ -144,9 +302,9 @@ def authenticate(self): api_key = getattr(self.config, "credentials", {}).get("password", None) url = getattr(self.config, "api_endpoint", None) if not all([uid, api_key, url]): - raise AuthenticationError("Missing authentication informations") + raise AuthenticationError("Missing authentication information") - auth_dict = {"key": f"{uid}:{api_key}", "url": url} + auth_dict: Dict[str, str] = {"key": f"{uid}:{api_key}", "url": url} client = self._get_cds_client(**auth_dict) try: @@ -161,24 +319,8 @@ def authenticate(self): return auth_dict - def download(self, product, auth=None, progress_callback=None, **kwargs): - """Download data from providers using CDS API""" - - product_extension = CDS_KNOWN_FORMATS[product.properties.get("format", "grib")] - - # Prepare download - fs_path, record_filename = self._prepare_download( - product, - progress_callback=progress_callback, - outputs_extension=f".{product_extension}", - **kwargs, - ) - - if not fs_path or not record_filename: - if fs_path: - product.location = path_to_uri(fs_path) - return fs_path - + def _prepare_download_link(self, product): + """Update product download link with http url obtained from cds api""" # get download request dict from product.location/downloadLink url query string # separate url & parameters query_str = "".join(urlsplit(product.location).fragment.split("?", 1)[1:]) @@ -206,36 +348,84 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): ) try: client = self._get_cds_client(**auth_dict) - client.retrieve(name=dataset_name, request=download_request, target=fs_path) + result = client._api( + "%s/resources/%s" % (client.url, dataset_name), download_request, "POST" + ) + # update product download link through a new asset + product.assets["data"] = Asset(product, "data", {"href": result.location}) except Exception as e: logger.error(e) raise DownloadError(e) - with open(record_filename, "w") as fh: - fh.write(product.properties["downloadLink"]) - logger.debug("Download recorded in %s", record_filename) - - # do not try to extract or delete grib/netcdf - kwargs["extract"] = False + def download( + self, + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> Optional[str]: + """Download data from providers using CDS API""" + product_format = product.properties.get("format", "grib") + product_extension = CDS_KNOWN_FORMATS.get(product_format, product_format) - product_path = self._finalize( - fs_path, + # Prepare download + fs_path, record_filename = self._prepare_download( + product, progress_callback=progress_callback, outputs_extension=f".{product_extension}", **kwargs, ) - product.location = path_to_uri(product_path) - return product_path + + if not fs_path or not record_filename: + if fs_path: + product.location = path_to_uri(fs_path) + return fs_path + + self._prepare_download_link(product) + + try: + return super(CdsApi, self).download( + product, + progress_callback=progress_callback, + **kwargs, + ) + except Exception as e: + logger.error(e) + raise DownloadError(e) + + def _stream_download_dict( + self, + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Dict[str, Any]: + """Returns dictionnary of :class:`~fastapi.responses.StreamingResponse` keyword-arguments. + It contains a generator to streamed download chunks and the response headers.""" + + self._prepare_download_link(product) + return super(CdsApi, self)._stream_download_dict( + product, + auth=auth, + progress_callback=progress_callback, + wait=wait, + timeout=timeout, + **kwargs, + ) def download_all( self, - products, - auth=None, - downloaded_callback=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, + products: SearchResult, + auth: Optional[PluginConfig] = None, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, ): """ Download all using parent (base plugin) method @@ -249,3 +439,102 @@ def download_all( timeout=timeout, **kwargs, ) + + def discover_queryables( + self, **kwargs: Any + ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + """Fetch queryables list from provider using `discover_queryables` conf + + :param kwargs: additional filters for queryables (`productType` and other search + arguments) + :type kwargs: Any + :returns: fetched queryable parameters dict + :rtype: Optional[Dict[str, Annotated[Any, FieldInfo]]] + """ + constraints_file_url = getattr(self.config, "constraints_file_url", "") + if not constraints_file_url: + return {} + product_type = kwargs.pop("productType", None) + if not product_type: + return {} + + provider_product_type = self.config.products.get(product_type, {}).get( + "dataset", None + ) + user_provider_product_type = kwargs.pop("dataset", None) + if ( + user_provider_product_type + and user_provider_product_type != provider_product_type + ): + raise ValidationError( + f"Cannot change dataset from {provider_product_type} to {user_provider_product_type}" + ) + + non_empty_kwargs = {k: v for k, v in kwargs.items() if v} + + if "{" in constraints_file_url: + constraints_file_url = constraints_file_url.format( + dataset=provider_product_type + ) + constraints = fetch_constraints(constraints_file_url, self) + if not constraints: + return {} + + # defaults + default_queryables = self.get_defaults_as_queryables(product_type) + # remove dataset from queryables + default_queryables.pop("dataset", None) + + constraint_params: Dict[str, Dict[str, Set[Any]]] = {} + if len(kwargs) == 0: + # get values from constraints without additional filters + for constraint in constraints: + for key in constraint.keys(): + if key in constraint_params: + constraint_params[key]["enum"].update(constraint[key]) + else: + constraint_params[key] = {} + constraint_params[key]["enum"] = set(constraint[key]) + else: + # get values from constraints with additional filters + constraints_input_params = {k: v for k, v in non_empty_kwargs.items()} + constraint_params = get_constraint_queryables_with_additional_params( + constraints, constraints_input_params, self, product_type + ) + # query params that are not in constraints but might be default queryables + if len(constraint_params) == 1 and "not_available" in constraint_params: + not_queryables = set() + for constraint_param in constraint_params["not_available"]["enum"]: + param = CommonQueryables.get_queryable_from_alias(constraint_param) + if param in dict( + CommonQueryables.model_fields, **default_queryables + ): + non_empty_kwargs.pop(constraint_param) + else: + not_queryables.add(constraint_param) + if not_queryables: + raise ValidationError( + f"parameter(s) {str(not_queryables)} not queryable" + ) + else: + # get constraints again without common queryables + constraint_params = ( + get_constraint_queryables_with_additional_params( + constraints, non_empty_kwargs, self, product_type + ) + ) + + field_definitions = dict() + for json_param, json_mtd in constraint_params.items(): + param = ( + get_queryable_from_provider(json_param, self.config.metadata_mapping) + or json_param + ) + default = kwargs.get(param, None) + annotated_def = json_field_definition_to_python( + json_mtd, default_value=default, required=True + ) + field_definitions[param] = get_args(annotated_def) + + python_queryables = create_model("m", **field_definitions).model_fields + return dict(default_queryables, **model_fields_to_annotated(python_queryables)) diff --git a/eodag/plugins/apis/ecmwf.py b/eodag/plugins/apis/ecmwf.py index 452043081..acc9146d5 100644 --- a/eodag/plugins/apis/ecmwf.py +++ b/eodag/plugins/apis/ecmwf.py @@ -15,26 +15,39 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging from datetime import datetime +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple import geojson from ecmwfapi import ECMWFDataServer, ECMWFService from ecmwfapi.api import APIException, Connection, get_apikey_values from eodag.plugins.apis.base import Api -from eodag.plugins.download.base import ( - DEFAULT_DOWNLOAD_TIMEOUT, - DEFAULT_DOWNLOAD_WAIT, - Download, -) +from eodag.plugins.download.base import Download from eodag.plugins.search.base import Search from eodag.plugins.search.build_search_result import BuildPostSearchResult from eodag.rest.stac import DEFAULT_MISSION_START_DATE -from eodag.utils import get_geometry_from_various, path_to_uri, urlsplit +from eodag.utils import ( + DEFAULT_DOWNLOAD_TIMEOUT, + DEFAULT_DOWNLOAD_WAIT, + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, + get_geometry_from_various, + path_to_uri, + urlsplit, +) from eodag.utils.exceptions import AuthenticationError, DownloadError from eodag.utils.logging import get_logging_verbose +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.api.search_result import SearchResult + from eodag.config import PluginConfig + from eodag.utils import DownloadedCallback, ProgressCallback + logger = logging.getLogger("eodag.apis.ecmwf") ECMWF_MARS_KNOWN_FORMATS = {"grib": "grib", "netcdf": "nc"} @@ -56,7 +69,7 @@ class EcmwfApi(Download, Api, BuildPostSearchResult): query build methods. """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: # init self.config.metadata_mapping using Search Base plugin Search.__init__(self, provider, config) @@ -65,13 +78,18 @@ def __init__(self, provider, config): # needed for compatibility self.config.__dict__.setdefault("pagination", {"next_page_query_obj": "{{}}"}) - def do_search(self, *args, **kwargs): + def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]: """Should perform the actual search request.""" return [{}] def query( - self, product_type=None, items_per_page=None, page=None, count=True, **kwargs - ): + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Build ready-to-download SearchResult""" # check productType, dates, geometry, use defaults if not specified @@ -97,21 +115,16 @@ def query( ).get("missionEndDate", None) or datetime.utcnow().isoformat( timespec="seconds" ) + # geometry - if not kwargs.get("geometry", None): - kwargs["geometry"] = [ - -180, - -90, - 180, - 90, - ] - kwargs["geometry"] = get_geometry_from_various(geometry=kwargs["geometry"]) + if "geometry" in kwargs: + kwargs["geometry"] = get_geometry_from_various(geometry=kwargs["geometry"]) return BuildPostSearchResult.query( self, items_per_page=items_per_page, page=page, count=count, **kwargs ) - def authenticate(self): + def authenticate(self) -> Dict[str, Optional[str]]: """Check credentials and returns information needed for auth :returns: {key, url, email} dictionary @@ -140,12 +153,18 @@ def authenticate(self): return {"key": key, "url": url, "email": email} - def download(self, product, auth=None, progress_callback=None, **kwargs): + def download( + self, + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> Optional[str]: """Download data from ECMWF MARS""" - - product_extension = ECMWF_MARS_KNOWN_FORMATS[ - product.properties.get("format", "grib") - ] + product_format = product.properties.get("format", "grib") + product_extension = ECMWF_MARS_KNOWN_FORMATS.get(product_format, product_format) # Prepare download fs_path, record_filename = self._prepare_download( @@ -217,14 +236,14 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): def download_all( self, - products, - auth=None, - downloaded_callback=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + products: SearchResult, + auth: Optional[PluginConfig] = None, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> List[str]: """ Download all using parent (base plugin) method """ diff --git a/eodag/plugins/apis/usgs.py b/eodag/plugins/apis/usgs.py index 83b878514..b306f2e34 100644 --- a/eodag/plugins/apis/usgs.py +++ b/eodag/plugins/apis/usgs.py @@ -15,10 +15,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging import shutil import tarfile import zipfile +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast import requests from jsonpath_ng.ext import parse @@ -32,12 +35,12 @@ properties_from_json, ) from eodag.plugins.apis.base import Api -from eodag.plugins.download.base import ( +from eodag.plugins.download.base import Download +from eodag.utils import ( DEFAULT_DOWNLOAD_TIMEOUT, DEFAULT_DOWNLOAD_WAIT, - Download, -) -from eodag.utils import ( + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, GENERIC_PRODUCT_TYPE, USER_AGENT, ProgressCallback, @@ -51,13 +54,18 @@ RequestError, ) +if TYPE_CHECKING: + from eodag.api.search_result import SearchResult + from eodag.config import PluginConfig + from eodag.utils import DownloadedCallback + logger = logging.getLogger("eodag.apis.usgs") class UsgsApi(Download, Api): """A plugin that enables to query and download data on the USGS catalogues""" - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(UsgsApi, self).__init__(provider, config) # Same method as in base.py, Search.__init__() @@ -73,7 +81,7 @@ def __init__(self, provider, config): result_type=getattr(self.config, "result_type", "json"), ) - def authenticate(self): + def authenticate(self) -> None: """Login to usgs api :raises: :class:`~eodag.utils.exceptions.AuthenticationError` @@ -96,8 +104,13 @@ def authenticate(self): ) from None def query( - self, product_type=None, items_per_page=None, page=None, count=True, **kwargs - ): + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Search for data on USGS catalogues""" product_type = kwargs.get("productType") if product_type is None: @@ -107,14 +120,14 @@ def query( self.authenticate() - product_type_def_params = self.config.products.get( - product_type, self.config.products[GENERIC_PRODUCT_TYPE] + product_type_def_params = self.config.products.get( # type: ignore + product_type, self.config.products[GENERIC_PRODUCT_TYPE] # type: ignore ) usgs_dataset = format_dict_items(product_type_def_params, **kwargs)["dataset"] start_date = kwargs.pop("startTimeFromAscendingNode", None) end_date = kwargs.pop("completionTimeFromAscendingNode", None) geom = kwargs.pop("geometry", None) - footprint = {} + footprint: Dict[str, str] = {} if hasattr(geom, "bounds"): ( footprint["lonmin"], @@ -125,7 +138,7 @@ def query( else: footprint = geom - final = [] + final: List[EOProduct] = [] if footprint and len(footprint.keys()) == 4: # a rectangle (or bbox) lower_left = { "longitude": footprint["lonmin"], @@ -182,7 +195,6 @@ def query( results["data"]["results"] = list(results_by_entity_id.values()) for result in results["data"]["results"]: - result["productType"] = usgs_dataset product_properties = properties_from_json( @@ -208,7 +220,7 @@ def query( if final: # parse total_results - path_parsed = parse(self.config.pagination["total_items_nb_key_path"]) + path_parsed = parse(self.config.pagination["total_items_nb_key_path"]) # type: ignore total_results = path_parsed.find(results["data"])[0].value else: total_results = 0 @@ -217,13 +229,13 @@ def query( def download( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> Optional[str]: """Download data from USGS catalogues""" if progress_callback is None: @@ -232,9 +244,12 @@ def download( ) progress_callback = ProgressCallback(disable=True) - outputs_extension = self.config.products.get( - product.product_type, self.config.products[GENERIC_PRODUCT_TYPE] - ).get("outputs_extension", ".tar.gz") + outputs_extension = cast( + str, + self.config.products.get( # type: ignore + product.product_type, self.config.products[GENERIC_PRODUCT_TYPE] # type: ignore + ).get("outputs_extension", ".tar.gz"), + ) fs_path, record_filename = self._prepare_download( product, @@ -254,21 +269,27 @@ def download( f"No USGS products found for {product.properties['id']}" ) - download_request = api.download_request( + download_request_results = api.download_request( product.properties["productType"], product.properties["entityId"], product.properties["productId"], ) - req_urls = [] + req_urls: List[str] = [] try: - if len(download_request["data"]["preparingDownloads"]) > 0: + if len(download_request_results["data"]["preparingDownloads"]) > 0: req_urls.extend( - [x["url"] for x in download_request["data"]["preparingDownloads"]] + [ + x["url"] + for x in download_request_results["data"]["preparingDownloads"] + ] ) else: req_urls.extend( - [x["url"] for x in download_request["data"]["availableDownloads"]] + [ + x["url"] + for x in download_request_results["data"]["availableDownloads"] + ] ) except KeyError as e: raise NotAvailableError( @@ -289,7 +310,12 @@ def download( logger.debug(f"Downloading {req_url}") @self._download_retry(product, wait, timeout) - def download_request(product, fs_path, progress_callback, **kwargs): + def download_request( + product: EOProduct, + fs_path: str, + progress_callback: ProgressCallback, + **kwargs: Any, + ) -> None: try: with requests.get( req_url, @@ -300,8 +326,10 @@ def download_request(product, fs_path, progress_callback, **kwargs): try: stream.raise_for_status() except RequestException as e: - if hasattr(e, "response") and hasattr(e.response, "content"): - error_message = f"{e.response.content} - {e}" + if e.response and hasattr(e.response, "content"): + error_message = ( + f"{e.response.content.decode('utf-8')} - {e}" + ) else: error_message = str(e) raise NotAvailableError(error_message) @@ -314,8 +342,8 @@ def download_request(product, fs_path, progress_callback, **kwargs): fhandle.write(chunk) progress_callback(len(chunk)) except requests.exceptions.Timeout as e: - if hasattr(e, "response") and hasattr(e.response, "content"): - error_message = f"{e.response.content} - {e}" + if e.response and hasattr(e.response, "content"): + error_message = f"{e.response.content.decode('utf-8')} - {e}" else: error_message = str(e) raise NotAvailableError(error_message) @@ -367,14 +395,14 @@ def download_request(product, fs_path, progress_callback, **kwargs): def download_all( self, - products, - auth=None, - downloaded_callback=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + products: SearchResult, + auth: Optional[PluginConfig] = None, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Any, + ) -> List[str]: """ Download all using parent (base plugin) method """ diff --git a/eodag/plugins/authentication/__init__.py b/eodag/plugins/authentication/__init__.py index f6117cb61..b84235b3e 100644 --- a/eodag/plugins/authentication/__init__.py +++ b/eodag/plugins/authentication/__init__.py @@ -16,12 +16,16 @@ # See the License for the specific language governing permissions and # limitations under the License. """EODAG authentication package""" +from __future__ import annotations + +from requests.auth import AuthBase + from eodag.plugins.authentication.base import Authentication class DummyAuth(Authentication): """Dummy authentication""" - def authenticate(self): + def authenticate(self) -> AuthBase: """authenticate""" - return self + return AuthBase() diff --git a/eodag/plugins/authentication/aws_auth.py b/eodag/plugins/authentication/aws_auth.py index 1ec6d9b0a..d1b9cf48e 100644 --- a/eodag/plugins/authentication/aws_auth.py +++ b/eodag/plugins/authentication/aws_auth.py @@ -15,9 +15,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING, Dict from eodag.plugins.authentication.base import Authentication +if TYPE_CHECKING: + from botocore.client import S3 + + from eodag.config import PluginConfig + class AwsAuth(Authentication): """AWS authentication plugin @@ -31,13 +39,15 @@ class AwsAuth(Authentication): will be skipped if AWS credentials are filled in eodag conf """ - def __init__(self, provider, config): + s3_client: S3 + + def __init__(self, provider: str, config: PluginConfig) -> None: super(AwsAuth, self).__init__(provider, config) self.aws_access_key_id = None self.aws_secret_access_key = None self.profile_name = None - def authenticate(self): + def authenticate(self) -> Dict[str, str]: """Authenticate :returns: dict containing AWS/boto3 non-empty credentials diff --git a/eodag/plugins/authentication/base.py b/eodag/plugins/authentication/base.py index 44dabe7cb..4e87540e9 100644 --- a/eodag/plugins/authentication/base.py +++ b/eodag/plugins/authentication/base.py @@ -15,18 +15,25 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING, Dict, Union + from eodag.plugins.base import PluginTopic from eodag.utils.exceptions import MisconfiguredError +if TYPE_CHECKING: + from requests.auth import AuthBase + class Authentication(PluginTopic): """Plugins authentication Base plugin""" - def authenticate(self): + def authenticate(self) -> Union[AuthBase, Dict[str, str]]: """Authenticate""" raise NotImplementedError - def validate_config_credentials(self): + def validate_config_credentials(self) -> None: """Validate configured credentials""" # No credentials dict in the config try: diff --git a/eodag/plugins/authentication/generic.py b/eodag/plugins/authentication/generic.py index d1cb01ad5..5d0271e24 100644 --- a/eodag/plugins/authentication/generic.py +++ b/eodag/plugins/authentication/generic.py @@ -15,28 +15,39 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING from requests.auth import HTTPBasicAuth, HTTPDigestAuth from eodag.plugins.authentication.base import Authentication +from eodag.utils.exceptions import MisconfiguredError + +if TYPE_CHECKING: + from requests.auth import AuthBase class GenericAuth(Authentication): """GenericAuth authentication plugin""" - def authenticate(self): + def authenticate(self) -> AuthBase: """Authenticate""" self.validate_config_credentials() - method = getattr(self.config, "method", None) - if not method: - method = "basic" - if method == "basic": - return HTTPBasicAuth( + method = getattr(self.config, "method", "basic") + + if method == "digest": + return HTTPDigestAuth( self.config.credentials["username"], self.config.credentials["password"], ) - if method == "digest": - return HTTPDigestAuth( + elif method == "basic": + return HTTPBasicAuth( self.config.credentials["username"], self.config.credentials["password"], ) + else: + raise MisconfiguredError( + f"Cannot authenticate with {self.provider}:", + f"method {method} is not supported. Must be one of digest or basic", + ) diff --git a/eodag/plugins/authentication/header.py b/eodag/plugins/authentication/header.py index 0913296d2..fd263f670 100644 --- a/eodag/plugins/authentication/header.py +++ b/eodag/plugins/authentication/header.py @@ -15,11 +15,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations -import requests.auth +from typing import TYPE_CHECKING, Dict + +from requests.auth import AuthBase from eodag.plugins.authentication import Authentication +if TYPE_CHECKING: + from requests import PreparedRequest + class HTTPHeaderAuth(Authentication): """HTTPHeaderAuth Authentication plugin. @@ -54,7 +60,7 @@ class HTTPHeaderAuth(Authentication): Expect an undefined behaviour if you use empty braces in header value strings. """ - def authenticate(self): + def authenticate(self) -> AuthBase: """Authenticate""" self.validate_config_credentials() headers = { @@ -64,13 +70,13 @@ def authenticate(self): return HeaderAuth(headers) -class HeaderAuth(requests.auth.AuthBase): +class HeaderAuth(AuthBase): """HeaderAuth custom authentication class to be used with requests module""" - def __init__(self, authentication_headers): + def __init__(self, authentication_headers: Dict[str, str]) -> None: self.auth_headers = authentication_headers - def __call__(self, request): + def __call__(self, request: PreparedRequest) -> PreparedRequest: """Perform the actual authentication""" request.headers.update(self.auth_headers) return request diff --git a/eodag/plugins/authentication/keycloak.py b/eodag/plugins/authentication/keycloak.py index 7ec370b9a..c0b77d45b 100644 --- a/eodag/plugins/authentication/keycloak.py +++ b/eodag/plugins/authentication/keycloak.py @@ -15,9 +15,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging from datetime import datetime -from typing import Optional +from typing import TYPE_CHECKING, Dict, Union import requests @@ -26,6 +28,12 @@ from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT from eodag.utils.exceptions import AuthenticationError, MisconfiguredError +if TYPE_CHECKING: + from requests.auth import AuthBase + + from eodag.config import PluginConfig + + logger = logging.getLogger("eodag.auth.keycloak") @@ -73,14 +81,14 @@ class KeycloakOIDCPasswordAuth(Authentication): TOKEN_URL_TEMPLATE = "{auth_base_uri}/realms/{realm}/protocol/openid-connect/token" REQUIRED_PARAMS = ["auth_base_uri", "client_id", "client_secret", "token_provision"] # already retrieved token store, to be used if authenticate() fails (OTP use-case) - retrieved_token: Optional[str] = None - token_info: Optional[dict] = {} + retrieved_token: str = "" + token_info: Dict[str, Union[str, datetime]] = {} - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(KeycloakOIDCPasswordAuth, self).__init__(provider, config) self.session = requests.Session() - def validate_config_credentials(self): + def validate_config_credentials(self) -> None: """Validate configured credentials""" super(KeycloakOIDCPasswordAuth, self).validate_config_credentials() @@ -91,7 +99,7 @@ def validate_config_credentials(self): f"{self.provider}: {param}", ) - def authenticate(self): + def authenticate(self) -> AuthBase: """ Makes authentication request """ @@ -104,7 +112,7 @@ def authenticate(self): key=getattr(self.config, "token_qs_key", None), ) - def _get_access_token(self): + def _get_access_token(self) -> str: current_time = datetime.now() if ( not self.token_info @@ -141,7 +149,7 @@ def _get_access_token(self): logger.debug("using already retrieved access token") return self.retrieved_token - def _request_new_token(self): + def _request_new_token(self) -> Dict[str, str]: logger.debug("fetching new access token") req_data = { "client_id": self.config.client_id, @@ -197,7 +205,7 @@ def _request_new_token(self): ) return response.json() - def _get_token_with_refresh_token(self): + def _get_token_with_refresh_token(self) -> Dict[str, str]: logger.debug("fetching access token with refresh token") req_data = { "client_id": self.config.client_id, diff --git a/eodag/plugins/authentication/oauth.py b/eodag/plugins/authentication/oauth.py index e04ca8c50..2687796b1 100644 --- a/eodag/plugins/authentication/oauth.py +++ b/eodag/plugins/authentication/oauth.py @@ -15,21 +15,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING, Dict from eodag.plugins.authentication.base import Authentication +if TYPE_CHECKING: + from eodag.config import PluginConfig + class OAuth(Authentication): """OAuth authentication plugin""" - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(OAuth, self).__init__(provider, config) self.access_key = None self.secret_key = None - def authenticate(self): + def authenticate(self) -> Dict[str, str]: """Authenticate""" self.validate_config_credentials() self.access_key = self.config.credentials["aws_access_key_id"] self.secret_key = self.config.credentials["aws_secret_access_key"] - return self.access_key, self.secret_key + return {"access_key": self.access_key, "secret_key": self.secret_key} diff --git a/eodag/plugins/authentication/openid_connect.py b/eodag/plugins/authentication/openid_connect.py index 0d985c40c..c6a7acf9e 100644 --- a/eodag/plugins/authentication/openid_connect.py +++ b/eodag/plugins/authentication/openid_connect.py @@ -15,10 +15,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import re import string from random import SystemRandom +from typing import TYPE_CHECKING, Any, Dict, Optional import requests from lxml import etree @@ -26,7 +28,12 @@ from eodag.plugins.authentication import Authentication from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT, parse_qs, repeatfunc, urlparse -from eodag.utils.exceptions import AuthenticationError, MisconfiguredError +from eodag.utils.exceptions import AuthenticationError, MisconfiguredError, TimeOutError + +if TYPE_CHECKING: + from requests import PreparedRequest, Response + + from eodag.config import PluginConfig class OIDCAuthorizationCodeFlowAuth(Authentication): @@ -122,7 +129,7 @@ class OIDCAuthorizationCodeFlowAuth(Authentication): RESPONSE_TYPE = "code" CONFIG_XPATH_REGEX = re.compile(r"^xpath\((?P.+)\)$") - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(OIDCAuthorizationCodeFlowAuth, self).__init__(provider, config) if getattr(self.config, "token_provision", None) not in ("qs", "header"): raise MisconfiguredError( @@ -137,7 +144,7 @@ def __init__(self, provider, config): ) self.session = requests.Session() - def authenticate(self): + def authenticate(self) -> AuthBase: """Authenticate""" state = self.compute_state() authentication_response = self.authenticate_user(state) @@ -147,6 +154,8 @@ def authenticate(self): exchange_url = user_consent_response.url try: token = self.exchange_code_for_token(exchange_url, state) + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except Exception: import traceback as tb @@ -161,7 +170,7 @@ def authenticate(self): key=getattr(self.config, "token_qs_key", None), ) - def authenticate_user(self, state): + def authenticate_user(self, state: str) -> Response: """Authenticate user""" self.validate_config_credentials() params = { @@ -201,7 +210,7 @@ def authenticate_user(self, state): auth_uri, data=login_data, headers=USER_AGENT, timeout=HTTP_REQ_TIMEOUT ) - def grant_user_consent(self, authentication_response): + def grant_user_consent(self, authentication_response: Response) -> Response: """Grant user consent""" user_consent_document = etree.HTML(authentication_response.text) user_consent_form = user_consent_document.xpath( @@ -219,7 +228,7 @@ def grant_user_consent(self, authentication_response): timeout=HTTP_REQ_TIMEOUT, ) - def exchange_code_for_token(self, authorized_url, state): + def exchange_code_for_token(self, authorized_url: str, state: str) -> str: """Get exchange code for token""" qs = parse_qs(urlparse(authorized_url).query) if qs["state"][0] != state: @@ -227,7 +236,7 @@ def exchange_code_for_token(self, authorized_url, state): "The state received in the authorized url does not match initially computed state" ) code = qs["code"][0] - token_exchange_data = { + token_exchange_data: Dict[str, Any] = { "redirect_uri": self.config.redirect_uri, "client_id": self.config.client_id, "code": code, @@ -259,11 +268,13 @@ def exchange_code_for_token(self, authorized_url, state): self.config.token_uri, headers=USER_AGENT, timeout=HTTP_REQ_TIMEOUT, - **post_request_kwargs + **post_request_kwargs, ) return r.json()[self.config.token_key] - def _constant_or_xpath_extracted(self, value, form_element): + def _constant_or_xpath_extracted( + self, value: str, form_element: Any + ) -> Optional[str]: match = self.CONFIG_XPATH_REGEX.match(value) if not match: return value @@ -272,9 +283,10 @@ def _constant_or_xpath_extracted(self, value, form_element): ) if len(value_from_xpath) == 1: return value_from_xpath[0] + return None @staticmethod - def compute_state(): + def compute_state() -> str: """Compute state""" rand = SystemRandom() return "".join( @@ -289,12 +301,12 @@ def compute_state(): class CodeAuthorizedAuth(AuthBase): """CodeAuthorizedAuth custom authentication class to be used with requests module""" - def __init__(self, token, where, key=None): + def __init__(self, token: str, where: str, key: Optional[str] = None) -> None: self.token = token self.where = where self.key = key - def __call__(self, request): + def __call__(self, request: PreparedRequest) -> PreparedRequest: """Perform the actual authentication""" if self.where == "qs": parts = urlparse(request.url) diff --git a/eodag/plugins/authentication/qsauth.py b/eodag/plugins/authentication/qsauth.py index 8fa17d8b2..724fdf64e 100644 --- a/eodag/plugins/authentication/qsauth.py +++ b/eodag/plugins/authentication/qsauth.py @@ -15,15 +15,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations +from typing import TYPE_CHECKING, Any from urllib.parse import parse_qs, urlparse -import requests.auth +import requests +from requests.auth import AuthBase from requests.exceptions import RequestException from eodag.plugins.authentication import Authentication from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT -from eodag.utils.exceptions import AuthenticationError +from eodag.utils.exceptions import AuthenticationError, TimeOutError + +if TYPE_CHECKING: + from requests import PreparedRequest class HttpQueryStringAuth(Authentication): @@ -54,7 +60,7 @@ class HttpQueryStringAuth(Authentication): :meth:`~eodag.plugins.authentication.query_string.HttpQueryStringAuth.authenticate` """ - def authenticate(self): + def authenticate(self) -> AuthBase: """Authenticate""" self.validate_config_credentials() @@ -70,24 +76,26 @@ def authenticate(self): auth=auth, ) response.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except RequestException as e: raise AuthenticationError(f"Could no authenticate: {str(e)}") return auth -class QueryStringAuth(requests.auth.AuthBase): +class QueryStringAuth(AuthBase): """ "QueryStringAuth custom authentication class to be used with requests module""" - def __init__(self, **parse_args): + def __init__(self, **parse_args: Any) -> None: self.parse_args = parse_args - def __call__(self, request): + def __call__(self, request: PreparedRequest) -> PreparedRequest: """Perform the actual authentication""" - parts = urlparse(request.url) + parts = urlparse(str(request.url)) query_dict = parse_qs(parts.query) query_dict.update(self.parse_args) - url_without_args = parts._replace(query=None).geturl() + url_without_args = parts._replace(query="").geturl() request.prepare_url(url_without_args, query_dict) return request diff --git a/eodag/plugins/authentication/sas_auth.py b/eodag/plugins/authentication/sas_auth.py index 3187d639e..c0d0e61b0 100644 --- a/eodag/plugins/authentication/sas_auth.py +++ b/eodag/plugins/authentication/sas_auth.py @@ -15,15 +15,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging from json import JSONDecodeError +from typing import TYPE_CHECKING, Dict, Optional import requests from requests.auth import AuthBase from eodag.plugins.authentication.base import Authentication from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT, deepcopy, format_dict_items -from eodag.utils.exceptions import AuthenticationError +from eodag.utils.exceptions import AuthenticationError, TimeOutError + +if TYPE_CHECKING: + from requests import PreparedRequest + logger = logging.getLogger("eodag.auth.sas_auth") @@ -31,13 +38,18 @@ class RequestsSASAuth(AuthBase): """A custom authentication class to be used with requests module""" - def __init__(self, auth_uri, signed_url_key, headers=None): + def __init__( + self, + auth_uri: str, + signed_url_key: str, + headers: Optional[Dict[str, str]] = None, + ) -> None: self.auth_uri = auth_uri self.signed_url_key = signed_url_key self.headers = headers - self.signed_urls = {} + self.signed_urls: Dict[str, str] = {} - def __call__(self, request): + def __call__(self, request: PreparedRequest) -> PreparedRequest: """Perform the actual authentication""" # update headers @@ -55,6 +67,8 @@ def __call__(self, request): ) response.raise_for_status() signed_url = response.json().get(self.signed_url_key) + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except (requests.RequestException, JSONDecodeError, KeyError) as e: raise AuthenticationError(f"Could no get signed url: {str(e)}") else: @@ -68,12 +82,12 @@ def __call__(self, request): class SASAuth(Authentication): """SASAuth authentication plugin""" - def validate_config_credentials(self): + def validate_config_credentials(self) -> None: """Validate configured credentials""" # credentials are optionnal pass - def authenticate(self): + def authenticate(self) -> AuthBase: """Authenticate""" self.validate_config_credentials() diff --git a/eodag/plugins/authentication/token.py b/eodag/plugins/authentication/token.py index 11e9e2aa3..12250f448 100644 --- a/eodag/plugins/authentication/token.py +++ b/eodag/plugins/authentication/token.py @@ -15,16 +15,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging +from typing import TYPE_CHECKING, Any, Dict, Optional +from urllib.parse import parse_qs, urlencode, urlparse, urlunparse import requests from requests import RequestException from requests.adapters import HTTPAdapter +from requests.auth import AuthBase from urllib3 import Retry from eodag.plugins.authentication.base import Authentication -from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT, RequestsTokenAuth -from eodag.utils.exceptions import AuthenticationError, MisconfiguredError +from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT +from eodag.utils.exceptions import AuthenticationError, MisconfiguredError, TimeOutError + +if TYPE_CHECKING: + from requests import PreparedRequest + + from eodag.config import PluginConfig + logger = logging.getLogger("eodag.authentication.token") @@ -32,11 +43,11 @@ class TokenAuth(Authentication): """TokenAuth authentication plugin""" - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(TokenAuth, self).__init__(provider, config) self.token = "" - def validate_config_credentials(self): + def validate_config_credentials(self) -> None: """Validate configured credentials""" super(TokenAuth, self).validate_config_credentials() try: @@ -54,12 +65,12 @@ def validate_config_credentials(self): f"Missing credentials inputs for provider {self.provider}: {e}" ) - def authenticate(self): + def authenticate(self) -> AuthBase: """Authenticate""" self.validate_config_credentials() # append headers to req if some are specified in config - req_kwargs = ( + req_kwargs: Dict[str, Any] = ( {"headers": dict(self.config.headers, **USER_AGENT)} if hasattr(self.config, "headers") else {"headers": USER_AGENT} @@ -87,6 +98,8 @@ def authenticate(self): **req_kwargs, ) response.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except RequestException as e: response_text = getattr(e.response, "text", "").strip() raise AuthenticationError( @@ -102,7 +115,7 @@ def authenticate(self): # Return auth class set with obtained token return RequestsTokenAuth(token, "header", headers=headers) - def _get_headers(self, token): + def _get_headers(self, token: str) -> Dict[str, str]: headers = self.config.headers if "Authorization" in headers and "$" in headers["Authorization"]: headers["Authorization"] = headers["Authorization"].replace("$token", token) @@ -115,3 +128,42 @@ def _get_headers(self, token): self.token, token ) return headers + + +class RequestsTokenAuth(AuthBase): + """A custom authentication class to be used with requests module""" + + def __init__( + self, + token: str, + where: str, + qs_key: Optional[str] = None, + headers: Optional[Dict[str, str]] = None, + ) -> None: + self.token = token + self.where = where + self.qs_key = qs_key + self.headers = headers + + def __call__(self, request: PreparedRequest) -> PreparedRequest: + """Perform the actual authentication""" + if self.headers and isinstance(self.headers, dict): + for k, v in self.headers.items(): + request.headers[k] = v + if self.where == "qs": + parts = urlparse(str(request.url)) + qs = parse_qs(parts.query) + qs[self.qs_key] = self.token # type: ignore + request.url = urlunparse( + ( + parts.scheme, + parts.netloc, + parts.path, + parts.params, + urlencode(qs), + parts.fragment, + ) + ) + elif self.where == "header": + request.headers["Authorization"] = "Bearer {}".format(self.token) + return request diff --git a/eodag/plugins/base.py b/eodag/plugins/base.py index 279a90715..e3ce4edcc 100644 --- a/eodag/plugins/base.py +++ b/eodag/plugins/base.py @@ -15,30 +15,39 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, List, Tuple + from eodag.utils.exceptions import PluginNotFoundError +if TYPE_CHECKING: + from eodag.config import PluginConfig + class EODAGPluginMount(type): """Plugin mount""" - def __init__(cls, name, bases, attrs): + def __init__( + cls, name: str, bases: Tuple[type, ...], attrs: Dict[str, Any] + ) -> None: if not hasattr(cls, "plugins"): # This branch only executes when processing the mount point itself. # So, since this is a new plugin type, not an implementation, this # class shouldn't be registered as a plugin. Instead, it sets up a # list where plugins can be registered later. - cls.plugins = [] + cls.plugins: List[EODAGPluginMount] = [] else: # This must be a plugin implementation, which should be registered. # Simply appending it to the list is all that's needed to keep # track of it later. cls.plugins.append(cls) - def get_plugins(cls, *args, **kwargs): + def get_plugins(cls, *args: Any, **kwargs: Any) -> List[EODAGPluginMount]: """Get plugins""" return [plugin(*args, **kwargs) for plugin in cls.plugins] - def get_plugin_by_class_name(cls, name): + def get_plugin_by_class_name(cls, name: str) -> EODAGPluginMount: """Get plugin by class_name""" for plugin in cls.plugins: if name == plugin.__name__: @@ -51,11 +60,11 @@ def get_plugin_by_class_name(cls, name): class PluginTopic(metaclass=EODAGPluginMount): """Base of all plugin topics in eodag""" - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: self.config = config self.provider = provider - def __repr__(self): + def __repr__(self) -> str: return "{}(provider={}, priority={}, topic={})".format( self.__class__.__name__, self.provider, diff --git a/eodag/plugins/crunch/base.py b/eodag/plugins/crunch/base.py index 93e6b8476..2d74e10a4 100644 --- a/eodag/plugins/crunch/base.py +++ b/eodag/plugins/crunch/base.py @@ -14,17 +14,25 @@ # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and -# limitations under the License. +# limitations under the License +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, List, Optional from eodag.plugins.base import PluginTopic +if TYPE_CHECKING: + from eodag.api.product import EOProduct + class Crunch(PluginTopic): """Base cruncher""" - def __init__(self, config): + def __init__(self, config: Optional[Dict[str, Any]]) -> None: self.config = config if config is not None else {} - def proceed(self, product_list, **search_params): + def proceed( + self, products: List[EOProduct], **search_params: Any + ) -> List[EOProduct]: """Implementation of how the results must be crunched""" raise NotImplementedError diff --git a/eodag/plugins/crunch/filter_date.py b/eodag/plugins/crunch/filter_date.py index 0f0a625d9..303361216 100644 --- a/eodag/plugins/crunch/filter_date.py +++ b/eodag/plugins/crunch/filter_date.py @@ -15,14 +15,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import datetime import logging import time +from datetime import datetime as dt +from typing import TYPE_CHECKING, Any, Dict, List import dateutil.parser from dateutil import tz +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.plugins.crunch.base import Crunch logger = logging.getLogger("eodag.crunch.date") @@ -39,8 +45,10 @@ class FilterDate(Crunch): :type config: dict """ + config: Dict[str, str] + @staticmethod - def sort_product_by_start_date(product): + def sort_product_by_start_date(product: EOProduct) -> dt: """Get product start date""" start_date = product.properties.get("startTimeFromAscendingNode") if not start_date: @@ -49,7 +57,9 @@ def sort_product_by_start_date(product): start_date = datetime.datetime(*epoch).isoformat() return dateutil.parser.parse(start_date) - def proceed(self, products, **search_params): + def proceed( + self, products: List[EOProduct], **search_params: Any + ) -> List[EOProduct]: """Execute crunch: Filter products between start and end dates. :param products: A list of products resulting from a search @@ -82,7 +92,7 @@ def proceed(self, products, **search_params): if not filter_start and not filter_end: return products - filtered = [] + filtered: List[EOProduct] = [] for product in products: # product start date diff --git a/eodag/plugins/crunch/filter_latest_intersect.py b/eodag/plugins/crunch/filter_latest_intersect.py index a5c479d8b..06456a203 100644 --- a/eodag/plugins/crunch/filter_latest_intersect.py +++ b/eodag/plugins/crunch/filter_latest_intersect.py @@ -15,16 +15,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import datetime import logging import time +from typing import TYPE_CHECKING, Any, Dict, List, Union import dateutil.parser from shapely import geometry +from shapely.geometry.base import BaseGeometry from eodag.plugins.crunch.base import Crunch +if TYPE_CHECKING: + from datetime import datetime as dt + + from eodag.api.product import EOProduct + logger = logging.getLogger("eodag.crunch.latest_intersect") @@ -35,7 +43,7 @@ class FilterLatestIntersect(Crunch): """ @staticmethod - def sort_product_by_start_date(product): + def sort_product_by_start_date(product: EOProduct) -> dt: """Get product start date""" start_date = product.properties.get("startTimeFromAscendingNode") if not start_date: @@ -44,7 +52,9 @@ def sort_product_by_start_date(product): start_date = datetime.datetime(*epoch).isoformat() return dateutil.parser.parse(start_date) - def proceed(self, products, **search_params): + def proceed( + self, products: List[EOProduct], **search_params: Any + ) -> List[EOProduct]: """Execute crunch: Filter latest products (the ones with a the highest start date) that intersect search extent. @@ -61,9 +71,11 @@ def proceed(self, products, **search_params): return [] # Warning: May crash if startTimeFromAscendingNode is not in the appropriate format products.sort(key=self.sort_product_by_start_date, reverse=True) - filtered = [] + filtered: List[EOProduct] = [] add_to_filtered = filtered.append - footprint = search_params.get("geometry") or search_params.get("geom") + footprint: Union[Dict[str, Any], BaseGeometry, Any] = search_params.get( + "geometry" + ) or search_params.get("geom") if not footprint: logger.warning( "geometry not found in cruncher arguments, filtering disabled." @@ -76,7 +88,7 @@ def proceed(self, products, **search_params): footprint["lonmax"], footprint["latmax"], ) - elif not isinstance(footprint, geometry.base.BaseGeometry): + elif not isinstance(footprint, BaseGeometry): logger.warning( "geometry found in cruncher arguments did not match the expected format." ) diff --git a/eodag/plugins/crunch/filter_latest_tpl_name.py b/eodag/plugins/crunch/filter_latest_tpl_name.py index 96a6677be..6fdb781f2 100644 --- a/eodag/plugins/crunch/filter_latest_tpl_name.py +++ b/eodag/plugins/crunch/filter_latest_tpl_name.py @@ -15,13 +15,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging import re +from typing import TYPE_CHECKING, Any, Dict, List, Match, Optional, cast from eodag.plugins.crunch.base import Crunch from eodag.utils.exceptions import ValidationError +if TYPE_CHECKING: + from eodag.api.product import EOProduct logger = logging.getLogger("eodag.crunch.latest_tpl_name") @@ -39,7 +43,7 @@ class FilterLatestByName(Crunch): NAME_PATTERN_CONSTRAINT = re.compile(r"\(\?P\\d\{6\}\)") - def __init__(self, config): + def __init__(self, config: Dict[str, Any]) -> None: super(FilterLatestByName, self).__init__(config) name_pattern = config.pop("name_pattern") if not self.NAME_PATTERN_CONSTRAINT.search(name_pattern): @@ -50,7 +54,9 @@ def __init__(self, config): ) self.name_pattern = re.compile(name_pattern) - def proceed(self, product_list, **search_params): + def proceed( + self, products: List[EOProduct], **search_params: Any + ) -> List[EOProduct]: """Execute crunch: Filter Search results to get only the latest product, based on the name of the product :param products: A list of products resulting from a search @@ -59,12 +65,15 @@ def proceed(self, product_list, **search_params): :rtype: list(:class:`~eodag.api.product._product.EOProduct`) """ logger.debug("Starting products filtering") - processed = [] - filtered = [] - for product in product_list: - match = self.name_pattern.match(product.properties["title"]) + processed: List[str] = [] + filtered: List[EOProduct] = [] + for product in products: + match = cast( + Optional[Match[Any]], + self.name_pattern.match(product.properties["title"]), + ) if match: - tileid = match.group("tileid") + tileid: str = match.group("tileid") if tileid not in processed: logger.debug( "Latest product found for tileid=%s: date=%s", diff --git a/eodag/plugins/crunch/filter_overlap.py b/eodag/plugins/crunch/filter_overlap.py index 72c71223a..160191d4d 100644 --- a/eodag/plugins/crunch/filter_overlap.py +++ b/eodag/plugins/crunch/filter_overlap.py @@ -15,8 +15,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging +from typing import TYPE_CHECKING, Any, List from eodag.plugins.crunch.base import Crunch from eodag.utils import get_geometry_from_various @@ -27,6 +29,8 @@ # shapely < 2.0 compatibility from shapely.errors import TopologicalError as GEOSException +if TYPE_CHECKING: + from eodag.api.product import EOProduct logger = logging.getLogger("eodag.crunch.overlap") @@ -47,7 +51,9 @@ class FilterOverlap(Crunch): :type config: dict """ - def proceed(self, products, **search_params): + def proceed( + self, products: List[EOProduct], **search_params: Any + ) -> List[EOProduct]: """Execute crunch: Filter products, retaining only those that are overlapping with the search_extent :param products: A list of products resulting from a search @@ -58,7 +64,7 @@ def proceed(self, products, **search_params): :rtype: list(:class:`~eodag.api.product._product.EOProduct`) """ logger.debug("Start filtering for overlapping products") - filtered = [] + filtered: List[EOProduct] = [] add_to_filtered = filtered.append search_geom = get_geometry_from_various(**search_params) diff --git a/eodag/plugins/crunch/filter_property.py b/eodag/plugins/crunch/filter_property.py index 593de7850..54e876d4f 100644 --- a/eodag/plugins/crunch/filter_property.py +++ b/eodag/plugins/crunch/filter_property.py @@ -15,14 +15,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging import operator +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union from eodag.plugins.crunch.base import Crunch logger = logging.getLogger("eodag.crunch.property") +if TYPE_CHECKING: + from eodag.api.product import EOProduct + class FilterProperty(Crunch): """FilterProperty cruncher @@ -37,7 +42,11 @@ class FilterProperty(Crunch): :type config: dict """ - def proceed(self, products, **search_params): + config: Dict[str, Union[str, Optional[str]]] + + def proceed( + self, products: List[EOProduct], **search_params: Any + ) -> List[EOProduct]: """Execute crunch: Filter products, retaining only those that match property filtering :param products: A list of products resulting from a search @@ -45,7 +54,7 @@ def proceed(self, products, **search_params): :returns: The filtered products :rtype: list(:class:`~eodag.api.product._product.EOProduct`) """ - operator_name = self.config.pop("operator", "eq") + operator_name = self.config.pop("operator", "eq") or "eq" try: operator_method = getattr(operator, operator_name) except AttributeError: @@ -68,7 +77,7 @@ def proceed(self, products, **search_params): property_key, property_value, ) - filtered = [] + filtered: List[EOProduct] = [] add_to_filtered = filtered.append for product in products: diff --git a/eodag/plugins/download/aws.py b/eodag/plugins/download/aws.py index a4c98b549..a7a42bf04 100644 --- a/eodag/plugins/download/aws.py +++ b/eodag/plugins/download/aws.py @@ -15,17 +15,35 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging import os import re +from datetime import datetime +from itertools import chain from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Match, + Optional, + Set, + Tuple, + Union, + cast, +) import boto3 import requests from botocore.exceptions import ClientError, ProfileNotFound from botocore.handlers import disable_signing from lxml import etree +from stream_zip import ZIP_AUTO, stream_zip from eodag.api.product.metadata_mapping import ( mtd_cfg_as_conversion_and_querypath, @@ -34,6 +52,8 @@ ) from eodag.plugins.download.base import Download from eodag.utils import ( + DEFAULT_DOWNLOAD_TIMEOUT, + DEFAULT_DOWNLOAD_WAIT, HTTP_REQ_TIMEOUT, USER_AGENT, ProgressCallback, @@ -41,8 +61,23 @@ get_bucket_name_and_prefix, path_to_uri, rename_subfolder, + sanitize, +) +from eodag.utils.exceptions import ( + AuthenticationError, + DownloadError, + NotAvailableError, + TimeOutError, ) -from eodag.utils.exceptions import AuthenticationError, DownloadError, NotAvailableError + +if TYPE_CHECKING: + from boto3.resources.collection import ResourceCollection + + from eodag.api.product import EOProduct + from eodag.api.search_result import SearchResult + from eodag.config import PluginConfig + from eodag.utils import DownloadedCallback + logger = logging.getLogger("eodag.download.aws") @@ -163,6 +198,12 @@ "VH": {"VH": 1}, } +AWS_AUTH_ERROR_MESSAGES = [ + "AccessDenied", + "InvalidAccessKeyId", + "SignatureDoesNotMatch", +] + class AwsDownload(Download): """Download on AWS using S3 protocol. @@ -181,12 +222,20 @@ class AwsDownload(Download): :type config: :class:`~eodag.config.PluginConfig` """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(AwsDownload, self).__init__(provider, config) self.requester_pays = getattr(self.config, "requester_pays", False) self.s3_session = None - def download(self, product, auth=None, progress_callback=None, **kwargs): + def download( + self, + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> str: """Download method for AWS S3 API. The product can be downloaded as it is, or as SAFE-formatted product. @@ -198,7 +247,7 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): :param product: The EO product to download :type product: :class:`~eodag.api.product._product.EOProduct` :param auth: (optional) The configuration of a plugin of type Authentication - :type auth: :class:`~eodag.config.PluginConfig` + :type auth: Union[AuthBase, Dict[str, str]] :param progress_callback: (optional) A method or a callable object which takes a current size and a maximum size as inputs and handle progress bar @@ -220,13 +269,144 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): progress_callback = ProgressCallback(disable=True) # prepare download & create dirs (before updating metadata) + product_local_path, record_filename = self._download_preparation( + product, progress_callback=progress_callback, **kwargs + ) + if not record_filename: + return product_local_path + + product_conf = getattr(self.config, "products", {}).get( + product.product_type, {} + ) + + # do not try to build SAFE if asset filter is used + asset_filter = kwargs.get("asset", None) + if asset_filter: + build_safe = False + else: + build_safe = product_conf.get("build_safe", False) + + ignore_assets = getattr(self.config, "ignore_assets", False) + + # product conf overrides provider conf for "flatten_top_dirs" + flatten_top_dirs = product_conf.get( + "flatten_top_dirs", getattr(self.config, "flatten_top_dirs", False) + ) + + # xtra metadata needed for SAFE product + self._configure_safe_build(build_safe, product) + # bucket names and prefixes + bucket_names_and_prefixes = self._get_bucket_names_and_prefixes( + product, asset_filter, ignore_assets + ) + + # add complementary urls + try: + for complementary_url_key in product_conf.get("complementary_url_key", []): + bucket_names_and_prefixes.append( + self.get_product_bucket_name_and_prefix( + product, product.properties[complementary_url_key] + ) + ) + except KeyError: + logger.warning( + "complementary_url_key %s is missing in %s properties" + % (complementary_url_key, product.properties["id"]) + ) + + # authenticate + authenticated_objects, s3_objects = self._do_authentication( + bucket_names_and_prefixes, auth + ) + + # downloadable files + unique_product_chunks = self._get_unique_products( + bucket_names_and_prefixes, + authenticated_objects, + asset_filter, + ignore_assets, + product, + ) + + total_size = sum([p.size for p in unique_product_chunks]) + + # download + progress_callback.reset(total=total_size) + try: + for product_chunk in unique_product_chunks: + try: + chunk_rel_path = self.get_chunk_dest_path( + product, + product_chunk, + build_safe=build_safe, + ) + except NotAvailableError as e: + # out of SAFE format chunk + logger.warning(e) + continue + chunk_abs_path = os.path.join(product_local_path, chunk_rel_path) + chunk_abs_path_dir = os.path.dirname(chunk_abs_path) + if not os.path.isdir(chunk_abs_path_dir): + os.makedirs(chunk_abs_path_dir) + + if not os.path.isfile(chunk_abs_path): + product_chunk.Bucket().download_file( + product_chunk.key, + chunk_abs_path, + ExtraArgs=getattr(s3_objects, "_params", {}), + Callback=progress_callback, + ) + + except AuthenticationError as e: + logger.warning("Unexpected error: %s" % e) + except ClientError as e: + self._raise_if_auth_error(e) + logger.warning("Unexpected error: %s" % e) + + # finalize safe product + if build_safe and "S2_MSI" in product.product_type: + self.finalize_s2_safe_product(product_local_path) + # flatten directory structure + elif flatten_top_dirs: + flatten_top_directories(product_local_path) + + if build_safe: + self.check_manifest_file_list(product_local_path) + + if asset_filter is None: + # save hash/record file + with open(record_filename, "w") as fh: + fh.write(product.remote_location) + logger.debug("Download recorded in %s", record_filename) + + product.location = path_to_uri(product_local_path) + + return product_local_path + + def _download_preparation( + self, product: EOProduct, progress_callback: ProgressCallback, **kwargs: Any + ) -> Tuple[str, Optional[str]]: + """ + preparation for the download: + - check if file was already downloaded + - get file path + - create directories + :param product: product to be downloaded + :type product: EOProduct + :param progress_callback: progress callback to be used + :type progress_callback: ProgressCallback + :param kwargs: additional arguments + :type kwargs: Any + :return: local path and file name + :rtype: Tuple[str, Optional[str]] + """ product_local_path, record_filename = self._prepare_download( product, progress_callback=progress_callback, **kwargs ) if not product_local_path or not record_filename: if product_local_path: product.location = path_to_uri(product_local_path) - return product_local_path + return product_local_path, None product_local_path = product_local_path.replace(".zip", "") # remove existing incomplete file if os.path.isfile(product_local_path): @@ -234,19 +414,19 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): # create product dest dir if not os.path.isdir(product_local_path): os.makedirs(product_local_path) + return product_local_path, record_filename + def _configure_safe_build(self, build_safe: bool, product: EOProduct): + """ + updates the product properties with fetch metadata if safe build is enabled + :param build_safe: if safe build is enabled + :type build_safe: bool + :param product: product to be updated + :type product: EOProduct + """ product_conf = getattr(self.config, "products", {}).get( product.product_type, {} ) - - build_safe = product_conf.get("build_safe", False) - - # product conf overrides provider conf for "flatten_top_dirs" - flatten_top_dirs = product_conf.get( - "flatten_top_dirs", getattr(self.config, "flatten_top_dirs", False) - ) - - # xtra metadata needed for SAFE product if build_safe and "fetch_metadata" in product_conf.keys(): fetch_format = product_conf["fetch_metadata"]["fetch_format"] update_metadata = product_conf["fetch_metadata"]["update_metadata"] @@ -254,7 +434,12 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): **product.properties ) logger.info("Fetching extra metadata from %s" % fetch_url) - resp = requests.get(fetch_url, headers=USER_AGENT, timeout=HTTP_REQ_TIMEOUT) + try: + resp = requests.get( + fetch_url, headers=USER_AGENT, timeout=HTTP_REQ_TIMEOUT + ) + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc update_metadata = mtd_cfg_as_conversion_and_querypath(update_metadata) if fetch_format == "json": json_resp = resp.json() @@ -267,12 +452,41 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): logger.warning( "SAFE metadata fetch format %s not implemented" % fetch_format ) + + def _get_bucket_names_and_prefixes( + self, product: EOProduct, asset_filter: str, ignore_assets: bool + ) -> List[Tuple[str, Optional[str]]]: + """ + retrieves the bucket names and path prefixes for the assets + :param product: product for which the assets shall be downloaded + :type product: EOProduct + :param asset_filter: text for which the assets should be filtered + :type asset_filter: str + :param ignore_assets: if product instead of individual assets should be used + :type ignore_assets: bool + :return: tuples of bucket names and prefixes + :rtype: List[Tuple[str, Optional[str]]] + """ # if assets are defined, use them instead of scanning product.location - if hasattr(product, "assets") and not getattr( - self.config, "ignore_assets", False - ): + if len(product.assets) > 0 and not ignore_assets: + if asset_filter: + filter_regex = re.compile(asset_filter) + assets_keys = getattr(product, "assets", {}).keys() + assets_keys = list(filter(filter_regex.fullmatch, assets_keys)) + filtered_assets = { + a_key: getattr(product, "assets", {})[a_key] + for a_key in assets_keys + } + assets_values = [a for a in filtered_assets.values() if "href" in a] + if not assets_values: + raise NotAvailableError( + rf"No asset key matching re.fullmatch(r'{asset_filter}') was found in {product}" + ) + else: + assets_values = getattr(product, "assets", {}).values() + bucket_names_and_prefixes = [] - for complementary_url in getattr(product, "assets", {}).values(): + for complementary_url in assets_values: bucket_names_and_prefixes.append( self.get_product_bucket_name_and_prefix( product, complementary_url.get("href", "") @@ -282,25 +496,26 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): bucket_names_and_prefixes = [ self.get_product_bucket_name_and_prefix(product) ] + return bucket_names_and_prefixes - # add complementary urls - try: - for complementary_url_key in product_conf.get("complementary_url_key", []): - bucket_names_and_prefixes.append( - self.get_product_bucket_name_and_prefix( - product, product.properties[complementary_url_key] - ) - ) - except KeyError: - logger.warning( - "complementary_url_key %s is missing in %s properties" - % (complementary_url_key, product.properties["id"]) - ) - - # authenticate - authenticated_objects = {} - auth_error_messages = set() - for idx, pack in enumerate(bucket_names_and_prefixes): + def _do_authentication( + self, + bucket_names_and_prefixes: List[Tuple[str, Optional[str]]], + auth: Dict[str, str], + ) -> Tuple[Dict[str, Any], ResourceCollection[Any]]: + """ + authenticates with s3 and retrieves the available objects + raises an error when authentication is not possible + :param bucket_names_and_prefixes: list of bucket names and corresponding path prefixes + :type bucket_names_and_prefixes: List[Tuple[str, Optional[str]]] + :param auth: authentication information + :type auth: Dict[str, str] + :return: authenticated objects per bucket, list of available objects + :rtype: Tuple[Dict[str, Any], ResourceCollection[Any]] + """ + authenticated_objects: Dict[str, Any] = {} + auth_error_messages: Set[str] = set() + for _, pack in enumerate(bucket_names_and_prefixes): try: bucket_name, prefix = pack if bucket_name not in authenticated_objects: @@ -337,21 +552,7 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): logger.warning("Skipping %s/%s" % (bucket_name, prefix)) auth_error_messages.add(str(e)) except ClientError as e: - err = e.response["Error"] - auth_messages = [ - "AccessDenied", - "InvalidAccessKeyId", - "SignatureDoesNotMatch", - ] - if err["Code"] in auth_messages and "key" in err["Message"].lower(): - raise AuthenticationError( - "HTTP error {} returned\n{}: {}\nPlease check your credentials for {}".format( - e.response["ResponseMetadata"]["HTTPStatusCode"], - err["Code"], - err["Message"], - self.provider, - ) - ) + self._raise_if_auth_error(e) logger.warning("Unexpected error: %s" % e) logger.warning("Skipping %s/%s" % (bucket_name, prefix)) auth_error_messages.add(str(e)) @@ -359,9 +560,32 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): # could not auth on any bucket if not authenticated_objects: raise AuthenticationError(", ".join(auth_error_messages)) + return authenticated_objects, s3_objects - # downloadable files - product_chunks = [] + def _get_unique_products( + self, + bucket_names_and_prefixes: List[Tuple[str, Optional[str]]], + authenticated_objects: Dict[str, Any], + asset_filter: str, + ignore_assets: bool, + product: EOProduct, + ) -> Set[Any]: + """ + retrieve unique product chunks based on authenticated objects and asset filters + :param bucket_names_and_prefixes: list of bucket names and corresponding path prefixes + :type bucket_names_and_prefixes: List[Tuple[str, Optional[str]]] + :param authenticated_objects: available objects per bucket + :type authenticated_objects: Dict[str, Any] + :param asset_filter: text for which assets should be filtered + :type asset_filter: str + :param ignore_assets: if product instead of individual assets should be used + :type ignore_assets: bool + :param product: product that shall be downloaded + :type product: EOProduct + :return: set of product chunks that can be downloaded + :rtype: Set[Any] + """ + product_chunks: List[Any] = [] for bucket_name, prefix in bucket_names_and_prefixes: # unauthenticated items filtered out if bucket_name in authenticated_objects.keys(): @@ -371,76 +595,240 @@ def download(self, product, auth=None, progress_callback=None, **kwargs): unique_product_chunks = set(product_chunks) - total_size = sum([p.size for p in unique_product_chunks]) + # if asset_filter is used with ignore_assets, apply filtering on listed prefixes + if asset_filter and ignore_assets: + filter_regex = re.compile(asset_filter) + unique_product_chunks = set( + filter( + lambda c: filter_regex.search(os.path.basename(c.key)), + unique_product_chunks, + ) + ) + if not unique_product_chunks: + raise NotAvailableError( + rf"No file basename matching re.fullmatch(r'{asset_filter}') was found in {product.remote_location}" + ) + return unique_product_chunks + + def _raise_if_auth_error(self, exception: ClientError) -> None: + """Raises an error if given exception is an authentication error""" + err = exception.response["Error"] + if err["Code"] in AWS_AUTH_ERROR_MESSAGES and "key" in err["Message"].lower(): + raise AuthenticationError( + "HTTP error {} returned\n{}: {}\nPlease check your credentials for {}".format( + exception.response["ResponseMetadata"]["HTTPStatusCode"], + err["Code"], + err["Message"], + self.provider, + ) + ) - # download - progress_callback.reset(total=total_size) + def _stream_download_dict( + self, + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Dict[str, Any]: + r""" + Returns dictionnary of :class:`~fastapi.responses.StreamingResponse` keyword-arguments. + It contains a generator to streamed download chunks and the response headers. + + :param product: The EO product to download + :type product: :class:`~eodag.api.product._product.EOProduct` + :param auth: (optional) The configuration of a plugin of type Authentication + :type auth: :class:`~eodag.config.PluginConfig` + :param progress_callback: (optional) A progress callback + :type progress_callback: :class:`~eodag.utils.ProgressCallback` + :param wait: (optional) If download fails, wait time in minutes between two download tries + :type wait: int + :param timeout: (optional) If download fails, maximum time in minutes before stop retrying + to download + :type timeout: int + :param kwargs: `outputs_prefix` (str), `extract` (bool), `delete_archive` (bool) + and `dl_url_params` (dict) can be provided as additional kwargs + and will override any other values defined in a configuration + file or with environment variables. + :type kwargs: Union[str, bool, dict] + :returns: Dictionnary of :class:`~fastapi.responses.StreamingResponse` keyword-arguments + :rtype: dict + """ + if progress_callback is None: + logger.info( + "Progress bar unavailable, please call product.download() instead of plugin.download()" + ) + progress_callback = ProgressCallback(disable=True) + + product_conf = getattr(self.config, "products", {}).get( + product.product_type, {} + ) + # do not try to build SAFE if asset filter is used + asset_filter = kwargs.get("asset", None) + if asset_filter: + build_safe = False + else: + build_safe = product_conf.get("build_safe", False) + + ignore_assets = getattr(self.config, "ignore_assets", False) + + # xtra metadata needed for SAFE product + self._configure_safe_build(build_safe, product) + # bucket names and prefixes + bucket_names_and_prefixes = self._get_bucket_names_and_prefixes( + product, asset_filter, ignore_assets + ) + + # add complementary urls try: - for product_chunk in unique_product_chunks: - try: - chunk_rel_path = self.get_chunk_dest_path( - product, - product_chunk, - build_safe=build_safe, + for complementary_url_key in product_conf.get("complementary_url_key", []): + bucket_names_and_prefixes.append( + self.get_product_bucket_name_and_prefix( + product, product.properties[complementary_url_key] ) - except NotAvailableError as e: - # out of SAFE format chunk - logger.warning(e) - continue - chunk_abs_path = os.path.join(product_local_path, chunk_rel_path) - chunk_abs_path_dir = os.path.dirname(chunk_abs_path) - if not os.path.isdir(chunk_abs_path_dir): - os.makedirs(chunk_abs_path_dir) + ) + except KeyError: + logger.warning( + "complementary_url_key %s is missing in %s properties" + % (complementary_url_key, product.properties["id"]) + ) - if not os.path.isfile(chunk_abs_path): - product_chunk.Bucket().download_file( - product_chunk.key, - chunk_abs_path, - ExtraArgs=getattr(s3_objects, "_params", {}), - Callback=progress_callback, - ) + # authenticate + authenticated_objects, s3_objects = self._do_authentication( + bucket_names_and_prefixes, auth + ) - except AuthenticationError as e: - logger.warning("Unexpected error: %s" % e) - logger.warning("Skipping %s/%s" % (bucket_name, prefix)) - except ClientError as e: - err = e.response["Error"] - auth_messages = [ - "AccessDenied", - "InvalidAccessKeyId", - "SignatureDoesNotMatch", - ] - if err["Code"] in auth_messages and "key" in err["Message"].lower(): - raise AuthenticationError( - "HTTP error {} returned\n{}: {}\nPlease check your credentials for {}".format( - e.response["ResponseMetadata"]["HTTPStatusCode"], - err["Code"], - err["Message"], - self.provider, + # downloadable files + unique_product_chunks = self._get_unique_products( + bucket_names_and_prefixes, + authenticated_objects, + asset_filter, + ignore_assets, + product, + ) + assets_values = product.assets.get_values(asset_filter) + chunks_tuples = self._stream_download( + unique_product_chunks, product, build_safe, progress_callback, assets_values + ) + outputs_filename = ( + sanitize(product.properties["title"]) + if "title" in product.properties + else sanitize(product.properties.get("id", "download")) + ) + + if len(assets_values) == 1: + first_chunks_tuple = next(chunks_tuples) + # update headers + filename = os.path.basename(list(unique_product_chunks)[0].key) + headers = {"content-disposition": f"attachment; filename={filename}"} + if assets_values[0].get("type", None): + headers["content-type"] = assets_values[0]["type"] + + return dict( + content=chain(iter([first_chunks_tuple]), chunks_tuples), + headers=headers, + ) + return dict( + content=stream_zip(chunks_tuples), + media_type="application/zip", + headers={ + "content-disposition": f"attachment; filename={outputs_filename}.zip", + }, + ) + + def _stream_download( + self, + unique_product_chunks: Set[Any], + product: EOProduct, + build_safe: bool, + progress_callback: ProgressCallback, + assets_values: List[Dict[str, Any]], + ) -> Iterator[Tuple[str, datetime, int, Any, Iterator[Any]]]: + """Yield product data chunks""" + + chunk_size = 4096 * 1024 + modified_at = datetime.now() + perms = 0o600 + + def get_chunk_parts( + product_chunk: Any, progress_callback: ProgressCallback + ) -> Any: + try: + + chunk_start = 0 + chunk_end = chunk_start + chunk_size - 1 + + while chunk_start <= product_chunk.size: + get_kwargs = ( + dict(RequestPayer="requester") if self.requester_pays else {} ) - ) - logger.warning("Unexpected error: %s" % e) - logger.warning("Skipping %s/%s" % (bucket_name, prefix)) + chunk_part = product_chunk.get( + Range=f"bytes={chunk_start}-{chunk_end}", **get_kwargs + )["Body"].read() + progress_callback(len(chunk_part)) + chunk_start += chunk_size + chunk_end += chunk_size + yield chunk_part - # finalize safe product - if build_safe and "S2_MSI" in product.product_type: - self.finalize_s2_safe_product(product_local_path) - # flatten directory structure - elif flatten_top_dirs: - flatten_top_directories(product_local_path) + except ClientError as e: + self._raise_if_auth_error(e) + raise DownloadError("Unexpected error: %s" % e) from e - if build_safe: - self.check_manifest_file_list(product_local_path) + product_conf = getattr(self.config, "products", {}).get( + product.product_type, {} + ) + flatten_top_dirs = product_conf.get( + "flatten_top_dirs", getattr(self.config, "flatten_top_dirs", False) + ) + common_path = "" + if flatten_top_dirs: + common_path = self._get_commonpath( + product, unique_product_chunks, build_safe + ) - # save hash/record file - with open(record_filename, "w") as fh: - fh.write(product.remote_location) - logger.debug("Download recorded in %s", record_filename) + for product_chunk in unique_product_chunks: + try: + chunk_rel_path = self.get_chunk_dest_path( + product, + product_chunk, + build_safe=build_safe, + ) + if flatten_top_dirs: + chunk_rel_path = os.path.join( + product.properties["title"], + re.sub(rf"^{common_path}/?", "", chunk_rel_path), + ) - product.location = path_to_uri(product_local_path) - return product_local_path + except NotAvailableError as e: + # out of SAFE format chunk + logger.warning(e) + continue - def get_rio_env(self, bucket_name, prefix, auth_dict): + if len(assets_values) == 1: + yield from get_chunk_parts(product_chunk, progress_callback) + else: + yield ( + chunk_rel_path, + modified_at, + perms, + ZIP_AUTO(product_chunk.size), + get_chunk_parts(product_chunk, progress_callback), + ) + + def _get_commonpath( + self, product: EOProduct, product_chunks: Set[Any], build_safe: bool + ) -> str: + chunk_paths = [] + for product_chunk in product_chunks: + chunk_paths.append( + self.get_chunk_dest_path(product, product_chunk, build_safe=build_safe) + ) + return os.path.commonpath(chunk_paths) + + def get_rio_env( + self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + ) -> Dict[str, Any]: """Get rasterio environment variables needed for data access authentication. :param bucket_name: Bucket containg objects @@ -467,7 +855,9 @@ def get_rio_env(self, bucket_name, prefix, auth_dict): else: return {"aws_unsigned": True} - def get_authenticated_objects(self, bucket_name, prefix, auth_dict): + def get_authenticated_objects( + self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + ) -> ResourceCollection: """Get boto3 authenticated objects for the given bucket using the most adapted auth strategy. Also expose ``s3_session`` as class variable if available. @@ -482,7 +872,9 @@ def get_authenticated_objects(self, bucket_name, prefix, auth_dict): :returns: The boto3 authenticated objects :rtype: :class:`~boto3.resources.collection.s3.Bucket.objectsCollection` """ - auth_methods = [ + auth_methods: List[ + Callable[[str, str, Dict[str, str]], Optional[ResourceCollection]] + ] = [ self._get_authenticated_objects_unsigned, self._get_authenticated_objects_from_auth_profile, self._get_authenticated_objects_from_auth_keys, @@ -499,11 +891,10 @@ def get_authenticated_objects(self, bucket_name, prefix, auth_dict): logger.debug("Auth using %s succeeded", try_auth_method.__name__) return s3_objects except ClientError as e: - if e.response.get("Error", {}).get("Code", {}) in [ - "AccessDenied", - "InvalidAccessKeyId", - "SignatureDoesNotMatch", - ]: + if ( + e.response.get("Error", {}).get("Code", {}) + in AWS_AUTH_ERROR_MESSAGES + ): pass else: raise e @@ -516,85 +907,93 @@ def get_authenticated_objects(self, bucket_name, prefix, auth_dict): % bucket_name ) - def _get_authenticated_objects_unsigned(self, bucket_name, prefix, auth_dict): + def _get_authenticated_objects_unsigned( + self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + ) -> Optional[ResourceCollection]: """Auth strategy using no-sign-request""" - s3_resource = boto3.resource( + s3_resource = boto3.resource( # type: ignore service_name="s3", endpoint_url=getattr(self.config, "base_uri", None) ) - s3_resource.meta.client.meta.events.register( + s3_resource.meta.client.meta.events.register( # type: ignore "choose-signer.s3.*", disable_signing ) - objects = s3_resource.Bucket(bucket_name).objects - list(objects.filter(Prefix=prefix).limit(1)) - return objects + objects = s3_resource.Bucket(bucket_name).objects # type: ignore + list(objects.filter(Prefix=prefix).limit(1)) # type: ignore + return objects # type: ignore def _get_authenticated_objects_from_auth_profile( - self, bucket_name, prefix, auth_dict - ): + self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + ) -> Optional[ResourceCollection]: """Auth strategy using RequestPayer=requester and ``aws_profile`` from provided credentials""" if "profile_name" in auth_dict.keys(): - s3_session = boto3.session.Session(profile_name=auth_dict["profile_name"]) - s3_resource = s3_session.resource( + s3_session = boto3.session.Session(profile_name=auth_dict["profile_name"]) # type: ignore + s3_resource = s3_session.resource( # type: ignore service_name="s3", endpoint_url=getattr(self.config, "base_uri", None), ) if self.requester_pays: - objects = s3_resource.Bucket(bucket_name).objects.filter( + objects = s3_resource.Bucket(bucket_name).objects.filter( # type: ignore RequestPayer="requester" ) else: - objects = s3_resource.Bucket(bucket_name).objects - list(objects.filter(Prefix=prefix).limit(1)) - self.s3_session = s3_session - return objects + objects = s3_resource.Bucket(bucket_name).objects # type: ignore + list(objects.filter(Prefix=prefix).limit(1)) # type: ignore + self.s3_session = s3_session # type: ignore + return objects # type: ignore else: return None - def _get_authenticated_objects_from_auth_keys(self, bucket_name, prefix, auth_dict): + def _get_authenticated_objects_from_auth_keys( + self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + ) -> Optional[ResourceCollection]: """Auth strategy using RequestPayer=requester and ``aws_access_key_id``/``aws_secret_access_key`` from provided credentials""" if all(k in auth_dict for k in ("aws_access_key_id", "aws_secret_access_key")): - s3_session = boto3.session.Session( + s3_session = boto3.session.Session( # type: ignore aws_access_key_id=auth_dict["aws_access_key_id"], aws_secret_access_key=auth_dict["aws_secret_access_key"], ) - s3_resource = s3_session.resource( + s3_resource = s3_session.resource( # type: ignore service_name="s3", endpoint_url=getattr(self.config, "base_uri", None), ) if self.requester_pays: - objects = s3_resource.Bucket(bucket_name).objects.filter( + objects = s3_resource.Bucket(bucket_name).objects.filter( # type: ignore RequestPayer="requester" ) else: - objects = s3_resource.Bucket(bucket_name).objects - list(objects.filter(Prefix=prefix).limit(1)) - self.s3_session = s3_session - return objects + objects = s3_resource.Bucket(bucket_name).objects # type: ignore + list(objects.filter(Prefix=prefix).limit(1)) # type: ignore + self.s3_session = s3_session # type: ignore + return objects # type: ignore else: return None - def _get_authenticated_objects_from_env(self, bucket_name, prefix, auth_dict): + def _get_authenticated_objects_from_env( + self, bucket_name: str, prefix: str, auth_dict: Dict[str, str] + ) -> Optional[ResourceCollection]: """Auth strategy using RequestPayer=requester and current environment""" - s3_session = boto3.session.Session() - s3_resource = s3_session.resource( + s3_session = boto3.session.Session() # type: ignore + s3_resource = s3_session.resource( # type: ignore service_name="s3", endpoint_url=getattr(self.config, "base_uri", None) ) if self.requester_pays: - objects = s3_resource.Bucket(bucket_name).objects.filter( + objects = s3_resource.Bucket(bucket_name).objects.filter( # type: ignore RequestPayer="requester" ) else: - objects = s3_resource.Bucket(bucket_name).objects - list(objects.filter(Prefix=prefix).limit(1)) - self.s3_session = s3_session - return objects - - def get_product_bucket_name_and_prefix(self, product, url=None): + objects = s3_resource.Bucket(bucket_name).objects # type: ignore + list(objects.filter(Prefix=prefix).limit(1)) # type: ignore + self.s3_session = s3_session # type: ignore + return objects # type: ignore + + def get_product_bucket_name_and_prefix( + self, product: EOProduct, url: Optional[str] = None + ) -> Tuple[str, Optional[str]]: """Extract bucket name and prefix from product URL :param product: The EO product to download @@ -604,7 +1003,7 @@ def get_product_bucket_name_and_prefix(self, product, url=None): :returns: bucket_name and prefix as str :rtype: tuple """ - if not url: + if url is None: url = product.location bucket_path_level = getattr(self.config, "bucket_path_level", None) @@ -622,7 +1021,7 @@ def get_product_bucket_name_and_prefix(self, product, url=None): return bucket, prefix - def check_manifest_file_list(self, product_path): + def check_manifest_file_list(self, product_path: str) -> None: """Checks if products listed in manifest.safe exist""" manifest_path_list = [ os.path.join(d, x) @@ -646,7 +1045,7 @@ def check_manifest_file_list(self, product_path): elif not os.path.isfile(safe_file_path): logger.warning("SAFE build: %s is missing" % safe_file.get("href")) - def finalize_s2_safe_product(self, product_path): + def finalize_s2_safe_product(self, product_path: str) -> None: """Add missing dirs to downloaded product""" try: logger.debug("Finalize SAFE product") @@ -676,23 +1075,29 @@ def finalize_s2_safe_product(self, product_path): # granule tile dirname root = etree.parse(os.path.join(safe_path, "manifest.safe")).getroot() - tile_id = os.path.basename( - os.path.dirname( - root.xpath("//fileLocation[contains(@href,'MTD_TL.xml')]")[0].get( - "href" + tile_id = cast( + str, + os.path.basename( + os.path.dirname( + root.xpath("//fileLocation[contains(@href,'MTD_TL.xml')]")[ + 0 + ].get("href") ) - ) + ), ) granule_folder = os.path.join(safe_path, "GRANULE") rename_subfolder(granule_folder, tile_id) # datastrip scene dirname - scene_id = os.path.basename( - os.path.dirname( - root.xpath("//fileLocation[contains(@href,'MTD_DS.xml')]")[0].get( - "href" + scene_id = cast( + str, + os.path.basename( + os.path.dirname( + root.xpath("//fileLocation[contains(@href,'MTD_DS.xml')]")[ + 0 + ].get("href") ) - ) + ), ) datastrip_folder = os.path.join(safe_path, "DATASTRIP") rename_subfolder(datastrip_folder, scene_id) @@ -700,201 +1105,190 @@ def finalize_s2_safe_product(self, product_path): logger.exception("Could not finalize SAFE product from downloaded data") raise DownloadError(e) - def get_chunk_dest_path(self, product, chunk, dir_prefix=None, build_safe=False): + def get_chunk_dest_path( + self, + product: EOProduct, + chunk: Any, + dir_prefix: Optional[str] = None, + build_safe: bool = False, + ) -> str: """Get chunk SAFE destination path""" - if build_safe: - # S2 common - if "S2_MSI" in product.product_type: - title_search = re.search( - r"^\w+_\w+_(\w+)_(\w+)_(\w+)_(\w+)_(\w+)$", - product.properties["title"], - ) - title_date1 = title_search.group(1) if title_search else None - title_part3 = title_search.group(4) if title_search else None - ds_dir_search = re.search( - r"^.+_(DS_\w+_+\w+_\w+)_\w+.\w+$", - product.properties.get("originalSceneID", ""), - ) - ds_dir = ds_dir_search.group(1) if ds_dir_search else 0 - s2_processing_level = product.product_type.split("_")[-1] - # S1 common - elif product.product_type == "S1_SAR_GRD": - s1_title_suffix_search = re.search( - r"^.+_([A-Z0-9_]+_[A-Z0-9_]+_[A-Z0-9_]+_[A-Z0-9_]+)_\w+$", - product.properties["title"], - ) - s1_title_suffix = ( - s1_title_suffix_search.group(1).lower().replace("_", "-") - if s1_title_suffix_search - else None - ) + if not build_safe: + if dir_prefix is None: + dir_prefix = chunk.key + product_path: str = chunk.key.split(dir_prefix.strip("/") + "/")[-1] + logger.debug(f"Downloading {chunk.key} to {product_path}") + return product_path + + title_date1: Optional[str] = None + title_part3: Optional[str] = None + ds_dir: Any = 0 + s2_processing_level: str = "" + s1_title_suffix: Optional[str] = None + # S2 common + if product.product_type and "S2_MSI" in product.product_type: + title_search: Optional[Match[str]] = re.search( + r"^\w+_\w+_(\w+)_(\w+)_(\w+)_(\w+)_(\w+)$", + product.properties["title"], + ) + title_date1 = title_search.group(1) if title_search else None + title_part3 = title_search.group(4) if title_search else None + ds_dir_search = re.search( + r"^.+_(DS_\w+_+\w+_\w+)_\w+.\w+$", + product.properties.get("originalSceneID", ""), + ) + ds_dir = ds_dir_search.group(1) if ds_dir_search else 0 + s2_processing_level = product.product_type.split("_")[-1] + # S1 common + elif product.product_type == "S1_SAR_GRD": + s1_title_suffix_search = re.search( + r"^.+_([A-Z0-9_]+_[A-Z0-9_]+_[A-Z0-9_]+_[A-Z0-9_]+)_\w+$", + product.properties["title"], + ) + s1_title_suffix = ( + s1_title_suffix_search.group(1).lower().replace("_", "-") + if s1_title_suffix_search + else None + ) - # S2 L2A Tile files ----------------------------------------------- - if S2L2A_TILE_IMG_REGEX.match(chunk.key): - found_dict = S2L2A_TILE_IMG_REGEX.match(chunk.key).groupdict() - product_path = ( - "%s.SAFE/GRANULE/%s/IMG_DATA/R%s/T%s%s%s_%s_%s_%s.jp2" - % ( - product.properties["title"], - found_dict["num"], - found_dict["res"], - found_dict["tile1"], - found_dict["tile2"], - found_dict["tile3"], - title_date1, - found_dict["file"], - found_dict["res"], - ) - ) - elif S2L2A_TILE_AUX_DIR_REGEX.match(chunk.key): - found_dict = S2L2A_TILE_AUX_DIR_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/AUX_DATA/%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["file"], - ) - # S2 L2A QI Masks - elif S2_TILE_QI_MSK_REGEX.match(chunk.key): - found_dict = S2_TILE_QI_MSK_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/QI_DATA/MSK_%sPRB_%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["file_base"], - found_dict["file_suffix"], - ) - # S2 L2A QI PVI - elif S2_TILE_QI_PVI_REGEX.match(chunk.key): - found_dict = S2_TILE_QI_PVI_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/QI_DATA/%s_%s_PVI.jp2" % ( - product.properties["title"], - found_dict["num"], - title_part3, - title_date1, - ) - # S2 Tile files --------------------------------------------------- - elif S2_TILE_PREVIEW_DIR_REGEX.match(chunk.key): - found_dict = S2_TILE_PREVIEW_DIR_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/preview/%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["file"], - ) - elif S2_TILE_IMG_REGEX.match(chunk.key): - found_dict = S2_TILE_IMG_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/IMG_DATA/T%s%s%s_%s_%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["tile1"], - found_dict["tile2"], - found_dict["tile3"], - title_date1, - found_dict["file"], - ) - elif S2_TILE_THUMBNAIL_REGEX.match(chunk.key): - found_dict = S2_TILE_THUMBNAIL_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["file"], - ) - elif S2_TILE_MTD_REGEX.match(chunk.key): - found_dict = S2_TILE_MTD_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/MTD_TL.xml" % ( - product.properties["title"], - found_dict["num"], - ) - elif S2_TILE_AUX_DIR_REGEX.match(chunk.key): - found_dict = S2_TILE_AUX_DIR_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/AUX_DATA/AUX_%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["file"], - ) - elif S2_TILE_QI_DIR_REGEX.match(chunk.key): - found_dict = S2_TILE_QI_DIR_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/QI_DATA/%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["file"], - ) - # S2 Tiles generic - elif S2_TILE_REGEX.match(chunk.key): - found_dict = S2_TILE_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/GRANULE/%s/%s" % ( - product.properties["title"], - found_dict["num"], - found_dict["file"], - ) - # S2 Product files - elif S2_PROD_DS_MTD_REGEX.match(chunk.key): - found_dict = S2_PROD_DS_MTD_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/DATASTRIP/%s/MTD_DS.xml" % ( - product.properties["title"], - ds_dir, - ) - elif S2_PROD_DS_QI_REPORT_REGEX.match(chunk.key): - found_dict = S2_PROD_DS_QI_REPORT_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/DATASTRIP/%s/QI_DATA/%s.xml" % ( - product.properties["title"], - ds_dir, - found_dict["filename"], - ) - elif S2_PROD_DS_QI_REGEX.match(chunk.key): - found_dict = S2_PROD_DS_QI_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/DATASTRIP/%s/QI_DATA/%s" % ( - product.properties["title"], - ds_dir, - found_dict["file"], - ) - elif S2_PROD_INSPIRE_REGEX.match(chunk.key): - found_dict = S2_PROD_INSPIRE_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/INSPIRE.xml" % (product.properties["title"],) - elif S2_PROD_MTD_REGEX.match(chunk.key): - found_dict = S2_PROD_MTD_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/MTD_MSI%s.xml" % ( - product.properties["title"], - s2_processing_level, - ) - # S2 Product generic - elif S2_PROD_REGEX.match(chunk.key): - found_dict = S2_PROD_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/%s" % ( - product.properties["title"], - found_dict["file"], - ) - # S1 -------------------------------------------------------------- - elif S1_CALIB_REGEX.match(chunk.key): - found_dict = S1_CALIB_REGEX.match(chunk.key).groupdict() - product_path = ( - "%s.SAFE/annotation/calibration/%s-%s-%s-grd-%s-%s-%03d.xml" - % ( - product.properties["title"], - found_dict["file_prefix"], - product.properties["platformSerialIdentifier"].lower(), - found_dict["file_beam"], - found_dict["file_pol"], - s1_title_suffix, - S1_IMG_NB_PER_POLAR.get( - product.properties["polarizationMode"], {} - ).get(found_dict["file_pol"].upper(), 1), - ) - ) - elif S1_ANNOT_REGEX.match(chunk.key): - found_dict = S1_ANNOT_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/annotation/%s-%s-grd-%s-%s-%03d.xml" % ( - product.properties["title"], - product.properties["platformSerialIdentifier"].lower(), - found_dict["file_beam"], - found_dict["file_pol"], - s1_title_suffix, - S1_IMG_NB_PER_POLAR.get( - product.properties["polarizationMode"], {} - ).get(found_dict["file_pol"].upper(), 1), - ) - elif S1_MEAS_REGEX.match(chunk.key): - found_dict = S1_MEAS_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/measurement/%s-%s-grd-%s-%s-%03d.%s" % ( + # S2 L2A Tile files ----------------------------------------------- + if matched := S2L2A_TILE_IMG_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/IMG_DATA/R%s/T%s%s%s_%s_%s_%s.jp2" % ( + product.properties["title"], + found_dict["num"], + found_dict["res"], + found_dict["tile1"], + found_dict["tile2"], + found_dict["tile3"], + title_date1, + found_dict["file"], + found_dict["res"], + ) + elif matched := S2L2A_TILE_AUX_DIR_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/AUX_DATA/%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["file"], + ) + # S2 L2A QI Masks + elif matched := S2_TILE_QI_MSK_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/QI_DATA/MSK_%sPRB_%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["file_base"], + found_dict["file_suffix"], + ) + # S2 L2A QI PVI + elif matched := S2_TILE_QI_PVI_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/QI_DATA/%s_%s_PVI.jp2" % ( + product.properties["title"], + found_dict["num"], + title_part3, + title_date1, + ) + # S2 Tile files --------------------------------------------------- + elif matched := S2_TILE_PREVIEW_DIR_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/preview/%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["file"], + ) + elif matched := S2_TILE_IMG_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/IMG_DATA/T%s%s%s_%s_%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["tile1"], + found_dict["tile2"], + found_dict["tile3"], + title_date1, + found_dict["file"], + ) + elif matched := S2_TILE_THUMBNAIL_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["file"], + ) + elif matched := S2_TILE_MTD_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/MTD_TL.xml" % ( + product.properties["title"], + found_dict["num"], + ) + elif matched := S2_TILE_AUX_DIR_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/AUX_DATA/AUX_%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["file"], + ) + elif matched := S2_TILE_QI_DIR_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/QI_DATA/%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["file"], + ) + # S2 Tiles generic + elif matched := S2_TILE_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/GRANULE/%s/%s" % ( + product.properties["title"], + found_dict["num"], + found_dict["file"], + ) + # S2 Product files + elif matched := S2_PROD_DS_MTD_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/DATASTRIP/%s/MTD_DS.xml" % ( + product.properties["title"], + ds_dir, + ) + elif matched := S2_PROD_DS_QI_REPORT_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/DATASTRIP/%s/QI_DATA/%s.xml" % ( + product.properties["title"], + ds_dir, + found_dict["filename"], + ) + elif matched := S2_PROD_DS_QI_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/DATASTRIP/%s/QI_DATA/%s" % ( + product.properties["title"], + ds_dir, + found_dict["file"], + ) + elif matched := S2_PROD_INSPIRE_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/INSPIRE.xml" % (product.properties["title"],) + elif matched := S2_PROD_MTD_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/MTD_MSI%s.xml" % ( + product.properties["title"], + s2_processing_level, + ) + # S2 Product generic + elif matched := S2_PROD_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/%s" % ( + product.properties["title"], + found_dict["file"], + ) + # S1 -------------------------------------------------------------- + elif matched := S1_CALIB_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = ( + "%s.SAFE/annotation/calibration/%s-%s-%s-grd-%s-%s-%03d.xml" + % ( product.properties["title"], + found_dict["file_prefix"], product.properties["platformSerialIdentifier"].lower(), found_dict["file_beam"], found_dict["file_pol"], @@ -902,43 +1296,64 @@ def get_chunk_dest_path(self, product, chunk, dir_prefix=None, build_safe=False) S1_IMG_NB_PER_POLAR.get( product.properties["polarizationMode"], {} ).get(found_dict["file_pol"].upper(), 1), - found_dict["file_ext"], - ) - elif S1_REPORT_REGEX.match(chunk.key): - found_dict = S1_REPORT_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/%s.SAFE-%s" % ( - product.properties["title"], - product.properties["title"], - found_dict["file"], - ) - # S1 generic - elif S1_REGEX.match(chunk.key): - found_dict = S1_REGEX.match(chunk.key).groupdict() - product_path = "%s.SAFE/%s" % ( - product.properties["title"], - found_dict["file"], ) - # out of SAFE format - else: - raise NotAvailableError( - f"Ignored {chunk.key} out of SAFE matching pattern" - ) - # no SAFE format + ) + elif matched := S1_ANNOT_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/annotation/%s-%s-grd-%s-%s-%03d.xml" % ( + product.properties["title"], + product.properties["platformSerialIdentifier"].lower(), + found_dict["file_beam"], + found_dict["file_pol"], + s1_title_suffix, + S1_IMG_NB_PER_POLAR.get(product.properties["polarizationMode"], {}).get( + found_dict["file_pol"].upper(), 1 + ), + ) + elif matched := S1_MEAS_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/measurement/%s-%s-grd-%s-%s-%03d.%s" % ( + product.properties["title"], + product.properties["platformSerialIdentifier"].lower(), + found_dict["file_beam"], + found_dict["file_pol"], + s1_title_suffix, + S1_IMG_NB_PER_POLAR.get(product.properties["polarizationMode"], {}).get( + found_dict["file_pol"].upper(), 1 + ), + found_dict["file_ext"], + ) + elif matched := S1_REPORT_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/%s.SAFE-%s" % ( + product.properties["title"], + product.properties["title"], + found_dict["file"], + ) + # S1 generic + elif matched := S1_REGEX.match(chunk.key): + found_dict = matched.groupdict() + product_path = "%s.SAFE/%s" % ( + product.properties["title"], + found_dict["file"], + ) + # out of SAFE format else: - if not dir_prefix: - dir_prefix = chunk.key - product_path = chunk.key.split(dir_prefix.strip("/") + "/")[-1] + raise NotAvailableError(f"Ignored {chunk.key} out of SAFE matching pattern") + logger.debug(f"Downloading {chunk.key} to {product_path}") return product_path def download_all( self, - products, - auth=None, - downloaded_callback=None, - progress_callback=None, - **kwargs, - ): + products: SearchResult, + auth: Optional[PluginConfig] = None, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> List[str]: """ download_all using parent (base plugin) method """ @@ -947,5 +1362,7 @@ def download_all( auth=auth, downloaded_callback=downloaded_callback, progress_callback=progress_callback, + wait=wait, + timeout=timeout, **kwargs, ) diff --git a/eodag/plugins/download/base.py b/eodag/plugins/download/base.py index 3be2f992e..cca131ab4 100644 --- a/eodag/plugins/download/base.py +++ b/eodag/plugins/download/base.py @@ -15,6 +15,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import hashlib import logging @@ -25,9 +26,26 @@ import zipfile from datetime import datetime, timedelta from time import sleep +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + List, + Optional, + Tuple, + TypeVar, + Union, +) from eodag.plugins.base import PluginTopic -from eodag.utils import ProgressCallback, sanitize, uri_to_path +from eodag.utils import ( + DEFAULT_DOWNLOAD_TIMEOUT, + DEFAULT_DOWNLOAD_WAIT, + ProgressCallback, + sanitize, + uri_to_path, +) from eodag.utils.exceptions import ( AuthenticationError, MisconfiguredError, @@ -35,11 +53,16 @@ ) from eodag.utils.notebook import NotebookWidgets +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.api.search_result import SearchResult + from eodag.config import PluginConfig + from eodag.utils import DownloadedCallback + + logger = logging.getLogger("eodag.download.base") -# default wait times in minutes -DEFAULT_DOWNLOAD_WAIT = 2 # in minutes -DEFAULT_DOWNLOAD_TIMEOUT = 20 # in minutes +T = TypeVar("T") class Download(PluginTopic): @@ -78,19 +101,19 @@ class Download(PluginTopic): :type config: str """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(Download, self).__init__(provider, config) self._authenticate = bool(getattr(self.config, "authenticate", False)) def download( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Optional[str]: r""" Base download method. Not available, it must be defined for each plugin. @@ -121,13 +144,13 @@ def download( def _stream_download_dict( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Dict[str, Any]: r""" Base _stream_download_dict method. Not available, it must be defined for each plugin. @@ -154,7 +177,12 @@ def _stream_download_dict( "Download streaming must be implemented using a method named _stream_download_dict" ) - def _prepare_download(self, product, progress_callback=None, **kwargs): + def _prepare_download( + self, + product: EOProduct, + progress_callback: Optional[ProgressCallback] = None, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Tuple[Optional[str], Optional[str]]: """Check if file has already been downloaded, and prepare product download :param product: The EO product to download @@ -162,7 +190,7 @@ def _prepare_download(self, product, progress_callback=None, **kwargs): :param progress_callback: (optional) A progress callback :type progress_callback: :class:`~eodag.utils.ProgressCallback` or None :returns: fs_path, record_filename - :rtype: tuple + :rtype: Tuple[Optional[str], Optional[str]] """ if product.location != product.remote_location: fs_path = uri_to_path(product.location) @@ -250,7 +278,7 @@ def _prepare_download(self, product, progress_callback=None, **kwargs): return fs_path, record_filename - def _resolve_archive_depth(self, product_path): + def _resolve_archive_depth(self, product_path: str) -> str: """Update product_path using archive_depth from provider configuration. Handle depth levels in the product archive. For example, if the downloaded archive was @@ -270,7 +298,12 @@ def _resolve_archive_depth(self, product_path): count += 1 return product_path - def _finalize(self, fs_path, progress_callback=None, **kwargs): + def _finalize( + self, + fs_path: str, + progress_callback: Optional[ProgressCallback] = None, + **kwargs: Any, + ) -> str: """Finalize the download process. :param fs_path: The path to the local zip archive downloaded or already present @@ -402,14 +435,14 @@ def _finalize(self, fs_path, progress_callback=None, **kwargs): def download_all( self, - products, - auth=None, - downloaded_callback=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + products: SearchResult, + auth: Optional[PluginConfig] = None, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> List[str]: """ Base download_all method. @@ -422,7 +455,7 @@ def download_all( :type auth: :class:`~eodag.config.PluginConfig` :param downloaded_callback: (optional) A method or a callable object which takes as parameter the ``product``. You can use the base class - :class:`~eodag.utils.DownloadedCallback` and override + :class:`~eodag.api.product.DownloadedCallback` and override its ``__call__`` method. Will be called each time a product finishes downloading :type downloaded_callback: Callable[[:class:`~eodag.api.product._product.EOProduct`], None] @@ -447,7 +480,7 @@ def download_all( # Products are going to be removed one by one from this sequence once # downloaded. products = products[:] - paths = [] + paths: List[str] = [] # initiate retry loop start_time = datetime.now() stop_time = start_time + timedelta(minutes=timeout) @@ -556,7 +589,9 @@ def download_all( return paths - def _download_retry(self, product, wait, timeout): + def _download_retry( + self, product: EOProduct, wait: int, timeout: int + ) -> Callable[[Callable[..., T]], Callable[..., T]]: """ Download retry decorator. @@ -571,11 +606,11 @@ def _download_retry(self, product, wait, timeout): to download :type timeout: int :returns: decorator - :rtype: :class:`typing.Any` + :rtype: Callable[[Callable[..., T]], Callable[..., T]] """ - def decorator(download): - def download_and_retry(*args, **kwargs): + def decorator(download: Callable[..., T]) -> Callable[..., T]: + def download_and_retry(*args: Any, **kwargs: Any) -> T: # initiate retry loop start_time = datetime.now() stop_time = start_time + timedelta(minutes=timeout) @@ -586,7 +621,6 @@ def download_and_retry(*args, **kwargs): nb_info = NotebookWidgets() while "Loop until products download succeeds or timeout is reached": - datetime_now = datetime.now() if datetime_now >= product.next_try: diff --git a/eodag/plugins/download/creodias_s3.py b/eodag/plugins/download/creodias_s3.py new file mode 100644 index 000000000..89108878a --- /dev/null +++ b/eodag/plugins/download/creodias_s3.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2024, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import boto3 +from botocore.exceptions import ClientError + +from eodag.plugins.download.aws import AwsDownload +from eodag.utils.exceptions import MisconfiguredError + + +class CreodiasS3Download(AwsDownload): + """ + Download on creodias s3 from their VMs + """ + + def _get_authenticated_objects_unsigned(self, bucket_name, prefix, auth_dict): + """Auth strategy using no-sign-request""" + + raise ClientError( + {"Error": {"Code": "AccessDenied", "Message": "skip unsigned"}}, + "_get_authenticated_objects_unsigned", + ) + + def _get_authenticated_objects_from_auth_keys(self, bucket_name, prefix, auth_dict): + """Auth strategy using RequestPayer=requester and ``aws_access_key_id``/``aws_secret_access_key`` + from provided credentials""" + + # check if credentials are missing + required_creds = ["aws_access_key_id", "aws_secret_access_key"] + if not all(auth_dict.get(x, None) for x in required_creds): + raise MisconfiguredError( + f"Incomplete credentials for {self.provider}, missing " + f"{[x for x in required_creds if not auth_dict.get(x, None)]}" + ) + + s3_session = boto3.session.Session(**auth_dict) + s3_resource = s3_session.resource( + "s3", endpoint_url=getattr(self.config, "base_uri", None) + ) + objects = s3_resource.Bucket(bucket_name).objects.filter() + list(objects.filter(Prefix=prefix).limit(1)) + self.s3_session = s3_session + return objects diff --git a/eodag/plugins/download/http.py b/eodag/plugins/download/http.py index b0647e766..e64ee0b01 100644 --- a/eodag/plugins/download/http.py +++ b/eodag/plugins/download/http.py @@ -15,13 +15,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging import os import shutil import zipfile from datetime import datetime +from email.message import Message from itertools import chain +from typing import TYPE_CHECKING, Any, Dict, Iterator, List, Optional, Tuple, Union from urllib.parse import parse_qs, urlparse import geojson @@ -29,7 +32,7 @@ import requests_ftp from lxml import etree from requests import RequestException -from stream_zip import NO_COMPRESSION_64, stream_zip +from stream_zip import ZIP_AUTO, stream_zip from eodag.api.product.metadata_mapping import ( OFFLINE_STATUS, @@ -38,12 +41,10 @@ properties_from_json, properties_from_xml, ) -from eodag.plugins.download.base import ( +from eodag.plugins.download.base import Download +from eodag.utils import ( DEFAULT_DOWNLOAD_TIMEOUT, DEFAULT_DOWNLOAD_WAIT, - Download, -) -from eodag.utils import ( DEFAULT_STREAM_REQUESTS_TIMEOUT, HTTP_REQ_TIMEOUT, USER_AGENT, @@ -59,8 +60,17 @@ DownloadError, MisconfiguredError, NotAvailableError, + TimeOutError, ) +if TYPE_CHECKING: + from requests import Response + + from eodag.api.product import EOProduct + from eodag.api.search_result import SearchResult + from eodag.config import PluginConfig + from eodag.utils import DownloadedCallback + logger = logging.getLogger("eodag.download.http") @@ -90,19 +100,21 @@ class HTTPDownload(Download): """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(HTTPDownload, self).__init__(provider, config) if not hasattr(self.config, "base_uri"): raise MisconfiguredError( - "{} plugin require a base_uri configuration key".format(self.__name__) + "{} plugin require a base_uri configuration key".format( + type(self).__name__ + ) ) def orderDownload( self, - product, - auth=None, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> None: """Send product order request. It will be executed once before the download retry loop, if the product is OFFLINE @@ -132,7 +144,7 @@ def orderDownload( order_method = getattr(self.config, "order_method", "GET").lower() if order_method == "post": # separate url & parameters - parts = urlparse(product.properties["orderLink"]) + parts = urlparse(str(product.properties["orderLink"])) query_dict = parse_qs(parts.query) if not query_dict and parts.query: query_dict = geojson.loads(parts.query) @@ -155,11 +167,17 @@ def orderDownload( ordered_message = response.text logger.debug(ordered_message) logger.info("%s was ordered", product.properties["title"]) + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except RequestException as e: + if e.response and hasattr(e.response, "content"): + error_message = f"{e.response.content.decode('utf-8')} - {e}" + else: + error_message = str(e) logger.warning( "%s could not be ordered, request returned %s", product.properties["title"], - f"{e.response.content} - {e}", + error_message, ) order_metadata_mapping = getattr(self.config, "order_on_response", {}).get( @@ -167,9 +185,8 @@ def orderDownload( ) if order_metadata_mapping: logger.debug("Parsing order response to update product metada-mapping") - order_metadata_mapping_jsonpath = {} order_metadata_mapping_jsonpath = mtd_cfg_as_conversion_and_querypath( - order_metadata_mapping, order_metadata_mapping_jsonpath + order_metadata_mapping, ) properties_update = properties_from_json( response.json(), @@ -184,10 +201,10 @@ def orderDownload( def orderDownloadStatus( self, - product, - auth=None, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> None: """Send product order status request. It will be executed before each download retry. @@ -213,7 +230,7 @@ def orderDownloadStatus( status_method = getattr(self.config, "order_status_method", "GET").lower() if status_method == "post": # separate url & parameters - parts = urlparse(product.properties["orderStatusLink"]) + parts = urlparse(str(product.properties["orderStatusLink"])) query_dict = parse_qs(parts.query) if not query_dict and parts.query: query_dict = geojson.loads(parts.query) @@ -344,6 +361,8 @@ def orderDownloadStatus( f"after order success. Please search and download {product} again" ) + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except RequestException as e: logger.warning( "%s order status could not be checked, request returned %s", @@ -353,13 +372,13 @@ def orderDownloadStatus( def download( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Optional[str]: """Download a product using HTTP protocol. The downloaded product is assumed to be a Zip file. If it is not, @@ -381,9 +400,7 @@ def download( return fs_path # download assets if exist instead of remote_location - if hasattr(product, "assets") and not getattr( - self.config, "ignore_assets", False - ): + if len(product.assets) > 0 and not getattr(self.config, "ignore_assets", False): try: fs_path = self._download_assets( product, @@ -393,17 +410,27 @@ def download( progress_callback, **kwargs, ) + if kwargs.get("asset", None) is None: + product.location = path_to_uri(fs_path) return fs_path - except NotAvailableError: - pass + except NotAvailableError as e: + if kwargs.get("asset", None) is not None: + raise NotAvailableError(e).with_traceback(e.__traceback__) + else: + pass url = product.remote_location @self._download_retry(product, wait, timeout) - def download_request(product, auth, progress_callback, wait, timeout, **kwargs): - chunks = self._stream_download( - product, auth, progress_callback, wait, timeout, **kwargs - ) + def download_request( + product: EOProduct, + auth: PluginConfig, + progress_callback: ProgressCallback, + wait: int, + timeout: int, + **kwargs: Dict[str, Any], + ) -> None: + chunks = self._stream_download(product, auth, progress_callback, **kwargs) with open(fs_path, "wb") as fhandle: for chunk in chunks: @@ -433,7 +460,7 @@ def download_request(product, auth, progress_callback, wait, timeout, **kwargs): product.location = path_to_uri(product_path) return product_path - def _check_stream_size(self, product): + def _check_stream_size(self, product: EOProduct) -> int: stream_size = int(self.stream.headers.get("content-length", 0)) if ( stream_size == 0 @@ -452,13 +479,13 @@ def _check_stream_size(self, product): def _stream_download_dict( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Dict[str, Any]: r""" Returns dictionnary of :class:`~fastapi.responses.StreamingResponse` keyword-arguments. It contains a generator to streamed download chunks and the response headers. @@ -483,32 +510,55 @@ def _stream_download_dict( :rtype: dict """ # download assets if exist instead of remote_location - if hasattr(product, "assets") and not getattr( - self.config, "ignore_assets", False - ): + if len(product.assets) > 0 and not getattr(self.config, "ignore_assets", False): try: + assets_values = product.assets.get_values(kwargs.get("asset", None)) chunks_tuples = self._stream_download_assets( - product, auth, progress_callback, **kwargs + product, + auth, + progress_callback, + assets_values=assets_values, + **kwargs, ) - outputs_filename = ( - sanitize(product.properties["title"]) - if "title" in product.properties - else sanitize(product.properties.get("id", "download")) - ) - return dict( - content=stream_zip(chunks_tuples), - media_type="application/zip", - headers={ - "content-disposition": f"attachment; filename={outputs_filename}.zip", - }, - ) - except NotAvailableError: - pass + if len(assets_values) == 1: + # start reading chunks to set asset.headers + first_chunks_tuple = next(chunks_tuples) + + # update headers + assets_values[0].headers[ + "content-disposition" + ] = f"attachment; filename={assets_values[0].filename}" + if assets_values[0].get("type", None): + assets_values[0].headers["content-type"] = assets_values[0][ + "type" + ] - chunks = self._stream_download( - product, auth, progress_callback, wait, timeout, **kwargs - ) + return dict( + content=chain(iter([first_chunks_tuple]), chunks_tuples), + headers=assets_values[0].headers, + ) + + else: + outputs_filename = ( + sanitize(product.properties["title"]) + if "title" in product.properties + else sanitize(product.properties.get("id", "download")) + ) + return dict( + content=stream_zip(chunks_tuples), + media_type="application/zip", + headers={ + "content-disposition": f"attachment; filename={outputs_filename}.zip", + }, + ) + except NotAvailableError as e: + if kwargs.get("asset", None) is not None: + raise NotAvailableError(e).with_traceback(e.__traceback__) + else: + pass + + chunks = self._stream_download(product, auth, progress_callback, **kwargs) # start reading chunks to set product.headers first_chunk = next(chunks) @@ -517,12 +567,14 @@ def _stream_download_dict( headers=product.headers, ) - def _process_exception(self, e, product, ordered_message): + def _process_exception( + self, e: RequestException, product: EOProduct, ordered_message: str + ) -> None: # check if error is identified as auth_error in provider conf auth_errors = getattr(self.config, "auth_error_code", [None]) if not isinstance(auth_errors, list): auth_errors = [auth_errors] - if e.response.status_code in auth_errors: + if e.response and e.response.status_code in auth_errors: raise AuthenticationError( "HTTP Error %s returned, %s\nPlease check your credentials for %s" % ( @@ -557,13 +609,11 @@ def _process_exception(self, e, product, ordered_message): def _stream_download( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + **kwargs: Dict[str, Any], + ) -> Iterator[Any]: """ fetches a zip file containing the assets of a given product as a stream and returns a generator yielding the chunks of the file @@ -577,9 +627,6 @@ def _stream_download( creation and update to give the user a feedback on the download progress :type progress_callback: :class:`~eodag.utils.ProgressCallback` - :param ordered_message: message to be used in case of error because - the product is unavailable - :type ordered_message: str :param kwargs: additional arguments :type kwargs: dict """ @@ -614,7 +661,7 @@ def _stream_download( if not query_dict and parts.query: query_dict = geojson.loads(parts.query) req_url = parts._replace(query=None).geturl() - req_kwargs = {"json": query_dict} if query_dict else {} + req_kwargs: Dict[str, Any] = {"json": query_dict} if query_dict else {} else: req_url = url req_kwargs = {} @@ -635,6 +682,10 @@ def _stream_download( try: self.stream.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError( + exc, timeout=DEFAULT_STREAM_REQUESTS_TIMEOUT + ) from exc except RequestException as e: self._process_exception(e, product, ordered_message) else: @@ -648,11 +699,11 @@ def _stream_download( def _stream_download_assets( self, - product, - auth=None, - progress_callback=None, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Iterator[Tuple[str, datetime, int, Any, Iterator[Any]]]: if progress_callback is None: logger.info("Progress bar unavailable, please call product.download()") progress_callback = ProgressCallback(disable=True) @@ -660,13 +711,12 @@ def _stream_download_assets( assets_urls = [ a["href"] for a in getattr(product, "assets", {}).values() if "href" in a ] - assets_values = [ - a for a in getattr(product, "assets", {}).values() if "href" in a - ] if not assets_urls: raise NotAvailableError("No assets available for %s" % product) + assets_values = kwargs.get("assets_values", []) + # get extra parameters to pass to the query params = kwargs.pop("dl_url_params", None) or getattr( self.config, "dl_url_params", {} @@ -676,7 +726,7 @@ def _stream_download_assets( progress_callback.reset(total=total_size) - def get_chunks(stream): + def get_chunks(stream: Response) -> Any: for chunk in stream.iter_content(chunk_size=64 * 1024): if chunk: progress_callback(len(chunk)) @@ -693,9 +743,10 @@ def get_chunks(stream): asset_rel_path_parts_sanitized = [ sanitize(part) for part in asset_rel_path_parts ] - asset["rel_path"] = os.path.join(*asset_rel_path_parts_sanitized) - asset_rel_paths_list.append(asset["rel_path"]) - assets_common_subdir = os.path.commonpath(asset_rel_paths_list) + asset.rel_path = os.path.join(*asset_rel_path_parts_sanitized) + asset_rel_paths_list.append(asset.rel_path) + if asset_rel_paths_list: + assets_common_subdir = os.path.commonpath(asset_rel_paths_list) # product conf overrides provider conf for "flatten_top_dirs" product_conf = getattr(self.config, "products", {}).get( @@ -707,7 +758,6 @@ def get_chunks(stream): # loop for assets download for asset in assets_values: - if asset["href"].startswith("file:"): logger.info( f"Local asset detected. Download skipped for {asset['href']}" @@ -724,62 +774,75 @@ def get_chunks(stream): ) as stream: try: stream.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError( + exc, timeout=DEFAULT_STREAM_REQUESTS_TIMEOUT + ) from exc except RequestException as e: - self._handle_asset_exception(e, asset) + raise_errors = True if len(assets_values) == 1 else False + self._handle_asset_exception(e, asset, raise_errors=raise_errors) else: asset_rel_path = ( - asset["rel_path"] - .replace(assets_common_subdir, "") - .strip(os.sep) + asset.rel_path.replace(assets_common_subdir, "").strip(os.sep) if flatten_top_dirs - else asset["rel_path"] + else asset.rel_path ) asset_rel_dir = os.path.dirname(asset_rel_path) - if not asset.get("filename", None): + if not getattr(asset, "filename", None): # try getting filename in GET header if was not found in HEAD result asset_content_disposition = stream.headers.get( "content-disposition", None ) if asset_content_disposition: - asset["filename"] = parse_header( + asset.filename = parse_header( asset_content_disposition ).get_param("filename", None) - if not asset.get("filename", None): + if not getattr(asset, "filename", None): # default filename extracted from path - asset["filename"] = os.path.basename(asset["rel_path"]) - - yield ( - os.path.join(asset_rel_dir, asset["filename"]), - modified_at, - perms, - NO_COMPRESSION_64, - get_chunks(stream), - ) + asset.filename = os.path.basename(asset.rel_path) + + asset.rel_path = os.path.join(asset_rel_dir, asset.filename) + + if len(assets_values) == 1: + # apply headers to asset + product.assets[assets_values[0].key].headers = stream.headers + yield from get_chunks(stream) + else: + # several assets to zip + yield ( + asset.rel_path, + modified_at, + perms, + ZIP_AUTO(asset.size), + get_chunks(stream), + ) def _download_assets( self, - product, - fs_dir_path, - record_filename, - auth=None, - progress_callback=None, - **kwargs, - ): - # """Download product assets if they exist""" + product: EOProduct, + fs_dir_path: str, + record_filename: str, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> str: + """Download product assets if they exist""" + if progress_callback is None: + logger.info("Progress bar unavailable, please call product.download()") + progress_callback = ProgressCallback(disable=True) + assets_urls = [ a["href"] for a in getattr(product, "assets", {}).values() if "href" in a ] - assets_values = [ - a for a in getattr(product, "assets", {}).values() if "href" in a - ] - if not assets_urls: raise NotAvailableError("No assets available for %s" % product) + assets_values = product.assets.get_values(kwargs.get("asset", None)) + chunks_tuples = self._stream_download_assets( - product, auth, progress_callback, **kwargs + product, auth, progress_callback, assets_values=assets_values, **kwargs ) # remove existing incomplete file @@ -804,6 +867,12 @@ def _download_assets( local_assets_count += 1 continue + if len(assets_values) == 1 and local_assets_count == 0: + # start reading chunks to set asset.rel_path + first_chunks_tuple = next(chunks_tuples) + chunks = chain(iter([first_chunks_tuple]), chunks_tuples) + chunks_tuples = [(assets_values[0].rel_path, None, None, None, chunks)] + for chunk_tuple in chunks_tuples: asset_path = chunk_tuple[0] asset_chunks = chunk_tuple[4] @@ -840,19 +909,22 @@ def _download_assets( if flatten_top_dirs: flatten_top_directories(fs_dir_path) - # save hash/record file - with open(record_filename, "w") as fh: - fh.write(product.remote_location) - logger.debug("Download recorded in %s", record_filename) + if kwargs.get("asset", None) is None: + # save hash/record file + with open(record_filename, "w") as fh: + fh.write(product.remote_location) + logger.debug("Download recorded in %s", record_filename) return fs_dir_path - def _handle_asset_exception(self, e, asset): + def _handle_asset_exception( + self, e: RequestException, asset: Dict[str, Any], raise_errors: bool = False + ) -> None: # check if error is identified as auth_error in provider conf auth_errors = getattr(self.config, "auth_error_code", [None]) if not isinstance(auth_errors, list): auth_errors = [auth_errors] - if e.response.status_code in auth_errors: + if e.response and e.response.status_code in auth_errors: raise AuthenticationError( "HTTP Error %s returned, %s\nPlease check your credentials for %s" % ( @@ -861,11 +933,19 @@ def _handle_asset_exception(self, e, asset): self.provider, ) ) + elif raise_errors: + raise DownloadError(e) else: logger.warning("Unexpected error: %s" % e) logger.warning("Skipping %s" % asset["href"]) - def _get_asset_sizes(self, assets_values, auth, params, zipped=False): + def _get_asset_sizes( + self, + assets_values: List[Dict[str, Any]], + auth: Optional[PluginConfig], + params: Optional[Dict[str, str]], + zipped: bool = False, + ) -> int: total_size = 0 # loop for assets size & filename @@ -879,25 +959,26 @@ def _get_asset_sizes(self, assets_values, auth, params, zipped=False): timeout=HTTP_REQ_TIMEOUT, ).headers - if not asset.get("size", 0): + if not getattr(asset, "size", 0): # size from HEAD header / Content-length - asset["size"] = int(asset_headers.get("Content-length", 0)) + asset.size = int(asset_headers.get("Content-length", 0)) - if not asset.get("size", 0) or not asset.get("filename", 0): + header_content_disposition = Message() + if not getattr(asset, "size", 0) or not getattr(asset, "filename", 0): # header content-disposition header_content_disposition = parse_header( asset_headers.get("content-disposition", "") ) - if not asset.get("size", 0): + if not getattr(asset, "size", 0): # size from HEAD header / content-disposition / size - asset["size"] = int(header_content_disposition.get_param("size", 0)) - if not asset.get("filename", 0): + asset.size = int(header_content_disposition.get_param("size", 0)) + if not getattr(asset, "filename", 0): # filename from HEAD header / content-disposition / size - asset["filename"] = header_content_disposition.get_param( + asset.filename = header_content_disposition.get_param( "filename", None ) - if not asset.get("size", 0): + if not getattr(asset, "size", 0): # GET request for size with requests.get( asset["href"], @@ -908,88 +989,27 @@ def _get_asset_sizes(self, assets_values, auth, params, zipped=False): timeout=DEFAULT_STREAM_REQUESTS_TIMEOUT, ) as stream: # size from GET header / Content-length - asset["size"] = int(stream.headers.get("Content-length", 0)) - if not asset.get("size", 0): + asset.size = int(stream.headers.get("Content-length", 0)) + if not getattr(asset, "size", 0): # size from GET header / content-disposition / size - asset["size"] = int( + asset.size = int( parse_header( stream.headers.get("content-disposition", "") ).get_param("size", 0) ) - total_size += asset["size"] + total_size += asset.size return total_size - def _stream_assets(self, product, auth=None, progress_callback=None, **kwargs): - assets_values = [ - a for a in getattr(product, "assets", {}).values() if "href" in a - ] - - # get extra parameters to pass to the query - params = kwargs.pop("dl_url_params", None) or getattr( - self.config, "dl_url_params", {} - ) - - total_size = self._get_asset_sizes(assets_values, auth, params) - progress_callback.reset(total_size) - - # zipped files properties - modified_at = datetime.now() - perms = 0o600 - - def get_chunks(stream): - for chunk in stream.iter_content(chunk_size=64 * 1024): - if chunk: - progress_callback(len(chunk)) - yield chunk - - for asset in assets_values: - with requests.get( - asset["href"], - stream=True, - auth=auth, - params=params, - headers=USER_AGENT, - timeout=DEFAULT_STREAM_REQUESTS_TIMEOUT, - ) as stream: - try: - stream.raise_for_status() - except RequestException as e: - self._handle_asset_exception(e, asset) - else: - asset_rel_path = urlparse(asset["href"]).path.strip("/") - - if not asset.get("filename", None): - # try getting filename in GET header if was not found in HEAD result - asset_content_disposition = stream.headers.get( - "content-disposition", None - ) - if asset_content_disposition: - asset["filename"] = parse_header( - asset_content_disposition - ).get_param("filename", None) - - if not asset.get("filename", None): - # default filename extracted from path - asset["filename"] = os.path.basename(asset_rel_path) - - yield ( - asset["filename"], - modified_at, - perms, - NO_COMPRESSION_64, - get_chunks(stream), - ) - def download_all( self, - products, - auth=None, - downloaded_callback=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, + products: SearchResult, + auth: Optional[PluginConfig] = None, + downloaded_callback: Optional[DownloadedCallback] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], ): """ Download all using parent (base plugin) method diff --git a/eodag/plugins/download/s3rest.py b/eodag/plugins/download/s3rest.py index 4899d022c..ff00c04ef 100644 --- a/eodag/plugins/download/s3rest.py +++ b/eodag/plugins/download/s3rest.py @@ -15,11 +15,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import hashlib import logging import os import os.path +from typing import TYPE_CHECKING, Any, Dict, Optional, Union from xml.dom import minidom from xml.parsers.expat import ExpatError @@ -27,13 +29,11 @@ from requests import RequestException from eodag.api.product.metadata_mapping import OFFLINE_STATUS, ONLINE_STATUS -from eodag.plugins.download.base import ( - DEFAULT_DOWNLOAD_TIMEOUT, - DEFAULT_DOWNLOAD_WAIT, - Download, -) +from eodag.plugins.download.base import Download from eodag.plugins.download.http import HTTPDownload from eodag.utils import ( + DEFAULT_DOWNLOAD_TIMEOUT, + DEFAULT_DOWNLOAD_WAIT, DEFAULT_STREAM_REQUESTS_TIMEOUT, HTTP_REQ_TIMEOUT, USER_AGENT, @@ -50,6 +50,10 @@ RequestError, ) +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.config import PluginConfig + logger = logging.getLogger("eodag.download.s3rest") @@ -80,19 +84,19 @@ class S3RestDownload(Download): :type config: :class:`~eodag.config.PluginConfig` """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(S3RestDownload, self).__init__(provider, config) self.http_download_plugin = HTTPDownload(self.provider, self.config) def download( self, - product, - auth=None, - progress_callback=None, - wait=DEFAULT_DOWNLOAD_WAIT, - timeout=DEFAULT_DOWNLOAD_TIMEOUT, - **kwargs, - ): + product: EOProduct, + auth: Optional[PluginConfig] = None, + progress_callback: Optional[ProgressCallback] = None, + wait: int = DEFAULT_DOWNLOAD_WAIT, + timeout: int = DEFAULT_DOWNLOAD_TIMEOUT, + **kwargs: Union[str, bool, Dict[str, Any]], + ) -> Optional[str]: """Download method for S3 REST API. :param product: The EO product to download @@ -130,11 +134,11 @@ def download( @self._download_retry(product, wait, timeout) def download_request( - product, - auth, - progress_callback, - ordered_message, - **kwargs, + product: EOProduct, + auth: PluginConfig, + progress_callback: ProgressCallback, + ordered_message: str, + **kwargs: Any, ): # check order status if product.properties.get("orderStatusLink", None): @@ -178,7 +182,7 @@ def download_request( auth_errors = getattr(self.config, "auth_error_code", [None]) if not isinstance(auth_errors, list): auth_errors = [auth_errors] - if err.response.status_code in auth_errors: + if err.response and err.response.status_code in auth_errors: raise AuthenticationError( "HTTP Error %s returned, %s\nPlease check your credentials for %s" % ( @@ -194,8 +198,10 @@ def download_request( ): msg = ( ordered_message - if ordered_message and not err.response.text.strip() - else err.response.text.strip() + if ordered_message + and err.response + and not err.response.text.strip() + else err.response and err.response.text.strip() ) raise NotAvailableError( "%s(initially %s) requested, returned: %s" diff --git a/eodag/plugins/manager.py b/eodag/plugins/manager.py index b48043723..518142465 100644 --- a/eodag/plugins/manager.py +++ b/eodag/plugins/manager.py @@ -15,10 +15,23 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging from operator import attrgetter from pathlib import Path -from typing import Optional +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + List, + Optional, + Tuple, + Type, + Union, + cast, +) import pkg_resources @@ -32,7 +45,13 @@ from eodag.utils import GENERIC_PRODUCT_TYPE from eodag.utils.exceptions import UnsupportedProvider -logger = logging.getLogger("eodag.manager") +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.config import PluginConfig, ProviderConfig + from eodag.plugins.base import PluginTopic + + +logger = logging.getLogger("eodag.plugins.manager") class PluginManager: @@ -49,12 +68,12 @@ class PluginManager: :param providers_config: The configuration with all information about the providers supported by ``eodag`` - :type providers_config: dict + :type providers_config: dict[str, :class:`~eodag.config.ProviderConfig`] """ supported_topics = {"search", "download", "crunch", "auth", "api"} - def __init__(self, providers_config): + def __init__(self, providers_config: Dict[str, ProviderConfig]) -> None: self.providers_config = providers_config # Load all the plugins. This will make all plugin classes of a particular # type to be available in the base plugin class's 'plugins' attribute. @@ -80,10 +99,10 @@ def __init__(self, providers_config): "Check that the plugin module (%s) is importable", entry_point.module_name, ) - if entry_point.dist.key != "eodag": + if entry_point.dist and entry_point.dist.key != "eodag": # use plugin providers if any plugin_providers_config_path = [ - x + str(x) for x in Path( entry_point.dist.location, pkg_resources.to_filename(entry_point.dist.key), @@ -103,14 +122,14 @@ def rebuild(self, providers_config=None): self.providers_config = providers_config self.build_product_type_to_provider_config_map() - self._built_plugins_cache = {} + self._built_plugins_cache: Dict[Tuple[str, str], Any] = {} - def build_product_type_to_provider_config_map(self): + def build_product_type_to_provider_config_map(self) -> None: """Build mapping conf between product types and providers""" - self.product_type_to_provider_config_map = {} + self.product_type_to_provider_config_map: Dict[str, List[ProviderConfig]] = {} for provider in list(self.providers_config): provider_config = self.providers_config[provider] - if not hasattr(provider_config, "products"): + if not hasattr(provider_config, "products") or not provider_config.products: logger.info( "%s: provider has no product configured and will be skipped" % provider @@ -131,7 +150,9 @@ def build_product_type_to_provider_config_map(self): product_type_providers.append(provider_config) product_type_providers.sort(key=attrgetter("priority"), reverse=True) - def get_search_plugins(self, product_type=None, provider=None): + def get_search_plugins( + self, product_type: Optional[str] = None, provider: Optional[str] = None + ) -> Iterator[Union[Search, Api]]: """Build and return all the search plugins supporting the given product type, ordered by highest priority, or the search plugin of the given provider @@ -142,20 +163,23 @@ def get_search_plugins(self, product_type=None, provider=None): :type provider: str :returns: All the plugins supporting the product type, one by one (a generator object) - :rtype: types.GeneratorType(:class:`~eodag.plugins.search.Search`) + :rtype: types.GeneratorType(:class:`~eodag.plugins.search.Search` or :class:`~eodag.plugins.download.Api`) :raises: :class:`~eodag.utils.exceptions.UnsupportedProvider` :raises: :class:`~eodag.utils.exceptions.UnsupportedProductType` """ - def get_plugin(): + def get_plugin() -> Union[Search, Api]: + plugin: Union[Search, Api] try: config.search.products = config.products config.search.priority = config.priority - plugin = self._build_plugin(config.name, config.search, Search) + plugin = cast( + Search, self._build_plugin(config.name, config.search, Search) + ) except AttributeError: config.api.products = config.products config.api.priority = config.priority - plugin = self._build_plugin(config.name, config.api, Api) + plugin = cast(Api, self._build_plugin(config.name, config.api, Api)) return plugin if provider is not None: @@ -186,25 +210,28 @@ def get_plugin(): ]: yield get_plugin() - def get_download_plugin(self, product): + def get_download_plugin(self, product: EOProduct) -> Union[Download, Api]: """Build and return the download plugin capable of downloading the given product. :param product: The product to get a download plugin for :type product: :class:`~eodag.api.product._product.EOProduct` :returns: The download plugin capable of downloading the product - :rtype: :class:`~eodag.plugins.download.Download` + :rtype: :class:`~eodag.plugins.download.Download` or :class:`~eodag.plugins.download.Api` """ plugin_conf = self.providers_config[product.provider] try: plugin_conf.download.priority = plugin_conf.priority - plugin = self._build_plugin( - product.provider, plugin_conf.download, Download + plugin = cast( + Download, + self._build_plugin(product.provider, plugin_conf.download, Download), ) return plugin except AttributeError: plugin_conf.api.priority = plugin_conf.priority - plugin = self._build_plugin(product.provider, plugin_conf.api, Api) + plugin = cast( + Api, self._build_plugin(product.provider, plugin_conf.api, Api) + ) return plugin def get_auth_plugin(self, provider: str) -> Optional[Authentication]: @@ -219,7 +246,10 @@ def get_auth_plugin(self, provider: str) -> Optional[Authentication]: plugin_conf = self.providers_config[provider] try: plugin_conf.auth.priority = plugin_conf.priority - plugin = self._build_plugin(provider, plugin_conf.auth, Authentication) + plugin = cast( + Authentication, + self._build_plugin(provider, plugin_conf.auth, Authentication), + ) return plugin except AttributeError: # We guess the plugin being built is of type Api, therefore no need @@ -227,7 +257,7 @@ def get_auth_plugin(self, provider: str) -> Optional[Authentication]: return None @staticmethod - def get_crunch_plugin(name, **options): + def get_crunch_plugin(name: str, **options: Any) -> Crunch: """Instantiate a eodag Crunch plugin whom class name is `name`, and configure it with the `options` @@ -241,12 +271,12 @@ def get_crunch_plugin(name, **options): Klass = Crunch.get_plugin_by_class_name(name) return Klass(options) - def sort_providers(self): + def sort_providers(self) -> None: """Sort providers taking into account current priority order""" for provider_configs in self.product_type_to_provider_config_map.values(): provider_configs.sort(key=attrgetter("priority"), reverse=True) - def set_priority(self, provider, priority): + def set_priority(self, provider: str, priority: int) -> None: """Set the priority of the given provider :param provider: The provider which is assigned the priority @@ -257,7 +287,7 @@ def set_priority(self, provider, priority): # Update the priority in the configurations so that it is taken into account # when a plugin of this provider is latterly built for ( - product_type, + _, provider_configs, ) in self.product_type_to_provider_config_map.items(): for config in provider_configs: @@ -270,7 +300,12 @@ def set_priority(self, provider, priority): if provider_name == provider: self._built_plugins_cache[(provider, topic_class)].priority = priority - def _build_plugin(self, provider, plugin_conf, topic_class): + def _build_plugin( + self, + provider: str, + plugin_conf: PluginConfig, + topic_class: Type[PluginTopic], + ) -> Union[Api, Search, Download, Authentication, Crunch]: """Build the plugin of the given topic with the given plugin configuration and registered as the given provider @@ -294,6 +329,8 @@ def _build_plugin(self, provider, plugin_conf, topic_class): plugin_class = EODAGPluginMount.get_plugin_by_class_name( topic_class, getattr(plugin_conf, "type") ) - plugin = plugin_class(provider, plugin_conf) + plugin: Union[Api, Search, Download, Authentication, Crunch] = plugin_class( + provider, plugin_conf + ) self._built_plugins_cache[(provider, topic_class.__name__)] = plugin return plugin diff --git a/eodag/plugins/search/base.py b/eodag/plugins/search/base.py index 50bb4d3fe..b84d2e490 100644 --- a/eodag/plugins/search/base.py +++ b/eodag/plugins/search/base.py @@ -15,15 +15,29 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple + +from pydantic.fields import Field, FieldInfo from eodag.api.product.metadata_mapping import ( DEFAULT_METADATA_MAPPING, mtd_cfg_as_conversion_and_querypath, ) from eodag.plugins.base import PluginTopic -from eodag.utils import GENERIC_PRODUCT_TYPE, format_dict_items +from eodag.utils import ( + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, + GENERIC_PRODUCT_TYPE, + Annotated, + format_dict_items, +) + +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.config import PluginConfig logger = logging.getLogger("eodag.search.base") @@ -31,31 +45,39 @@ class Search(PluginTopic): """Base Search Plugin. - :param provider: An eodag providers configuration dictionary - :type provider: dict - :param config: Path to the user configuration file - :type config: str + :param provider: An EODAG provider name + :type provider: str + :param config: An EODAG plugin configuration + :type config: :class:`~eodag.config.PluginConfig` """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(Search, self).__init__(provider, config) # Prepare the metadata mapping # Do a shallow copy, the structure is flat enough for this to be sufficient metas = DEFAULT_METADATA_MAPPING.copy() # Update the defaults with the mapping value. This will add any new key # added by the provider mapping that is not in the default metadata - metas.update(self.config.metadata_mapping) + if self.config.metadata_mapping: + metas.update(self.config.metadata_mapping) self.config.metadata_mapping = mtd_cfg_as_conversion_and_querypath( metas, self.config.metadata_mapping, result_type=getattr(self.config, "result_type", "json"), ) - def clear(self): + def clear(self) -> None: """Method used to clear a search context between two searches.""" pass - def query(self, *args, count=True, **kwargs): + def query( + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Implementation of how the products must be searched goes here. This method must return a tuple with (1) a list of EOProduct instances (see eodag.api.product module) @@ -64,11 +86,43 @@ def query(self, *args, count=True, **kwargs): """ raise NotImplementedError("A Search plugin must implement a method named query") - def discover_product_types(self): + def discover_product_types(self) -> Optional[Dict[str, Any]]: """Fetch product types list from provider using `discover_product_types` conf""" - return + return None + + def discover_queryables( + self, **kwargs: Any + ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + """Fetch queryables list from provider using `discover_queryables` conf + + :param kwargs: additional filters for queryables (`productType` and other search + arguments) + :type kwargs: Any + :returns: fetched queryable parameters dict + :rtype: Optional[Dict[str, Annotated[Any, FieldInfo]]] + """ + return None - def map_product_type(self, product_type, **kwargs): + def get_defaults_as_queryables( + self, product_type: str + ) -> Dict[str, Annotated[Any, FieldInfo]]: + """ + Return given product type defaut settings as queryables + + :param product_type: given product type + :type product_type: str + :returns: queryable parameters dict + :rtype: Dict[str, Annotated[Any, FieldInfo]] + """ + defaults = self.config.products.get(product_type, {}) + queryables = {} + for parameter, value in defaults.items(): + queryables[parameter] = Annotated[type(value), Field(default=value)] + return queryables + + def map_product_type( + self, product_type: Optional[str], **kwargs: Any + ) -> Optional[str]: """Get the provider product type from eodag product type :param product_type: eodag product type @@ -77,13 +131,15 @@ def map_product_type(self, product_type, **kwargs): :rtype: str """ if product_type is None: - return + return None logger.debug("Mapping eodag product type to provider product type") return self.config.products.get(product_type, {}).get( "productType", GENERIC_PRODUCT_TYPE ) - def get_product_type_def_params(self, product_type, **kwargs): + def get_product_type_def_params( + self, product_type: str, **kwargs: Any + ) -> Dict[str, Any]: """Get the provider product type definition parameters and specific settings :param product_type: the desired product type @@ -112,7 +168,9 @@ def get_product_type_def_params(self, product_type, **kwargs): else: return {} - def get_metadata_mapping(self, product_type=None): + def get_metadata_mapping( + self, product_type: Optional[str] = None + ) -> Dict[str, str]: """Get the plugin metadata mapping configuration (product type specific if exists) :param product_type: the desired product type diff --git a/eodag/plugins/search/build_search_result.py b/eodag/plugins/search/build_search_result.py index 545e38f20..04fc14c05 100644 --- a/eodag/plugins/search/build_search_result.py +++ b/eodag/plugins/search/build_search_result.py @@ -15,20 +15,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import hashlib import logging +from typing import Any, Dict, List, Optional, Tuple +from urllib.parse import quote_plus, unquote_plus import geojson import orjson from jsonpath_ng import Fields from eodag.api.product import EOProduct -from eodag.api.product.metadata_mapping import ( - NOT_AVAILABLE, - NOT_MAPPED, - properties_from_json, -) +from eodag.api.product.metadata_mapping import properties_from_json from eodag.plugins.search.qssearch import PostJsonSearch from eodag.utils import dict_items_recursive_sort @@ -61,18 +60,26 @@ class BuildPostSearchResult(PostJsonSearch): :type config: str """ - def count_hits(self, count_url=None, result_type=None): + def count_hits( + self, count_url: Optional[str] = None, result_type: Optional[str] = None + ) -> int: """Count method that will always return 1.""" return 1 - def collect_search_urls(self, page=None, items_per_page=None, count=True, **kwargs): + def collect_search_urls( + self, + page: Optional[int] = None, + items_per_page: Optional[int] = None, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[str], int]: """Wraps PostJsonSearch.collect_search_urls to force product count to 1""" urls, _ = super(BuildPostSearchResult, self).collect_search_urls( page=page, items_per_page=items_per_page, count=count, **kwargs ) return urls, 1 - def do_search(self, *args, **kwargs): + def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]: """Perform the actual search request, and return result in a single element.""" search_url = self.search_urls[0] response = self._request( @@ -83,7 +90,9 @@ def do_search(self, *args, **kwargs): ) return [response.json()] - def normalize_results(self, results, **kwargs): + def normalize_results( + self, results: List[Dict[str, Any]], **kwargs: Any + ) -> List[EOProduct]: """Build :class:`~eodag.api.product._product.EOProduct` from provider result :param results: Raw provider result as single dict in list @@ -97,22 +106,39 @@ def normalize_results(self, results, **kwargs): result = results[0] - # update result with query parameters without pagination (or search-only params) - if isinstance(self.config.pagination["next_page_query_obj"], str) and hasattr( - self, "query_params_unpaginated" - ): - unpaginated_query_params = self.query_params_unpaginated - elif isinstance(self.config.pagination["next_page_query_obj"], str): - next_page_query_obj = orjson.loads( - self.config.pagination["next_page_query_obj"].format() + # datacube query string got from previous search + _dc_qs = kwargs.pop("_dc_qs", None) + if _dc_qs is not None: + qs = unquote_plus(unquote_plus(_dc_qs)) + unpaginated_query_params = sorted_unpaginated_query_params = geojson.loads( + qs ) - unpaginated_query_params = { - k: v - for k, v in self.query_params.items() - if (k, v) not in next_page_query_obj.items() - } else: - unpaginated_query_params = self.query_params + # update result with query parameters without pagination (or search-only params) + if isinstance( + self.config.pagination["next_page_query_obj"], str + ) and hasattr(self, "query_params_unpaginated"): + unpaginated_query_params = self.query_params_unpaginated + elif isinstance(self.config.pagination["next_page_query_obj"], str): + next_page_query_obj = orjson.loads( + self.config.pagination["next_page_query_obj"].format() + ) + unpaginated_query_params = { + k: v[0] if (isinstance(v, list) and len(v) == 1) else v + for k, v in self.query_params.items() + if (k, v) not in next_page_query_obj.items() + } + else: + unpaginated_query_params = self.query_params + + # query hash, will be used to build a product id + sorted_unpaginated_query_params = dict_items_recursive_sort( + unpaginated_query_params + ) + qs = geojson.dumps(sorted_unpaginated_query_params) + + query_hash = hashlib.sha1(str(qs).encode("UTF-8")).hexdigest() + result = dict(result, **unpaginated_query_params) # update result with search args if not None (and not auth) @@ -129,20 +155,6 @@ def normalize_results(self, results, **kwargs): if not product_type: product_type = parsed_properties.get("productType", None) - # filter available mapped properties - product_available_properties = { - k: v - for (k, v) in parsed_properties.items() - if v not in (NOT_AVAILABLE, NOT_MAPPED) - } - - # query hash, will be used to build a product id - sorted_unpaginated_query_params = dict_items_recursive_sort( - unpaginated_query_params - ) - qs = geojson.dumps(sorted_unpaginated_query_params) - query_hash = hashlib.sha1(str(qs).encode("UTF-8")).hexdigest() - # build product id id_prefix = (product_type or self.provider).upper() product_id = "%s_%s_%s" % ( @@ -152,30 +164,29 @@ def normalize_results(self, results, **kwargs): .replace("-", ""), query_hash, ) - product_available_properties["id"] = product_available_properties[ - "title" - ] = product_id + parsed_properties["id"] = parsed_properties["title"] = product_id # update downloadLink - product_available_properties["downloadLink"] += f"?{qs}" + parsed_properties["downloadLink"] += f"?{qs}" + parsed_properties["_dc_qs"] = quote_plus(qs) # parse metadata needing downloadLink for param, mapping in self.config.metadata_mapping.items(): if Fields("downloadLink") in mapping: - product_available_properties.update( - properties_from_json(product_available_properties, {param: mapping}) + parsed_properties.update( + properties_from_json(parsed_properties, {param: mapping}) ) # use product_type_config as default properties - product_available_properties = dict( + parsed_properties = dict( getattr(self.config, "product_type_config", {}), - **product_available_properties, + **parsed_properties, ) product = EOProduct( provider=self.provider, productType=product_type, - properties=product_available_properties, + properties=parsed_properties, ) return [ diff --git a/eodag/plugins/search/creodias_s3.py b/eodag/plugins/search/creodias_s3.py new file mode 100644 index 000000000..8bec50932 --- /dev/null +++ b/eodag/plugins/search/creodias_s3.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright 2024, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +from types import MethodType +from typing import Any, Dict, List + +import boto3 +import botocore +from botocore.exceptions import BotoCoreError + +from eodag import EOProduct +from eodag.api.product._assets import AssetsDict +from eodag.config import PluginConfig +from eodag.plugins.authentication.aws_auth import AwsAuth +from eodag.plugins.search.qssearch import QueryStringSearch +from eodag.utils.exceptions import AuthenticationError, MisconfiguredError, RequestError + +DATA_EXTENSIONS = ["jp2", "tiff", "nc", "grib"] +logger = logging.getLogger("eodag.search.creodiass3") + + +def patched_register_downloader(self, downloader, authenticator): + """Add the download information to the product. + :param self: product to which information should be added + :type self: EoProduct + :param downloader: The download method that it can use + :type downloader: Concrete subclass of + :class:`~eodag.plugins.download.base.Download` or + :class:`~eodag.plugins.api.base.Api` + :param authenticator: The authentication method needed to perform the download + :type authenticator: Concrete subclass of + :class:`~eodag.plugins.authentication.base.Authentication` + """ + # register downloader + self.register_downloader_only(downloader, authenticator) + # and also update assets + try: + _update_assets(self, downloader.config, authenticator) + except BotoCoreError as e: + raise RequestError(f"could not update assets: {str(e)}") from e + + +def _update_assets(product: EOProduct, config: PluginConfig, auth: AwsAuth): + product.assets = {} + prefix = ( + product.properties.get("productIdentifier", None).replace("/eodata/", "") + "/" + ) + if prefix: + try: + auth_dict = auth.authenticate() + required_creds = ["aws_access_key_id", "aws_secret_access_key"] + if not all(getattr(auth, x) for x in required_creds): + raise MisconfiguredError( + f"Incomplete credentials for {product.provider}, missing " + f"{[x for x in required_creds if not getattr(auth, x)]}" + ) + if not getattr(auth, "s3_client", None): + auth.s3_client = boto3.client( + "s3", + endpoint_url=config.base_uri, + **auth_dict, + ) + logger.debug(f"Listing assets in {prefix}") + product.assets = AssetsDict(product) + for asset in auth.s3_client.list_objects( + Bucket=config.s3_bucket, Prefix=prefix, MaxKeys=300 + )["Contents"]: + asset_basename = ( + asset["Key"].split("/")[-1] if "/" in asset["Key"] else asset["Key"] + ) + + if len(asset_basename) > 0 and asset_basename not in product.assets: + role = ( + "data" + if asset_basename.split(".")[-1] in DATA_EXTENSIONS + else "metadata" + ) + + product.assets[asset_basename] = { + "title": asset_basename, + "roles": [role], + "href": f"s3://{config.s3_bucket}/{asset['Key']}", + } + # update driver + product.driver = product.get_driver() + + except botocore.exceptions.ClientError as e: + if str(auth.config.auth_error_code) in str(e): + raise AuthenticationError( + f"Authentication failed on {config.base_uri} s3" + ) from e + else: + raise RequestError( + "assets for product %s could not be found", prefix + ) from e + + +class CreodiasS3Search(QueryStringSearch): + """ + Search on creodias and adapt results to s3 + """ + + def __init__(self, provider, config): + super(CreodiasS3Search, self).__init__(provider, config) + + def normalize_results( + self, results: List[Dict[str, Any]], **kwargs: Any + ) -> List[EOProduct]: + """Build EOProducts from provider results""" + + products = super(CreodiasS3Search, self).normalize_results(results, **kwargs) + + for product in products: + # backup original register_downloader to register_downloader_only + product.register_downloader_only = product.register_downloader + # patched register_downloader that will also update assets + product.register_downloader = MethodType( + patched_register_downloader, product + ) + + return products diff --git a/eodag/plugins/search/csw.py b/eodag/plugins/search/csw.py index 1d93c096d..316daa58c 100644 --- a/eodag/plugins/search/csw.py +++ b/eodag/plugins/search/csw.py @@ -15,9 +15,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging import re +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union import pyproj from owslib.csw import CatalogueServiceWeb @@ -34,9 +36,15 @@ from eodag.api.product import EOProduct from eodag.api.product.metadata_mapping import properties_from_xml from eodag.plugins.search.base import Search -from eodag.utils import DEFAULT_PROJ +from eodag.utils import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE, DEFAULT_PROJ from eodag.utils.import_system import patch_owslib_requests +if TYPE_CHECKING: + from owslib.fes import OgcExpression + + from eodag.config import PluginConfig + + logger = logging.getLogger("eodag.search.csw") SUPPORTED_REFERENCE_SCHEMES = ["WWW:DOWNLOAD-1.0-http--download"] @@ -45,25 +53,33 @@ class CSWSearch(Search): """A plugin for implementing search based on OGC CSW""" - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(CSWSearch, self).__init__(provider, config) self.catalog = None - def clear(self): + def clear(self) -> None: """Clear search context""" super().clear() self.catalog = None - def query(self, product_type=None, auth=None, count=True, **kwargs): + def query( + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Perform a search on a OGC/CSW-like interface""" product_type = kwargs.get("productType") if product_type is None: return [], 0 - if auth is not None: + auth = kwargs.get("auth") + if auth: self.__init_catalog(**getattr(auth.config, "credentials", {})) else: self.__init_catalog() - results = [] + results: List[EOProduct] = [] if self.catalog: provider_product_type = self.config.products[product_type]["productType"] for product_type_def in self.config.search_definition["product_type_tags"]: @@ -105,7 +121,9 @@ def query(self, product_type=None, auth=None, count=True, **kwargs): total_results = len(results) if count else None return results, total_results - def __init_catalog(self, username=None, password=None): + def __init_catalog( + self, username: Optional[str] = None, password: Optional[str] = None + ) -> None: """Initializes a catalogue by performing a GetCapabilities request on the url""" if not self.catalog: api_endpoint = self.config.api_endpoint @@ -126,7 +144,7 @@ def __init_catalog(self, username=None, password=None): e, ) - def __build_product(self, rec, product_type, **kwargs): + def __build_product(self, rec: Any, product_type: str, **kwargs: Any) -> EOProduct: """Enable search results to be handled by http download plugin""" download_url = "" resource_filter = re.compile( @@ -171,9 +189,14 @@ def __build_product(self, rec, product_type, **kwargs): searched_bbox=kwargs.get("footprints"), ) - def __convert_query_params(self, product_type_def, product_type, params): + def __convert_query_params( + self, + product_type_def: Dict[str, Any], + product_type: str, + params: Dict[str, Any], + ) -> Union[List[OgcExpression], List[List[OgcExpression]]]: """Translates eodag search to CSW constraints using owslib constraint classes""" - constraints = [] + constraints: List[OgcExpression] = [] # How the match should be performed (fuzzy, prefix, postfix or exact). # defaults to fuzzy pt_tag, matching = ( @@ -213,4 +236,5 @@ def __convert_query_params(self, product_type_def, product_type, params): self.config.search_definition["date_tags"]["end"], end ) ) + # [[a, b]] is interpreted as a && b while [a, b] is interpreted as a || b return [constraints] if len(constraints) > 1 else constraints diff --git a/eodag/plugins/search/data_request_search.py b/eodag/plugins/search/data_request_search.py index 7a09c6a2d..1dbfeec2e 100644 --- a/eodag/plugins/search/data_request_search.py +++ b/eodag/plugins/search/data_request_search.py @@ -1,11 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2023, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + import logging import time from datetime import datetime, timedelta +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple import requests from eodag import EOProduct -from eodag.api.core import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE from eodag.api.product.metadata_mapping import ( format_query_params, mtd_cfg_as_conversion_and_querypath, @@ -14,13 +33,18 @@ from eodag.plugins.search.base import Search from eodag.rest.stac import DEFAULT_MISSION_START_DATE from eodag.utils import ( + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, GENERIC_PRODUCT_TYPE, HTTP_REQ_TIMEOUT, USER_AGENT, deepcopy, string_to_jsonpath, ) -from eodag.utils.exceptions import NotAvailableError, RequestError +from eodag.utils.exceptions import NotAvailableError, RequestError, TimeOutError + +if TYPE_CHECKING: + from eodag.config import PluginConfig logger = logging.getLogger("eodag.search.data_request_search") @@ -33,7 +57,7 @@ class DataRequestSearch(Search): - if finished - fetch the result of the job """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(DataRequestSearch, self).__init__(provider, config) self.config.__dict__.setdefault("result_type", "json") self.config.__dict__.setdefault("results_entry", "content") @@ -80,20 +104,27 @@ def __init__(self, provider, config): self.download_info = {} self.data_request_id = None - def discover_product_types(self): + def discover_product_types(self) -> Optional[Dict[str, Any]]: """Fetch product types is disabled for `DataRequestSearch` :returns: empty dict - :rtype: dict + :rtype: (optional) dict """ - return {} + return None - def clear(self): + def clear(self) -> None: """Clear search context""" super().clear() self.data_request_id = None - def query(self, *args, count=True, **kwargs): + def query( + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """ performs the search for a provider where several steps are required to fetch the data """ @@ -102,7 +133,7 @@ def query(self, *args, count=True, **kwargs): # for compatibility with DataRequestSearch method if kwargs.get("product_type"): kwargs["providerProductType"] = kwargs.pop("product_type", None) - provider_product_type = self._map_product_type(product_type) + provider_product_type = self._map_product_type(product_type or "") keywords = {k: v for k, v in kwargs.items() if k != "auth" and v is not None} if provider_product_type and provider_product_type != GENERIC_PRODUCT_TYPE: @@ -208,10 +239,12 @@ def query(self, *args, count=True, **kwargs): result, self.config.products[product_type]["custom_filters"] ) return self._convert_result_data( - result, data_request_id, product_type, **kwargs + result, data_request_id, product_type or "", **kwargs ) - def _create_data_request(self, product_type, eodag_product_type, **kwargs): + def _create_data_request( + self, product_type: str, eodag_product_type: str, **kwargs: Any + ) -> str: headers = getattr(self.auth, "headers", USER_AGENT) try: url = self.config.data_request_url @@ -225,6 +258,8 @@ def _create_data_request(self, product_type, eodag_product_type, **kwargs): url, json=request_body, headers=headers, timeout=HTTP_REQ_TIMEOUT ) request_job.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except requests.RequestException as e: raise RequestError( f"search job for product_type {product_type} could not be created: {str(e)}, {request_job.text}" @@ -233,7 +268,7 @@ def _create_data_request(self, product_type, eodag_product_type, **kwargs): logger.info("search job for product_type %s created", product_type) return request_job.json()["jobId"] - def _cancel_request(self, data_request_id): + def _cancel_request(self, data_request_id: str) -> None: logger.info("deleting request job %s", data_request_id) delete_url = f"{self.config.data_request_url}/{data_request_id}" try: @@ -241,10 +276,12 @@ def _cancel_request(self, data_request_id): delete_url, headers=self.auth.headers, timeout=HTTP_REQ_TIMEOUT ) delete_resp.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except requests.RequestException as e: raise RequestError(f"_cancel_request failed: {str(e)}") - def _check_request_status(self, data_request_id): + def _check_request_status(self, data_request_id: str) -> bool: logger.debug("checking status of request job %s", data_request_id) status_url = self.config.status_url + data_request_id try: @@ -252,6 +289,8 @@ def _check_request_status(self, data_request_id): status_url, headers=self.auth.headers, timeout=HTTP_REQ_TIMEOUT ) status_resp.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except requests.RequestException as e: raise RequestError(f"_check_request_status failed: {str(e)}") else: @@ -269,7 +308,9 @@ def _check_request_status(self, data_request_id): ) return status_data["status"] == "completed" - def _get_result_data(self, data_request_id, items_per_page, page): + def _get_result_data( + self, data_request_id: str, items_per_page: int, page: int + ) -> Dict[str, Any]: page = page - 1 + self.config.pagination.get("start_page", 1) url = self.config.result_url.format( jobId=data_request_id, items_per_page=items_per_page, page=page @@ -278,19 +319,26 @@ def _get_result_data(self, data_request_id, items_per_page, page): return requests.get( url, headers=self.auth.headers, timeout=HTTP_REQ_TIMEOUT ).json() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except requests.RequestException: logger.error(f"Result could not be retrieved for {url}") + return {} def _convert_result_data( - self, result_data, data_request_id, product_type, **kwargs - ): + self, + result_data: Dict[str, Any], + data_request_id: str, + product_type: str, + **kwargs: Any, + ) -> Tuple[List[EOProduct], int]: """Build EOProducts from provider results""" results_entry = self.config.results_entry results = result_data[results_entry] logger.debug( "Adapting %s plugin results to eodag product representation" % len(results) ) - products = [] + products: List[EOProduct] = [] for result in results: product = EOProduct( self.provider, @@ -334,7 +382,7 @@ def _convert_result_data( } return products, total_items_nb - def _check_uses_custom_filters(self, product_type): + def _check_uses_custom_filters(self, product_type: str) -> bool: if ( product_type in self.config.products and "custom_filters" in self.config.products[product_type] @@ -342,7 +390,9 @@ def _check_uses_custom_filters(self, product_type): return True return False - def _apply_additional_filters(self, result, custom_filters): + def _apply_additional_filters( + self, result: Dict[str, Any], custom_filters: Dict[str, str] + ) -> Dict[str, Any]: filtered_result = [] results_entry = self.config.results_entry results = result[results_entry] @@ -357,10 +407,10 @@ def _apply_additional_filters(self, result, custom_filters): result[results_entry] = filtered_result return result - def _map_product_type(self, product_type): + def _map_product_type(self, product_type: Optional[str]) -> Optional[str]: """Map the eodag product type to the provider product type""" if product_type is None: - return + return None logger.debug("Mapping eodag product type to provider product type") return self.config.products.get(product_type, {}).get( "productType", GENERIC_PRODUCT_TYPE diff --git a/eodag/plugins/search/qssearch.py b/eodag/plugins/search/qssearch.py index 769562ffa..11f1103e6 100644 --- a/eodag/plugins/search/qssearch.py +++ b/eodag/plugins/search/qssearch.py @@ -15,9 +15,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging import re +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, cast from urllib.error import URLError from urllib.request import Request, urlopen @@ -25,30 +28,48 @@ import requests import yaml from lxml import etree +from pydantic import create_model +from pydantic.fields import FieldInfo +from requests import Response +from requests.adapters import HTTPAdapter from eodag.api.product import EOProduct from eodag.api.product.metadata_mapping import ( NOT_AVAILABLE, format_query_params, + get_queryable_from_provider, mtd_cfg_as_conversion_and_querypath, properties_from_json, properties_from_xml, ) from eodag.plugins.search.base import Search +from eodag.types import json_field_definition_to_python, model_fields_to_annotated from eodag.utils import ( + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, GENERIC_PRODUCT_TYPE, HTTP_REQ_TIMEOUT, USER_AGENT, + Annotated, _deprecated, deepcopy, dict_items_recursive_apply, format_dict_items, + get_args, quote, string_to_jsonpath, update_nested_dict, urlencode, ) -from eodag.utils.exceptions import AuthenticationError, MisconfiguredError, RequestError +from eodag.utils.exceptions import ( + AuthenticationError, + MisconfiguredError, + RequestError, + TimeOutError, +) + +if TYPE_CHECKING: + from eodag.config import PluginConfig logger = logging.getLogger("eodag.search.qssearch") @@ -154,14 +175,14 @@ class QueryStringSearch(Search): DEFAULT_ITEMS_PER_PAGE = 10 extract_properties = {"xml": properties_from_xml, "json": properties_from_json} - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(QueryStringSearch, self).__init__(provider, config) self.config.__dict__.setdefault("result_type", "json") self.config.__dict__.setdefault("results_entry", "features") self.config.__dict__.setdefault("pagination", {}) self.config.__dict__.setdefault("free_text_search_operations", {}) - self.search_urls = [] - self.query_params = dict() + self.search_urls: List[str] = [] + self.query_params: Dict[str, str] = dict() self.query_string = "" self.next_page_url = None self.next_page_query_obj = None @@ -231,6 +252,16 @@ def __init__(self, provider, config): ] ) + # parse jsonpath on init: queryables discovery + if ( + getattr(self.config, "discover_queryables", {}).get("results_entry", None) + and getattr(self.config, "discover_queryables", {}).get("result_type", None) + == "json" + ): + self.config.discover_queryables["results_entry"] = string_to_jsonpath( + self.config.discover_queryables["results_entry"], force=True + ) + # parse jsonpath on init: product type specific metadata-mapping for product_type in self.config.products.keys(): if "metadata_mapping" in self.config.products[product_type].keys(): @@ -244,26 +275,38 @@ def __init__(self, provider, config): # update config using provider product type definition metadata_mapping # from another product - other_product_for_mapping = self.config.products[product_type].get( - "metadata_mapping_from_product", "" + other_product_for_mapping = cast( + str, + self.config.products[product_type].get( + "metadata_mapping_from_product", "" + ), ) if other_product_for_mapping: other_product_type_def_params = self.get_product_type_def_params( - other_product_for_mapping, # **kwargs + other_product_for_mapping, ) - product_type_metadata_mapping.update( - other_product_type_def_params.get("metadata_mapping", {}) + other_product_type_mtd_mapping = ( + mtd_cfg_as_conversion_and_querypath( + other_product_type_def_params.get("metadata_mapping", {}) + ) ) - # from current product - product_type_metadata_mapping.update( - self.config.products[product_type]["metadata_mapping"] - ) + # updated mapping at the end + for metadata, mapping in other_product_type_mtd_mapping.items(): + product_type_metadata_mapping.pop(metadata, None) + product_type_metadata_mapping[metadata] = mapping + + # from current product, updated mapping at the end + for metadata, mapping in self.config.products[product_type][ + "metadata_mapping" + ].items(): + product_type_metadata_mapping.pop(metadata, None) + product_type_metadata_mapping[metadata] = mapping self.config.products[product_type][ "metadata_mapping" ] = product_type_metadata_mapping - def clear(self): + def clear(self) -> None: """Clear search context""" super().clear() self.search_urls.clear() @@ -273,15 +316,18 @@ def clear(self): self.next_page_query_obj = None self.next_page_merge = None - def discover_product_types(self): + def discover_product_types(self) -> Optional[Dict[str, Any]]: """Fetch product types list from provider using `discover_product_types` conf :returns: configuration dict containing fetched product types information - :rtype: dict + :rtype: (optional) dict """ try: - fetch_url = self.config.discover_product_types["fetch_url"].format( - **self.config.__dict__ + fetch_url = cast( + str, + self.config.discover_product_types["fetch_url"].format( + **self.config.__dict__ + ), ) response = QueryStringSearch._request( self, @@ -291,9 +337,14 @@ def discover_product_types(self): "{} {} instance:".format(self.provider, self.__class__.__name__), ) except (RequestError, KeyError, AttributeError): - return + return None else: try: + conf_update_dict = { + "providers_config": {}, + "product_types_config": {}, + } + if self.config.discover_product_types["result_type"] == "json": resp_as_json = response.json() # extract results from response json @@ -304,11 +355,6 @@ def discover_product_types(self): ].find(resp_as_json) ] - conf_update_dict = { - "providers_config": {}, - "product_types_config": {}, - } - for product_type_result in result: # providers_config extraction extracted_mapping = properties_from_json( @@ -391,14 +437,21 @@ def discover_product_types(self): self.provider, e, ) - return + return None conf_update_dict["product_types_config"] = dict_items_recursive_apply( conf_update_dict["product_types_config"], lambda k, v: v if v != NOT_AVAILABLE else None, ) return conf_update_dict - def query(self, items_per_page=None, page=None, count=True, **kwargs): + def query( + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Perform a search on an OpenSearch-like interface :param items_per_page: (optional) The number of results that must appear in one @@ -427,27 +480,30 @@ def query(self, items_per_page=None, page=None, count=True, **kwargs): ) # provider product type specific conf - self.product_type_def_params = self.get_product_type_def_params( - product_type, **kwargs + self.product_type_def_params = ( + self.get_product_type_def_params(product_type, **kwargs) + if product_type is not None + else {} ) # if product_type_def_params is set, remove product_type as it may conflict with this conf if self.product_type_def_params: keywords.pop("productType", None) - product_type_metadata_mapping = dict( - self.config.metadata_mapping, - **self.product_type_def_params.get("metadata_mapping", {}), - ) - keywords.update( - { - k: v - for k, v in self.product_type_def_params.items() - if k not in keywords.keys() - and k in product_type_metadata_mapping.keys() - and isinstance(product_type_metadata_mapping[k], list) - } - ) + if self.config.metadata_mapping: + product_type_metadata_mapping = dict( + self.config.metadata_mapping, + **self.product_type_def_params.get("metadata_mapping", {}), + ) + keywords.update( + { + k: v + for k, v in self.product_type_def_params.items() + if k not in keywords.keys() + and k in product_type_metadata_mapping.keys() + and isinstance(product_type_metadata_mapping[k], list) + } + ) qp, qs = self.build_query_string(product_type, **keywords) @@ -472,25 +528,33 @@ def query(self, items_per_page=None, page=None, count=True, **kwargs): reason="Simply run `self.config.metadata_mapping.update(metadata_mapping)` instead", version="2.10.0", ) - def update_metadata_mapping(self, metadata_mapping): + def update_metadata_mapping(self, metadata_mapping: Dict[str, Any]) -> None: """Update plugin metadata_mapping with input metadata_mapping configuration""" - self.config.metadata_mapping.update(metadata_mapping) + if self.config.metadata_mapping: + self.config.metadata_mapping.update(metadata_mapping) - def build_query_string(self, product_type, **kwargs): + def build_query_string( + self, product_type: str, **kwargs: Any + ) -> Tuple[Dict[str, Any], str]: """Build The query string using the search parameters""" logger.debug("Building the query string that will be used for search") query_params = format_query_params(product_type, self.config, **kwargs) # Build the final query string, in one go without quoting it # (some providers do not operate well with urlencoded and quoted query strings) + quote_via: Callable[[Any], str] = lambda x, *_args, **_kwargs: x return ( query_params, - urlencode( - query_params, doseq=True, quote_via=lambda x, *_args, **_kwargs: x - ), + urlencode(query_params, doseq=True, quote_via=quote_via), ) - def collect_search_urls(self, page=None, items_per_page=None, count=True, **kwargs): + def collect_search_urls( + self, + page: Optional[int] = None, + items_per_page: Optional[int] = None, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[str], Optional[int]]: """Build paginated urls""" urls = [] total_results = 0 if count else None @@ -515,12 +579,18 @@ def collect_search_urls(self, page=None, items_per_page=None, count=True, **kwar ).format(collection=collection) if count_endpoint: count_url = "{}?{}".format(count_endpoint, self.query_string) - _total_results = self.count_hits( - count_url, result_type=self.config.result_type + _total_results = ( + self.count_hits( + count_url, result_type=self.config.result_type + ) + or 0 ) if getattr(self.config, "merge_responses", False): - total_results = _total_results or 0 + total_results = _total_results else: + total_results = ( + 0 if total_results is None else total_results + ) total_results += _total_results or 0 next_url = self.config.pagination["next_page_url_tpl"].format( url=search_endpoint, @@ -535,7 +605,9 @@ def collect_search_urls(self, page=None, items_per_page=None, count=True, **kwar urls.append(next_url) return urls, total_results - def do_search(self, items_per_page=None, **kwargs): + def do_search( + self, items_per_page: Optional[int] = None, **kwargs: Any + ) -> List[Any]: """Perform the actual search request. If there is a specified number of items per page, return the results as soon @@ -544,15 +616,15 @@ def do_search(self, items_per_page=None, **kwargs): :param items_per_page: (optional) The number of items to return for one page :type items_per_page: int """ + total_items_nb = 0 if getattr(self, "need_count", False): # extract total_items_nb from search results - total_items_nb = 0 if self.config.result_type == "json": total_items_nb_key_path_parsed = self.config.pagination[ "total_items_nb_key_path" ] - results = [] + results: List[Any] = [] for search_url in self.search_urls: response = self._request( search_url, @@ -572,12 +644,15 @@ def do_search(self, items_per_page=None, **kwargs): if self.config.result_type == "xml": root_node = etree.fromstring(response.content) namespaces = {k or "ns": v for k, v in root_node.nsmap.items()} - result = [ - etree.tostring(entry) - for entry in root_node.xpath( - self.config.results_entry, namespaces=namespaces - ) - ] + results_xpath = root_node.xpath( + self.config.results_entry or "//ns:entry", namespaces=namespaces + ) + result = ( + [etree.tostring(element_or_tree=entry) for entry in results_xpath] + if isinstance(results_xpath, Iterable) + else [] + ) + if next_page_url_key_path or next_page_query_obj_key_path: raise NotImplementedError( "Setting the next page url from an XML response has not " @@ -586,11 +661,16 @@ def do_search(self, items_per_page=None, **kwargs): if getattr(self, "need_count", False): # extract total_items_nb from search results try: - total_nb_results = root_node.xpath( - self.config.pagination["total_items_nb_key_path"], + total_nb_results_xpath = root_node.xpath( + str(self.config.pagination["total_items_nb_key_path"]), namespaces={ k or "ns": v for k, v in root_node.nsmap.items() }, + ) + total_nb_results = ( + total_nb_results_xpath + if isinstance(total_nb_results_xpath, Iterable) + else [] )[0] _total_items_nb = int(total_nb_results) @@ -673,14 +753,16 @@ def do_search(self, items_per_page=None, **kwargs): return results return results - def normalize_results(self, results, **kwargs): + def normalize_results( + self, results: List[Dict[str, Any]], **kwargs: Any + ) -> List[EOProduct]: """Build EOProducts from provider results""" normalize_remaining_count = len(results) logger.debug( "Adapting %s plugin results to eodag product representation" % normalize_remaining_count ) - products = [] + products: List[EOProduct] = [] for result in results: product = EOProduct( self.provider, @@ -698,7 +780,7 @@ def normalize_results(self, results, **kwargs): products.append(product) return products - def count_hits(self, count_url, result_type="json"): + def count_hits(self, count_url: str, result_type: Optional[str] = "json") -> int: """Count the number of results satisfying some criteria""" # Handle a very annoying special case :'( url = count_url.replace("$format=json&", "") @@ -724,15 +806,15 @@ def count_hits(self, count_url, result_type="json"): total_results = int(count_results) return total_results - def get_collections(self, **kwargs): + def get_collections(self, **kwargs: Any) -> Tuple[Set[Dict[str, Any]], ...]: """Get the collection to which the product belongs""" # See https://earth.esa.int/web/sentinel/missions/sentinel-2/news/- # /asset_publisher/Ac0d/content/change-of # -format-for-new-sentinel-2-level-1c-products-starting-on-6-december - product_type = kwargs.get("productType") + product_type: Optional[str] = kwargs.get("productType") if product_type is None and not self.product_type_def_params: - collections = set() - collection = getattr(self.config, "collection", None) + collections: Set[Dict[str, Any]] = set() + collection: Optional[str] = getattr(self.config, "collection", None) if collection is None: try: for product_type, product_config in self.config.products.items(): @@ -750,21 +832,23 @@ def get_collections(self, **kwargs): collections.add(collection) return tuple(collections) - collection = getattr(self.config, "collection", None) + collection: Optional[str] = getattr(self.config, "collection", None) if collection is None: collection = ( self.product_type_def_params.get("collection", None) or product_type ) - collections = ( - (collection,) if not isinstance(collection, list) else tuple(collection) - ) - return collections - - def _request(self, url, info_message=None, exception_message=None): + return (collection,) if not isinstance(collection, list) else tuple(collection) + + def _request( + self, + url: str, + info_message: Optional[str] = None, + exception_message: Optional[str] = None, + ) -> Response: try: timeout = getattr(self.config, "timeout", HTTP_REQ_TIMEOUT) # auth if needed - kwargs = {} + kwargs: Dict[str, Any] = {} if ( getattr(self.config, "need_auth", False) and hasattr(self, "auth") @@ -792,8 +876,8 @@ def _request(self, url, info_message=None, exception_message=None): urllib_req = Request(prep.url, headers=USER_AGENT) urllib_response = urlopen(urllib_req, timeout=timeout) # build Response - adapter = requests.adapters.HTTPAdapter() - response = adapter.build_response(prep, urllib_response) + adapter = HTTPAdapter() + response = cast(Response, adapter.build_response(prep, urllib_response)) else: if info_message: logger.info(info_message) @@ -801,6 +885,8 @@ def _request(self, url, info_message=None, exception_message=None): url, timeout=timeout, headers=USER_AGENT, **kwargs ) response.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=timeout) from exc except (requests.RequestException, URLError) as err: err_msg = err.readlines() if hasattr(err, "readlines") else "" if exception_message: @@ -821,9 +907,11 @@ class AwsSearch(QueryStringSearch): """A specialisation of RestoSearch that modifies the way the EOProducts are built from the search results""" - def normalize_results(self, results, **kwargs): + def normalize_results( + self, results: List[Dict[str, Any]], **kwargs: Any + ) -> List[EOProduct]: """Transform metadata from provider representation to eodag representation""" - normalized = [] + normalized: List[EOProduct] = [] logger.debug("Adapting plugin results to eodag product representation") for result in results: ref = result["properties"]["title"].split("_")[5] @@ -847,7 +935,7 @@ class ODataV4Search(QueryStringSearch): """A specialisation of a QueryStringSearch that does a two step search to retrieve all products metadata""" - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(ODataV4Search, self).__init__(provider, config) # parse jsonpath on init @@ -860,7 +948,7 @@ def __init__(self, provider, config): metadata_path ) - def do_search(self, *args, **kwargs): + def do_search(self, *args: Any, **kwargs: Any) -> List[Any]: """A two step search can be performed if the metadata are not given into the search result""" if getattr(self.config, "per_product_metadata_query", False): @@ -874,6 +962,8 @@ def do_search(self, *args, **kwargs): metadata_url, headers=USER_AGENT, timeout=HTTP_REQ_TIMEOUT ) response.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc except requests.RequestException: logger.exception( "Skipping error while searching for %s %s instance:", @@ -889,13 +979,15 @@ def do_search(self, *args, **kwargs): else: return super(ODataV4Search, self).do_search(*args, **kwargs) - def get_metadata_search_url(self, entity): + def get_metadata_search_url(self, entity: Dict[str, Any]) -> str: """Build the metadata link for the given entity""" return "{}({})/Metadata".format( self.config.api_endpoint.rstrip("/"), entity["id"] ) - def normalize_results(self, results, **kwargs): + def normalize_results( + self, results: List[Dict[str, Any]], **kwargs: Any + ) -> List[EOProduct]: """Build EOProducts from provider results If configured, a metadata pre-mapping can be applied to simplify further metadata extraction. @@ -928,7 +1020,14 @@ def normalize_results(self, results, **kwargs): class PostJsonSearch(QueryStringSearch): """A specialisation of a QueryStringSearch that uses POST method""" - def query(self, items_per_page=None, page=None, count=True, **kwargs): + def query( + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Perform a search on an OpenSearch-like interface""" product_type = kwargs.get("productType", None) # remove "product_type" from search args if exists for compatibility with QueryStringSearch methods @@ -1032,9 +1131,15 @@ def query(self, items_per_page=None, page=None, count=True, **kwargs): total_items = len(eo_products) if total_items == 0 else total_items return eo_products, total_items - def collect_search_urls(self, page=None, items_per_page=None, count=True, **kwargs): + def collect_search_urls( + self, + page: Optional[int] = None, + items_per_page: Optional[int] = None, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[str], Optional[int]]: """Adds pagination to query parameters, and auth to url""" - urls = [] + urls: List[str] = [] total_results = 0 if count else None if "count_endpoint" not in self.config.pagination: @@ -1049,7 +1154,7 @@ def collect_search_urls(self, page=None, items_per_page=None, count=True, **kwar auth_conf_dict = {} for collection in self.get_collections(**kwargs): try: - search_endpoint = self.config.api_endpoint.rstrip("/").format( + search_endpoint: str = self.config.api_endpoint.rstrip("/").format( **dict(collection=collection, **auth_conf_dict) ) except KeyError as e: @@ -1087,7 +1192,13 @@ def collect_search_urls(self, page=None, items_per_page=None, count=True, **kwar urls.append(search_endpoint) return urls, total_results - def _request(self, url, info_message=None, exception_message=None): + def _request( + self, + url: str, + info_message: Optional[str] = None, + exception_message: Optional[str] = None, + ) -> Response: + timeout = getattr(self.config, "timeout", HTTP_REQ_TIMEOUT) try: # auth if needed kwargs = {} @@ -1108,10 +1219,12 @@ def _request(self, url, info_message=None, exception_message=None): url, json=self.query_params, headers=USER_AGENT, - timeout=getattr(self.config, "timeout", HTTP_REQ_TIMEOUT), + timeout=timeout, **kwargs, ) response.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=timeout) from exc except (requests.RequestException, URLError) as err: # check if error is identified as auth_error in provider conf auth_errors = getattr(self.config, "auth_error_code", [None]) @@ -1146,7 +1259,7 @@ def _request(self, url, info_message=None, exception_message=None): class StacSearch(PostJsonSearch): """A specialisation of a QueryStringSearch that uses generic STAC configuration""" - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: # backup results_entry overwritten by init results_entry = config.results_entry @@ -1155,13 +1268,97 @@ def __init__(self, provider, config): # restore results_entry overwritten by init self.config.results_entry = results_entry - def normalize_results(self, results, **kwargs): + def normalize_results( + self, results: List[Dict[str, Any]], **kwargs: Any + ) -> List[EOProduct]: """Build EOProducts from provider results""" products = super(StacSearch, self).normalize_results(results, **kwargs) # move assets from properties to product's attr for product in products: - product.assets = product.properties.pop("assets", []) + product.assets.update(product.properties.pop("assets", {})) return products + + def discover_queryables( + self, **kwargs: Any + ) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]: + """Fetch queryables list from provider using `discover_queryables` conf + + :param kwargs: additional filters for queryables (`productType` and other search + arguments) + :type kwargs: Any + :returns: fetched queryable parameters dict + :rtype: Optional[Dict[str, Annotated[Any, FieldInfo]]] + """ + product_type = kwargs.get("productType", None) + provider_product_type = ( + self.config.products.get(product_type, {}).get("productType", product_type) + if product_type + else None + ) + + try: + unparsed_fetch_url = ( + self.config.discover_queryables["product_type_fetch_url"] + if provider_product_type + else self.config.discover_queryables["fetch_url"] + ) + + fetch_url = unparsed_fetch_url.format( + provider_product_type=provider_product_type, **self.config.__dict__ + ) + response = QueryStringSearch._request( + self, + fetch_url, + info_message="Fetching queryables: {}".format(fetch_url), + exception_message="Skipping error while fetching queryables for " + "{} {} instance:".format(self.provider, self.__class__.__name__), + ) + except (RequestError, KeyError, AttributeError): + return None + else: + json_queryables = dict() + try: + resp_as_json = response.json() + + # extract results from response json + json_queryables = [ + match.value + for match in self.config.discover_queryables["results_entry"].find( + resp_as_json + ) + ][0] + + except KeyError as e: + logger.warning( + "Incomplete %s discover_queryables configuration: %s", + self.provider, + e, + ) + except IndexError: + logger.info( + "No queryable found for %s on %s", product_type, self.provider + ) + return None + + # convert json results to pydantic model fields + field_definitions: Dict[str, Any] = dict() + for json_param, json_mtd in json_queryables.items(): + param = ( + get_queryable_from_provider( + json_param, self.config.metadata_mapping + ) + or json_param + ) + + default = kwargs.get(param, None) + annotated_def = json_field_definition_to_python( + json_mtd, default_value=default + ) + field_definitions[param] = get_args(annotated_def) + + python_queryables = create_model("m", **field_definitions).model_fields + + return model_fields_to_annotated(python_queryables) diff --git a/eodag/plugins/search/static_stac_search.py b/eodag/plugins/search/static_stac_search.py index 4535bbe9b..502b50169 100644 --- a/eodag/plugins/search/static_stac_search.py +++ b/eodag/plugins/search/static_stac_search.py @@ -15,8 +15,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple import geojson @@ -25,9 +27,19 @@ from eodag.plugins.crunch.filter_overlap import FilterOverlap from eodag.plugins.crunch.filter_property import FilterProperty from eodag.plugins.search.qssearch import StacSearch -from eodag.utils import HTTP_REQ_TIMEOUT, MockResponse +from eodag.utils import ( + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, + HTTP_REQ_TIMEOUT, + MockResponse, +) from eodag.utils.stac_reader import fetch_stac_items +if TYPE_CHECKING: + from eodag.api.product import EOProduct + from eodag.config import PluginConfig + + logger = logging.getLogger("eodag.search.static_stac_search") @@ -55,12 +67,12 @@ class StaticStacSearch(StacSearch): :type config: str """ - def __init__(self, provider, config): + def __init__(self, provider: str, config: PluginConfig) -> None: super(StaticStacSearch, self).__init__(provider, config) self.config.__dict__.setdefault("max_connections", 100) self.config.__dict__.setdefault("timeout", HTTP_REQ_TIMEOUT) - def discover_product_types(self): + def discover_product_types(self) -> Dict[str, Any]: """Fetch product types is disabled for `StaticStacSearch` :returns: empty dict @@ -68,7 +80,14 @@ def discover_product_types(self): """ return {} - def query(self, items_per_page=None, page=None, count=True, **kwargs): + def query( + self, + product_type: Optional[str] = None, + items_per_page: int = DEFAULT_ITEMS_PER_PAGE, + page: int = DEFAULT_PAGE, + count: bool = True, + **kwargs: Any, + ) -> Tuple[List[EOProduct], Optional[int]]: """Perform a search on a static STAC Catalog""" features = fetch_stac_items( @@ -79,11 +98,6 @@ def query(self, items_per_page=None, page=None, count=True, **kwargs): ) nb_features = len(features) feature_collection = geojson.FeatureCollection(features) - feature_collection["context"] = { - "limit": nb_features, - "matched": nb_features, - "returned": nb_features, - } # save StaticStacSearch._request and mock it to make return loaded static results stacapi_request = self._request @@ -97,10 +111,8 @@ def query(self, items_per_page=None, page=None, count=True, **kwargs): eo_products, _ = super(StaticStacSearch, self).query( items_per_page=nb_features, page=1, count=True, **kwargs ) - # filter using query params search_result = SearchResult(eo_products) - # Filter by date if "startTimeFromAscendingNode" in kwargs: kwargs["start"] = kwargs.pop("startTimeFromAscendingNode") diff --git a/eodag/resources/ext_product_types.json b/eodag/resources/ext_product_types.json index cfeacf0bc..989fd46a5 100644 --- a/eodag/resources/ext_product_types.json +++ b/eodag/resources/ext_product_types.json @@ -1 +1 @@ -{"astraea_eod": {"providers_config": {"landsat8_c2l1t1": {"productType": "landsat8_c2l1t1"}, "mcd43a4": {"productType": "mcd43a4"}, "mod11a1": {"productType": "mod11a1"}, "mod13a1": {"productType": "mod13a1"}, "myd11a1": {"productType": "myd11a1"}, "myd13a1": {"productType": "myd13a1"}, "maxar_open_data": {"productType": "maxar_open_data"}, "naip": {"productType": "naip"}, "sentinel1_l1c_grd": {"productType": "sentinel1_l1c_grd"}, "sentinel2_l1c": {"productType": "sentinel2_l1c"}, "sentinel2_l2a": {"productType": "sentinel2_l2a"}, "spacenet7": {"productType": "spacenet7"}, "umbra_open_data": {"productType": "umbra_open_data"}}, "product_types_config": {"landsat8_c2l1t1": {"abstract": "Landsat 8 Collection 2 Tier 1 Precision Terrain from Landsat 8 Operational Land Imager (OLI) and Thermal Infrared Sensor (TIRS) data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat8-c2l1t1", "license": "PDDL-1.0", "title": "Landsat 8 - Level 1", "missionStartDate": "2013-03-18T15:59:02.333Z"}, "mcd43a4": {"abstract": "MCD43A4: MODIS/Terra and Aqua Nadir BRDF-Adjusted Reflectance Daily L3 Global 500 m SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "mcd43a4", "license": "CC-PDDC", "title": "MCD43A4 NBAR", "missionStartDate": "2000-02-16T00:00:00.000Z"}, "mod11a1": {"abstract": "MOD11A1: MODIS/Terra Land Surface Temperature/Emissivity Daily L3 Global 1 km SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "mod11a1", "license": "CC-PDDC", "title": "MOD11A1 LST", "missionStartDate": "2000-02-24T00:00:00.000Z"}, "mod13a1": {"abstract": "MOD13A1: MODIS/Terra Vegetation Indices 16-Day L3 Global 500 m SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "mod13a1", "license": "CC-PDDC", "title": "MOD13A1 VI", "missionStartDate": "2000-02-18T00:00:00.000Z"}, "myd11a1": {"abstract": "MYD11A1: MODIS/Aqua Land Surface Temperature/Emissivity Daily L3 Global 1 km SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "myd11a1", "license": "CC-PDDC", "title": "MYD11A1 LST", "missionStartDate": "2002-07-04T00:00:00.000Z"}, "myd13a1": {"abstract": "MYD13A1: MODIS/Aqua Vegetation Indices 16-Day L3 Global 500 m SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "myd13a1", "license": "CC-PDDC", "title": "MYD13A1 VI", "missionStartDate": "2002-07-04T00:00:00.000Z"}, "maxar_open_data": {"abstract": "Maxar Open Data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "maxar-open-data", "license": "CC-BY-NC-4.0", "title": "Maxar Open Data", "missionStartDate": "2008-01-15T00:00:00.000Z"}, "naip": {"abstract": "National Agriculture Imagery Program aerial imagery", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "naip", "license": "CC-PDDC", "title": "NAIP", "missionStartDate": "2012-04-23T12:00:00.000Z"}, "sentinel1_l1c_grd": {"abstract": "Sentinel-1 Level-1 Ground Range Detected data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel1-l1c-grd", "license": "CC-BY-SA-3.0", "title": "Sentinel-1 L1C GRD", "missionStartDate": "2017-09-27T14:19:16.000"}, "sentinel2_l1c": {"abstract": "Sentinel-2 Level-1C top of atmosphere", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel2-l1c", "license": "CC-BY-SA-3.0", "title": "Sentinel-2 L1C", "missionStartDate": "2015-06-27T10:25:31.456Z"}, "sentinel2_l2a": {"abstract": "Sentinel-2 Level-2A atmospherically corrected data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel2-l2a", "license": "CC-BY-SA-3.0", "title": "Sentinel-2 L2A", "missionStartDate": "2018-04-01T07:02:22.463Z"}, "spacenet7": {"abstract": "SpaceNet 7 Imagery and Labels", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "spacenet7", "license": "CC-BY-SA-4.0", "title": "SpaceNet 7", "missionStartDate": "2018-01-01T00:00:00.000Z"}, "umbra_open_data": {"abstract": "Umbra Open Data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "umbra-open-data", "license": "proprietary", "title": "Umbra Open Data", "missionStartDate": null}}}, "creodias": {"providers_config": {"Sentinel1": {"collection": "Sentinel1"}, "Sentinel1RTC": {"collection": "Sentinel1RTC"}, "Sentinel2": {"collection": "Sentinel2"}, "Sentinel3": {"collection": "Sentinel3"}, "Sentinel5P": {"collection": "Sentinel5P"}, "Sentinel6": {"collection": "Sentinel6"}, "Landsat5": {"collection": "Landsat5"}, "Landsat7": {"collection": "Landsat7"}, "Landsat8": {"collection": "Landsat8"}, "Envisat": {"collection": "Envisat"}, "SMOS": {"collection": "SMOS"}, "S2GLC": {"collection": "S2GLC"}, "CopDem": {"collection": "CopDem"}}, "product_types_config": {"Sentinel1": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel1", "license": null, "title": null, "missionStartDate": null}, "Sentinel1RTC": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel1rtc", "license": null, "title": null, "missionStartDate": null}, "Sentinel2": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel2", "license": null, "title": null, "missionStartDate": null}, "Sentinel3": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel3", "license": null, "title": null, "missionStartDate": null}, "Sentinel5P": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel5p", "license": null, "title": null, "missionStartDate": null}, "Sentinel6": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel6", "license": null, "title": null, "missionStartDate": null}, "Landsat5": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat5", "license": null, "title": null, "missionStartDate": null}, "Landsat7": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat7", "license": null, "title": null, "missionStartDate": null}, "Landsat8": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat8", "license": null, "title": null, "missionStartDate": null}, "Envisat": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "envisat", "license": null, "title": null, "missionStartDate": null}, "SMOS": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "smos", "license": null, "title": null, "missionStartDate": null}, "S2GLC": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "s2glc", "license": null, "title": null, "missionStartDate": null}, "CopDem": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "copdem", "license": null, "title": null, "missionStartDate": null}}}, "earth_search": {"providers_config": {"sentinel-s2-l2a": {"productType": "sentinel-s2-l2a"}, "sentinel-s2-l1c": {"productType": "sentinel-s2-l1c"}, "landsat-8-l1-c1": {"productType": "landsat-8-l1-c1"}}, "product_types_config": {"sentinel-s2-l2a": {"abstract": "Sentinel-2a and Sentinel-2b imagery, processed to Level 2A (Surface Reflectance)", "instrument": "msi", "platform": "sentinel-2", "platformSerialIdentifier": "sentinel-2a,sentinel-2b", "processingLevel": null, "keywords": "earth-observation,esa,msi,sentinel,sentinel-2,sentinel-2a,sentinel-2b,sentinel-s2-l2a", "license": "proprietary", "title": "Sentinel 2 L2A", "missionStartDate": "2015-06-27T10:25:31.456000Z"}, "sentinel-s2-l1c": {"abstract": "Sentinel-2a and Sentinel-2b imagery, processed to Level 1C (Top-Of-Atmosphere Geometrically Corrected)", "instrument": "msi", "platform": "sentinel-2", "platformSerialIdentifier": "sentinel-2a,sentinel-2b", "processingLevel": null, "keywords": "earth-observation,esa,msi,sentinel,sentinel-2,sentinel-2a,sentinel-2b,sentinel-s2-l1c", "license": "proprietary", "title": "Sentinel 2 L1C", "missionStartDate": "2015-06-27T10:25:31.456000Z"}, "landsat-8-l1-c1": {"abstract": "Landat-8 L1 Collection-1 imagery radiometrically calibrated and orthorectified using gound points and Digital Elevation Model (DEM) data to correct relief displacement.", "instrument": "oli,tirs", "platform": null, "platformSerialIdentifier": "landsat-8", "processingLevel": null, "keywords": "earth-observation,landsat,landsat-8,landsat-8-l1-c1,oli,tirs,usgs", "license": "PDDL-1.0", "title": "Landsat-8 L1 Collection-1", "missionStartDate": "2013-06-01T00:00:00Z"}}}, "earth_search_cog": null, "earth_search_gcs": null, "planetary_computer": {"providers_config": {"daymet-annual-pr": {"productType": "daymet-annual-pr"}, "daymet-daily-hi": {"productType": "daymet-daily-hi"}, "3dep-seamless": {"productType": "3dep-seamless"}, "3dep-lidar-dsm": {"productType": "3dep-lidar-dsm"}, "fia": {"productType": "fia"}, "sentinel-1-rtc": {"productType": "sentinel-1-rtc"}, "gridmet": {"productType": "gridmet"}, "daymet-annual-na": {"productType": "daymet-annual-na"}, "daymet-monthly-na": {"productType": "daymet-monthly-na"}, "daymet-annual-hi": {"productType": "daymet-annual-hi"}, "daymet-monthly-hi": {"productType": "daymet-monthly-hi"}, "daymet-monthly-pr": {"productType": "daymet-monthly-pr"}, "gnatsgo-tables": {"productType": "gnatsgo-tables"}, "hgb": {"productType": "hgb"}, "cop-dem-glo-30": {"productType": "cop-dem-glo-30"}, "cop-dem-glo-90": {"productType": "cop-dem-glo-90"}, "goes-cmi": {"productType": "goes-cmi"}, "terraclimate": {"productType": "terraclimate"}, "nasa-nex-gddp-cmip6": {"productType": "nasa-nex-gddp-cmip6"}, "gpm-imerg-hhr": {"productType": "gpm-imerg-hhr"}, "gnatsgo-rasters": {"productType": "gnatsgo-rasters"}, "3dep-lidar-hag": {"productType": "3dep-lidar-hag"}, "3dep-lidar-intensity": {"productType": "3dep-lidar-intensity"}, "3dep-lidar-pointsourceid": {"productType": "3dep-lidar-pointsourceid"}, "mtbs": {"productType": "mtbs"}, "noaa-c-cap": {"productType": "noaa-c-cap"}, "3dep-lidar-copc": {"productType": "3dep-lidar-copc"}, "modis-64A1-061": {"productType": "modis-64A1-061"}, "alos-fnf-mosaic": {"productType": "alos-fnf-mosaic"}, "3dep-lidar-returns": {"productType": "3dep-lidar-returns"}, "mobi": {"productType": "mobi"}, "landsat-c2-l2": {"productType": "landsat-c2-l2"}, "era5-pds": {"productType": "era5-pds"}, "chloris-biomass": {"productType": "chloris-biomass"}, "kaza-hydroforecast": {"productType": "kaza-hydroforecast"}, "planet-nicfi-analytic": {"productType": "planet-nicfi-analytic"}, "modis-17A2H-061": {"productType": "modis-17A2H-061"}, "modis-11A2-061": {"productType": "modis-11A2-061"}, "daymet-daily-pr": {"productType": "daymet-daily-pr"}, "3dep-lidar-dtm-native": {"productType": "3dep-lidar-dtm-native"}, "3dep-lidar-classification": {"productType": "3dep-lidar-classification"}, "3dep-lidar-dtm": {"productType": "3dep-lidar-dtm"}, "gap": {"productType": "gap"}, "modis-17A2HGF-061": {"productType": "modis-17A2HGF-061"}, "planet-nicfi-visual": {"productType": "planet-nicfi-visual"}, "gbif": {"productType": "gbif"}, "modis-17A3HGF-061": {"productType": "modis-17A3HGF-061"}, "modis-09A1-061": {"productType": "modis-09A1-061"}, "alos-dem": {"productType": "alos-dem"}, "alos-palsar-mosaic": {"productType": "alos-palsar-mosaic"}, "deltares-water-availability": {"productType": "deltares-water-availability"}, "modis-16A3GF-061": {"productType": "modis-16A3GF-061"}, "modis-21A2-061": {"productType": "modis-21A2-061"}, "us-census": {"productType": "us-census"}, "jrc-gsw": {"productType": "jrc-gsw"}, "deltares-floods": {"productType": "deltares-floods"}, "modis-43A4-061": {"productType": "modis-43A4-061"}, "modis-09Q1-061": {"productType": "modis-09Q1-061"}, "modis-14A1-061": {"productType": "modis-14A1-061"}, "hrea": {"productType": "hrea"}, "modis-13Q1-061": {"productType": "modis-13Q1-061"}, "modis-14A2-061": {"productType": "modis-14A2-061"}, "sentinel-2-l2a": {"productType": "sentinel-2-l2a"}, "modis-15A2H-061": {"productType": "modis-15A2H-061"}, "modis-11A1-061": {"productType": "modis-11A1-061"}, "modis-15A3H-061": {"productType": "modis-15A3H-061"}, "modis-13A1-061": {"productType": "modis-13A1-061"}, "daymet-daily-na": {"productType": "daymet-daily-na"}, "nrcan-landcover": {"productType": "nrcan-landcover"}, "modis-10A2-061": {"productType": "modis-10A2-061"}, "ecmwf-forecast": {"productType": "ecmwf-forecast"}, "noaa-mrms-qpe-24h-pass2": {"productType": "noaa-mrms-qpe-24h-pass2"}, "sentinel-1-grd": {"productType": "sentinel-1-grd"}, "nasadem": {"productType": "nasadem"}, "io-lulc": {"productType": "io-lulc"}, "landsat-c2-l1": {"productType": "landsat-c2-l1"}, "drcog-lulc": {"productType": "drcog-lulc"}, "chesapeake-lc-7": {"productType": "chesapeake-lc-7"}, "chesapeake-lc-13": {"productType": "chesapeake-lc-13"}, "chesapeake-lu": {"productType": "chesapeake-lu"}, "noaa-mrms-qpe-1h-pass1": {"productType": "noaa-mrms-qpe-1h-pass1"}, "noaa-mrms-qpe-1h-pass2": {"productType": "noaa-mrms-qpe-1h-pass2"}, "noaa-nclimgrid-monthly": {"productType": "noaa-nclimgrid-monthly"}, "goes-glm": {"productType": "goes-glm"}, "usda-cdl": {"productType": "usda-cdl"}, "eclipse": {"productType": "eclipse"}, "esa-cci-lc": {"productType": "esa-cci-lc"}, "esa-cci-lc-netcdf": {"productType": "esa-cci-lc-netcdf"}, "fws-nwi": {"productType": "fws-nwi"}, "usgs-lcmap-conus-v13": {"productType": "usgs-lcmap-conus-v13"}, "usgs-lcmap-hawaii-v10": {"productType": "usgs-lcmap-hawaii-v10"}, "noaa-climate-normals-tabular": {"productType": "noaa-climate-normals-tabular"}, "noaa-climate-normals-netcdf": {"productType": "noaa-climate-normals-netcdf"}, "noaa-climate-normals-gridded": {"productType": "noaa-climate-normals-gridded"}, "aster-l1t": {"productType": "aster-l1t"}, "cil-gdpcir-cc-by-sa": {"productType": "cil-gdpcir-cc-by-sa"}, "io-lulc-9-class": {"productType": "io-lulc-9-class"}, "io-biodiversity": {"productType": "io-biodiversity"}, "naip": {"productType": "naip"}, "noaa-cdr-sea-surface-temperature-whoi": {"productType": "noaa-cdr-sea-surface-temperature-whoi"}, "noaa-cdr-ocean-heat-content": {"productType": "noaa-cdr-ocean-heat-content"}, "cil-gdpcir-cc0": {"productType": "cil-gdpcir-cc0"}, "cil-gdpcir-cc-by": {"productType": "cil-gdpcir-cc-by"}, "noaa-cdr-sea-surface-temperature-whoi-netcdf": {"productType": "noaa-cdr-sea-surface-temperature-whoi-netcdf"}, "noaa-cdr-sea-surface-temperature-optimum-interpolation": {"productType": "noaa-cdr-sea-surface-temperature-optimum-interpolation"}, "modis-10A1-061": {"productType": "modis-10A1-061"}, "sentinel-5p-l2-netcdf": {"productType": "sentinel-5p-l2-netcdf"}, "sentinel-3-olci-wfr-l2-netcdf": {"productType": "sentinel-3-olci-wfr-l2-netcdf"}, "noaa-cdr-ocean-heat-content-netcdf": {"productType": "noaa-cdr-ocean-heat-content-netcdf"}, "sentinel-3-synergy-aod-l2-netcdf": {"productType": "sentinel-3-synergy-aod-l2-netcdf"}, "sentinel-3-synergy-v10-l2-netcdf": {"productType": "sentinel-3-synergy-v10-l2-netcdf"}, "sentinel-3-olci-lfr-l2-netcdf": {"productType": "sentinel-3-olci-lfr-l2-netcdf"}, "sentinel-3-sral-lan-l2-netcdf": {"productType": "sentinel-3-sral-lan-l2-netcdf"}, "sentinel-3-slstr-lst-l2-netcdf": {"productType": "sentinel-3-slstr-lst-l2-netcdf"}, "sentinel-3-slstr-wst-l2-netcdf": {"productType": "sentinel-3-slstr-wst-l2-netcdf"}, "sentinel-3-sral-wat-l2-netcdf": {"productType": "sentinel-3-sral-wat-l2-netcdf"}, "ms-buildings": {"productType": "ms-buildings"}, "sentinel-3-slstr-frp-l2-netcdf": {"productType": "sentinel-3-slstr-frp-l2-netcdf"}, "sentinel-3-synergy-syn-l2-netcdf": {"productType": "sentinel-3-synergy-syn-l2-netcdf"}, "sentinel-3-synergy-vgp-l2-netcdf": {"productType": "sentinel-3-synergy-vgp-l2-netcdf"}, "sentinel-3-synergy-vg1-l2-netcdf": {"productType": "sentinel-3-synergy-vg1-l2-netcdf"}, "esa-worldcover": {"productType": "esa-worldcover"}}, "product_types_config": {"daymet-annual-pr": {"abstract": "Annual climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1852](https://doi.org/10.3334/ORNLDAAC/1852) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#annual). \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-annual-pr,precipitation,puerto-rico,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Annual Puerto Rico", "missionStartDate": "1980-07-01T12:00:00Z"}, "daymet-daily-hi": {"abstract": "Gridded estimates of daily weather parameters. [Daymet](https://daymet.ornl.gov) Version 4 variables include the following parameters: minimum temperature, maximum temperature, precipitation, shortwave radiation, vapor pressure, snow water equivalent, and day length.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1840](https://doi.org/10.3334/ORNLDAAC/1840) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#daily).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "daymet,daymet-daily-hi,hawaii,precipitation,temperature,vapor-pressure,weather", "license": "proprietary", "title": "Daymet Daily Hawaii", "missionStartDate": "1980-01-01T12:00:00Z"}, "3dep-seamless": {"abstract": "U.S.-wide digital elevation data at horizontal resolutions ranging from one to sixty meters.\n\nThe [USGS 3D Elevation Program (3DEP) Datasets](https://www.usgs.gov/core-science-systems/ngp/3dep) from the [National Map](https://www.usgs.gov/core-science-systems/national-geospatial-program/national-map) are the primary elevation data product produced and distributed by the USGS. The 3DEP program provides raster elevation data for the conterminous United States, Alaska, Hawaii, and the island territories, at a variety of spatial resolutions. The seamless DEM layers produced by the 3DEP program are updated frequently to integrate newly available, improved elevation source data. \n\nDEM layers are available nationally at grid spacings of 1 arc-second (approximately 30 meters) for the conterminous United States, and at approximately 1, 3, and 9 meters for parts of the United States. Most seamless DEM data for Alaska is available at a resolution of approximately 60 meters, where only lower resolution source data exist.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-seamless,dem,elevation,ned,usgs", "license": "PDDL-1.0", "title": "USGS 3DEP Seamless DEMs", "missionStartDate": "1925-01-01T00:00:00Z"}, "3dep-lidar-dsm": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It creates a Digital Surface Model (DSM) using [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to output a collection of Cloud Optimized GeoTIFFs, removing all points that have been classified as noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-dsm,cog,dsm,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Digital Surface Model", "missionStartDate": "2012-01-01T00:00:00Z"}, "fia": {"abstract": "Status and trends on U.S. forest location, health, growth, mortality, and production, from the U.S. Forest Service's [Forest Inventory and Analysis](https://www.fia.fs.fed.us/) (FIA) program.\n\nThe Forest Inventory and Analysis (FIA) dataset is a nationwide survey of the forest assets of the United States. The FIA research program has been in existence since 1928. FIA's primary objective is to determine the extent, condition, volume, growth, and use of trees on the nation's forest land.\n\nDomain: continental U.S., 1928-2018\n\nResolution: plot-level (irregular polygon)\n\nThis dataset was curated and brought to Azure by [CarbonPlan](https://carbonplan.org/).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biomass,carbon,fia,forest,forest-service,species,usda", "license": "CC0-1.0", "title": "Forest Inventory and Analysis", "missionStartDate": "2020-06-01T00:00:00Z"}, "sentinel-1-rtc": {"abstract": "The [Sentinel-1](https://sentinel.esa.int/web/sentinel/missions/sentinel-1) mission is a constellation of two polar-orbiting satellites, operating day and night performing C-band synthetic aperture radar imaging. The Sentinel-1 Radiometrically Terrain Corrected (RTC) data in this collection is a radiometrically terrain corrected product derived from the [Ground Range Detected (GRD) Level-1](https://planetarycomputer.microsoft.com/dataset/sentinel-1-grd) products produced by the European Space Agency. The RTC processing is performed by [Catalyst](https://catalyst.earth/).\n\nRadiometric Terrain Correction accounts for terrain variations that affect both the position of a given point on the Earth's surface and the brightness of the radar return, as expressed in radar geometry. Without treatment, the hill-slope modulations of the radiometry threaten to overwhelm weaker thematic land cover-induced backscatter differences. Additionally, comparison of backscatter from multiple satellites, modes, or tracks loses meaning.\n\nA Planetary Computer account is required to retrieve SAS tokens to read the RTC data. See the [documentation](http://planetarycomputer.microsoft.com/docs/concepts/sas/#when-an-account-is-needed) for more information.\n\n### Methodology\n\nThe Sentinel-1 GRD product is converted to calibrated intensity using the conversion algorithm described in the ESA technical note ESA-EOPG-CSCOP-TN-0002, [Radiometric Calibration of S-1 Level-1 Products Generated by the S-1 IPF](https://ai4edatasetspublicassets.blob.core.windows.net/assets/pdfs/sentinel-1/S1-Radiometric-Calibration-V1.0.pdf). The flat earth calibration values for gamma correction (i.e. perpendicular to the radar line of sight) are extracted from the GRD metadata. The calibration coefficients are applied as a two-dimensional correction in range (by sample number) and azimuth (by time). All available polarizations are calibrated and written as separate layers of a single file. The calibrated SAR output is reprojected to nominal map orientation with north at the top and west to the left.\n\nThe data is then radiometrically terrain corrected using PlanetDEM as the elevation source. The correction algorithm is nominally based upon D. Small, [\u201cFlattening Gamma: Radiometric Terrain Correction for SAR Imagery\u201d](https://ai4edatasetspublicassets.blob.core.windows.net/assets/pdfs/sentinel-1/2011_Flattening_Gamma.pdf), IEEE Transactions on Geoscience and Remote Sensing, Vol 49, No 8., August 2011, pp 3081-3093. For each image scan line, the digital elevation model is interpolated to determine the elevation corresponding to the position associated with the known near slant range distance and arc length for each input pixel. The elevations at the four corners of each pixel are estimated using bilinear resampling. The four elevations are divided into two triangular facets and reprojected onto the plane perpendicular to the radar line of sight to provide an estimate of the area illuminated by the radar for each earth flattened pixel. The uncalibrated sum at each earth flattened pixel is normalized by dividing by the flat earth surface area. The adjustment for gamma intensity is given by dividing the normalized result by the cosine of the incident angle. Pixels which are not illuminated by the radar due to the viewing geometry are flagged as shadow.\n\nCalibrated data is then orthorectified to the appropriate UTM projection. The orthorectified output maintains the original sample sizes (in range and azimuth) and was not shifted to any specific grid.\n\nRTC data is processed only for the Interferometric Wide Swath (IW) mode, which is the main acquisition mode over land and satisfies the majority of service requirements.\n", "instrument": null, "platform": "Sentinel-1", "platformSerialIdentifier": "SENTINEL-1A,SENTINEL-1B", "processingLevel": null, "keywords": "c-band,copernicus,esa,rtc,sar,sentinel,sentinel-1,sentinel-1-rtc,sentinel-1a,sentinel-1b", "license": "CC-BY-4.0", "title": "Sentinel 1 Radiometrically Terrain Corrected (RTC)", "missionStartDate": "2014-10-10T00:28:21Z"}, "gridmet": {"abstract": "gridMET is a dataset of daily surface meteorological data at approximately four-kilometer resolution, covering the contiguous U.S. from 1979 to the present. These data can provide important inputs for ecological, agricultural, and hydrological models.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,gridmet,precipitation,temperature,vapor-pressure,water", "license": "CC0-1.0", "title": "gridMET", "missionStartDate": "1979-01-01T00:00:00Z"}, "daymet-annual-na": {"abstract": "Annual climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1852](https://doi.org/10.3334/ORNLDAAC/1852) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#annual). \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-annual-na,north-america,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Annual North America", "missionStartDate": "1980-07-01T12:00:00Z"}, "daymet-monthly-na": {"abstract": "Monthly climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1855](https://doi.org/10.3334/ORNLDAAC/1855) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#monthly).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-monthly-na,north-america,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Monthly North America", "missionStartDate": "1980-01-16T12:00:00Z"}, "daymet-annual-hi": {"abstract": "Annual climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1852](https://doi.org/10.3334/ORNLDAAC/1852) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#annual). \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-annual-hi,hawaii,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Annual Hawaii", "missionStartDate": "1980-07-01T12:00:00Z"}, "daymet-monthly-hi": {"abstract": "Monthly climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1855](https://doi.org/10.3334/ORNLDAAC/1855) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#monthly).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-monthly-hi,hawaii,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Monthly Hawaii", "missionStartDate": "1980-01-16T12:00:00Z"}, "daymet-monthly-pr": {"abstract": "Monthly climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1855](https://doi.org/10.3334/ORNLDAAC/1855) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#monthly).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-monthly-pr,precipitation,puerto-rico,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Monthly Puerto Rico", "missionStartDate": "1980-01-16T12:00:00Z"}, "gnatsgo-tables": {"abstract": "This collection contains the table data for gNATSGO. This table data can be used to determine the values of raster data cells for Items in the [gNATSGO Rasters](https://planetarycomputer.microsoft.com/dataset/gnatsgo-rasters) Collection.\n\nThe gridded National Soil Survey Geographic Database (gNATSGO) is a USDA-NRCS Soil & Plant Science Division (SPSD) composite database that provides complete coverage of the best available soils information for all areas of the United States and Island Territories. It was created by combining data from the Soil Survey Geographic Database (SSURGO), State Soil Geographic Database (STATSGO2), and Raster Soil Survey Databases (RSS) into a single seamless ESRI file geodatabase.\n\nSSURGO is the SPSD flagship soils database that has over 100 years of field-validated detailed soil mapping data. SSURGO contains soils information for more than 90 percent of the United States and island territories, but unmapped land remains. STATSGO2 is a general soil map that has soils data for all of the United States and island territories, but the data is not as detailed as the SSURGO data. The Raster Soil Surveys (RSSs) are the next generation soil survey databases developed using advanced digital soil mapping methods.\n\nThe gNATSGO database is composed primarily of SSURGO data, but STATSGO2 data was used to fill in the gaps. The RSSs are newer product with relatively limited spatial extent. These RSSs were merged into the gNATSGO after combining the SSURGO and STATSGO2 data. The extent of RSS is expected to increase in the coming years.\n\nSee the [official documentation](https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/geo/?cid=nrcseprd1464625)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gnatsgo-tables,natsgo,rss,soils,ssurgo,statsgo2,united-states,usda", "license": "CC0-1.0", "title": "gNATSGO Soil Database - Tables", "missionStartDate": "2020-07-01T00:00:00Z"}, "hgb": {"abstract": "This dataset provides temporally consistent and harmonized global maps of aboveground and belowground biomass carbon density for the year 2010 at 300m resolution. The aboveground biomass map integrates land-cover-specific, remotely sensed maps of woody, grassland, cropland, and tundra biomass. Input maps were amassed from the published literature and, where necessary, updated to cover the focal extent or time period. The belowground biomass map similarly integrates matching maps derived from each aboveground biomass map and land-cover-specific empirical models. Aboveground and belowground maps were then integrated separately using ancillary maps of percent tree/land cover and a rule-based decision tree. Maps reporting the accumulated uncertainty of pixel-level estimates are also provided.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biomass,carbon,hgb,ornl", "license": "proprietary", "title": "HGB: Harmonized Global Biomass for 2010", "missionStartDate": "2010-12-31T00:00:00Z"}, "cop-dem-glo-30": {"abstract": "The Copernicus DEM is a digital surface model (DSM), which represents the surface of the Earth including buildings, infrastructure, and vegetation. This DSM is based on radar satellite data acquired during the TanDEM-X Mission, which was funded by a public-private partnership between the German Aerospace Centre (DLR) and Airbus Defence and Space.\n\nCopernicus DEM is available at both 30-meter and 90-meter resolution; this dataset has a horizontal resolution of approximately 30 meters.\n\nSee the [Product Handbook](https://object.cloud.sdsc.edu/v1/AUTH_opentopography/www/metadata/Copernicus_metadata.pdf) for more information.\n\nSee the dataset page on OpenTopography: \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": "tandem-x", "processingLevel": null, "keywords": "cop-dem-glo-30,copernicus,dem,dsm,elevation,tandem-x", "license": "proprietary", "title": "Copernicus DEM GLO-30", "missionStartDate": "2021-04-22T00:00:00Z"}, "cop-dem-glo-90": {"abstract": "The Copernicus DEM is a digital surface model (DSM), which represents the surface of the Earth including buildings, infrastructure, and vegetation. This DSM is based on radar satellite data acquired during the TanDEM-X Mission, which was funded by a public-private partnership between the German Aerospace Centre (DLR) and Airbus Defence and Space.\n\nCopernicus DEM is available at both 30-meter and 90-meter resolution; this dataset has a horizontal resolution of approximately 90 meters.\n\nSee the [Product Handbook](https://object.cloud.sdsc.edu/v1/AUTH_opentopography/www/metadata/Copernicus_metadata.pdf) for more information.\n\nSee the dataset page on OpenTopography: \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": "tandem-x", "processingLevel": null, "keywords": "cop-dem-glo-90,copernicus,dem,elevation,tandem-x", "license": "proprietary", "title": "Copernicus DEM GLO-90", "missionStartDate": "2021-04-22T00:00:00Z"}, "goes-cmi": {"abstract": "The GOES-R Advanced Baseline Imager (ABI) L2 Cloud and Moisture Imagery product provides 16 reflective and emissive bands at high temporal cadence over the Western Hemisphere.\n\nThe GOES-R series is the latest in the Geostationary Operational Environmental Satellites (GOES) program, which has been operated in a collaborative effort by NOAA and NASA since 1975. The operational GOES-R Satellites, GOES-16, GOES-17, and GOES-18, capture 16-band imagery from geostationary orbits over the Western Hemisphere via the Advance Baseline Imager (ABI) radiometer. The ABI captures 2 visible, 4 near-infrared, and 10 infrared channels at resolutions between 0.5km and 2km.\n\n### Geographic coverage\n\nThe ABI captures three levels of coverage, each at a different temporal cadence depending on the modes described below. The geographic coverage for each image is described by the `goes:image-type` STAC Item property.\n\n- _FULL DISK_: a circular image depicting nearly full coverage of the Western Hemisphere.\n- _CONUS_: a 3,000 (lat) by 5,000 (lon) km rectangular image depicting the Continental U.S. (GOES-16) or the Pacific Ocean including Hawaii (GOES-17).\n- _MESOSCALE_: a 1,000 by 1,000 km rectangular image. GOES-16 and 17 both alternate between two different mesoscale geographic regions.\n\n### Modes\n\nThere are three standard scanning modes for the ABI instrument: Mode 3, Mode 4, and Mode 6.\n\n- Mode _3_ consists of one observation of the full disk scene of the Earth, three observations of the continental United States (CONUS), and thirty observations for each of two distinct mesoscale views every fifteen minutes.\n- Mode _4_ consists of the observation of the full disk scene every five minutes.\n- Mode _6_ consists of one observation of the full disk scene of the Earth, two observations of the continental United States (CONUS), and twenty observations for each of two distinct mesoscale views every ten minutes.\n\nThe mode that each image was captured with is described by the `goes:mode` STAC Item property.\n\nSee this [ABI Scan Mode Demonstration](https://youtu.be/_c5H6R-M0s8) video for an idea of how the ABI scans multiple geographic regions over time.\n\n### Cloud and Moisture Imagery\n\nThe Cloud and Moisture Imagery product contains one or more images with pixel values identifying \"brightness values\" that are scaled to support visual analysis. Cloud and Moisture Imagery product (CMIP) files are generated for each of the sixteen ABI reflective and emissive bands. In addition, there is a multi-band product file that includes the imagery at all bands (MCMIP).\n\nThe Planetary Computer STAC Collection `goes-cmi` captures both the CMIP and MCMIP product files into individual STAC Items for each observation from a GOES-R satellite. It contains the original CMIP and MCMIP NetCDF files, as well as cloud-optimized GeoTIFF (COG) exports of the data from each MCMIP band (2km); the full-resolution CMIP band for bands 1, 2, 3, and 5; and a Web Mercator COG of bands 1, 2 and 3, which are useful for rendering.\n\nThis product is not in a standard coordinate reference system (CRS), which can cause issues with some tooling that does not handle non-standard large geographic regions.\n\n### For more information\n- [Beginner\u2019s Guide to GOES-R Series Data](https://www.goes-r.gov/downloads/resources/documents/Beginners_Guide_to_GOES-R_Series_Data.pdf)\n- [GOES-R Series Product Definition and Users\u2019 Guide: Volume 5 (Level 2A+ Products)](https://www.goes-r.gov/products/docs/PUG-L2+-vol5.pdf) ([Spanish verison](https://github.com/NOAA-Big-Data-Program/bdp-data-docs/raw/main/GOES/QuickGuides/Spanish/Guia%20introductoria%20para%20datos%20de%20la%20serie%20GOES-R%20V1.1%20FINAL2%20-%20Copy.pdf))\n\n", "instrument": "ABI", "platform": null, "platformSerialIdentifier": "GOES-16,GOES-17,GOES-18", "processingLevel": null, "keywords": "abi,cloud,goes,goes-16,goes-17,goes-18,goes-cmi,moisture,nasa,noaa,satellite", "license": "proprietary", "title": "GOES-R Cloud & Moisture Imagery", "missionStartDate": "2017-02-28T00:16:52Z"}, "terraclimate": {"abstract": "[TerraClimate](http://www.climatologylab.org/terraclimate.html) is a dataset of monthly climate and climatic water balance for global terrestrial surfaces from 1958 to the present. These data provide important inputs for ecological and hydrological studies at global scales that require high spatial resolution and time-varying data. All data have monthly temporal resolution and a ~4-km (1/24th degree) spatial resolution. This dataset is provided in [Zarr](https://zarr.readthedocs.io/) format.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,precipitation,temperature,terraclimate,vapor-pressure,water", "license": "CC0-1.0", "title": "TerraClimate", "missionStartDate": "1958-01-01T00:00:00Z"}, "nasa-nex-gddp-cmip6": {"abstract": "The NEX-GDDP-CMIP6 dataset is comprised of global downscaled climate scenarios derived from the General Circulation Model (GCM) runs conducted under the Coupled Model Intercomparison Project Phase 6 (CMIP6) and across two of the four \u201cTier 1\u201d greenhouse gas emissions scenarios known as Shared Socioeconomic Pathways (SSPs). The CMIP6 GCM runs were developed in support of the Sixth Assessment Report of the Intergovernmental Panel on Climate Change (IPCC AR6). This dataset includes downscaled projections from ScenarioMIP model runs for which daily scenarios were produced and distributed through the Earth System Grid Federation. The purpose of this dataset is to provide a set of global, high resolution, bias-corrected climate change projections that can be used to evaluate climate change impacts on processes that are sensitive to finer-scale climate gradients and the effects of local topography on climate conditions.\n\nThe [NASA Center for Climate Simulation](https://www.nccs.nasa.gov/) maintains the [next-gddp-cmip6 product page](https://www.nccs.nasa.gov/services/data-collections/land-based-products/nex-gddp-cmip6) where you can find more information about these datasets. Users are encouraged to review the [technote](https://www.nccs.nasa.gov/sites/default/files/NEX-GDDP-CMIP6-Tech_Note.pdf), provided alongside the data set, where more detailed information, references and acknowledgements can be found.\n\nThis collection contains many NetCDF files. There is one NetCDF file per `(model, scenario, variable, year)` tuple.\n\n- **model** is the name of a modeling group (e.g. \"ACCESS-CM-2\"). See the `cmip6:model` summary in the STAC collection for a full list of models.\n- **scenario** is one of \"historical\", \"ssp245\" or \"ssp585\".\n- **variable** is one of \"hurs\", \"huss\", \"pr\", \"rlds\", \"rsds\", \"sfcWind\", \"tas\", \"tasmax\", \"tasmin\".\n- **year** depends on the value of *scenario*. For \"historical\", the values range from 1950 to 2014 (inclusive). For \"ssp245\" and \"ssp585\", the years range from 2015 to 2100 (inclusive).\n\nIn addition to the NetCDF files, we provide some *experimental* **reference files** as collection-level dataset assets. These are JSON files implementing the [references specification](https://fsspec.github.io/kerchunk/spec.html).\nThese files include the positions of data variables within the binary NetCDF files, which can speed up reading the metadata. See the example notebook for more.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,cmip6,humidity,nasa,nasa-nex-gddp-cmip6,precipitation,temperature", "license": "proprietary", "title": "Earth Exchange Global Daily Downscaled Projections (NEX-GDDP-CMIP6)", "missionStartDate": "1950-01-01T00:00:00Z"}, "gpm-imerg-hhr": {"abstract": "The Integrated Multi-satellitE Retrievals for GPM (IMERG) algorithm combines information from the [GPM satellite constellation](https://gpm.nasa.gov/missions/gpm/constellation) to estimate precipitation over the majority of the Earth's surface. This algorithm is particularly valuable over the majority of the Earth's surface that lacks precipitation-measuring instruments on the ground. Now in the latest Version 06 release of IMERG the algorithm fuses the early precipitation estimates collected during the operation of the TRMM satellite (2000 - 2015) with more recent precipitation estimates collected during operation of the GPM satellite (2014 - present). The longer the record, the more valuable it is, as researchers and application developers will attest. By being able to compare and contrast past and present data, researchers are better informed to make climate and weather models more accurate, better understand normal and extreme rain and snowfall around the world, and strengthen applications for current and future disasters, disease, resource management, energy production and food security.\n\nFor more, see the [IMERG homepage](https://gpm.nasa.gov/data/imerg) The [IMERG Technical documentation](https://gpm.nasa.gov/sites/default/files/2020-10/IMERG_doc_201006.pdf) provides more information on the algorithm, input datasets, and output products.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gpm,gpm-imerg-hhr,imerg,precipitation", "license": "proprietary", "title": "GPM IMERG", "missionStartDate": "2000-06-01T00:00:00Z"}, "gnatsgo-rasters": {"abstract": "This collection contains the raster data for gNATSGO. In order to use the map unit values contained in the `mukey` raster asset, you'll need to join to tables represented as Items in the [gNATSGO Tables](https://planetarycomputer.microsoft.com/dataset/gnatsgo-tables) Collection. Many items have commonly used values encoded in additional raster assets.\n\nThe gridded National Soil Survey Geographic Database (gNATSGO) is a USDA-NRCS Soil & Plant Science Division (SPSD) composite database that provides complete coverage of the best available soils information for all areas of the United States and Island Territories. It was created by combining data from the Soil Survey Geographic Database (SSURGO), State Soil Geographic Database (STATSGO2), and Raster Soil Survey Databases (RSS) into a single seamless ESRI file geodatabase.\n\nSSURGO is the SPSD flagship soils database that has over 100 years of field-validated detailed soil mapping data. SSURGO contains soils information for more than 90 percent of the United States and island territories, but unmapped land remains. STATSGO2 is a general soil map that has soils data for all of the United States and island territories, but the data is not as detailed as the SSURGO data. The Raster Soil Surveys (RSSs) are the next generation soil survey databases developed using advanced digital soil mapping methods.\n\nThe gNATSGO database is composed primarily of SSURGO data, but STATSGO2 data was used to fill in the gaps. The RSSs are newer product with relatively limited spatial extent. These RSSs were merged into the gNATSGO after combining the SSURGO and STATSGO2 data. The extent of RSS is expected to increase in the coming years.\n\nSee the [official documentation](https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/geo/?cid=nrcseprd1464625)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gnatsgo-rasters,natsgo,rss,soils,ssurgo,statsgo2,united-states,usda", "license": "CC0-1.0", "title": "gNATSGO Soil Database - Rasters", "missionStartDate": "2020-07-01T00:00:00Z"}, "3dep-lidar-hag": {"abstract": "This COG type is generated using the Z dimension of the [COPC data](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc) data and removes noise, water, and using [`pdal.filters.smrf`](https://pdal.io/stages/filters.smrf.html#filters-smrf) followed by [pdal.filters.hag_nn](https://pdal.io/stages/filters.hag_nn.html#filters-hag-nn).\n\nThe Height Above Ground Nearest Neighbor filter takes as input a point cloud with Classification set to 2 for ground points. It creates a new dimension, HeightAboveGround, that contains the normalized height values.\n\nGround points may be generated with [`pdal.filters.pmf`](https://pdal.io/stages/filters.pmf.html#filters-pmf) or [`pdal.filters.smrf`](https://pdal.io/stages/filters.smrf.html#filters-smrf), but you can use any method you choose, as long as the ground returns are marked.\n\nNormalized heights are a commonly used attribute of point cloud data. This can also be referred to as height above ground (HAG) or above ground level (AGL) heights. In the end, it is simply a measure of a point's relative height as opposed to its raw elevation value.\n\nThe filter finds the number of ground points nearest to the non-ground point under consideration. It calculates an average ground height weighted by the distance of each ground point from the non-ground point. The HeightAboveGround is the difference between the Z value of the non-ground point and the interpolated ground height.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-hag,cog,elevation,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Height above Ground", "missionStartDate": "2012-01-01T00:00:00Z"}, "3dep-lidar-intensity": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It is a collection of Cloud Optimized GeoTIFFs representing the pulse return magnitude.\n\nThe values are based on the Intensity [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-intensity,cog,intensity,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Intensity", "missionStartDate": "2012-01-01T00:00:00Z"}, "3dep-lidar-pointsourceid": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It is a collection of Cloud Optimized GeoTIFFs representing the file source ID from which the point originated. Zero indicates that the point originated in the current file.\n\nThis values are based on the PointSourceId [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-pointsourceid,cog,pointsourceid,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Point Source", "missionStartDate": "2012-01-01T00:00:00Z"}, "mtbs": {"abstract": "[Monitoring Trends in Burn Severity](https://www.mtbs.gov/) (MTBS) is an inter-agency program whose goal is to consistently map the burn severity and extent of large fires across the United States from 1984 to the present. This includes all fires 1000 acres or greater in the Western United States and 500 acres or greater in the Eastern United States. The burn severity mosaics in this dataset consist of thematic raster images of MTBS burn severity classes for all currently completed MTBS fires for the continental United States and Alaska.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "fire,forest,mtbs,usda,usfs,usgs", "license": "proprietary", "title": "MTBS: Monitoring Trends in Burn Severity", "missionStartDate": "1984-12-31T00:00:00Z"}, "noaa-c-cap": {"abstract": "Nationally standardized, raster-based inventories of land cover for the coastal areas of the U.S. Data are derived, through the Coastal Change Analysis Program, from the analysis of multiple dates of remotely sensed imagery. Two file types are available: individual dates that supply a wall-to-wall map, and change files that compare one date to another. The use of standardized data and procedures assures consistency through time and across geographies. C-CAP data forms the coastal expression of the National Land Cover Database (NLCD) and the A-16 land cover theme of the National Spatial Data Infrastructure. The data are updated every 5 years.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "coastal,land-cover,land-use,noaa,noaa-c-cap", "license": "proprietary", "title": "C-CAP Regional Land Cover and Change", "missionStartDate": "1975-01-01T00:00:00Z"}, "3dep-lidar-copc": {"abstract": "This collection contains source data from the [USGS 3DEP program](https://www.usgs.gov/3d-elevation-program) reformatted into the [COPC](https://copc.io) format. A COPC file is a LAZ 1.4 file that stores point data organized in a clustered octree. It contains a VLR that describes the octree organization of data that are stored in LAZ 1.4 chunks. The end product is a one-to-one mapping of LAZ to UTM-reprojected COPC files.\n\nLAZ data is geospatial [LiDAR point cloud](https://en.wikipedia.org/wiki/Point_cloud) (LPC) content stored in the compressed [LASzip](https://laszip.org?) format. Data were reorganized and stored in LAZ-compatible [COPC](https://copc.io) organization for use in Planetary Computer, which supports incremental spatial access and cloud streaming.\n\nLPC can be summarized for construction of digital terrain models (DTM), filtered for extraction of features like vegetation and buildings, and visualized to provide a point cloud map of the physical spaces the laser scanner interacted with. LPC content from 3DEP is used to compute and extract a variety of landscape characterization products, and some of them are provided by Planetary Computer, including Height Above Ground, Relative Intensity Image, and DTM and Digital Surface Models.\n\nThe LAZ tiles represent a one-to-one mapping of original tiled content as provided by the [USGS 3DEP program](https://www.usgs.gov/3d-elevation-program), with the exception that the data were reprojected and normalized into appropriate UTM zones for their location without adjustment to the vertical datum. In some cases, vertical datum description may not match actual data values, especially for pre-2010 USGS 3DEP point cloud data.\n\nIn addition to these COPC files, various higher-level derived products are available as Cloud Optimized GeoTIFFs in [other collections](https://planetarycomputer.microsoft.com/dataset/group/3dep-lidar).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-copc,cog,point-cloud,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Point Cloud", "missionStartDate": "2012-01-01T00:00:00Z"}, "modis-64A1-061": {"abstract": "The Terra and Aqua combined MCD64A1 Version 6.1 Burned Area data product is a monthly, global gridded 500 meter (m) product containing per-pixel burned-area and quality information. The MCD64A1 burned-area mapping approach employs 500 m Moderate Resolution Imaging Spectroradiometer (MODIS) Surface Reflectance imagery coupled with 1 kilometer (km) MODIS active fire observations. The algorithm uses a burn sensitive Vegetation Index (VI) to create dynamic thresholds that are applied to the composite data. The VI is derived from MODIS shortwave infrared atmospherically corrected surface reflectance bands 5 and 7 with a measure of temporal texture. The algorithm identifies the date of burn for the 500 m grid cells within each individual MODIS tile. The date is encoded in a single data layer as the ordinal day of the calendar year on which the burn occurred with values assigned to unburned land pixels and additional special values reserved for missing data and water grid cells. The data layers provided in the MCD64A1 product include Burn Date, Burn Data Uncertainty, Quality Assurance, along with First Day and Last Day of reliable change detection of the year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,fire,global,imagery,mcd64a1,modis,modis-64a1-061,nasa,satellite,terra", "license": "proprietary", "title": "MODIS Burned Area Monthly", "missionStartDate": "2000-11-01T00:00:00Z"}, "alos-fnf-mosaic": {"abstract": "The global 25m resolution SAR mosaics and forest/non-forest maps are free and open annual datasets generated by [JAXA](https://www.eorc.jaxa.jp/ALOS/en/dataset/fnf_e.htm) using the L-band Synthetic Aperture Radar sensors on the Advanced Land Observing Satellite-2 (ALOS-2 PALSAR-2), the Advanced Land Observing Satellite (ALOS PALSAR) and the Japanese Earth Resources Satellite-1 (JERS-1 SAR).\n\nThe global forest/non-forest maps (FNF) were generated by a Random Forest machine learning-based classification method, with the re-processed global 25m resolution [PALSAR-2 mosaic dataset](https://planetarycomputer.microsoft.com/dataset/alos-palsar-mosaic) (Ver. 2.0.0) as input. Here, the \"forest\" is defined as the tree covered land with an area larger than 0.5 ha and a canopy cover of over 10 %, in accordance with the FAO definition of forest. The classification results are presented in four categories, with two categories of forest areas: forests with a canopy cover of 90 % or more and forests with a canopy cover of 10 % to 90 %, depending on the density of the forest area.\n\nSee the [Product Description](https://www.eorc.jaxa.jp/ALOS/en/dataset/pdf/DatasetDescription_PALSAR2_FNF_V200.pdf) for more details.\n", "instrument": "PALSAR,PALSAR-2", "platform": null, "platformSerialIdentifier": "ALOS,ALOS-2", "processingLevel": null, "keywords": "alos,alos-2,alos-fnf-mosaic,forest,global,jaxa,land-cover,palsar,palsar-2", "license": "proprietary", "title": "ALOS Forest/Non-Forest Annual Mosaic", "missionStartDate": "2015-01-01T00:00:00Z"}, "3dep-lidar-returns": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It is a collection of Cloud Optimized GeoTIFFs representing the number of returns for a given pulse.\n\nThis values are based on the PointSourceId [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.\n\nThe values are based on the NumberOfReturns [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-returns,cog,numberofreturns,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Returns", "missionStartDate": "2012-01-01T00:00:00Z"}, "mobi": {"abstract": "The [Map of Biodiversity Importance](https://www.natureserve.org/conservation-tools/projects/map-biodiversity-importance) (MoBI) consists of raster maps that combine habitat information for 2,216 imperiled species occurring in the conterminous United States, using weightings based on range size and degree of protection to identify areas of high importance for biodiversity conservation. Species included in the project are those which, as of September 2018, had a global conservation status of G1 (critical imperiled) or G2 (imperiled) or which are listed as threatened or endangered at the full species level under the United States Endangered Species Act. Taxonomic groups included in the project are vertebrates (birds, mammals, amphibians, reptiles, turtles, crocodilians, and freshwater and anadromous fishes), vascular plants, selected aquatic invertebrates (freshwater mussels and crayfish) and selected pollinators (bumblebees, butterflies, and skippers).\n\nThere are three types of spatial data provided, described in more detail below: species richness, range-size rarity, and protection-weighted range-size rarity. For each type, this data set includes five different layers – one for all species combined, and four additional layers that break the data down by taxonomic group (vertebrates, plants, freshwater invertebrates, and pollinators) – for a total of fifteen layers.\n\nThese data layers are intended to identify areas of high potential value for on-the-ground biodiversity protection efforts. As a synthesis of predictive models, they cannot guarantee either the presence or absence of imperiled species at a given location. For site-specific decision-making, these data should be used in conjunction with field surveys and/or documented occurrence data, such as is available from the [NatureServe Network](https://www.natureserve.org/natureserve-network).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biodiversity,mobi,natureserve,united-states", "license": "proprietary", "title": "MoBI: Map of Biodiversity Importance", "missionStartDate": "2020-04-14T00:00:00Z"}, "landsat-c2-l2": {"abstract": "Landsat Collection 2 Level-2 [Science Products](https://www.usgs.gov/landsat-missions/landsat-collection-2-level-2-science-products), consisting of atmospherically corrected [surface reflectance](https://www.usgs.gov/landsat-missions/landsat-collection-2-surface-reflectance) and [surface temperature](https://www.usgs.gov/landsat-missions/landsat-collection-2-surface-temperature) image data. Collection 2 Level-2 Science Products are available from August 22, 1982 to present.\n\nThis dataset represents the global archive of Level-2 data from [Landsat Collection 2](https://www.usgs.gov/core-science-systems/nli/landsat/landsat-collection-2) acquired by the [Thematic Mapper](https://landsat.gsfc.nasa.gov/thematic-mapper/) onboard Landsat 4 and 5, the [Enhanced Thematic Mapper](https://landsat.gsfc.nasa.gov/the-enhanced-thematic-mapper-plus-etm/) onboard Landsat 7, and the [Operatational Land Imager](https://landsat.gsfc.nasa.gov/satellites/landsat-8/spacecraft-instruments/operational-land-imager/) and [Thermal Infrared Sensor](https://landsat.gsfc.nasa.gov/satellites/landsat-8/spacecraft-instruments/thermal-infrared-sensor/) onboard Landsat 8 and 9. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": "tm,etm+,oli,tirs", "platform": null, "platformSerialIdentifier": "landsat-4,landsat-5,landsat-7,landsat-8,landsat-9", "processingLevel": null, "keywords": "etm+,global,imagery,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2-l2,nasa,oli,reflectance,satellite,temperature,tirs,tm,usgs", "license": "proprietary", "title": "Landsat Collection 2 Level-2", "missionStartDate": "1982-08-22T00:00:00Z"}, "era5-pds": {"abstract": "ERA5 is the fifth generation ECMWF atmospheric reanalysis of the global climate\ncovering the period from January 1950 to present. ERA5 is produced by the\nCopernicus Climate Change Service (C3S) at ECMWF.\n\nReanalysis combines model data with observations from across the world into a\nglobally complete and consistent dataset using the laws of physics. This\nprinciple, called data assimilation, is based on the method used by numerical\nweather prediction centres, where every so many hours (12 hours at ECMWF) a\nprevious forecast is combined with newly available observations in an optimal\nway to produce a new best estimate of the state of the atmosphere, called\nanalysis, from which an updated, improved forecast is issued. Reanalysis works\nin the same way, but at reduced resolution to allow for the provision of a\ndataset spanning back several decades. Reanalysis does not have the constraint\nof issuing timely forecasts, so there is more time to collect observations, and\nwhen going further back in time, to allow for the ingestion of improved versions\nof the original observations, which all benefit the quality of the reanalysis\nproduct.\n\nThis dataset was converted to Zarr by [Planet OS](https://planetos.com/).\nSee [their documentation](https://github.com/planet-os/notebooks/blob/master/aws/era5-pds.md)\nfor more.\n\n## STAC Metadata\n\nTwo types of data variables are provided: \"forecast\" (`fc`) and \"analysis\" (`an`).\n\n* An **analysis**, of the atmospheric conditions, is a blend of observations\n with a previous forecast. An analysis can only provide\n [instantaneous](https://confluence.ecmwf.int/display/CKB/Model+grid+box+and+time+step)\n parameters (parameters valid at a specific time, e.g temperature at 12:00),\n but not accumulated parameters, mean rates or min/max parameters.\n* A **forecast** starts with an analysis at a specific time (the 'initialization\n time'), and a model computes the atmospheric conditions for a number of\n 'forecast steps', at increasing 'validity times', into the future. A forecast\n can provide\n [instantaneous](https://confluence.ecmwf.int/display/CKB/Model+grid+box+and+time+step)\n parameters, accumulated parameters, mean rates, and min/max parameters.\n\nEach [STAC](https://stacspec.org/) item in this collection covers a single month\nand the entire globe. There are two STAC items per month, one for each type of data\nvariable (`fc` and `an`). The STAC items include an `ecmwf:kind` properties to\nindicate which kind of variables that STAC item catalogs.\n\n## How to acknowledge, cite and refer to ERA5\n\nAll users of data on the Climate Data Store (CDS) disks (using either the web interface or the CDS API) must provide clear and visible attribution to the Copernicus programme and are asked to cite and reference the dataset provider:\n\nAcknowledge according to the [licence to use Copernicus Products](https://cds.climate.copernicus.eu/api/v2/terms/static/licence-to-use-copernicus-products.pdf).\n\nCite each dataset used as indicated on the relevant CDS entries (see link to \"Citation\" under References on the Overview page of the dataset entry).\n\nThroughout the content of your publication, the dataset used is referred to as Author (YYYY).\n\nThe 3-steps procedure above is illustrated with this example: [Use Case 2: ERA5 hourly data on single levels from 1979 to present](https://confluence.ecmwf.int/display/CKB/Use+Case+2%3A+ERA5+hourly+data+on+single+levels+from+1979+to+present).\n\nFor complete details, please refer to [How to acknowledge and cite a Climate Data Store (CDS) catalogue entry and the data published as part of it](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "ecmwf,era5,era5-pds,precipitation,reanalysis,temperature,weather", "license": "proprietary", "title": "ERA5 - PDS", "missionStartDate": "1979-01-01T00:00:00Z"}, "chloris-biomass": {"abstract": "The Chloris Global Biomass 2003 - 2019 dataset provides estimates of stock and change in aboveground biomass for Earth's terrestrial woody vegetation ecosystems. It covers the period 2003 - 2019, at annual time steps. The global dataset has a circa 4.6 km spatial resolution.\n\nThe maps and data sets were generated by combining multiple remote sensing measurements from space borne satellites, processed using state-of-the-art machine learning and statistical methods, validated with field data from multiple countries. The dataset provides direct estimates of aboveground stock and change, and are not based on land use or land cover area change, and as such they include gains and losses of carbon stock in all types of woody vegetation - whether natural or plantations.\n\nAnnual stocks are expressed in units of tons of biomass. Annual changes in stocks are expressed in units of CO2 equivalent, i.e., the amount of CO2 released from or taken up by terrestrial ecosystems for that specific pixel.\n\nThe spatial data sets are available on [Microsoft\u2019s Planetary Computer](https://planetarycomputer.microsoft.com/dataset/chloris-biomass) under a Creative Common license of the type Attribution-Non Commercial-Share Alike [CC BY-NC-SA](https://spdx.org/licenses/CC-BY-NC-SA-4.0.html).\n\n[Chloris Geospatial](https://chloris.earth/) is a mission-driven technology company that develops software and data products on the state of natural capital for use by business, governments, and the social sector.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biomass,carbon,chloris,chloris-biomass,modis", "license": "CC-BY-NC-SA-4.0", "title": "Chloris Biomass", "missionStartDate": "2003-07-31T00:00:00Z"}, "kaza-hydroforecast": {"abstract": "This dataset is a daily updated set of HydroForecast seasonal river flow forecasts at six locations in the Kwando and Upper Zambezi river basins. More details about the locations, project context, and to interactively view current and previous forecasts, visit our [public website](https://dashboard.hydroforecast.com/public/wwf-kaza).\n\n## Flow forecast dataset and model description\n\n[HydroForecast](https://www.upstream.tech/hydroforecast) is a theory-guided machine learning hydrologic model that predicts streamflow in basins across the world. For the Kwando and Upper Zambezi, HydroForecast makes daily predictions of streamflow rates using a [seasonal analog approach](https://support.upstream.tech/article/125-seasonal-analog-model-a-technical-overview). The model's output is probabilistic and the mean, median and a range of quantiles are available at each forecast step.\n\nThe underlying model has the following attributes: \n\n* Timestep: 10 days\n* Horizon: 10 to 180 days \n* Update frequency: daily\n* Units: cubic meters per second (m\u00b3/s)\n \n## Site details\n\nThe model produces output for six locations in the Kwando and Upper Zambezi river basins.\n\n* Upper Zambezi sites\n * Zambezi at Chavuma\n * Luanginga at Kalabo\n* Kwando basin sites\n * Kwando at Kongola -- total basin flows\n * Kwando Sub-basin 1\n * Kwando Sub-basin 2 \n * Kwando Sub-basin 3\n * Kwando Sub-basin 4\n * Kwando Kongola Sub-basin\n\n## STAC metadata\n\nThere is one STAC item per location. Each STAC item has a single asset linking to a Parquet file in Azure Blob Storage.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "hydroforecast,hydrology,kaza-hydroforecast,streamflow,upstream-tech,water", "license": "CDLA-Sharing-1.0", "title": "HydroForecast - Kwando & Upper Zambezi Rivers", "missionStartDate": "2022-01-01T00:00:00Z"}, "planet-nicfi-analytic": {"abstract": "*Note: Assets in this collection are only available to winners of the [GEO-Microsoft Planetary Computer RFP](https://www.earthobservations.org/geo_blog_obs.php?id=528). Others wishing to use the data can sign up and access it from Planet at [https://www.planet.com/nicfi/](https://www.planet.com/nicfi/) and email [planetarycomputer@microsoft.com](mailto:planetarycomputer@microsoft.com).*\n\nThrough Norway\u2019s International Climate & Forests Initiative (NICFI), users can access Planet\u2019s high-resolution, analysis-ready mosaics of the world\u2019s tropics in order to help reduce and reverse the loss of tropical forests, combat climate change, conserve biodiversity, and facilitate sustainable development.\n\nIn support of NICFI\u2019s mission, you can use this data for a number of projects including, but not limited to:\n\n* Advance scientific research about the world\u2019s tropical forests and the critical services they provide.\n* Implement and improve policies for sustainable forest management and land use in developing tropical forest countries and jurisdictions.\n* Increase transparency and accountability in the tropics.\n* Protect and improve the rights of indigenous peoples and local communities in tropical forest countries.\n* Innovate solutions towards reducing pressure on forests from global commodities and financial markets.\n* In short, the primary purpose of the NICFI Program is to support reducing and reversing the loss of tropical forests, contributing to combating climate change, conserving biodiversity, contributing to forest regrowth, restoration, and enhancement, and facilitating sustainable development, all of which must be Non-Commercial Use.\n\nTo learn how more about the NICFI program, streaming and downloading basemaps please read the [NICFI Data Program User Guide](https://assets.planet.com/docs/NICFI_UserGuidesFAQ.pdf).\n\nThis collection contains both monthly and biannual mosaics. Biannual mosaics are available from December 2015 - August 2020. Monthly mosaics are available from September 2020. The STAC items include a `planet-nicfi:cadence` field indicating the type of mosaic.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "imagery,nicfi,planet,planet-nicfi-analytic,satellite,tropics", "license": "proprietary", "title": "Planet-NICFI Basemaps (Analytic)", "missionStartDate": "2015-12-01T00:00:00Z"}, "modis-17A2H-061": {"abstract": "The Version 6.1 Gross Primary Productivity (GPP) product is a cumulative 8-day composite of values with 500 meter (m) pixel size based on the radiation use efficiency concept that can be potentially used as inputs to data models to calculate terrestrial energy, carbon, water cycle processes, and biogeochemistry of vegetation. The Moderate Resolution Imaging Spectroradiometer (MODIS) data product includes information about GPP and Net Photosynthesis (PSN). The PSN band values are the GPP less the Maintenance Respiration (MR). The data product also contains a PSN Quality Control (QC) layer. The quality layer contains quality information for both the GPP and the PSN.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod17a2h,modis,modis-17a2h-061,myd17a2h,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Gross Primary Productivity 8-Day", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-11A2-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Land Surface Temperature/Emissivity 8-Day Version 6.1 product provides an average 8-day per-pixel Land Surface Temperature and Emissivity (LST&E) with a 1 kilometer (km) spatial resolution in a 1,200 by 1,200 km grid. Each pixel value in the MOD11A2 is a simple average of all the corresponding MOD11A1 LST pixels collected within that 8-day period. The 8-day compositing period was chosen because twice that period is the exact ground track repeat period of the Terra and Aqua platforms. Provided along with the daytime and nighttime surface temperature bands are associated quality control assessments, observation times, view zenith angles, and clear-sky coverages along with bands 31 and 32 emissivities from land cover types.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod11a2,modis,modis-11a2-061,myd11a2,nasa,satellite,temperature,terra", "license": "proprietary", "title": "MODIS Land Surface Temperature/Emissivity 8-Day", "missionStartDate": "2000-02-18T00:00:00Z"}, "daymet-daily-pr": {"abstract": "Gridded estimates of daily weather parameters. [Daymet](https://daymet.ornl.gov) Version 4 variables include the following parameters: minimum temperature, maximum temperature, precipitation, shortwave radiation, vapor pressure, snow water equivalent, and day length.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1840](https://doi.org/10.3334/ORNLDAAC/1840) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#daily).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "daymet,daymet-daily-pr,precipitation,puerto-rico,temperature,vapor-pressure,weather", "license": "proprietary", "title": "Daymet Daily Puerto Rico", "missionStartDate": "1980-01-01T12:00:00Z"}, "3dep-lidar-dtm-native": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It creates a Digital Terrain Model (DTM) using the vendor provided (native) ground classification and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to output a collection of Cloud Optimized GeoTIFFs, removing all points that have been classified as noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-dtm-native,cog,dtm,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Digital Terrain Model (Native)", "missionStartDate": "2012-01-01T00:00:00Z"}, "3dep-lidar-classification": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It uses the [ASPRS](https://www.asprs.org/) (American Society for Photogrammetry and Remote Sensing) [Lidar point classification](https://desktop.arcgis.com/en/arcmap/latest/manage-data/las-dataset/lidar-point-classification.htm). See [LAS specification](https://www.ogc.org/standards/LAS) for details.\n\nThis COG type is based on the Classification [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.range`](https://pdal.io/stages/filters.range.html) to select a subset of interesting classifications. Do note that not all LiDAR collections contain a full compliment of classification labels.\nTo remove outliers, the PDAL pipeline uses a noise filter and then outputs the Classification dimension.\n\nThe STAC collection implements the [`item_assets`](https://github.com/stac-extensions/item-assets) and [`classification`](https://github.com/stac-extensions/classification) extensions. These classes are displayed in the \"Item assets\" below. You can programmatically access the full list of class values and descriptions using the `classification:classes` field form the `data` asset on the STAC collection.\n\nClassification rasters were produced as a subset of LiDAR classification categories:\n\n```\n0, Never Classified\n1, Unclassified\n2, Ground\n3, Low Vegetation\n4, Medium Vegetation\n5, High Vegetation\n6, Building\n9, Water\n10, Rail\n11, Road\n17, Bridge Deck\n```\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-classification,classification,cog,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Classification", "missionStartDate": "2012-01-01T00:00:00Z"}, "3dep-lidar-dtm": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It creates a Digital Terrain Model (DTM) using [`pdal.filters.smrf`](https://pdal.io/stages/filters.smrf.html#filters-smrf) to output a collection of Cloud Optimized GeoTIFFs.\n\nThe Simple Morphological Filter (SMRF) classifies ground points based on the approach outlined in [Pingel2013](https://pdal.io/references.html#pingel2013).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-dtm,cog,dtm,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Digital Terrain Model", "missionStartDate": "2012-01-01T00:00:00Z"}, "gap": {"abstract": "The [USGS GAP/LANDFIRE National Terrestrial Ecosystems data](https://www.sciencebase.gov/catalog/item/573cc51be4b0dae0d5e4b0c5), based on the [NatureServe Terrestrial Ecological Systems](https://www.natureserve.org/products/terrestrial-ecological-systems-united-states), are the foundation of the most detailed, consistent map of vegetation available for the United States. These data facilitate planning and management for biological diversity on a regional and national scale.\n\nThis dataset includes the [land cover](https://www.usgs.gov/core-science-systems/science-analytics-and-synthesis/gap/science/land-cover) component of the GAP/LANDFIRE project.\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gap,land-cover,landfire,united-states,usgs", "license": "proprietary", "title": "USGS Gap Land Cover", "missionStartDate": "1999-01-01T00:00:00Z"}, "modis-17A2HGF-061": {"abstract": "The Version 6.1 Gross Primary Productivity (GPP) product is a cumulative 8-day composite of values with 500 meter (m) pixel size based on the radiation use efficiency concept that can be potentially used as inputs to data models to calculate terrestrial energy, carbon, water cycle processes, and biogeochemistry of vegetation. The Moderate Resolution Imaging Spectroradiometer (MODIS) data product includes information about GPP and Net Photosynthesis (PSN). The PSN band values are the GPP less the Maintenance Respiration (MR). The data product also contains a PSN Quality Control (QC) layer. The quality layer contains quality information for both the GPP and the PSN. This product will be generated at the end of each year when the entire yearly 8-day 15A2H is available. Hence, the gap-filled A2HGF is the improved 17, which has cleaned the poor-quality inputs from 8-day Leaf Area Index and Fraction of Photosynthetically Active Radiation (FPAR/LAI) based on the Quality Control (QC) label for every pixel. If any LAI/FPAR pixel did not meet the quality screening criteria, its value is determined through linear interpolation. However, users cannot get this product in near-real time because it will be generated only at the end of a given year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod17a2hgf,modis,modis-17a2hgf-061,myd17a2hgf,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Gross Primary Productivity 8-Day Gap-Filled", "missionStartDate": "2000-02-18T00:00:00Z"}, "planet-nicfi-visual": {"abstract": "*Note: Assets in this collection are only available to winners of the [GEO-Microsoft Planetary Computer RFP](https://www.earthobservations.org/geo_blog_obs.php?id=528). Others wishing to use the data can sign up and access it from Planet at [https://www.planet.com/nicfi/](https://www.planet.com/nicfi/) and email [planetarycomputer@microsoft.com](mailto:planetarycomputer@microsoft.com).*\n\nThrough Norway\u2019s International Climate & Forests Initiative (NICFI), users can access Planet\u2019s high-resolution, analysis-ready mosaics of the world\u2019s tropics in order to help reduce and reverse the loss of tropical forests, combat climate change, conserve biodiversity, and facilitate sustainable development.\n\nIn support of NICFI\u2019s mission, you can use this data for a number of projects including, but not limited to:\n\n* Advance scientific research about the world\u2019s tropical forests and the critical services they provide.\n* Implement and improve policies for sustainable forest management and land use in developing tropical forest countries and jurisdictions.\n* Increase transparency and accountability in the tropics.\n* Protect and improve the rights of indigenous peoples and local communities in tropical forest countries.\n* Innovate solutions towards reducing pressure on forests from global commodities and financial markets.\n* In short, the primary purpose of the NICFI Program is to support reducing and reversing the loss of tropical forests, contributing to combating climate change, conserving biodiversity, contributing to forest regrowth, restoration, and enhancement, and facilitating sustainable development, all of which must be Non-Commercial Use.\n\nTo learn how more about the NICFI program, streaming and downloading basemaps please read the [NICFI Data Program User Guide](https://assets.planet.com/docs/NICFI_UserGuidesFAQ.pdf).\n\nThis collection contains both monthly and biannual mosaics. Biannual mosaics are available from December 2015 - August 2020. Monthly mosaics are available from September 2020. The STAC items include a `planet-nicfi:cadence` field indicating the type of mosaic.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "imagery,nicfi,planet,planet-nicfi-visual,satellite,tropics", "license": "proprietary", "title": "Planet-NICFI Basemaps (Visual)", "missionStartDate": "2015-12-01T00:00:00Z"}, "gbif": {"abstract": "The [Global Biodiversity Information Facility](https://www.gbif.org) (GBIF) is an international network and data infrastructure funded by the world's governments, providing global data that document the occurrence of species. GBIF currently integrates datasets documenting over 1.6 billion species occurrences.\n\nThe GBIF occurrence dataset combines data from a wide array of sources, including specimen-related data from natural history museums, observations from citizen science networks, and automated environmental surveys. While these data are constantly changing at [GBIF.org](https://www.gbif.org), periodic snapshots are taken and made available here. \n\nData are stored in [Parquet](https://parquet.apache.org/) format; the Parquet file schema is described below. Most field names correspond to [terms from the Darwin Core standard](https://dwc.tdwg.org/terms/), and have been interpreted by GBIF's systems to align taxonomy, location, dates, etc. Additional information may be retrieved using the [GBIF API](https://www.gbif.org/developer/summary).\n\nPlease refer to the GBIF [citation guidelines](https://www.gbif.org/citation-guidelines) for information about how to cite GBIF data in publications.. For analyses using the whole dataset, please use the following citation:\n\n> GBIF.org ([Date]) GBIF Occurrence Data [DOI of dataset]\n\nFor analyses where data are significantly filtered, please track the datasetKeys used and use a \"[derived dataset](https://www.gbif.org/citation-guidelines#derivedDatasets)\" record for citing the data.\n\nThe [GBIF data blog](https://data-blog.gbif.org/categories/gbif/) contains a number of articles that can help you analyze GBIF data.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biodiversity,gbif,species", "license": "proprietary", "title": "Global Biodiversity Information Facility (GBIF)", "missionStartDate": "2021-04-13T00:00:00Z"}, "modis-17A3HGF-061": {"abstract": "The Version 6.1 product provides information about annual Net Primary Production (NPP) at 500 meter (m) pixel resolution. Annual Moderate Resolution Imaging Spectroradiometer (MODIS) NPP is derived from the sum of all 8-day Net Photosynthesis (PSN) products (MOD17A2H) from the given year. The PSN value is the difference of the Gross Primary Productivity (GPP) and the Maintenance Respiration (MR). The product will be generated at the end of each year when the entire yearly 8-day 15A2H is available. Hence, the gap-filled product is the improved 17, which has cleaned the poor-quality inputs from 8-day Leaf Area Index and Fraction of Photosynthetically Active Radiation (LAI/FPAR) based on the Quality Control (QC) label for every pixel. If any LAI/FPAR pixel did not meet the quality screening criteria, its value is determined through linear interpolation. However, users cannot get this product in near-real time because it will be generated only at the end of a given year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod17a3hgf,modis,modis-17a3hgf-061,myd17a3hgf,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Net Primary Production Yearly Gap-Filled", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-09A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) 09A1 Version 6.1 product provides an estimate of the surface spectral reflectance of MODIS Bands 1 through 7 corrected for atmospheric conditions such as gasses, aerosols, and Rayleigh scattering. Along with the seven 500 meter (m) reflectance bands are two quality layers and four observation bands. For each pixel, a value is selected from all the acquisitions within the 8-day composite period. The criteria for the pixel choice include cloud and solar zenith. When several acquisitions meet the criteria the pixel with the minimum channel 3 (blue) value is used.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,imagery,mod09a1,modis,modis-09a1-061,myd09a1,nasa,reflectance,satellite,terra", "license": "proprietary", "title": "MODIS Surface Reflectance 8-Day (500m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "alos-dem": {"abstract": "The \"ALOS World 3D-30m\" (AW3D30) dataset is a 30 meter resolution global digital surface model (DSM), developed by the Japan Aerospace Exploration Agency (JAXA). AWD30 was constructed from the Panchromatic Remote-sensing Instrument for Stereo Mapping (PRISM) on board Advanced Land Observing Satellite (ALOS), operated from 2006 to 2011.\n\nSee the [Product Description](https://www.eorc.jaxa.jp/ALOS/en/aw3d30/aw3d30v3.2_product_e_e1.2.pdf) for more details.\n", "instrument": "prism", "platform": null, "platformSerialIdentifier": "alos", "processingLevel": null, "keywords": "alos,alos-dem,dem,dsm,elevation,jaxa,prism", "license": "proprietary", "title": "ALOS World 3D-30m", "missionStartDate": "2016-12-07T00:00:00Z"}, "alos-palsar-mosaic": {"abstract": "Global 25 m Resolution PALSAR-2/PALSAR Mosaic (MOS)", "instrument": "PALSAR,PALSAR-2", "platform": null, "platformSerialIdentifier": "ALOS,ALOS-2", "processingLevel": null, "keywords": "alos,alos-2,alos-palsar-mosaic,global,jaxa,palsar,palsar-2,remote-sensing", "license": "proprietary", "title": "ALOS PALSAR Annual Mosaic", "missionStartDate": "2015-01-01T00:00:00Z"}, "deltares-water-availability": {"abstract": "[Deltares](https://www.deltares.nl/en/) has produced a hydrological model approach to simulate historical daily reservoir variations for 3,236 locations across the globe for the period 1970-2020 using the distributed [wflow_sbm](https://deltares.github.io/Wflow.jl/stable/model_docs/model_configurations/) model. The model outputs long-term daily information on reservoir volume, inflow and outflow dynamics, as well as information on upstream hydrological forcing.\n\nThey hydrological model was forced with 5 different precipitation products. Two products (ERA5 and CHIRPS) are available at the global scale, while for Europe, USA and Australia a regional product was use (i.e. EOBS, NLDAS and BOM, respectively). Using these different precipitation products, it becomes possible to assess the impact of uncertainty in the model forcing. A different number of basins upstream of reservoirs are simulated, given the spatial coverage of each precipitation product.\n\nSee the complete [methodology documentation](https://ai4edatasetspublicassets.blob.core.windows.net/assets/aod_docs/pc-deltares-water-availability-documentation.pdf) for more information.\n\n## Dataset coverages\n\n| Name | Scale | Period | Number of basins |\n|--------|--------------------------|-----------|------------------|\n| ERA5 | Global | 1967-2020 | 3236 |\n| CHIRPS | Global (+/- 50 latitude) | 1981-2020 | 2951 |\n| EOBS | Europe/North Africa | 1979-2020 | 682 |\n| NLDAS | USA | 1979-2020 | 1090 |\n| BOM | Australia | 1979-2020 | 116 |\n\n## STAC Metadata\n\nThis STAC collection includes one STAC item per dataset. The item includes a `deltares:reservoir` property that can be used to query for the URL of a specific dataset.\n\n## Contact\n\nFor questions about this dataset, contact [`aiforearthdatasets@microsoft.com`](mailto:aiforearthdatasets@microsoft.com?subject=deltares-floods%20question).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "deltares,deltares-water-availability,precipitation,reservoir,water,water-availability", "license": "CDLA-Permissive-1.0", "title": "Deltares Global Water Availability", "missionStartDate": "1970-01-01T00:00:00Z"}, "modis-16A3GF-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) MOD16A3GF Version 6.1 Evapotranspiration/Latent Heat Flux (ET/LE) product is a year-end gap-filled yearly composite dataset produced at 500 meter (m) pixel resolution. The algorithm used for the MOD16 data product collection is based on the logic of the Penman-Monteith equation, which includes inputs of daily meteorological reanalysis data along with MODIS remotely sensed data products such as vegetation property dynamics, albedo, and land cover. The product will be generated at the end of each year when the entire yearly 8-day MOD15A2H/MYD15A2H is available. Hence, the gap-filled product is the improved 16, which has cleaned the poor-quality inputs from yearly Leaf Area Index and Fraction of Photosynthetically Active Radiation (LAI/FPAR) based on the Quality Control (QC) label for every pixel. If any LAI/FPAR pixel did not meet the quality screening criteria, its value is determined through linear interpolation. However, users cannot get this product in near-real time because it will be generated only at the end of a given year. Provided in the product are layers for composited ET, LE, Potential ET (PET), and Potential LE (PLE) along with a quality control layer. Two low resolution browse images, ET and LE, are also available for each granule. The pixel values for the two Evapotranspiration layers (ET and PET) are the sum for all days within the defined year, and the pixel values for the two Latent Heat layers (LE and PLE) are the average of all days within the defined year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod16a3gf,modis,modis-16a3gf-061,myd16a3gf,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Net Evapotranspiration Yearly Gap-Filled", "missionStartDate": "2001-01-01T00:00:00Z"}, "modis-21A2-061": {"abstract": "A suite of Moderate Resolution Imaging Spectroradiometer (MODIS) Land Surface Temperature and Emissivity (LST&E) products are available in Collection 6.1. The MOD21 Land Surface Temperatuer (LST) algorithm differs from the algorithm of the MOD11 LST products, in that the MOD21 algorithm is based on the ASTER Temperature/Emissivity Separation (TES) technique, whereas the MOD11 uses the split-window technique. The MOD21 TES algorithm uses a physics-based algorithm to dynamically retrieve both the LST and spectral emissivity simultaneously from the MODIS thermal infrared bands 29, 31, and 32. The TES algorithm is combined with an improved Water Vapor Scaling (WVS) atmospheric correction scheme to stabilize the retrieval during very warm and humid conditions. This dataset is an 8-day composite LST product at 1,000 meter spatial resolution that uses an algorithm based on a simple averaging method. The algorithm calculates the average from all the cloud free 21A1D and 21A1N daily acquisitions from the 8-day period. Unlike the 21A1 data sets where the daytime and nighttime acquisitions are separate products, the 21A2 contains both daytime and nighttime acquisitions as separate Science Dataset (SDS) layers within a single Hierarchical Data Format (HDF) file. The LST, Quality Control (QC), view zenith angle, and viewing time have separate day and night SDS layers, while the values for the MODIS emissivity bands 29, 31, and 32 are the average of both the nighttime and daytime acquisitions. Additional details regarding the method used to create this Level 3 (L3) product are available in the Algorithm Theoretical Basis Document (ATBD).", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod21a2,modis,modis-21a2-061,myd21a2,nasa,satellite,temperature,terra", "license": "proprietary", "title": "MODIS Land Surface Temperature/3-Band Emissivity 8-Day", "missionStartDate": "2000-02-16T00:00:00Z"}, "us-census": {"abstract": "The [2020 Census](https://www.census.gov/programs-surveys/decennial-census/decade/2020/2020-census-main.html) counted every person living in the United States and the five U.S. territories. It marked the 24th census in U.S. history and the first time that households were invited to respond to the census online.\n\nThe tables included on the Planetary Computer provide information on population and geographic boundaries at various levels of cartographic aggregation.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "administrative-boundaries,demographics,population,us-census,us-census-bureau", "license": "proprietary", "title": "US Census", "missionStartDate": "2021-08-01T00:00:00Z"}, "jrc-gsw": {"abstract": "Global surface water products from the European Commission Joint Research Centre, based on Landsat 5, 7, and 8 imagery. Layers in this collection describe the occurrence, change, and seasonality of surface water from 1984-2020. Complete documentation for each layer is available in the [Data Users Guide](https://storage.cloud.google.com/global-surface-water/downloads_ancillary/DataUsersGuidev2020.pdf).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "global,jrc-gsw,landsat,water", "license": "proprietary", "title": "JRC Global Surface Water", "missionStartDate": "1984-03-01T00:00:00Z"}, "deltares-floods": {"abstract": "[Deltares](https://www.deltares.nl/en/) has produced inundation maps of flood depth using a model that takes into account water level attenuation and is forced by sea level. At the coastline, the model is forced by extreme water levels containing surge and tide from GTSMip6. The water level at the coastline is extended landwards to all areas that are hydrodynamically connected to the coast following a \u2018bathtub\u2019 like approach and calculates the flood depth as the difference between the water level and the topography. Unlike a simple 'bathtub' model, this model attenuates the water level over land with a maximum attenuation factor of 0.5\u2009m\u2009km-1. The attenuation factor simulates the dampening of the flood levels due to the roughness over land.\n\nIn its current version, the model does not account for varying roughness over land and permanent water bodies such as rivers and lakes, and it does not account for the compound effects of waves, rainfall, and river discharge on coastal flooding. It also does not include the mitigating effect of coastal flood protection. Flood extents must thus be interpreted as the area that is potentially exposed to flooding without coastal protection.\n\nSee the complete [methodology documentation](https://ai4edatasetspublicassets.blob.core.windows.net/assets/aod_docs/11206409-003-ZWS-0003_v0.1-Planetary-Computer-Deltares-global-flood-docs.pdf) for more information.\n\n## Digital elevation models (DEMs)\n\nThis documentation will refer to three DEMs:\n\n* `NASADEM` is the SRTM-derived [NASADEM](https://planetarycomputer.microsoft.com/dataset/nasadem) product.\n* `MERITDEM` is the [Multi-Error-Removed Improved Terrain DEM](http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_DEM/), derived from SRTM and AW3D.\n* `LIDAR` is the [Global LiDAR Lowland DTM (GLL_DTM_v1)](https://data.mendeley.com/datasets/v5x4vpnzds/1).\n\n## Global datasets\n\nThis collection includes multiple global flood datasets derived from three different DEMs (`NASA`, `MERIT`, and `LIDAR`) and at different resolutions. Not all DEMs have all resolutions:\n\n* `NASADEM` and `MERITDEM` are available at `90m` and `1km` resolutions\n* `LIDAR` is available at `5km` resolution\n\n## Historic event datasets\n\nThis collection also includes historical storm event data files that follow similar DEM and resolution conventions. Not all storms events are available for each DEM and resolution combination, but generally follow the format of:\n\n`events/[DEM]_[resolution]-wm_final/[storm_name]_[event_year]_masked.nc`\n\nFor example, a flood map for the MERITDEM-derived 90m flood data for the \"Omar\" storm in 2008 is available at:\n\n\n\n## Contact\n\nFor questions about this dataset, contact [`aiforearthdatasets@microsoft.com`](mailto:aiforearthdatasets@microsoft.com?subject=deltares-floods%20question).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "deltares,deltares-floods,flood,global,sea-level-rise,water", "license": "CDLA-Permissive-1.0", "title": "Deltares Global Flood Maps", "missionStartDate": "2018-01-01T00:00:00Z"}, "modis-43A4-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) MCD43A4 Version 6.1 Nadir Bidirectional Reflectance Distribution Function (BRDF)-Adjusted Reflectance (NBAR) dataset is produced daily using 16 days of Terra and Aqua MODIS data at 500 meter (m) resolution. The view angle effects are removed from the directional reflectances, resulting in a stable and consistent NBAR product. Data are temporally weighted to the ninth day which is reflected in the Julian date in the file name. Users are urged to use the band specific quality flags to isolate the highest quality full inversion results for their own science applications as described in the User Guide. The MCD43A4 provides NBAR and simplified mandatory quality layers for MODIS bands 1 through 7. Essential quality information provided in the corresponding MCD43A2 data file should be consulted when using this product.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,imagery,mcd43a4,modis,modis-43a4-061,nasa,reflectance,satellite,terra", "license": "proprietary", "title": "MODIS Nadir BRDF-Adjusted Reflectance (NBAR) Daily", "missionStartDate": "2000-02-16T00:00:00Z"}, "modis-09Q1-061": {"abstract": "The 09Q1 Version 6.1 product provides an estimate of the surface spectral reflectance of Moderate Resolution Imaging Spectroradiometer (MODIS) Bands 1 and 2, corrected for atmospheric conditions such as gasses, aerosols, and Rayleigh scattering. Provided along with the 250 meter (m) surface reflectance bands are two quality layers. For each pixel, a value is selected from all the acquisitions within the 8-day composite period. The criteria for the pixel choice include cloud and solar zenith. When several acquisitions meet the criteria the pixel with the minimum channel 3 (blue) value is used.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,imagery,mod09q1,modis,modis-09q1-061,myd09q1,nasa,reflectance,satellite,terra", "license": "proprietary", "title": "MODIS Surface Reflectance 8-Day (250m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-14A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Thermal Anomalies and Fire Daily Version 6.1 data are generated every eight days at 1 kilometer (km) spatial resolution as a Level 3 product. MOD14A1 contains eight consecutive days of fire data conveniently packaged into a single file. The Science Dataset (SDS) layers include the fire mask, pixel quality indicators, maximum fire radiative power (MaxFRP), and the position of the fire pixel within the scan. Each layer consists of daily per pixel information for each of the eight days of data acquisition.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,fire,global,mod14a1,modis,modis-14a1-061,myd14a1,nasa,satellite,terra", "license": "proprietary", "title": "MODIS Thermal Anomalies/Fire Daily", "missionStartDate": "2000-02-18T00:00:00Z"}, "hrea": {"abstract": "The [HREA](http://www-personal.umich.edu/~brianmin/HREA/index.html) project aims to provide open access to new indicators of electricity access and reliability across the world. Leveraging satellite imagery with computational methods, these high-resolution data provide new tools to track progress toward reliable and sustainable energy access across the world.\n\nThis dataset includes settlement-level measures of electricity access, reliability, and usage for 89 nations, derived from nightly VIIRS satellite imagery. Specifically, this dataset provides the following annual values at country-level granularity:\n\n1. **Access**: Predicted likelihood that a settlement is electrified, based on night-by-night comparisons of each settlement against matched uninhabited areas over a calendar year.\n\n2. **Reliability**: Proportion of nights a settlement is statistically brighter than matched uninhabited areas. Areas with more frequent power outages or service interruptions have lower rates.\n\n3. **Usage**: Higher levels of brightness indicate more robust usage of outdoor lighting, which is highly correlated with overall energy consumption.\n\n4. **Nighttime Lights**: Annual composites of VIIRS nighttime light output.\n\nFor more information and methodology, please visit the [HREA website](http://www-personal.umich.edu/~brianmin/HREA/index.html).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "electricity,hrea,viirs", "license": "CC-BY-4.0", "title": "HREA: High Resolution Electricity Access", "missionStartDate": "2012-12-31T00:00:00Z"}, "modis-13Q1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Vegetation Indices Version 6.1 data are generated every 16 days at 250 meter (m) spatial resolution as a Level 3 product. The MOD13Q1 product provides two primary vegetation layers. The first is the Normalized Difference Vegetation Index (NDVI) which is referred to as the continuity index to the existing National Oceanic and Atmospheric Administration-Advanced Very High Resolution Radiometer (NOAA-AVHRR) derived NDVI. The second vegetation layer is the Enhanced Vegetation Index (EVI), which has improved sensitivity over high biomass regions. The algorithm chooses the best available pixel value from all the acquisitions from the 16 day period. The criteria used is low clouds, low view angle, and the highest NDVI/EVI value. Along with the vegetation layers and the two quality layers, the HDF file will have MODIS reflectance bands 1 (red), 2 (near-infrared), 3 (blue), and 7 (mid-infrared), as well as four observation layers.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod13q1,modis,modis-13q1-061,myd13q1,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Vegetation Indices 16-Day (250m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-14A2-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Thermal Anomalies and Fire 8-Day Version 6.1 data are generated at 1 kilometer (km) spatial resolution as a Level 3 product. The MOD14A2 gridded composite contains the maximum value of the individual fire pixel classes detected during the eight days of acquisition. The Science Dataset (SDS) layers include the fire mask and pixel quality indicators.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,fire,global,mod14a2,modis,modis-14a2-061,myd14a2,nasa,satellite,terra", "license": "proprietary", "title": "MODIS Thermal Anomalies/Fire 8-Day", "missionStartDate": "2000-02-18T00:00:00Z"}, "sentinel-2-l2a": {"abstract": "The [Sentinel-2](https://sentinel.esa.int/web/sentinel/missions/sentinel-2) program provides global imagery in thirteen spectral bands at 10m-60m resolution and a revisit time of approximately five days. This dataset represents the global Sentinel-2 archive, from 2016 to the present, processed to L2A (bottom-of-atmosphere) using [Sen2Cor](https://step.esa.int/main/snap-supported-plugins/sen2cor/) and converted to [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": "msi", "platform": "sentinel-2", "platformSerialIdentifier": "Sentinel-2A,Sentinel-2B", "processingLevel": null, "keywords": "copernicus,esa,global,imagery,msi,reflectance,satellite,sentinel,sentinel-2,sentinel-2-l2a,sentinel-2a,sentinel-2b", "license": "proprietary", "title": "Sentinel-2 Level-2A", "missionStartDate": "2015-06-27T10:25:31Z"}, "modis-15A2H-061": {"abstract": "The Version 6.1 Moderate Resolution Imaging Spectroradiometer (MODIS) Level 4, Combined Fraction of Photosynthetically Active Radiation (FPAR), and Leaf Area Index (LAI) product is an 8-day composite dataset with 500 meter pixel size. The algorithm chooses the best pixel available from within the 8-day period. LAI is defined as the one-sided green leaf area per unit ground area in broadleaf canopies and as one-half the total needle surface area per unit ground area in coniferous canopies. FPAR is defined as the fraction of incident photosynthetically active radiation (400-700 nm) absorbed by the green elements of a vegetation canopy.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mcd15a2h,mod15a2h,modis,modis-15a2h-061,myd15a2h,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Leaf Area Index/FPAR 8-Day", "missionStartDate": "2002-07-04T00:00:00Z"}, "modis-11A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Land Surface Temperature/Emissivity Daily Version 6.1 product provides daily per-pixel Land Surface Temperature and Emissivity (LST&E) with 1 kilometer (km) spatial resolution in a 1,200 by 1,200 km grid. The pixel temperature value is derived from the MOD11_L2 swath product. Above 30 degrees latitude, some pixels may have multiple observations where the criteria for clear-sky are met. When this occurs, the pixel value is a result of the average of all qualifying observations. Provided along with the daytime and nighttime surface temperature bands are associated quality control assessments, observation times, view zenith angles, and clear-sky coverages along with bands 31 and 32 emissivities from land cover types", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod11a1,modis,modis-11a1-061,myd11a1,nasa,satellite,temperature,terra", "license": "proprietary", "title": "MODIS Land Surface Temperature/Emissivity Daily", "missionStartDate": "2000-02-24T00:00:00Z"}, "modis-15A3H-061": {"abstract": "The MCD15A3H Version 6.1 Moderate Resolution Imaging Spectroradiometer (MODIS) Level 4, Combined Fraction of Photosynthetically Active Radiation (FPAR), and Leaf Area Index (LAI) product is a 4-day composite data set with 500 meter pixel size. The algorithm chooses the best pixel available from all the acquisitions of both MODIS sensors located on NASA's Terra and Aqua satellites from within the 4-day period. LAI is defined as the one-sided green leaf area per unit ground area in broadleaf canopies and as one-half the total needle surface area per unit ground area in coniferous canopies. FPAR is defined as the fraction of incident photosynthetically active radiation (400-700 nm) absorbed by the green elements of a vegetation canopy.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mcd15a3h,modis,modis-15a3h-061,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Leaf Area Index/FPAR 4-Day", "missionStartDate": "2002-07-04T00:00:00Z"}, "modis-13A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Vegetation Indices 16-Day Version 6.1 product provides Vegetation Index (VI) values at a per pixel basis at 500 meter (m) spatial resolution. There are two primary vegetation layers. The first is the Normalized Difference Vegetation Index (NDVI), which is referred to as the continuity index to the existing National Oceanic and Atmospheric Administration-Advanced Very High Resolution Radiometer (NOAA-AVHRR) derived NDVI. The second vegetation layer is the Enhanced Vegetation Index (EVI), which has improved sensitivity over high biomass regions. The algorithm for this product chooses the best available pixel value from all the acquisitions from the 16 day period. The criteria used is low clouds, low view angle, and the highest NDVI/EVI value. Provided along with the vegetation layers and two quality assurance (QA) layers are reflectance bands 1 (red), 2 (near-infrared), 3 (blue), and 7 (mid-infrared), as well as four observation layers.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod13a1,modis,modis-13a1-061,myd13a1,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Vegetation Indices 16-Day (500m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "daymet-daily-na": {"abstract": "Gridded estimates of daily weather parameters. [Daymet](https://daymet.ornl.gov) Version 4 variables include the following parameters: minimum temperature, maximum temperature, precipitation, shortwave radiation, vapor pressure, snow water equivalent, and day length.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1840](https://doi.org/10.3334/ORNLDAAC/1840) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#daily).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "daymet,daymet-daily-na,north-america,precipitation,temperature,vapor-pressure,weather", "license": "proprietary", "title": "Daymet Daily North America", "missionStartDate": "1980-01-01T12:00:00Z"}, "nrcan-landcover": {"abstract": "Collection of Land Cover products for Canada as produced by Natural Resources Canada using Landsat satellite imagery. This collection of cartographic products offers classified Land Cover of Canada at a 30 metre scale, updated on a 5 year basis.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "canada,land-cover,landsat,north-america,nrcan-landcover,remote-sensing", "license": "OGL-Canada-2.0", "title": "Land Cover of Canada", "missionStartDate": "2015-01-01T00:00:00Z"}, "modis-10A2-061": {"abstract": "This global Level-3 (L3) data set provides the maximum snow cover extent observed over an eight-day period within 10degx10deg MODIS sinusoidal grid tiles. Tiles are generated by compositing 500 m observations from the 'MODIS Snow Cover Daily L3 Global 500m Grid' data set. A bit flag index is used to track the eight-day snow/no-snow chronology for each 500 m cell.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod10a2,modis,modis-10a2-061,myd10a2,nasa,satellite,snow,terra", "license": "proprietary", "title": "MODIS Snow Cover 8-day", "missionStartDate": "2000-02-18T00:00:00Z"}, "ecmwf-forecast": {"abstract": "The [ECMWF catalog of real-time products](https://www.ecmwf.int/en/forecasts/datasets/catalogue-ecmwf-real-time-products) offers real-time meterological and oceanographic productions from the ECMWF forecast system. Users should consult the [ECMWF Forecast User Guide](https://confluence.ecmwf.int/display/FUG/1+Introduction) for detailed information on each of the products.\n\n## Overview of products\n\nThe following diagram shows the publishing schedule of the various products.\n\n\n\nThe vertical axis shows the various products, defined below, which are grouped by combinations of `stream`, `forecast type`, and `reference time`. The horizontal axis shows *forecast times* in 3-hour intervals out from the reference time. A black square over a particular forecast time, or step, indicates that a forecast is made for that forecast time, for that particular `stream`, `forecast type`, `reference time` combination.\n\n* **stream** is the forecasting system that produced the data. The values are available in the `ecmwf:stream` summary of the STAC collection. They are:\n * `enfo`: [ensemble forecast](https://confluence.ecmwf.int/display/FUG/ENS+-+Ensemble+Forecasts), atmospheric fields\n * `mmsf`: [multi-model seasonal forecasts](https://confluence.ecmwf.int/display/FUG/Long-Range+%28Seasonal%29+Forecast) fields from the ECMWF model only.\n * `oper`: [high-resolution forecast](https://confluence.ecmwf.int/display/FUG/HRES+-+High-Resolution+Forecast), atmospheric fields \n * `scda`: short cut-off high-resolution forecast, atmospheric fields (also known as \"high-frequency products\")\n * `scwv`: short cut-off high-resolution forecast, ocean wave fields (also known as \"high-frequency products\") and\n * `waef`: [ensemble forecast](https://confluence.ecmwf.int/display/FUG/ENS+-+Ensemble+Forecasts), ocean wave fields,\n * `wave`: wave model\n* **type** is the forecast type. The values are available in the `ecmwf:type` summary of the STAC collection. They are:\n * `fc`: forecast\n * `ef`: ensemble forecast\n * `pf`: ensemble probabilities\n * `tf`: trajectory forecast for tropical cyclone tracks\n* **reference time** is the hours after midnight when the model was run. Each stream / type will produce assets for different forecast times (steps from the reference datetime) depending on the reference time.\n\nVisit the [ECMWF's User Guide](https://confluence.ecmwf.int/display/UDOC/ECMWF+Open+Data+-+Real+Time) for more details on each of the various products.\n\nAssets are available for the previous 30 days.\n\n## Asset overview\n\nThe data are provided as [GRIB2 files](https://confluence.ecmwf.int/display/CKB/What+are+GRIB+files+and+how+can+I+read+them).\nAdditionally, [index files](https://confluence.ecmwf.int/display/UDOC/ECMWF+Open+Data+-+Real+Time#ECMWFOpenDataRealTime-IndexFilesIndexfiles) are provided, which can be used to read subsets of the data from Azure Blob Storage.\n\nWithin each `stream`, `forecast type`, `reference time`, the structure of the data are mostly consistent. Each GRIB2 file will have the\nsame data variables, coordinates (aside from `time` as the *reference time* changes and `step` as the *forecast time* changes). The exception\nis the `enfo-ep` and `waef-ep` products, which have more `step`s in the 240-hour forecast than in the 360-hour forecast. \n\nSee the example notebook for more on how to access the data.\n\n## STAC metadata\n\nThe Planetary Computer provides a single STAC item per GRIB2 file. Each GRIB2 file is global in extent, so every item has the same\n`bbox` and `geometry`.\n\nA few custom properties are available on each STAC item, which can be used in searches to narrow down the data to items of interest:\n\n* `ecmwf:stream`: The forecasting system (see above for definitions). The full set of values is available in the Collection's summaries.\n* `ecmwf:type`: The forecast type (see above for definitions). The full set of values is available in the Collection's summaries.\n* `ecmwf:step`: The offset from the reference datetime, expressed as ``, for example `\"3h\"` means \"3 hours from the reference datetime\". \n* `ecmwf:reference_datetime`: The datetime when the model was run. This indicates when the forecast *was made*, rather than when it's valid for.\n* `ecmwf:forecast_datetime`: The datetime for which the forecast is valid. This is also set as the item's `datetime`.\n\nSee the example notebook for more on how to use the STAC metadata to query for particular data.\n\n## Attribution\n\nThe products listed and described on this page are available to the public and their use is governed by the [Creative Commons CC-4.0-BY license and the ECMWF Terms of Use](https://apps.ecmwf.int/datasets/licences/general/). This means that the data may be redistributed and used commercially, subject to appropriate attribution.\n\nThe following wording should be attached to the use of this ECMWF dataset: \n\n1. Copyright statement: Copyright \"\u00a9 [year] European Centre for Medium-Range Weather Forecasts (ECMWF)\".\n2. Source [www.ecmwf.int](http://www.ecmwf.int/)\n3. License Statement: This data is published under a Creative Commons Attribution 4.0 International (CC BY 4.0). [https://creativecommons.org/licenses/by/4.0/](https://creativecommons.org/licenses/by/4.0/)\n4. Disclaimer: ECMWF does not accept any liability whatsoever for any error or omission in the data, their availability, or for any loss or damage arising from their use.\n5. Where applicable, an indication if the material has been modified and an indication of previous modifications.\n\nThe following wording shall be attached to services created with this ECMWF dataset:\n\n1. Copyright statement: Copyright \"This service is based on data and products of the European Centre for Medium-Range Weather Forecasts (ECMWF)\".\n2. Source www.ecmwf.int\n3. License Statement: This ECMWF data is published under a Creative Commons Attribution 4.0 International (CC BY 4.0). [https://creativecommons.org/licenses/by/4.0/](https://creativecommons.org/licenses/by/4.0/)\n4. Disclaimer: ECMWF does not accept any liability whatsoever for any error or omission in the data, their availability, or for any loss or damage arising from their use.\n5. Where applicable, an indication if the material has been modified and an indication of previous modifications\n\n## More information\n\nFor more, see the [ECMWF's User Guide](https://confluence.ecmwf.int/display/UDOC/ECMWF+Open+Data+-+Real+Time) and [example notebooks](https://github.com/ecmwf/notebook-examples/tree/master/opencharts).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "ecmwf,ecmwf-forecast,forecast,weather", "license": "CC-BY-4.0", "title": "ECMWF Open Data (real-time)", "missionStartDate": null}, "noaa-mrms-qpe-24h-pass2": {"abstract": "The [Multi-Radar Multi-Sensor (MRMS) Quantitative Precipitation Estimation (QPE)](https://www.nssl.noaa.gov/projects/mrms/) products are seamless 1-km mosaics of precipitation accumulation covering the continental United States, Alaska, Hawaii, the Caribbean, and Guam. The products are automatically generated through integration of data from multiple radars and radar networks, surface and satellite observations, numerical weather prediction (NWP) models, and climatology. The products are updated hourly at the top of the hour.\n\nMRMS QPE is available as a \"Pass 1\" or \"Pass 2\" product. The Pass 1 product is available with a 60-minute latency and includes 60-65% of gauges. The Pass 2 product has a higher latency of 120 minutes, but includes 99% of gauges. The Pass 1 and Pass 2 products are broken into 1-, 3-, 6-, 12-, 24-, 48-, and 72-hour accumulation sub-products.\n\nThis Collection contains the **24-Hour Pass 2** sub-product, i.e., 24-hour cumulative precipitation accumulation with a 2-hour latency. The data are available in [Cloud Optimized GeoTIFF](https://www.cogeo.org/) format as well as the original source GRIB2 format files. The GRIB2 files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "caribbean,guam,mrms,noaa,noaa-mrms-qpe-24h-pass2,precipitation,qpe,united-states,weather", "license": "proprietary", "title": "NOAA MRMS QPE 24-Hour Pass 2", "missionStartDate": "2022-07-21T20:00:00Z"}, "sentinel-1-grd": {"abstract": "The [Sentinel-1](https://sentinel.esa.int/web/sentinel/missions/sentinel-1) mission is a constellation of two polar-orbiting satellites, operating day and night performing C-band synthetic aperture radar imaging. The Level-1 Ground Range Detected (GRD) products in this Collection consist of focused SAR data that has been detected, multi-looked and projected to ground range using the Earth ellipsoid model WGS84. The ellipsoid projection of the GRD products is corrected using the terrain height specified in the product general annotation. The terrain height used varies in azimuth but is constant in range (but can be different for each IW/EW sub-swath).\n\nGround range coordinates are the slant range coordinates projected onto the ellipsoid of the Earth. Pixel values represent detected amplitude. Phase information is lost. The resulting product has approximately square resolution pixels and square pixel spacing with reduced speckle at a cost of reduced spatial resolution.\n\nFor the IW and EW GRD products, multi-looking is performed on each burst individually. All bursts in all sub-swaths are then seamlessly merged to form a single, contiguous, ground range, detected image per polarization.\n\nFor more information see the [ESA documentation](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/product-types-processing-levels/level-1)\n\n### Terrain Correction\n\nUsers might want to geometrically or radiometrically terrain correct the Sentinel-1 GRD data from this collection. The [Sentinel-1-RTC Collection](https://planetarycomputer.microsoft.com/dataset/sentinel-1-rtc) collection is a global radiometrically terrain corrected dataset derived from Sentinel-1 GRD. Additionally, users can terrain-correct on the fly using [any DEM available on the Planetary Computer](https://planetarycomputer.microsoft.com/catalog?tags=DEM). See [Customizable radiometric terrain correction](https://planetarycomputer.microsoft.com/docs/tutorials/customizable-rtc-sentinel1/) for more.", "instrument": null, "platform": "Sentinel-1", "platformSerialIdentifier": "SENTINEL-1A,SENTINEL-1B", "processingLevel": null, "keywords": "c-band,copernicus,esa,grd,sar,sentinel,sentinel-1,sentinel-1-grd,sentinel-1a,sentinel-1b", "license": "proprietary", "title": "Sentinel 1 Level-1 Ground Range Detected (GRD)", "missionStartDate": "2014-10-10T00:28:21Z"}, "nasadem": {"abstract": "[NASADEM](https://earthdata.nasa.gov/esds/competitive-programs/measures/nasadem) provides global topographic data at 1 arc-second (~30m) horizontal resolution, derived primarily from data captured via the [Shuttle Radar Topography Mission](https://www2.jpl.nasa.gov/srtm/) (SRTM).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "dem,elevation,jpl,nasa,nasadem,nga,srtm,usgs", "license": "proprietary", "title": "NASADEM HGT v001", "missionStartDate": "2000-02-20T00:00:00Z"}, "io-lulc": {"abstract": "__Note__: _A new version of this item is available for your use. This mature version of the map remains available for use in existing applications. This item will be retired in December 2024. There is 2020 data available in the newer [9-class dataset](https://planetarycomputer.microsoft.com/dataset/io-lulc-9-class)._\n\nGlobal estimates of 10-class land use/land cover (LULC) for 2020, derived from ESA Sentinel-2 imagery at 10m resolution. This dataset was generated by [Impact Observatory](http://impactobservatory.com/), who used billions of human-labeled pixels (curated by the National Geographic Society) to train a deep learning model for land classification. The global map was produced by applying this model to the relevant yearly Sentinel-2 scenes on the Planetary Computer.\n\nThis dataset is also available on the [ArcGIS Living Atlas of the World](https://livingatlas.arcgis.com/landcover/).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "global,io-lulc,land-cover,land-use,sentinel", "license": "CC-BY-4.0", "title": "Esri 10-Meter Land Cover (10-class)", "missionStartDate": "2017-01-01T00:00:00Z"}, "landsat-c2-l1": {"abstract": "Landsat Collection 2 Level-1 data, consisting of quantized and calibrated scaled Digital Numbers (DN) representing the multispectral image data. These [Level-1](https://www.usgs.gov/landsat-missions/landsat-collection-2-level-1-data) data can be [rescaled](https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product) to top of atmosphere (TOA) reflectance and/or radiance. Thermal band data can be rescaled to TOA brightness temperature.\n\nThis dataset represents the global archive of Level-1 data from [Landsat Collection 2](https://www.usgs.gov/core-science-systems/nli/landsat/landsat-collection-2) acquired by the [Multispectral Scanner System](https://landsat.gsfc.nasa.gov/multispectral-scanner-system/) onboard Landsat 1 through Landsat 5 from July 7, 1972 to January 7, 2013. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": "mss", "platform": null, "platformSerialIdentifier": "landsat-1,landsat-2,landsat-3,landsat-4,landsat-5", "processingLevel": null, "keywords": "global,imagery,landsat,landsat-1,landsat-2,landsat-3,landsat-4,landsat-5,landsat-c2-l1,mss,nasa,satellite,usgs", "license": "proprietary", "title": "Landsat Collection 2 Level-1", "missionStartDate": "1972-07-25T00:00:00Z"}, "drcog-lulc": {"abstract": "The [Denver Regional Council of Governments (DRCOG) Land Use/Land Cover (LULC)](https://drcog.org/services-and-resources/data-maps-and-modeling/regional-land-use-land-cover-project) datasets are developed in partnership with the [Babbit Center for Land and Water Policy](https://www.lincolninst.edu/our-work/babbitt-center-land-water-policy) and the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/)'s Conservation Innovation Center (CIC). DRCOG LULC includes 2018 data at 3.28ft (1m) resolution covering 1,000 square miles and 2020 data at 1ft resolution covering 6,000 square miles of the Denver, Colorado region. The classification data is derived from the USDA's 1m National Agricultural Imagery Program (NAIP) aerial imagery and leaf-off aerial ortho-imagery captured as part of the [Denver Regional Aerial Photography Project](https://drcog.org/services-and-resources/data-maps-and-modeling/denver-regional-aerial-photography-project) (6in resolution everywhere except the mountainous regions to the west, which are 1ft resolution).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "drcog-lulc,land-cover,land-use,naip,usda", "license": "proprietary", "title": "Denver Regional Council of Governments Land Use Land Cover", "missionStartDate": "2018-01-01T00:00:00Z"}, "chesapeake-lc-7": {"abstract": "A high-resolution 1-meter [land cover data product](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-cover-data-project/) in raster format for the entire Chesapeake Bay watershed based on 2013-2014 imagery from the National Agriculture Imagery Program (NAIP). The product area encompasses over 250,000 square kilometers in New York, Pennsylvania, Maryland, Delaware, West Virginia, Virginia, and the District of Columbia. The dataset was created by the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/) [Conservation Innovation Center](https://www.chesapeakeconservancy.org/conservation-innovation-center/) for the [Chesapeake Bay Program](https://www.chesapeakebay.net/), which is a regional partnership of EPA, other federal, state, and local agencies and governments, nonprofits, and academic institutions, that leads and directs Chesapeake Bay restoration efforts. \n\nThe dataset is composed of a uniform set of 7 land cover classes. Additional information is available in a [User Guide](https://www.chesapeakeconservancy.org/wp-content/uploads/2020/06/Chesapeake_Conservancy_LandCover101Guide_June2020.pdf). Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "chesapeake-bay-watershed,chesapeake-conservancy,chesapeake-lc-7,land-cover", "license": "proprietary", "title": "Chesapeake Land Cover (7-class)", "missionStartDate": "2013-01-01T00:00:00Z"}, "chesapeake-lc-13": {"abstract": "A high-resolution 1-meter [land cover data product](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-cover-data-project/) in raster format for the entire Chesapeake Bay watershed based on 2013-2014 imagery from the National Agriculture Imagery Program (NAIP). The product area encompasses over 250,000 square kilometers in New York, Pennsylvania, Maryland, Delaware, West Virginia, Virginia, and the District of Columbia. The dataset was created by the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/) [Conservation Innovation Center](https://www.chesapeakeconservancy.org/conservation-innovation-center/) for the [Chesapeake Bay Program](https://www.chesapeakebay.net/), which is a regional partnership of EPA, other federal, state, and local agencies and governments, nonprofits, and academic institutions, that leads and directs Chesapeake Bay restoration efforts. \n\nThe dataset is composed of 13 land cover classes, although not all classes are used in all areas. Additional information is available in a [User Guide](https://www.chesapeakeconservancy.org/wp-content/uploads/2020/06/Chesapeake_Conservancy_LandCover101Guide_June2020.pdf) and [Class Description](https://www.chesapeakeconservancy.org/wp-content/uploads/2020/03/LC_Class_Descriptions.pdf) document. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "chesapeake-bay-watershed,chesapeake-conservancy,chesapeake-lc-13,land-cover", "license": "proprietary", "title": "Chesapeake Land Cover (13-class)", "missionStartDate": "2013-01-01T00:00:00Z"}, "chesapeake-lu": {"abstract": "A high-resolution 1-meter [land use data product](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-use-data-project/) in raster format for the entire Chesapeake Bay watershed. The dataset was created by modifying the 2013-2014 high-resolution [land cover dataset](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-cover-data-project/) using 13 ancillary datasets including data on zoning, land use, parcel boundaries, landfills, floodplains, and wetlands. The product area encompasses over 250,000 square kilometers in New York, Pennsylvania, Maryland, Delaware, West Virginia, Virginia, and the District of Columbia. The dataset was created by the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/) [Conservation Innovation Center](https://www.chesapeakeconservancy.org/conservation-innovation-center/) for the [Chesapeake Bay Program](https://www.chesapeakebay.net/), which is a regional partnership of EPA, other federal, state, and local agencies and governments, nonprofits, and academic institutions that leads and directs Chesapeake Bay restoration efforts.\n\nThe dataset is composed of 17 land use classes in Virginia and 16 classes in all other jurisdictions. Additional information is available in a land use [Class Description](https://www.chesapeakeconservancy.org/wp-content/uploads/2018/11/2013-Phase-6-Mapped-Land-Use-Definitions-Updated-PC-11302018.pdf) document. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "chesapeake-bay-watershed,chesapeake-conservancy,chesapeake-lu,land-use", "license": "proprietary", "title": "Chesapeake Land Use", "missionStartDate": "2013-01-01T00:00:00Z"}, "noaa-mrms-qpe-1h-pass1": {"abstract": "The [Multi-Radar Multi-Sensor (MRMS) Quantitative Precipitation Estimation (QPE)](https://www.nssl.noaa.gov/projects/mrms/) products are seamless 1-km mosaics of precipitation accumulation covering the continental United States, Alaska, Hawaii, the Caribbean, and Guam. The products are automatically generated through integration of data from multiple radars and radar networks, surface and satellite observations, numerical weather prediction (NWP) models, and climatology. The products are updated hourly at the top of the hour.\n\nMRMS QPE is available as a \"Pass 1\" or \"Pass 2\" product. The Pass 1 product is available with a 60-minute latency and includes 60-65% of gauges. The Pass 2 product has a higher latency of 120 minutes, but includes 99% of gauges. The Pass 1 and Pass 2 products are broken into 1-, 3-, 6-, 12-, 24-, 48-, and 72-hour accumulation sub-products.\n\nThis Collection contains the **1-Hour Pass 1** sub-product, i.e., 1-hour cumulative precipitation accumulation with a 1-hour latency. The data are available in [Cloud Optimized GeoTIFF](https://www.cogeo.org/) format as well as the original source GRIB2 format files. The GRIB2 files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "caribbean,guam,mrms,noaa,noaa-mrms-qpe-1h-pass1,precipitation,qpe,united-states,weather", "license": "proprietary", "title": "NOAA MRMS QPE 1-Hour Pass 1", "missionStartDate": "2022-07-21T20:00:00Z"}, "noaa-mrms-qpe-1h-pass2": {"abstract": "The [Multi-Radar Multi-Sensor (MRMS) Quantitative Precipitation Estimation (QPE)](https://www.nssl.noaa.gov/projects/mrms/) products are seamless 1-km mosaics of precipitation accumulation covering the continental United States, Alaska, Hawaii, the Caribbean, and Guam. The products are automatically generated through integration of data from multiple radars and radar networks, surface and satellite observations, numerical weather prediction (NWP) models, and climatology. The products are updated hourly at the top of the hour.\n\nMRMS QPE is available as a \"Pass 1\" or \"Pass 2\" product. The Pass 1 product is available with a 60-minute latency and includes 60-65% of gauges. The Pass 2 product has a higher latency of 120 minutes, but includes 99% of gauges. The Pass 1 and Pass 2 products are broken into 1-, 3-, 6-, 12-, 24-, 48-, and 72-hour accumulation sub-products.\n\nThis Collection contains the **1-Hour Pass 2** sub-product, i.e., 1-hour cumulative precipitation accumulation with a 2-hour latency. The data are available in [Cloud Optimized GeoTIFF](https://www.cogeo.org/) format as well as the original source GRIB2 format files. The GRIB2 files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "caribbean,guam,mrms,noaa,noaa-mrms-qpe-1h-pass2,precipitation,qpe,united-states,weather", "license": "proprietary", "title": "NOAA MRMS QPE 1-Hour Pass 2", "missionStartDate": "2022-07-21T20:00:00Z"}, "noaa-nclimgrid-monthly": {"abstract": "The [NOAA U.S. Climate Gridded Dataset (NClimGrid)](https://www.ncei.noaa.gov/access/metadata/landing-page/bin/iso?id=gov.noaa.ncdc:C00332) consists of four climate variables derived from the [Global Historical Climatology Network daily (GHCNd)](https://www.ncei.noaa.gov/products/land-based-station/global-historical-climatology-network-daily) dataset: maximum temperature, minimum temperature, average temperature, and precipitation. The data is provided in 1/24 degree lat/lon (nominal 5x5 kilometer) grids for the Continental United States (CONUS). \n\nNClimGrid data is available in monthly and daily temporal intervals, with the daily data further differentiated as \"prelim\" (preliminary) or \"scaled\". Preliminary daily data is available within approximately three days of collection. Once a calendar month of preliminary daily data has been collected, it is scaled to match the corresponding monthly value. Monthly data is available from 1895 to the present. Daily preliminary and daily scaled data is available from 1951 to the present. \n\nThis Collection contains **Monthly** data. See the journal publication [\"Improved Historical Temperature and Precipitation Time Series for U.S. Climate Divisions\"](https://journals.ametsoc.org/view/journals/apme/53/5/jamc-d-13-0248.1.xml) for more information about monthly gridded data.\n\nUsers of all NClimGrid data product should be aware that [NOAA advertises](https://www.ncei.noaa.gov/access/metadata/landing-page/bin/iso?id=gov.noaa.ncdc:C00332) that:\n>\"On an annual basis, approximately one year of 'final' NClimGrid data is submitted to replace the initially supplied 'preliminary' data for the same time period. Users should be sure to ascertain which level of data is required for their research.\"\n\nThe source NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n\n*Note*: The Planetary Computer currently has STAC metadata for just the monthly collection. We'll have STAC metadata for daily data in our next release. In the meantime, you can access the daily NetCDF data directly from Blob Storage using the storage container at `https://nclimgridwesteurope.blob.core.windows.net/nclimgrid`. See https://planetarycomputer.microsoft.com/docs/concepts/data-catalog/#access-patterns for more.*\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,nclimgrid,noaa,noaa-nclimgrid-monthly,precipitation,temperature,united-states", "license": "proprietary", "title": "Monthly NOAA U.S. Climate Gridded Dataset (NClimGrid)", "missionStartDate": "1895-01-01T00:00:00Z"}, "goes-glm": {"abstract": "The [Geostationary Lightning Mapper (GLM)](https://www.goes-r.gov/spacesegment/glm.html) is a single-channel, near-infrared optical transient detector that can detect the momentary changes in an optical scene, indicating the presence of lightning. GLM measures total lightning (in-cloud, cloud-to-cloud and cloud-to-ground) activity continuously over the Americas and adjacent ocean regions with near-uniform spatial resolution of approximately 10 km. GLM collects information such as the frequency, location and extent of lightning discharges to identify intensifying thunderstorms and tropical cyclones. Trends in total lightning available from the GLM provide critical information to forecasters, allowing them to focus on developing severe storms much earlier and before these storms produce damaging winds, hail or even tornadoes.\n\nThe GLM data product consists of a hierarchy of earth-located lightning radiant energy measures including events, groups, and flashes:\n\n- Lightning events are detected by the instrument.\n- Lightning groups are a collection of one or more lightning events that satisfy temporal and spatial coincidence thresholds.\n- Similarly, lightning flashes are a collection of one or more lightning groups that satisfy temporal and spatial coincidence thresholds.\n\nThe product includes the relationship among lightning events, groups, and flashes, and the area coverage of lightning groups and flashes. The product also includes processing and data quality metadata, and satellite state and location information. \n\nThis Collection contains GLM L2 data in tabular ([GeoParquet](https://github.com/opengeospatial/geoparquet)) format and the original source NetCDF format. The NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).", "instrument": "FM1,FM2", "platform": "GOES", "platformSerialIdentifier": "GOES-16,GOES-17", "processingLevel": ["L2"], "keywords": "fm1,fm2,goes,goes-16,goes-17,goes-glm,l2,lightning,nasa,noaa,satellite,weather", "license": "proprietary", "title": "GOES-R Lightning Detection", "missionStartDate": "2018-02-13T16:10:00Z"}, "usda-cdl": {"abstract": "The Cropland Data Layer (CDL) is a product of the USDA National Agricultural Statistics Service (NASS) with the mission \"to provide timely, accurate and useful statistics in service to U.S. agriculture\" (Johnson and Mueller, 2010, p. 1204). The CDL is a crop-specific land cover classification product of more than 100 crop categories grown in the United States. CDLs are derived using a supervised land cover classification of satellite imagery. The supervised classification relies on first manually identifying pixels within certain images, often called training sites, which represent the same crop or land cover type. Using these training sites, a spectral signature is developed for each crop type that is then used by the analysis software to identify all other pixels in the satellite image representing the same crop. Using this method, a new CDL is compiled annually and released to the public a few months after the end of the growing season.\n\nThis collection includes Cropland, Confidence, Cultivated, and Frequency products.\n\n- Cropland: Crop-specific land cover data created annually. There are currently four individual crop frequency data layers that represent four major crops: corn, cotton, soybeans, and wheat.\n- Confidence: The predicted confidence associated with an output pixel. A value of zero indicates low confidence, while a value of 100 indicates high confidence.\n- Cultivated: cultivated and non-cultivated land cover for CONUS based on land cover information derived from the 2017 through 2021 Cropland products.\n- Frequency: crop specific planting frequency based on land cover information derived from the 2008 through 2021 Cropland products.\n\nFor more, visit the [Cropland Data Layer homepage](https://www.nass.usda.gov/Research_and_Science/Cropland/SARS1a.php).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "agriculture,land-cover,land-use,united-states,usda,usda-cdl", "license": "proprietary", "title": "USDA Cropland Data Layers (CDLs)", "missionStartDate": "2008-01-01T00:00:00Z"}, "eclipse": {"abstract": "The [Project Eclipse](https://www.microsoft.com/en-us/research/project/project-eclipse/) Network is a low-cost air quality sensing network for cities and a research project led by the [Urban Innovation Group]( https://www.microsoft.com/en-us/research/urban-innovation-research/) at Microsoft Research.\n\nProject Eclipse currently includes over 100 locations in Chicago, Illinois, USA.\n\nThis network was deployed starting in July, 2021, through a collaboration with the City of Chicago, the Array of Things Project, JCDecaux Chicago, and the Environmental Law and Policy Center as well as local environmental justice organizations in the city. [This talk]( https://www.microsoft.com/en-us/research/video/technology-demo-project-eclipse-hyperlocal-air-quality-monitoring-for-cities/) documents the network design and data calibration strategy.\n\n## Storage resources\n\nData are stored in [Parquet](https://parquet.apache.org/) files in Azure Blob Storage in the West Europe Azure region, in the following blob container:\n\n`https://ai4edataeuwest.blob.core.windows.net/eclipse`\n\nWithin that container, the periodic occurrence snapshots are stored in `Chicago/YYYY-MM-DD`, where `YYYY-MM-DD` corresponds to the date of the snapshot.\nEach snapshot contains a sensor readings from the next 7-days in Parquet format starting with date on the folder name YYYY-MM-DD.\nTherefore, the data files for the first snapshot are at\n\n`https://ai4edataeuwest.blob.core.windows.net/eclipse/chicago/2022-01-01/data_*.parquet\n\nThe Parquet file schema is as described below. \n\n## Additional Documentation\n\nFor details on Calibration of Pm2.5, O3 and NO2, please see [this PDF](https://ai4edatasetspublicassets.blob.core.windows.net/assets/aod_docs/Calibration_Doc_v1.1.pdf).\n\n## License and attribution\nPlease cite: Daepp, Cabral, Ranganathan et al. (2022) [Eclipse: An End-to-End Platform for Low-Cost, Hyperlocal Environmental Sensing in Cities. ACM/IEEE Information Processing in Sensor Networks. Milan, Italy.](https://www.microsoft.com/en-us/research/uploads/prod/2022/05/ACM_2022-IPSN_FINAL_Eclipse.pdf)\n\n## Contact\n\nFor questions about this dataset, contact [`msrurbanops@microsoft.com`](mailto:msrurbanops@microsoft.com?subject=eclipse%20question) \n\n\n## Learn more\n\nThe [Eclipse Project](https://www.microsoft.com/en-us/research/urban-innovation-research/) contains an overview of the Project Eclipse at Microsoft Research.\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "air-pollution,eclipse,pm25", "license": "proprietary", "title": "Urban Innovation Eclipse Sensor Data", "missionStartDate": "2021-01-01T00:00:00Z"}, "esa-cci-lc": {"abstract": "The ESA Climate Change Initiative (CCI) [Land Cover dataset](https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-land-cover?tab=overview) provides consistent global annual land cover maps at 300m spatial resolution from 1992 to 2020. The land cover classes are defined using the United Nations Food and Agriculture Organization's (UN FAO) [Land Cover Classification System](https://www.fao.org/land-water/land/land-governance/land-resources-planning-toolbox/category/details/en/c/1036361/) (LCCS). In addition to the land cover maps, four quality flags are produced to document the reliability of the classification and change detection. \n\nThe data in this Collection have been converted from the [original NetCDF data](https://planetarycomputer.microsoft.com/dataset/esa-cci-lc-netcdf) to a set of tiled [Cloud Optimized GeoTIFFs](https://www.cogeo.org/) (COGs).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cci,esa,esa-cci-lc,global,land-cover", "license": "proprietary", "title": "ESA Climate Change Initiative Land Cover Maps (Cloud Optimized GeoTIFF)", "missionStartDate": "1992-01-01T00:00:00Z"}, "esa-cci-lc-netcdf": {"abstract": "The ESA Climate Change Initiative (CCI) [Land Cover dataset](https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-land-cover?tab=overview) provides consistent global annual land cover maps at 300m spatial resolution from 1992 to 2020. The land cover classes are defined using the United Nations Food and Agriculture Organization's (UN FAO) [Land Cover Classification System](https://www.fao.org/land-water/land/land-governance/land-resources-planning-toolbox/category/details/en/c/1036361/) (LCCS). In addition to the land cover maps, four quality flags are produced to document the reliability of the classification and change detection. \n\nThe data in this Collection are the original NetCDF files accessed from the [Copernicus Climate Data Store](https://cds.climate.copernicus.eu/#!/home). We recommend users use the [`esa-cci-lc` Collection](planetarycomputer.microsoft.com/dataset/esa-cci-lc), which provides the data as Cloud Optimized GeoTIFFs.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cci,esa,esa-cci-lc-netcdf,global,land-cover", "license": "proprietary", "title": "ESA Climate Change Initiative Land Cover Maps (NetCDF)", "missionStartDate": "1992-01-01T00:00:00Z"}, "fws-nwi": {"abstract": "The Wetlands Data Layer is the product of over 45 years of work by the National Wetlands Inventory (NWI) and its collaborators and currently contains more than 35 million wetland and deepwater features. This dataset, covering the conterminous United States, Hawaii, Puerto Rico, the Virgin Islands, Guam, the major Northern Mariana Islands and Alaska, continues to grow at a rate of 50 to 100 million acres annually as data are updated.\n\n**NOTE:** Due to the variation in use and analysis of this data by the end user, each state's wetlands data extends beyond the state boundary. Each state includes wetlands data that intersect the 1:24,000 quadrangles that contain part of that state (1:2,000,000 source data). This allows the user to clip the data to their specific analysis datasets. Beware that two adjacent states will contain some of the same data along their borders.\n\nFor more information, visit the National Wetlands Inventory [homepage](https://www.fws.gov/program/national-wetlands-inventory).\n\n## STAC Metadata\n\nIn addition to the `zip` asset in every STAC item, each item has its own assets unique to its wetlands. In general, each item will have several assets, each linking to a [geoparquet](https://github.com/opengeospatial/geoparquet) asset with data for the entire region or a sub-region within that state. Use the `cloud-optimized` [role](https://github.com/radiantearth/stac-spec/blob/master/item-spec/item-spec.md#asset-roles) to select just the geoparquet assets. See the Example Notebook for more.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "fws-nwi,united-states,usfws,wetlands", "license": "proprietary", "title": "FWS National Wetlands Inventory", "missionStartDate": "2022-10-01T00:00:00Z"}, "usgs-lcmap-conus-v13": {"abstract": "The [Land Change Monitoring, Assessment, and Projection](https://www.usgs.gov/special-topics/lcmap) (LCMAP) product provides land cover mapping and change monitoring from the U.S. Geological Survey's [Earth Resources Observation and Science](https://www.usgs.gov/centers/eros) (EROS) Center. LCMAP's Science Products are developed by applying time-series modeling on a per-pixel basis to [Landsat Analysis Ready Data](https://www.usgs.gov/landsat-missions/landsat-us-analysis-ready-data) (ARD) using an implementation of the [Continuous Change Detection and Classification](https://doi.org/10.1016/j.rse.2014.01.011) (CCDC) algorithm. All available clear (non-cloudy) U.S. Landsat ARD observations are fit to a harmonic model to predict future Landsat-like surface reflectance. Where Landsat surface reflectance observations differ significantly from those predictions, a change is identified. Attributes of the resulting model sequences (e.g., start/end dates, residuals, model coefficients) are then used to produce a set of land surface change products and as inputs to the subsequent classification to thematic land cover. \n\nThis [STAC](https://stacspec.org/en) Collection contains [LCMAP CONUS Collection 1.3](https://www.usgs.gov/special-topics/lcmap/collection-13-conus-science-products), which was released in August 2022 for years 1985-2021. The data are tiled according to the Landsat ARD tile grid and consist of [Cloud Optimized GeoTIFFs](https://www.cogeo.org/) (COGs) and corresponding metadata files. Note that the provided COGs differ slightly from those in the USGS source data. They have been reprocessed to add overviews, \"nodata\" values where appropriate, and an updated projection definition.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "conus,land-cover,land-cover-change,lcmap,usgs,usgs-lcmap-conus-v13", "license": "proprietary", "title": "USGS LCMAP CONUS Collection 1.3", "missionStartDate": "1985-01-01T00:00:00Z"}, "usgs-lcmap-hawaii-v10": {"abstract": "The [Land Change Monitoring, Assessment, and Projection](https://www.usgs.gov/special-topics/lcmap) (LCMAP) product provides land cover mapping and change monitoring from the U.S. Geological Survey's [Earth Resources Observation and Science](https://www.usgs.gov/centers/eros) (EROS) Center. LCMAP's Science Products are developed by applying time-series modeling on a per-pixel basis to [Landsat Analysis Ready Data](https://www.usgs.gov/landsat-missions/landsat-us-analysis-ready-data) (ARD) using an implementation of the [Continuous Change Detection and Classification](https://doi.org/10.1016/j.rse.2014.01.011) (CCDC) algorithm. All available clear (non-cloudy) U.S. Landsat ARD observations are fit to a harmonic model to predict future Landsat-like surface reflectance. Where Landsat surface reflectance observations differ significantly from those predictions, a change is identified. Attributes of the resulting model sequences (e.g., start/end dates, residuals, model coefficients) are then used to produce a set of land surface change products and as inputs to the subsequent classification to thematic land cover. \n\nThis [STAC](https://stacspec.org/en) Collection contains [LCMAP Hawaii Collection 1.0](https://www.usgs.gov/special-topics/lcmap/collection-1-hawaii-science-products), which was released in January 2022 for years 2000-2020. The data are tiled according to the Landsat ARD tile grid and consist of [Cloud Optimized GeoTIFFs](https://www.cogeo.org/) (COGs) and corresponding metadata files. Note that the provided COGs differ slightly from those in the USGS source data. They have been reprocessed to add overviews, \"nodata\" values where appropriate, and an updated projection definition.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "hawaii,land-cover,land-cover-change,lcmap,usgs,usgs-lcmap-hawaii-v10", "license": "proprietary", "title": "USGS LCMAP Hawaii Collection 1.0", "missionStartDate": "2000-01-01T00:00:00Z"}, "noaa-climate-normals-tabular": {"abstract": "The [NOAA United States Climate Normals](https://www.ncei.noaa.gov/products/land-based-station/us-climate-normals) provide information about typical climate conditions for thousands of weather station locations across the United States. Normals act both as a ruler to compare current weather and as a predictor of conditions in the near future. The official normals are calculated for a uniform 30 year period, and consist of annual/seasonal, monthly, daily, and hourly averages and statistics of temperature, precipitation, and other climatological variables for each weather station. \n\nNOAA produces Climate Normals in accordance with the [World Meteorological Organization](https://public.wmo.int/en) (WMO), of which the United States is a member. The WMO requires each member nation to compute 30-year meteorological quantity averages at least every 30 years, and recommends an update each decade, in part to incorporate newer weather stations. The 1991\u20132020 U.S. Climate Normals are the latest in a series of decadal normals first produced in the 1950s. \n\nThis Collection contains tabular weather variable data at weather station locations in GeoParquet format, converted from the source CSV files. The source NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n\nData are provided for annual/seasonal, monthly, daily, and hourly frequencies for the following time periods:\n\n- Legacy 30-year normals (1981\u20132010)\n- Supplemental 15-year normals (2006\u20132020)\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate-normals,climatology,conus,noaa,noaa-climate-normals-tabular,surface-observations,weather", "license": "proprietary", "title": "NOAA US Tabular Climate Normals", "missionStartDate": "1981-01-01T00:00:00Z"}, "noaa-climate-normals-netcdf": {"abstract": "The [NOAA Gridded United States Climate Normals](https://www.ncei.noaa.gov/products/land-based-station/us-climate-normals#tab-1027) provide a continuous grid of temperature and precipitation data across the contiguous United States (CONUS). The grids are derived from NOAA's [NClimGrid dataset](https://planetarycomputer.microsoft.com/dataset/group/noaa-nclimgrid), and resolutions (nominal 5x5 kilometer) and spatial extents (CONUS) therefore match that of NClimGrid. Monthly, seasonal, and annual gridded normals are computed from simple averages of the NClimGrid data and are provided for three time-periods: 1901\u20132020, 1991\u20132020, and 2006\u20132020. Daily gridded normals are smoothed for a smooth transition from one day to another and are provided for two time-periods: 1991\u20132020, and 2006\u20132020.\n\nNOAA produces Climate Normals in accordance with the [World Meteorological Organization](https://public.wmo.int/en) (WMO), of which the United States is a member. The WMO requires each member nation to compute 30-year meteorological quantity averages at least every 30 years, and recommends an update each decade, in part to incorporate newer weather stations. The 1991\u20132020 U.S. Climate Normals are the latest in a series of decadal normals first produced in the 1950s. \n\nThe data in this Collection are the original NetCDF files provided by NOAA's National Centers for Environmental Information. This Collection contains gridded data for the following frequencies and time periods:\n\n- Annual, seasonal, and monthly normals\n - 100-year (1901\u20132000)\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n- Daily normals\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n\nFor most use-cases, we recommend using the [`noaa-climate-normals-gridded`](https://planetarycomputer.microsoft.com/dataset/noaa-climate-normals-gridded) collection, which contains the same data in Cloud Optimized GeoTIFF format. The NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate-normals,climatology,conus,noaa,noaa-climate-normals-netcdf,surface-observations,weather", "license": "proprietary", "title": "NOAA US Gridded Climate Normals (NetCDF)", "missionStartDate": "1901-01-01T00:00:00Z"}, "noaa-climate-normals-gridded": {"abstract": "The [NOAA Gridded United States Climate Normals](https://www.ncei.noaa.gov/products/land-based-station/us-climate-normals#tab-1027) provide a continuous grid of temperature and precipitation data across the contiguous United States (CONUS). The grids are derived from NOAA's [NClimGrid dataset](https://planetarycomputer.microsoft.com/dataset/group/noaa-nclimgrid), and resolutions (nominal 5x5 kilometer) and spatial extents (CONUS) therefore match that of NClimGrid. Monthly, seasonal, and annual gridded normals are computed from simple averages of the NClimGrid data and are provided for three time-periods: 1901\u20132020, 1991\u20132020, and 2006\u20132020. Daily gridded normals are smoothed for a smooth transition from one day to another and are provided for two time-periods: 1991\u20132020, and 2006\u20132020.\n\nNOAA produces Climate Normals in accordance with the [World Meteorological Organization](https://public.wmo.int/en) (WMO), of which the United States is a member. The WMO requires each member nation to compute 30-year meteorological quantity averages at least every 30 years, and recommends an update each decade, in part to incorporate newer weather stations. The 1991\u20132020 U.S. Climate Normals are the latest in a series of decadal normals first produced in the 1950s. \n\nThis Collection contains gridded data for the following frequencies and time periods:\n\n- Annual, seasonal, and monthly normals\n - 100-year (1901\u20132000)\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n- Daily normals\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n\nThe data in this Collection have been converted from the original NetCDF format to Cloud Optimized GeoTIFFs (COGs). The source NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n\n## STAC Metadata\n\nThe STAC items in this collection contain several custom fields that can be used to further filter the data.\n\n* `noaa_climate_normals:period`: Climate normal time period. This can be \"1901-2000\", \"1991-2020\", or \"2006-2020\".\n* `noaa_climate_normals:frequency`: Climate normal temporal interval (frequency). This can be \"daily\", \"monthly\", \"seasonal\" , or \"annual\"\n* `noaa_climate_normals:time_index`: Time step index, e.g., month of year (1-12).\n\nThe `description` field of the assets varies by frequency. Using `prcp_norm` as an example, the descriptions are\n\n* annual: \"Annual precipitation normals from monthly precipitation normal values\"\n* seasonal: \"Seasonal precipitation normals (WSSF) from monthly normals\"\n* monthly: \"Monthly precipitation normals from monthly precipitation values\"\n* daily: \"Precipitation normals from daily averages\"\n\nCheck the assets on individual items for the appropriate description.\n\nThe STAC keys for most assets consist of two abbreviations. A \"variable\":\n\n\n| Abbreviation | Description |\n| ------------ | ---------------------------------------- |\n| prcp | Precipitation over the time period |\n| tavg | Mean temperature over the time period |\n| tmax | Maximum temperature over the time period |\n| tmin | Minimum temperature over the time period |\n\nAnd an \"aggregation\":\n\n| Abbreviation | Description |\n| ------------ | ------------------------------------------------------------------------------ |\n| max | Maximum of the variable over the time period |\n| min | Minimum of the variable over the time period |\n| std | Standard deviation of the value over the time period |\n| flag | An count of the number of inputs (months, years, etc.) to calculate the normal |\n| norm | The normal for the variable over the time period |\n\nSo, for example, `prcp_max` for monthly data is the \"Maximum values of all input monthly precipitation normal values\".\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate-normals,climatology,conus,noaa,noaa-climate-normals-gridded,surface-observations,weather", "license": "proprietary", "title": "NOAA US Gridded Climate Normals (Cloud-Optimized GeoTIFF)", "missionStartDate": "1901-01-01T00:00:00Z"}, "aster-l1t": {"abstract": "The [ASTER](https://terra.nasa.gov/about/terra-instruments/aster) instrument, launched on-board NASA's [Terra](https://terra.nasa.gov/) satellite in 1999, provides multispectral images of the Earth at 15m-90m resolution. ASTER images provide information about land surface temperature, color, elevation, and mineral composition.\n\nThis dataset represents ASTER [L1T](https://lpdaac.usgs.gov/products/ast_l1tv003/) data from 2000-2006. L1T images have been terrain-corrected and rotated to a north-up UTM projection. Images are in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": "aster", "platform": null, "platformSerialIdentifier": "terra", "processingLevel": null, "keywords": "aster,aster-l1t,global,nasa,satellite,terra,usgs", "license": "proprietary", "title": "ASTER L1T", "missionStartDate": "2000-03-04T12:00:00Z"}, "cil-gdpcir-cc-by-sa": {"abstract": "The World Climate Research Programme's [6th Coupled Model Intercomparison Project (CMIP6)](https://www.wcrp-climate.org/wgcm-cmip/wgcm-cmip6) represents an enormous advance in the quality, detail, and scope of climate modeling.\n\nThe [Global Downscaled Projections for Climate Impacts Research](https://github.com/ClimateImpactLab/downscaleCMIP6) dataset makes this modeling more applicable to understanding the impacts of changes in the climate on humans and society with two key developments: trend-preserving bias correction and downscaling. In this dataset, the [Climate Impact Lab](https://impactlab.org) provides global, daily minimum and maximum air temperature at the surface (`tasmin` and `tasmax`) and daily cumulative surface precipitation (`pr`) corresponding to the CMIP6 historical, ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 scenarios for 25 global climate models on a 1/4-degree regular global grid.\n\n## Accessing the data\n\nGDPCIR data can be accessed on the Microsoft Planetary Computer. The dataset is made of of three collections, distinguished by data license:\n* [Public domain (CC0-1.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0)\n* [Attribution (CC BY 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by)\n* [Attribution-ShareAlike (CC BY SA 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by-sa)\n\nEach modeling center with bias corrected and downscaled data in this collection falls into one of these license categories - see the [table below](/dataset/cil-gdpcir-cc-by-sa#available-institutions-models-and-scenarios-by-license-collection) to see which model is in each collection, and see the section below on [Citing, Licensing, and using data produced by this project](/dataset/cil-gdpcir-cc-by-sa#citing-licensing-and-using-data-produced-by-this-project) for citations and additional information about each license.\n\n## Data format & contents\n\nThe data is stored as partitioned zarr stores (see [https://zarr.readthedocs.io](https://zarr.readthedocs.io)), each of which includes thousands of data and metadata files covering the full time span of the experiment. Historical zarr stores contain just over 50 GB, while SSP zarr stores contain nearly 70GB. Each store is stored as a 32-bit float, with dimensions time (daily datetime), lat (float latitude), and lon (float longitude). The data is chunked at each interval of 365 days and 90 degree interval of latitude and longitude. Therefore, each chunk is `(365, 360, 360)`, with each chunk occupying approximately 179MB in memory.\n\nHistorical data is daily, excluding leap days, from Jan 1, 1950 to Dec 31, 2014; SSP data is daily, excluding leap days, from Jan 1, 2015 to either Dec 31, 2099 or Dec 31, 2100, depending on data availability in the source GCM.\n\nThe spatial domain covers all 0.25-degree grid cells, indexed by the grid center, with grid edges on the quarter-degree, using a -180 to 180 longitude convention. Thus, the \u201clon\u201d coordinate extends from -179.875 to 179.875, and the \u201clat\u201d coordinate extends from -89.875 to 89.875, with intermediate values at each 0.25-degree increment between (e.g. -179.875, -179.625, -179.375, etc).\n\n## Available institutions, models, and scenarios by license collection\n\n| Modeling institution | Source model | Available experiments | License collection |\n| -------------------- | ----------------- | ------------------------------------------ | ---------------------- |\n| CAS | FGOALS-g3 [^1] | SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM4-8 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM5-0 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| BCC | BCC-CSM2-MR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| CMCC | CMCC-CM2-SR5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40] |\n| CMCC | CMCC-ESM2 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40] |\n| CSIRO-ARCCSS | ACCESS-CM2 | SSP2-4.5 and SSP3-7.0 | CC-BY-40] |\n| CSIRO | ACCESS-ESM1-5 | SSP1-2.6, SSP2-4.5, and SSP3-7.0 | CC-BY-40] |\n| MIROC | MIROC-ES2L | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MIROC | MIROC6 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MOHC | HadGEM3-GC31-LL | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40] |\n| MOHC | UKESM1-0-LL | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MPI-M | MPI-ESM1-2-LR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MPI-M/DKRZ [^2] | MPI-ESM1-2-HR | SSP1-2.6 and SSP5-8.5 | CC-BY-40] |\n| NCC | NorESM2-LM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| NCC | NorESM2-MM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| NOAA-GFDL | GFDL-CM4 | SSP2-4.5 and SSP5-8.5 | CC-BY-40] |\n| NOAA-GFDL | GFDL-ESM4 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| NUIST | NESM3 | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3 | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-AerChem | ssp370 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-CC | ssp245 and ssp585 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-Veg | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-Veg-LR | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40] |\n| CCCma | CanESM5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-SA-40] |\n\n*Notes:*\n\n[^1]: At the time of running, no ssp1-2.6 precipitation data was available. Therefore, we provide `tasmin` and `tamax` for this model and experiment, but not `pr`. All other model/experiment combinations in the above table include all three variables.\n\n[^2]: The institution which ran MPI-ESM1-2-HR\u2019s historical (CMIP) simulations is `MPI-M`, while the future (ScenarioMIP) simulations were run by `DKRZ`. Therefore, the institution component of `MPI-ESM1-2-HR` filepaths differ between `historical` and `SSP` scenarios.\n\n## Project methods\n\nThis project makes use of statistical bias correction and downscaling algorithms, which are specifically designed to accurately represent changes in the extremes. For this reason, we selected Quantile Delta Mapping (QDM), following the method introduced by [Cannon et al. (2015)](https://doi.org/10.1175/JCLI-D-14-00754.1), which preserves quantile-specific trends from the GCM while fitting the full distribution for a given day-of-year to a reference dataset (ERA5).\n\nWe then introduce a similar method tailored to increase spatial resolution while preserving extreme behavior, Quantile-Preserving Localized-Analog Downscaling (QPLAD).\n\nTogether, these methods provide a robust means to handle both the central and tail behavior seen in climate model output, while aligning the full distribution to a state-of-the-art reanalysis dataset and providing the spatial granularity needed to study surface impacts.\n\nFor further documentation, see [Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts](https://egusphere.copernicus.org/preprints/2023/egusphere-2022-1513/) (EGUsphere, 2022 [preprint]).\n\n## Citing, licensing, and using data produced by this project\n\nProjects making use of the data produced as part of the Climate Impact Lab Global Downscaled Projections for Climate Impacts Research (CIL GDPCIR) project are requested to cite both this project and the source datasets from which these results are derived. Additionally, the use of data derived from some GCMs *requires* citations, and some modeling centers impose licensing restrictions & requirements on derived works. See each GCM's license info in the links below for more information.\n\n### CIL GDPCIR\n\nUsers are requested to cite this project in derived works. Our method documentation paper may be cited using the following:\n\n> Gergel, D. R., Malevich, S. B., McCusker, K. E., Tenezakis, E., Delgado, M. T., Fish, M. A., and Kopp, R. E.: Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts, EGUsphere [preprint], https://doi.org/10.5194/egusphere-2022-1513, 2023. \n\nThe code repository may be cited using the following:\n\n> Diana Gergel, Kelly McCusker, Brewster Malevich, Emile Tenezakis, Meredith Fish, Michael Delgado (2022). ClimateImpactLab/downscaleCMIP6: (v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.6403794\n\n### ERA5\n\nAdditionally, we request you cite the historical dataset used in bias correction and downscaling, ERA5. See the [ECMWF guide to citing a dataset on the Climate Data Store](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it):\n\n> Hersbach, H, et al. The ERA5 global reanalysis. Q J R Meteorol Soc.2020; 146: 1999\u20132049. DOI: [10.1002/qj.3803](https://doi.org/10.1002/qj.3803)\n>\n> Mu\u00f1oz Sabater, J., (2019): ERA5-Land hourly data from 1981 to present. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n>\n> Mu\u00f1oz Sabater, J., (2021): ERA5-Land hourly data from 1950 to 1980. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n\n### GCM-specific citations & licenses\n\nThe CMIP6 simulation data made available through the Earth System Grid Federation (ESGF) are subject to Creative Commons [BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) or [BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/) licenses. The Climate Impact Lab has reached out to each of the modeling institutions to request waivers from these terms so the outputs of this project may be used with fewer restrictions, and has been granted permission to release the data using the licenses listed here.\n\n#### Public Domain Datasets\n\nThe following bias corrected and downscaled model simulations are available in the public domain using a [CC0 1.0 Universal Public Domain Declaration](https://creativecommons.org/publicdomain/zero/1.0/). Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0.\n\n* **FGOALS-g3**\n\n License description: [data_licenses/FGOALS-g3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/FGOALS-g3.txt)\n\n CMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 CMIP*. Version 20190826. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1783\n\n ScenarioMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190818; SSP2-4.5 version 20190818; SSP3-7.0 version 20190820; SSP5-8.5 tasmax version 20190819; SSP5-8.5 tasmin version 20190819; SSP5-8.5 pr version 20190818. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2056\n\n\n* **INM-CM4-8**\n\n License description: [data_licenses/INM-CM4-8.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM4-8.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 CMIP*. Version 20190530. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1422\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 ScenarioMIP*. Version 20190603. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12321\n\n\n* **INM-CM5-0**\n\n License description: [data_licenses/INM-CM5-0.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM5-0.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 CMIP*. Version 20190610. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1423\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190619; SSP2-4.5 version 20190619; SSP3-7.0 version 20190618; SSP5-8.5 version 20190724. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12322\n\n\n#### CC-BY-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/). Note that this license requires citation of the source model output (included here). Please see https://creativecommons.org/licenses/by/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by.\n\n* **ACCESS-CM2**\n\n License description: [data_licenses/ACCESS-CM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-CM2.txt)\n\n CMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2281\n\n ScenarioMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2285\n\n\n* **ACCESS-ESM1-5**\n\n License description: [data_licenses/ACCESS-ESM1-5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-ESM1-5.txt)\n\n CMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 CMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2288\n\n ScenarioMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 ScenarioMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2291\n\n\n* **BCC-CSM2-MR**\n\n License description: [data_licenses/BCC-CSM2-MR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/BCC-CSM2-MR.txt)\n\n CMIP Citation:\n\n > Xin, Xiaoge; Zhang, Jie; Zhang, Fang; Wu, Tongwen; Shi, Xueli; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2018)**. *BCC BCC-CSM2MR model output prepared for CMIP6 CMIP*. Version 20181126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1725\n\n ScenarioMIP Citation:\n\n > Xin, Xiaoge; Wu, Tongwen; Shi, Xueli; Zhang, Fang; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2019)**. *BCC BCC-CSM2MR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190315; SSP2-4.5 version 20190318; SSP3-7.0 version 20190318; SSP5-8.5 version 20190318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1732\n\n\n* **CMCC-CM2-SR5**\n\n License description: [data_licenses/CMCC-CM2-SR5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-CM2-SR5.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 CMIP*. Version 20200616. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1362\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200717; SSP2-4.5 version 20200617; SSP3-7.0 version 20200622; SSP5-8.5 version 20200622. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1365\n\n\n* **CMCC-ESM2**\n\n License description: [data_licenses/CMCC-ESM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-ESM2.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 CMIP*. Version 20210114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13164\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20210126; SSP2-4.5 version 20210129; SSP3-7.0 version 20210202; SSP5-8.5 version 20210126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13168\n\n\n* **EC-Earth3-AerChem**\n\n License description: [data_licenses/EC-Earth3-AerChem.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-AerChem.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 CMIP*. Version 20200624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.639\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 ScenarioMIP*. Version 20200827. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.724\n\n\n* **EC-Earth3-CC**\n\n License description: [data_licenses/EC-Earth3-CC.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-CC.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth-3-CC model output prepared for CMIP6 CMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.640\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2021)**. *EC-Earth-Consortium EC-Earth3-CC model output prepared for CMIP6 ScenarioMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.15327\n\n\n* **EC-Earth3-Veg-LR**\n\n License description: [data_licenses/EC-Earth3-Veg-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg-LR.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 CMIP*. Version 20200217. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.643\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20201201; SSP2-4.5 version 20201123; SSP3-7.0 version 20201123; SSP5-8.5 version 20201201. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.728\n\n\n* **EC-Earth3-Veg**\n\n License description: [data_licenses/EC-Earth3-Veg.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 CMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.642\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 ScenarioMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.727\n\n\n* **EC-Earth3**\n\n License description: [data_licenses/EC-Earth3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 CMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.181\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 ScenarioMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.251\n\n\n* **GFDL-CM4**\n\n License description: [data_licenses/GFDL-CM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-CM4.txt)\n\n CMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Bushuk, Mitchell; Dunne, Krista A.; Dussin, Raphael; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, P.C.D; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1402\n\n ScenarioMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, Chris; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.9242\n\n\n* **GFDL-ESM4**\n\n License description: [data_licenses/GFDL-ESM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-ESM4.txt)\n\n CMIP Citation:\n\n > Krasting, John P.; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Silvers, Levi; Wyman, Bruce; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Dussin, Raphael; Guo, Huan; He, Jian; Held, Isaac M; Horowitz, Larry W.; Lin, Pu; Milly, P.C.D; Shevliakova, Elena; Stock, Charles; Winton, Michael; Wittenberg, Andrew T.; Xie, Yuanyu; Zhao, Ming **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 CMIP*. Version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1407\n\n ScenarioMIP Citation:\n\n > John, Jasmin G; Blanton, Chris; McHugh, Colleen; Radhakrishnan, Aparna; Rand, Kristopher; Vahlenkamp, Hans; Wilson, Chandin; Zadeh, Niki T.; Dunne, John P.; Dussin, Raphael; Horowitz, Larry W.; Krasting, John P.; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Ploshay, Jeffrey; Shevliakova, Elena; Silvers, Levi; Stock, Charles; Winton, Michael; Zeng, Yujin **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1414\n\n\n* **HadGEM3-GC31-LL**\n\n License description: [data_licenses/HadGEM3-GC31-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/HadGEM3-GC31-LL.txt)\n\n CMIP Citation:\n\n > Ridley, Jeff; Menary, Matthew; Kuhlbrodt, Till; Andrews, Martin; Andrews, Tim **(2018)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 CMIP*. Version 20190624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.419\n\n ScenarioMIP Citation:\n\n > Good, Peter **(2019)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200114; SSP2-4.5 version 20190908; SSP5-8.5 version 20200114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.10845\n\n\n* **MIROC-ES2L**\n\n License description: [data_licenses/MIROC-ES2L.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC-ES2L.txt)\n\n CMIP Citation:\n\n > Hajima, Tomohiro; Abe, Manabu; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogura, Tomoo; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio; Tachiiri, Kaoru **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 CMIP*. Version 20191129. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.902\n\n ScenarioMIP Citation:\n\n > Tachiiri, Kaoru; Abe, Manabu; Hajima, Tomohiro; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 ScenarioMIP*. Version 20200318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.936\n\n\n* **MIROC6**\n\n License description: [data_licenses/MIROC6.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC6.txt)\n\n CMIP Citation:\n\n > Tatebe, Hiroaki; Watanabe, Masahiro **(2018)**. *MIROC MIROC6 model output prepared for CMIP6 CMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.881\n\n ScenarioMIP Citation:\n\n > Shiogama, Hideo; Abe, Manabu; Tatebe, Hiroaki **(2019)**. *MIROC MIROC6 model output prepared for CMIP6 ScenarioMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.898\n\n\n* **MPI-ESM1-2-HR**\n\n License description: [data_licenses/MPI-ESM1-2-HR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-HR.txt)\n\n CMIP Citation:\n\n > Jungclaus, Johann; Bittner, Matthias; Wieners, Karl-Hermann; Wachsmann, Fabian; Schupfner, Martin; Legutke, Stephanie; Giorgetta, Marco; Reick, Christian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Esch, Monika; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-HR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.741\n\n ScenarioMIP Citation:\n\n > Schupfner, Martin; Wieners, Karl-Hermann; Wachsmann, Fabian; Steger, Christian; Bittner, Matthias; Jungclaus, Johann; Fr\u00fch, Barbara; Pankatz, Klaus; Giorgetta, Marco; Reick, Christian; Legutke, Stephanie; Esch, Monika; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *DKRZ MPI-ESM1.2-HR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2450\n\n\n* **MPI-ESM1-2-LR**\n\n License description: [data_licenses/MPI-ESM1-2-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-LR.txt)\n\n CMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Legutke, Stephanie; Schupfner, Martin; Wachsmann, Fabian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.742\n\n ScenarioMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.793\n\n\n* **NESM3**\n\n License description: [data_licenses/NESM3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NESM3.txt)\n\n CMIP Citation:\n\n > Cao, Jian; Wang, Bin **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 CMIP*. Version 20190812. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2021\n\n ScenarioMIP Citation:\n\n > Cao, Jian **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190806; SSP2-4.5 version 20190805; SSP5-8.5 version 20190811. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2027\n\n\n* **NorESM2-LM**\n\n License description: [data_licenses/NorESM2-LM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-LM.txt)\n\n CMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 CMIP*. Version 20190815. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.502\n\n ScenarioMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.604\n\n\n* **NorESM2-MM**\n\n License description: [data_licenses/NorESM2-MM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-MM.txt)\n\n CMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.506\n\n ScenarioMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.608\n\n\n* **UKESM1-0-LL**\n\n License description: [data_licenses/UKESM1-0-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/UKESM1-0-LL.txt)\n\n CMIP Citation:\n\n > Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Mulcahy, Jane; Sellar, Alistair; Walton, Jeremy; Jones, Colin **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 CMIP*. Version 20190627. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1569\n\n ScenarioMIP Citation:\n\n > Good, Peter; Sellar, Alistair; Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Kuhlbrodt, Till; Walton, Jeremy **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190708; SSP2-4.5 version 20190715; SSP3-7.0 version 20190726; SSP5-8.5 version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1567\n\n\n#### CC-BY-SA-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). Note that this license requires citation of the source model output (included here) and requires that derived works be shared under the same license. Please see https://creativecommons.org/licenses/by-sa/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by-sa.\n\n* **CanESM5**\n\n License description: [data_licenses/CanESM5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CanESM5.txt)\n\n CMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 CMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1303\n\n ScenarioMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 ScenarioMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1317\n\n## Acknowledgements\n\nThis work is the result of many years worth of work by members of the [Climate Impact Lab](https://impactlab.org), but would not have been possible without many contributions from across the wider scientific and computing communities.\n\nSpecifically, we would like to acknowledge the World Climate Research Programme's Working Group on Coupled Modeling, which is responsible for CMIP, and we would like to thank the climate modeling groups for producing and making their model output available. We would particularly like to thank the modeling institutions whose results are included as an input to this repository (listed above) for their contributions to the CMIP6 project and for responding to and granting our requests for license waivers.\n\nWe would also like to thank Lamont-Doherty Earth Observatory, the [Pangeo Consortium](https://github.com/pangeo-data) (and especially the [ESGF Cloud Data Working Group](https://pangeo-data.github.io/pangeo-cmip6-cloud/#)) and Google Cloud and the Google Public Datasets program for making the [CMIP6 Google Cloud collection](https://console.cloud.google.com/marketplace/details/noaa-public/cmip6) possible. In particular we're extremely grateful to [Ryan Abernathey](https://github.com/rabernat), [Naomi Henderson](https://github.com/naomi-henderson), [Charles Blackmon-Luca](https://github.com/charlesbluca), [Aparna Radhakrishnan](https://github.com/aradhakrishnanGFDL), [Julius Busecke](https://github.com/jbusecke), and [Charles Stern](https://github.com/cisaacstern) for the huge amount of work they've done to translate the ESGF CMIP6 netCDF archives into consistently-formattted, analysis-ready zarr stores on Google Cloud.\n\nWe're also grateful to the [xclim developers](https://github.com/Ouranosinc/xclim/graphs/contributors) ([DOI: 10.5281/zenodo.2795043](https://doi.org/10.5281/zenodo.2795043)), in particular [Pascal Bourgault](https://github.com/aulemahal), [David Huard](https://github.com/huard), and [Travis Logan](https://github.com/tlogan2000), for implementing the QDM bias correction method in the xclim python package, supporting our QPLAD implementation into the package, and ongoing support in integrating dask into downscaling workflows. For method advice and useful conversations, we would like to thank Keith Dixon, Dennis Adams-Smith, and [Joe Hamman](https://github.com/jhamman).\n\n## Financial support\n\nThis research has been supported by The Rockefeller Foundation and the Microsoft AI for Earth Initiative.\n\n## Additional links:\n\n* CIL GDPCIR project homepage: [github.com/ClimateImpactLab/downscaleCMIP6](https://github.com/ClimateImpactLab/downscaleCMIP6)\n* Project listing on zenodo: https://doi.org/10.5281/zenodo.6403794\n* Climate Impact Lab homepage: [impactlab.org](https://impactlab.org)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cil-gdpcir-cc-by-sa,climate-impact-lab,cmip6,precipitation,rhodium-group,temperature", "license": "CC-BY-SA-4.0", "title": "CIL Global Downscaled Projections for Climate Impacts Research (CC-BY-SA-4.0)", "missionStartDate": "1950-01-01T00:00:00Z"}, "io-lulc-9-class": {"abstract": "Time series of annual global maps of land use and land cover (LULC). It currently has data from 2017-2022. The maps are derived from ESA Sentinel-2 imagery at 10m resolution. Each map is a composite of LULC predictions for 9 classes throughout the year in order to generate a representative snapshot of each year.\n\nThis dataset was generated by [Impact Observatory](http://impactobservatory.com/), who used billions of human-labeled pixels (curated by the National Geographic Society) to train a deep learning model for land classification. The global map was produced by applying this model to the Sentinel-2 annual scene collections on the Planetary Computer. Each of the maps has an assessed average accuracy of over 75%.\n\nThis map uses an updated model from the [10-class model](https://planetarycomputer.microsoft.com/dataset/io-lulc) and combines Grass(formerly class 3) and Scrub (formerly class 6) into a single Rangeland class (class 11). The original Esri 2020 Land Cover collection uses 10 classes (Grass and Scrub separate) and an older version of the underlying deep learning model. The Esri 2020 Land Cover map was also produced by Impact Observatory. The map remains available for use in existing applications. New applications should use the updated version of 2020 once it is available in this collection, especially when using data from multiple years of this time series, to ensure consistent classification.\n\nAll years are available under a Creative Commons BY-4.0.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "global,io-lulc-9-class,land-cover,land-use,sentinel", "license": "CC-BY-4.0", "title": "10m Annual Land Use Land Cover (9-class)", "missionStartDate": "2017-01-01T00:00:00Z"}, "io-biodiversity": {"abstract": "Generated by [Impact Observatory](https://www.impactobservatory.com/), in collaboration with [Vizzuality](https://www.vizzuality.com/), these datasets estimate terrestrial Biodiversity Intactness as 100-meter gridded maps for the years 2017-2020.\n\nMaps depicting the intactness of global biodiversity have become a critical tool for spatial planning and management, monitoring the extent of biodiversity across Earth, and identifying critical remaining intact habitat. Yet, these maps are often years out of date by the time they are available to scientists and policy-makers. The datasets in this STAC Collection build on past studies that map Biodiversity Intactness using the [PREDICTS database](https://onlinelibrary.wiley.com/doi/full/10.1002/ece3.2579) of spatially referenced observations of biodiversity across 32,000 sites from over 750 studies. The approach differs from previous work by modeling the relationship between observed biodiversity metrics and contemporary, global, geospatial layers of human pressures, with the intention of providing a high resolution monitoring product into the future.\n\nBiodiversity intactness is estimated as a combination of two metrics: Abundance, the quantity of individuals, and Compositional Similarity, how similar the composition of species is to an intact baseline. Linear mixed effects models are fit to estimate the predictive capacity of spatial datasets of human pressures on each of these metrics and project results spatially across the globe. These methods, as well as comparisons to other leading datasets and guidance on interpreting results, are further explained in a methods [white paper](https://ai4edatasetspublicassets.blob.core.windows.net/assets/pdfs/io-biodiversity/Biodiversity_Intactness_whitepaper.pdf) entitled \u201cGlobal 100m Projections of Biodiversity Intactness for the years 2017-2020.\u201d\n\nAll years are available under a Creative Commons BY-4.0 license.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biodiversity,global,io-biodiversity", "license": "CC-BY-4.0", "title": "Biodiversity Intactness", "missionStartDate": "2017-01-01T00:00:00Z"}, "naip": {"abstract": "The [National Agriculture Imagery Program](https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/) (NAIP) provides U.S.-wide, high-resolution aerial imagery, with four spectral bands (R, G, B, IR). NAIP is administered by the [Aerial Field Photography Office](https://www.fsa.usda.gov/programs-and-services/aerial-photography/) (AFPO) within the [US Department of Agriculture](https://www.usda.gov/) (USDA). Data are captured at least once every three years for each state. This dataset represents NAIP data from 2010-present, in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "aerial,afpo,agriculture,imagery,naip,united-states,usda", "license": "proprietary", "title": "NAIP: National Agriculture Imagery Program", "missionStartDate": "2010-01-01T00:00:00Z"}, "noaa-cdr-sea-surface-temperature-whoi": {"abstract": "The Sea Surface Temperature-Woods Hole Oceanographic Institution (WHOI) Climate Data Record (CDR) is one of three CDRs which combine to form the NOAA Ocean Surface Bundle (OSB) CDR. The resultant sea surface temperature (SST) data are produced through modeling the diurnal variability in combination with AVHRR SST observations. The final record is output to a 3-hourly 0.25\u00b0 resolution grid over the global ice-free oceans from January 1988\u2014present.\n\nThese Cloud Optimized GeoTIFFs (COGs) were created from NetCDF files which are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\nFor the NetCDF files, see collection `noaa-cdr-sea-surface-temperature-whoi-netcdf`.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-sea-surface-temperature-whoi,ocean,temperature", "license": "proprietary", "title": "Sea Surface Temperature - WHOI CDR", "missionStartDate": "1988-01-01T00:00:00Z"}, "noaa-cdr-ocean-heat-content": {"abstract": "The Ocean Heat Content Climate Data Record (CDR) is a set of ocean heat content anomaly (OHCA) time-series for 1955-present on 3-monthly, yearly, and pentadal (five-yearly) scales. This CDR quantifies ocean heat content change over time, which is an essential metric for understanding climate change and the Earth's energy budget. It provides time-series for multiple depth ranges in the global ocean and each of the major basins (Atlantic, Pacific, and Indian) divided by hemisphere (Northern, Southern).\n\nThese Cloud Optimized GeoTIFFs (COGs) were created from NetCDF files which are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\nFor the NetCDF files, see collection `noaa-cdr-ocean-heat-content-netcdf`.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-ocean-heat-content,ocean,temperature", "license": "proprietary", "title": "Global Ocean Heat Content CDR", "missionStartDate": "1972-03-01T00:00:00Z"}, "cil-gdpcir-cc0": {"abstract": "The World Climate Research Programme's [6th Coupled Model Intercomparison Project (CMIP6)](https://www.wcrp-climate.org/wgcm-cmip/wgcm-cmip6) represents an enormous advance in the quality, detail, and scope of climate modeling.\n\nThe [Global Downscaled Projections for Climate Impacts Research](https://github.com/ClimateImpactLab/downscaleCMIP6) dataset makes this modeling more applicable to understanding the impacts of changes in the climate on humans and society with two key developments: trend-preserving bias correction and downscaling. In this dataset, the [Climate Impact Lab](https://impactlab.org) provides global, daily minimum and maximum air temperature at the surface (`tasmin` and `tasmax`) and daily cumulative surface precipitation (`pr`) corresponding to the CMIP6 historical, ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 scenarios for 25 global climate models on a 1/4-degree regular global grid.\n\n## Accessing the data\n\nGDPCIR data can be accessed on the Microsoft Planetary Computer. The dataset is made of of three collections, distinguished by data license:\n* [Public domain (CC0-1.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0)\n* [Attribution (CC BY 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by)\n\nEach modeling center with bias corrected and downscaled data in this collection falls into one of these license categories - see the [table below](/dataset/cil-gdpcir-cc0#available-institutions-models-and-scenarios-by-license-collection) to see which model is in each collection, and see the section below on [Citing, Licensing, and using data produced by this project](/dataset/cil-gdpcir-cc0#citing-licensing-and-using-data-produced-by-this-project) for citations and additional information about each license.\n\n## Data format & contents\n\nThe data is stored as partitioned zarr stores (see [https://zarr.readthedocs.io](https://zarr.readthedocs.io)), each of which includes thousands of data and metadata files covering the full time span of the experiment. Historical zarr stores contain just over 50 GB, while SSP zarr stores contain nearly 70GB. Each store is stored as a 32-bit float, with dimensions time (daily datetime), lat (float latitude), and lon (float longitude). The data is chunked at each interval of 365 days and 90 degree interval of latitude and longitude. Therefore, each chunk is `(365, 360, 360)`, with each chunk occupying approximately 180MB in memory.\n\nHistorical data is daily, excluding leap days, from Jan 1, 1950 to Dec 31, 2014; SSP data is daily, excluding leap days, from Jan 1, 2015 to either Dec 31, 2099 or Dec 31, 2100, depending on data availability in the source GCM.\n\nThe spatial domain covers all 0.25-degree grid cells, indexed by the grid center, with grid edges on the quarter-degree, using a -180 to 180 longitude convention. Thus, the \u201clon\u201d coordinate extends from -179.875 to 179.875, and the \u201clat\u201d coordinate extends from -89.875 to 89.875, with intermediate values at each 0.25-degree increment between (e.g. -179.875, -179.625, -179.375, etc).\n\n## Available institutions, models, and scenarios by license collection\n\n| Modeling institution | Source model | Available experiments | License collection |\n| -------------------- | ----------------- | ------------------------------------------ | ---------------------- |\n| CAS | FGOALS-g3 [^1] | SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM4-8 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM5-0 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| BCC | BCC-CSM2-MR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| CMCC | CMCC-CM2-SR5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CMCC | CMCC-ESM2 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CSIRO-ARCCSS | ACCESS-CM2 | SSP2-4.5 and SSP3-7.0 | CC-BY-40 |\n| CSIRO | ACCESS-ESM1-5 | SSP1-2.6, SSP2-4.5, and SSP3-7.0 | CC-BY-40 |\n| MIROC | MIROC-ES2L | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MIROC | MIROC6 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MOHC | HadGEM3-GC31-LL | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| MOHC | UKESM1-0-LL | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M | MPI-ESM1-2-LR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M/DKRZ [^2] | MPI-ESM1-2-HR | SSP1-2.6 and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-LM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-MM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-CM4 | SSP2-4.5 and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-ESM4 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NUIST | NESM3 | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3 | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-AerChem | ssp370 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-CC | ssp245 and ssp585 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg-LR | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| CCCma | CanESM5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40[^3] |\n\n*Notes:*\n\n[^1]: At the time of running, no ssp1-2.6 precipitation data was available. Therefore, we provide `tasmin` and `tamax` for this model and experiment, but not `pr`. All other model/experiment combinations in the above table include all three variables.\n\n[^2]: The institution which ran MPI-ESM1-2-HR\u2019s historical (CMIP) simulations is `MPI-M`, while the future (ScenarioMIP) simulations were run by `DKRZ`. Therefore, the institution component of `MPI-ESM1-2-HR` filepaths differ between `historical` and `SSP` scenarios.\n\n[^3]: This dataset was previously licensed as [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), but was relicensed under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0) in March, 2023. \n\n## Project methods\n\nThis project makes use of statistical bias correction and downscaling algorithms, which are specifically designed to accurately represent changes in the extremes. For this reason, we selected Quantile Delta Mapping (QDM), following the method introduced by [Cannon et al. (2015)](https://doi.org/10.1175/JCLI-D-14-00754.1), which preserves quantile-specific trends from the GCM while fitting the full distribution for a given day-of-year to a reference dataset (ERA5).\n\nWe then introduce a similar method tailored to increase spatial resolution while preserving extreme behavior, Quantile-Preserving Localized-Analog Downscaling (QPLAD).\n\nTogether, these methods provide a robust means to handle both the central and tail behavior seen in climate model output, while aligning the full distribution to a state-of-the-art reanalysis dataset and providing the spatial granularity needed to study surface impacts.\n\nFor further documentation, see [Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts](https://egusphere.copernicus.org/preprints/2023/egusphere-2022-1513/) (EGUsphere, 2022 [preprint]).\n\n\n## Citing, licensing, and using data produced by this project\n\nProjects making use of the data produced as part of the Climate Impact Lab Global Downscaled Projections for Climate Impacts Research (CIL GDPCIR) project are requested to cite both this project and the source datasets from which these results are derived. Additionally, the use of data derived from some GCMs *requires* citations, and some modeling centers impose licensing restrictions & requirements on derived works. See each GCM's license info in the links below for more information.\n\n### CIL GDPCIR\n\nUsers are requested to cite this project in derived works. Our method documentation paper may be cited using the following:\n\n> Gergel, D. R., Malevich, S. B., McCusker, K. E., Tenezakis, E., Delgado, M. T., Fish, M. A., and Kopp, R. E.: Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts, EGUsphere [preprint], https://doi.org/10.5194/egusphere-2022-1513, 2023. \n\nThe code repository may be cited using the following:\n\n> Diana Gergel, Kelly McCusker, Brewster Malevich, Emile Tenezakis, Meredith Fish, Michael Delgado (2022). ClimateImpactLab/downscaleCMIP6: (v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.6403794\n\n### ERA5\n\nAdditionally, we request you cite the historical dataset used in bias correction and downscaling, ERA5. See the [ECMWF guide to citing a dataset on the Climate Data Store](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it):\n\n> Hersbach, H, et al. The ERA5 global reanalysis. Q J R Meteorol Soc.2020; 146: 1999\u20132049. DOI: [10.1002/qj.3803](https://doi.org/10.1002/qj.3803)\n>\n> Mu\u00f1oz Sabater, J., (2019): ERA5-Land hourly data from 1981 to present. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n>\n> Mu\u00f1oz Sabater, J., (2021): ERA5-Land hourly data from 1950 to 1980. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n\n### GCM-specific citations & licenses\n\nThe CMIP6 simulation data made available through the Earth System Grid Federation (ESGF) are subject to Creative Commons [BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) or [BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/) licenses. The Climate Impact Lab has reached out to each of the modeling institutions to request waivers from these terms so the outputs of this project may be used with fewer restrictions, and has been granted permission to release the data using the licenses listed here.\n\n#### Public Domain Datasets\n\nThe following bias corrected and downscaled model simulations are available in the public domain using a [CC0 1.0 Universal Public Domain Declaration](https://creativecommons.org/publicdomain/zero/1.0/). Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0.\n\n* **FGOALS-g3**\n\n License description: [data_licenses/FGOALS-g3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/FGOALS-g3.txt)\n\n CMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 CMIP*. Version 20190826. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1783\n\n ScenarioMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190818; SSP2-4.5 version 20190818; SSP3-7.0 version 20190820; SSP5-8.5 tasmax version 20190819; SSP5-8.5 tasmin version 20190819; SSP5-8.5 pr version 20190818. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2056\n\n\n* **INM-CM4-8**\n\n License description: [data_licenses/INM-CM4-8.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM4-8.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 CMIP*. Version 20190530. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1422\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 ScenarioMIP*. Version 20190603. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12321\n\n\n* **INM-CM5-0**\n\n License description: [data_licenses/INM-CM5-0.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM5-0.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 CMIP*. Version 20190610. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1423\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190619; SSP2-4.5 version 20190619; SSP3-7.0 version 20190618; SSP5-8.5 version 20190724. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12322\n\n\n#### CC-BY-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/). Note that this license requires citation of the source model output (included here). Please see https://creativecommons.org/licenses/by/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by.\n\n* **ACCESS-CM2**\n\n License description: [data_licenses/ACCESS-CM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-CM2.txt)\n\n CMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2281\n\n ScenarioMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2285\n\n\n* **ACCESS-ESM1-5**\n\n License description: [data_licenses/ACCESS-ESM1-5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-ESM1-5.txt)\n\n CMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 CMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2288\n\n ScenarioMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 ScenarioMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2291\n\n\n* **BCC-CSM2-MR**\n\n License description: [data_licenses/BCC-CSM2-MR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/BCC-CSM2-MR.txt)\n\n CMIP Citation:\n\n > Xin, Xiaoge; Zhang, Jie; Zhang, Fang; Wu, Tongwen; Shi, Xueli; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2018)**. *BCC BCC-CSM2MR model output prepared for CMIP6 CMIP*. Version 20181126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1725\n\n ScenarioMIP Citation:\n\n > Xin, Xiaoge; Wu, Tongwen; Shi, Xueli; Zhang, Fang; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2019)**. *BCC BCC-CSM2MR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190315; SSP2-4.5 version 20190318; SSP3-7.0 version 20190318; SSP5-8.5 version 20190318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1732\n\n\n* **CMCC-CM2-SR5**\n\n License description: [data_licenses/CMCC-CM2-SR5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-CM2-SR5.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 CMIP*. Version 20200616. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1362\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200717; SSP2-4.5 version 20200617; SSP3-7.0 version 20200622; SSP5-8.5 version 20200622. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1365\n\n\n* **CMCC-ESM2**\n\n License description: [data_licenses/CMCC-ESM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-ESM2.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 CMIP*. Version 20210114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13164\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20210126; SSP2-4.5 version 20210129; SSP3-7.0 version 20210202; SSP5-8.5 version 20210126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13168\n\n\n* **EC-Earth3-AerChem**\n\n License description: [data_licenses/EC-Earth3-AerChem.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-AerChem.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 CMIP*. Version 20200624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.639\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 ScenarioMIP*. Version 20200827. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.724\n\n\n* **EC-Earth3-CC**\n\n License description: [data_licenses/EC-Earth3-CC.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-CC.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth-3-CC model output prepared for CMIP6 CMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.640\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2021)**. *EC-Earth-Consortium EC-Earth3-CC model output prepared for CMIP6 ScenarioMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.15327\n\n\n* **EC-Earth3-Veg-LR**\n\n License description: [data_licenses/EC-Earth3-Veg-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg-LR.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 CMIP*. Version 20200217. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.643\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20201201; SSP2-4.5 version 20201123; SSP3-7.0 version 20201123; SSP5-8.5 version 20201201. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.728\n\n\n* **EC-Earth3-Veg**\n\n License description: [data_licenses/EC-Earth3-Veg.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 CMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.642\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 ScenarioMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.727\n\n\n* **EC-Earth3**\n\n License description: [data_licenses/EC-Earth3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 CMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.181\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 ScenarioMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.251\n\n\n* **GFDL-CM4**\n\n License description: [data_licenses/GFDL-CM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-CM4.txt)\n\n CMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Bushuk, Mitchell; Dunne, Krista A.; Dussin, Raphael; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, P.C.D; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1402\n\n ScenarioMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, Chris; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.9242\n\n\n* **GFDL-ESM4**\n\n License description: [data_licenses/GFDL-ESM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-ESM4.txt)\n\n CMIP Citation:\n\n > Krasting, John P.; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Silvers, Levi; Wyman, Bruce; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Dussin, Raphael; Guo, Huan; He, Jian; Held, Isaac M; Horowitz, Larry W.; Lin, Pu; Milly, P.C.D; Shevliakova, Elena; Stock, Charles; Winton, Michael; Wittenberg, Andrew T.; Xie, Yuanyu; Zhao, Ming **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 CMIP*. Version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1407\n\n ScenarioMIP Citation:\n\n > John, Jasmin G; Blanton, Chris; McHugh, Colleen; Radhakrishnan, Aparna; Rand, Kristopher; Vahlenkamp, Hans; Wilson, Chandin; Zadeh, Niki T.; Dunne, John P.; Dussin, Raphael; Horowitz, Larry W.; Krasting, John P.; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Ploshay, Jeffrey; Shevliakova, Elena; Silvers, Levi; Stock, Charles; Winton, Michael; Zeng, Yujin **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1414\n\n\n* **HadGEM3-GC31-LL**\n\n License description: [data_licenses/HadGEM3-GC31-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/HadGEM3-GC31-LL.txt)\n\n CMIP Citation:\n\n > Ridley, Jeff; Menary, Matthew; Kuhlbrodt, Till; Andrews, Martin; Andrews, Tim **(2018)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 CMIP*. Version 20190624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.419\n\n ScenarioMIP Citation:\n\n > Good, Peter **(2019)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200114; SSP2-4.5 version 20190908; SSP5-8.5 version 20200114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.10845\n\n\n* **MIROC-ES2L**\n\n License description: [data_licenses/MIROC-ES2L.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC-ES2L.txt)\n\n CMIP Citation:\n\n > Hajima, Tomohiro; Abe, Manabu; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogura, Tomoo; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio; Tachiiri, Kaoru **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 CMIP*. Version 20191129. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.902\n\n ScenarioMIP Citation:\n\n > Tachiiri, Kaoru; Abe, Manabu; Hajima, Tomohiro; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 ScenarioMIP*. Version 20200318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.936\n\n\n* **MIROC6**\n\n License description: [data_licenses/MIROC6.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC6.txt)\n\n CMIP Citation:\n\n > Tatebe, Hiroaki; Watanabe, Masahiro **(2018)**. *MIROC MIROC6 model output prepared for CMIP6 CMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.881\n\n ScenarioMIP Citation:\n\n > Shiogama, Hideo; Abe, Manabu; Tatebe, Hiroaki **(2019)**. *MIROC MIROC6 model output prepared for CMIP6 ScenarioMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.898\n\n\n* **MPI-ESM1-2-HR**\n\n License description: [data_licenses/MPI-ESM1-2-HR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-HR.txt)\n\n CMIP Citation:\n\n > Jungclaus, Johann; Bittner, Matthias; Wieners, Karl-Hermann; Wachsmann, Fabian; Schupfner, Martin; Legutke, Stephanie; Giorgetta, Marco; Reick, Christian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Esch, Monika; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-HR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.741\n\n ScenarioMIP Citation:\n\n > Schupfner, Martin; Wieners, Karl-Hermann; Wachsmann, Fabian; Steger, Christian; Bittner, Matthias; Jungclaus, Johann; Fr\u00fch, Barbara; Pankatz, Klaus; Giorgetta, Marco; Reick, Christian; Legutke, Stephanie; Esch, Monika; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *DKRZ MPI-ESM1.2-HR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2450\n\n\n* **MPI-ESM1-2-LR**\n\n License description: [data_licenses/MPI-ESM1-2-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-LR.txt)\n\n CMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Legutke, Stephanie; Schupfner, Martin; Wachsmann, Fabian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.742\n\n ScenarioMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.793\n\n\n* **NESM3**\n\n License description: [data_licenses/NESM3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NESM3.txt)\n\n CMIP Citation:\n\n > Cao, Jian; Wang, Bin **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 CMIP*. Version 20190812. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2021\n\n ScenarioMIP Citation:\n\n > Cao, Jian **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190806; SSP2-4.5 version 20190805; SSP5-8.5 version 20190811. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2027\n\n\n* **NorESM2-LM**\n\n License description: [data_licenses/NorESM2-LM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-LM.txt)\n\n CMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 CMIP*. Version 20190815. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.502\n\n ScenarioMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.604\n\n\n* **NorESM2-MM**\n\n License description: [data_licenses/NorESM2-MM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-MM.txt)\n\n CMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.506\n\n ScenarioMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.608\n\n\n* **UKESM1-0-LL**\n\n License description: [data_licenses/UKESM1-0-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/UKESM1-0-LL.txt)\n\n CMIP Citation:\n\n > Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Mulcahy, Jane; Sellar, Alistair; Walton, Jeremy; Jones, Colin **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 CMIP*. Version 20190627. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1569\n\n ScenarioMIP Citation:\n\n > Good, Peter; Sellar, Alistair; Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Kuhlbrodt, Till; Walton, Jeremy **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190708; SSP2-4.5 version 20190715; SSP3-7.0 version 20190726; SSP5-8.5 version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1567\n\n\n* **CanESM5**\n\n License description: [data_licenses/CanESM5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CanESM5.txt). Note: this dataset was previously licensed\n under CC BY-SA 4.0, but was relicensed as CC BY 4.0 in March, 2023.\n\n CMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 CMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1303\n\n ScenarioMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 ScenarioMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1317\n\n## Acknowledgements\n\nThis work is the result of many years worth of work by members of the [Climate Impact Lab](https://impactlab.org), but would not have been possible without many contributions from across the wider scientific and computing communities.\n\nSpecifically, we would like to acknowledge the World Climate Research Programme's Working Group on Coupled Modeling, which is responsible for CMIP, and we would like to thank the climate modeling groups for producing and making their model output available. We would particularly like to thank the modeling institutions whose results are included as an input to this repository (listed above) for their contributions to the CMIP6 project and for responding to and granting our requests for license waivers.\n\nWe would also like to thank Lamont-Doherty Earth Observatory, the [Pangeo Consortium](https://github.com/pangeo-data) (and especially the [ESGF Cloud Data Working Group](https://pangeo-data.github.io/pangeo-cmip6-cloud/#)) and Google Cloud and the Google Public Datasets program for making the [CMIP6 Google Cloud collection](https://console.cloud.google.com/marketplace/details/noaa-public/cmip6) possible. In particular we're extremely grateful to [Ryan Abernathey](https://github.com/rabernat), [Naomi Henderson](https://github.com/naomi-henderson), [Charles Blackmon-Luca](https://github.com/charlesbluca), [Aparna Radhakrishnan](https://github.com/aradhakrishnanGFDL), [Julius Busecke](https://github.com/jbusecke), and [Charles Stern](https://github.com/cisaacstern) for the huge amount of work they've done to translate the ESGF CMIP6 netCDF archives into consistently-formattted, analysis-ready zarr stores on Google Cloud.\n\nWe're also grateful to the [xclim developers](https://github.com/Ouranosinc/xclim/graphs/contributors) ([DOI: 10.5281/zenodo.2795043](https://doi.org/10.5281/zenodo.2795043)), in particular [Pascal Bourgault](https://github.com/aulemahal), [David Huard](https://github.com/huard), and [Travis Logan](https://github.com/tlogan2000), for implementing the QDM bias correction method in the xclim python package, supporting our QPLAD implementation into the package, and ongoing support in integrating dask into downscaling workflows. For method advice and useful conversations, we would like to thank Keith Dixon, Dennis Adams-Smith, and [Joe Hamman](https://github.com/jhamman).\n\n## Financial support\n\nThis research has been supported by The Rockefeller Foundation and the Microsoft AI for Earth Initiative.\n\n## Additional links:\n\n* CIL GDPCIR project homepage: [github.com/ClimateImpactLab/downscaleCMIP6](https://github.com/ClimateImpactLab/downscaleCMIP6)\n* Project listing on zenodo: https://doi.org/10.5281/zenodo.6403794\n* Climate Impact Lab homepage: [impactlab.org](https://impactlab.org)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cil-gdpcir-cc0,climate-impact-lab,cmip6,precipitation,rhodium-group,temperature", "license": "CC0-1.0", "title": "CIL Global Downscaled Projections for Climate Impacts Research (CC0-1.0)", "missionStartDate": "1950-01-01T00:00:00Z"}, "cil-gdpcir-cc-by": {"abstract": "The World Climate Research Programme's [6th Coupled Model Intercomparison Project (CMIP6)](https://www.wcrp-climate.org/wgcm-cmip/wgcm-cmip6) represents an enormous advance in the quality, detail, and scope of climate modeling.\n\nThe [Global Downscaled Projections for Climate Impacts Research](https://github.com/ClimateImpactLab/downscaleCMIP6) dataset makes this modeling more applicable to understanding the impacts of changes in the climate on humans and society with two key developments: trend-preserving bias correction and downscaling. In this dataset, the [Climate Impact Lab](https://impactlab.org) provides global, daily minimum and maximum air temperature at the surface (`tasmin` and `tasmax`) and daily cumulative surface precipitation (`pr`) corresponding to the CMIP6 historical, ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 scenarios for 25 global climate models on a 1/4-degree regular global grid.\n\n## Accessing the data\n\nGDPCIR data can be accessed on the Microsoft Planetary Computer. The dataset is made of of three collections, distinguished by data license:\n* [Public domain (CC0-1.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0)\n* [Attribution (CC BY 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by)\n\nEach modeling center with bias corrected and downscaled data in this collection falls into one of these license categories - see the [table below](/dataset/cil-gdpcir-cc-by#available-institutions-models-and-scenarios-by-license-collection) to see which model is in each collection, and see the section below on [Citing, Licensing, and using data produced by this project](/dataset/cil-gdpcir-cc-by#citing-licensing-and-using-data-produced-by-this-project) for citations and additional information about each license.\n\n## Data format & contents\n\nThe data is stored as partitioned zarr stores (see [https://zarr.readthedocs.io](https://zarr.readthedocs.io)), each of which includes thousands of data and metadata files covering the full time span of the experiment. Historical zarr stores contain just over 50 GB, while SSP zarr stores contain nearly 70GB. Each store is stored as a 32-bit float, with dimensions time (daily datetime), lat (float latitude), and lon (float longitude). The data is chunked at each interval of 365 days and 90 degree interval of latitude and longitude. Therefore, each chunk is `(365, 360, 360)`, with each chunk occupying approximately 180MB in memory.\n\nHistorical data is daily, excluding leap days, from Jan 1, 1950 to Dec 31, 2014; SSP data is daily, excluding leap days, from Jan 1, 2015 to either Dec 31, 2099 or Dec 31, 2100, depending on data availability in the source GCM.\n\nThe spatial domain covers all 0.25-degree grid cells, indexed by the grid center, with grid edges on the quarter-degree, using a -180 to 180 longitude convention. Thus, the \u201clon\u201d coordinate extends from -179.875 to 179.875, and the \u201clat\u201d coordinate extends from -89.875 to 89.875, with intermediate values at each 0.25-degree increment between (e.g. -179.875, -179.625, -179.375, etc).\n\n## Available institutions, models, and scenarios by license collection\n\n| Modeling institution | Source model | Available experiments | License collection |\n| -------------------- | ----------------- | ------------------------------------------ | ---------------------- |\n| CAS | FGOALS-g3 [^1] | SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM4-8 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM5-0 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| BCC | BCC-CSM2-MR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| CMCC | CMCC-CM2-SR5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CMCC | CMCC-ESM2 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CSIRO-ARCCSS | ACCESS-CM2 | SSP2-4.5 and SSP3-7.0 | CC-BY-40 |\n| CSIRO | ACCESS-ESM1-5 | SSP1-2.6, SSP2-4.5, and SSP3-7.0 | CC-BY-40 |\n| MIROC | MIROC-ES2L | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MIROC | MIROC6 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MOHC | HadGEM3-GC31-LL | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| MOHC | UKESM1-0-LL | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M | MPI-ESM1-2-LR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M/DKRZ [^2] | MPI-ESM1-2-HR | SSP1-2.6 and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-LM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-MM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-CM4 | SSP2-4.5 and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-ESM4 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NUIST | NESM3 | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3 | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-AerChem | ssp370 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-CC | ssp245 and ssp585 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg-LR | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| CCCma | CanESM5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40[^3] |\n\n*Notes:*\n\n[^1]: At the time of running, no ssp1-2.6 precipitation data was available. Therefore, we provide `tasmin` and `tamax` for this model and experiment, but not `pr`. All other model/experiment combinations in the above table include all three variables.\n\n[^2]: The institution which ran MPI-ESM1-2-HR\u2019s historical (CMIP) simulations is `MPI-M`, while the future (ScenarioMIP) simulations were run by `DKRZ`. Therefore, the institution component of `MPI-ESM1-2-HR` filepaths differ between `historical` and `SSP` scenarios.\n\n[^3]: This dataset was previously licensed as [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), but was relicensed under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0) in March, 2023. \n\n## Project methods\n\nThis project makes use of statistical bias correction and downscaling algorithms, which are specifically designed to accurately represent changes in the extremes. For this reason, we selected Quantile Delta Mapping (QDM), following the method introduced by [Cannon et al. (2015)](https://doi.org/10.1175/JCLI-D-14-00754.1), which preserves quantile-specific trends from the GCM while fitting the full distribution for a given day-of-year to a reference dataset (ERA5).\n\nWe then introduce a similar method tailored to increase spatial resolution while preserving extreme behavior, Quantile-Preserving Localized-Analog Downscaling (QPLAD).\n\nTogether, these methods provide a robust means to handle both the central and tail behavior seen in climate model output, while aligning the full distribution to a state-of-the-art reanalysis dataset and providing the spatial granularity needed to study surface impacts.\n\nFor further documentation, see [Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts](https://egusphere.copernicus.org/preprints/2023/egusphere-2022-1513/) (EGUsphere, 2022 [preprint]).\n\n## Citing, licensing, and using data produced by this project\n\nProjects making use of the data produced as part of the Climate Impact Lab Global Downscaled Projections for Climate Impacts Research (CIL GDPCIR) project are requested to cite both this project and the source datasets from which these results are derived. Additionally, the use of data derived from some GCMs *requires* citations, and some modeling centers impose licensing restrictions & requirements on derived works. See each GCM's license info in the links below for more information.\n\n### CIL GDPCIR\n\nUsers are requested to cite this project in derived works. Our method documentation paper may be cited using the following:\n\n> Gergel, D. R., Malevich, S. B., McCusker, K. E., Tenezakis, E., Delgado, M. T., Fish, M. A., and Kopp, R. E.: Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts, EGUsphere [preprint], https://doi.org/10.5194/egusphere-2022-1513, 2023. \n\nThe code repository may be cited using the following:\n\n> Diana Gergel, Kelly McCusker, Brewster Malevich, Emile Tenezakis, Meredith Fish, Michael Delgado (2022). ClimateImpactLab/downscaleCMIP6: (v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.6403794\n\n### ERA5\n\nAdditionally, we request you cite the historical dataset used in bias correction and downscaling, ERA5. See the [ECMWF guide to citing a dataset on the Climate Data Store](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it):\n\n> Hersbach, H, et al. The ERA5 global reanalysis. Q J R Meteorol Soc.2020; 146: 1999\u20132049. DOI: [10.1002/qj.3803](https://doi.org/10.1002/qj.3803)\n>\n> Mu\u00f1oz Sabater, J., (2019): ERA5-Land hourly data from 1981 to present. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n>\n> Mu\u00f1oz Sabater, J., (2021): ERA5-Land hourly data from 1950 to 1980. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n\n### GCM-specific citations & licenses\n\nThe CMIP6 simulation data made available through the Earth System Grid Federation (ESGF) are subject to Creative Commons [BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) or [BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/) licenses. The Climate Impact Lab has reached out to each of the modeling institutions to request waivers from these terms so the outputs of this project may be used with fewer restrictions, and has been granted permission to release the data using the licenses listed here.\n\n#### Public Domain Datasets\n\nThe following bias corrected and downscaled model simulations are available in the public domain using a [CC0 1.0 Universal Public Domain Declaration](https://creativecommons.org/publicdomain/zero/1.0/). Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0.\n\n* **FGOALS-g3**\n\n License description: [data_licenses/FGOALS-g3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/FGOALS-g3.txt)\n\n CMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 CMIP*. Version 20190826. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1783\n\n ScenarioMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190818; SSP2-4.5 version 20190818; SSP3-7.0 version 20190820; SSP5-8.5 tasmax version 20190819; SSP5-8.5 tasmin version 20190819; SSP5-8.5 pr version 20190818. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2056\n\n\n* **INM-CM4-8**\n\n License description: [data_licenses/INM-CM4-8.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM4-8.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 CMIP*. Version 20190530. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1422\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 ScenarioMIP*. Version 20190603. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12321\n\n\n* **INM-CM5-0**\n\n License description: [data_licenses/INM-CM5-0.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM5-0.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 CMIP*. Version 20190610. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1423\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190619; SSP2-4.5 version 20190619; SSP3-7.0 version 20190618; SSP5-8.5 version 20190724. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12322\n\n\n#### CC-BY-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/). Note that this license requires citation of the source model output (included here). Please see https://creativecommons.org/licenses/by/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by.\n\n* **ACCESS-CM2**\n\n License description: [data_licenses/ACCESS-CM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-CM2.txt)\n\n CMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2281\n\n ScenarioMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2285\n\n\n* **ACCESS-ESM1-5**\n\n License description: [data_licenses/ACCESS-ESM1-5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-ESM1-5.txt)\n\n CMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 CMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2288\n\n ScenarioMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 ScenarioMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2291\n\n\n* **BCC-CSM2-MR**\n\n License description: [data_licenses/BCC-CSM2-MR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/BCC-CSM2-MR.txt)\n\n CMIP Citation:\n\n > Xin, Xiaoge; Zhang, Jie; Zhang, Fang; Wu, Tongwen; Shi, Xueli; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2018)**. *BCC BCC-CSM2MR model output prepared for CMIP6 CMIP*. Version 20181126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1725\n\n ScenarioMIP Citation:\n\n > Xin, Xiaoge; Wu, Tongwen; Shi, Xueli; Zhang, Fang; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2019)**. *BCC BCC-CSM2MR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190315; SSP2-4.5 version 20190318; SSP3-7.0 version 20190318; SSP5-8.5 version 20190318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1732\n\n\n* **CMCC-CM2-SR5**\n\n License description: [data_licenses/CMCC-CM2-SR5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-CM2-SR5.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 CMIP*. Version 20200616. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1362\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200717; SSP2-4.5 version 20200617; SSP3-7.0 version 20200622; SSP5-8.5 version 20200622. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1365\n\n\n* **CMCC-ESM2**\n\n License description: [data_licenses/CMCC-ESM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-ESM2.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 CMIP*. Version 20210114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13164\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20210126; SSP2-4.5 version 20210129; SSP3-7.0 version 20210202; SSP5-8.5 version 20210126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13168\n\n\n* **EC-Earth3-AerChem**\n\n License description: [data_licenses/EC-Earth3-AerChem.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-AerChem.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 CMIP*. Version 20200624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.639\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 ScenarioMIP*. Version 20200827. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.724\n\n\n* **EC-Earth3-CC**\n\n License description: [data_licenses/EC-Earth3-CC.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-CC.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth-3-CC model output prepared for CMIP6 CMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.640\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2021)**. *EC-Earth-Consortium EC-Earth3-CC model output prepared for CMIP6 ScenarioMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.15327\n\n\n* **EC-Earth3-Veg-LR**\n\n License description: [data_licenses/EC-Earth3-Veg-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg-LR.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 CMIP*. Version 20200217. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.643\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20201201; SSP2-4.5 version 20201123; SSP3-7.0 version 20201123; SSP5-8.5 version 20201201. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.728\n\n\n* **EC-Earth3-Veg**\n\n License description: [data_licenses/EC-Earth3-Veg.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 CMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.642\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 ScenarioMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.727\n\n\n* **EC-Earth3**\n\n License description: [data_licenses/EC-Earth3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 CMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.181\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 ScenarioMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.251\n\n\n* **GFDL-CM4**\n\n License description: [data_licenses/GFDL-CM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-CM4.txt)\n\n CMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Bushuk, Mitchell; Dunne, Krista A.; Dussin, Raphael; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, P.C.D; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1402\n\n ScenarioMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, Chris; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.9242\n\n\n* **GFDL-ESM4**\n\n License description: [data_licenses/GFDL-ESM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-ESM4.txt)\n\n CMIP Citation:\n\n > Krasting, John P.; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Silvers, Levi; Wyman, Bruce; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Dussin, Raphael; Guo, Huan; He, Jian; Held, Isaac M; Horowitz, Larry W.; Lin, Pu; Milly, P.C.D; Shevliakova, Elena; Stock, Charles; Winton, Michael; Wittenberg, Andrew T.; Xie, Yuanyu; Zhao, Ming **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 CMIP*. Version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1407\n\n ScenarioMIP Citation:\n\n > John, Jasmin G; Blanton, Chris; McHugh, Colleen; Radhakrishnan, Aparna; Rand, Kristopher; Vahlenkamp, Hans; Wilson, Chandin; Zadeh, Niki T.; Dunne, John P.; Dussin, Raphael; Horowitz, Larry W.; Krasting, John P.; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Ploshay, Jeffrey; Shevliakova, Elena; Silvers, Levi; Stock, Charles; Winton, Michael; Zeng, Yujin **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1414\n\n\n* **HadGEM3-GC31-LL**\n\n License description: [data_licenses/HadGEM3-GC31-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/HadGEM3-GC31-LL.txt)\n\n CMIP Citation:\n\n > Ridley, Jeff; Menary, Matthew; Kuhlbrodt, Till; Andrews, Martin; Andrews, Tim **(2018)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 CMIP*. Version 20190624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.419\n\n ScenarioMIP Citation:\n\n > Good, Peter **(2019)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200114; SSP2-4.5 version 20190908; SSP5-8.5 version 20200114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.10845\n\n\n* **MIROC-ES2L**\n\n License description: [data_licenses/MIROC-ES2L.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC-ES2L.txt)\n\n CMIP Citation:\n\n > Hajima, Tomohiro; Abe, Manabu; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogura, Tomoo; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio; Tachiiri, Kaoru **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 CMIP*. Version 20191129. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.902\n\n ScenarioMIP Citation:\n\n > Tachiiri, Kaoru; Abe, Manabu; Hajima, Tomohiro; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 ScenarioMIP*. Version 20200318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.936\n\n\n* **MIROC6**\n\n License description: [data_licenses/MIROC6.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC6.txt)\n\n CMIP Citation:\n\n > Tatebe, Hiroaki; Watanabe, Masahiro **(2018)**. *MIROC MIROC6 model output prepared for CMIP6 CMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.881\n\n ScenarioMIP Citation:\n\n > Shiogama, Hideo; Abe, Manabu; Tatebe, Hiroaki **(2019)**. *MIROC MIROC6 model output prepared for CMIP6 ScenarioMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.898\n\n\n* **MPI-ESM1-2-HR**\n\n License description: [data_licenses/MPI-ESM1-2-HR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-HR.txt)\n\n CMIP Citation:\n\n > Jungclaus, Johann; Bittner, Matthias; Wieners, Karl-Hermann; Wachsmann, Fabian; Schupfner, Martin; Legutke, Stephanie; Giorgetta, Marco; Reick, Christian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Esch, Monika; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-HR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.741\n\n ScenarioMIP Citation:\n\n > Schupfner, Martin; Wieners, Karl-Hermann; Wachsmann, Fabian; Steger, Christian; Bittner, Matthias; Jungclaus, Johann; Fr\u00fch, Barbara; Pankatz, Klaus; Giorgetta, Marco; Reick, Christian; Legutke, Stephanie; Esch, Monika; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *DKRZ MPI-ESM1.2-HR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2450\n\n\n* **MPI-ESM1-2-LR**\n\n License description: [data_licenses/MPI-ESM1-2-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-LR.txt)\n\n CMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Legutke, Stephanie; Schupfner, Martin; Wachsmann, Fabian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.742\n\n ScenarioMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.793\n\n\n* **NESM3**\n\n License description: [data_licenses/NESM3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NESM3.txt)\n\n CMIP Citation:\n\n > Cao, Jian; Wang, Bin **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 CMIP*. Version 20190812. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2021\n\n ScenarioMIP Citation:\n\n > Cao, Jian **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190806; SSP2-4.5 version 20190805; SSP5-8.5 version 20190811. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2027\n\n\n* **NorESM2-LM**\n\n License description: [data_licenses/NorESM2-LM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-LM.txt)\n\n CMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 CMIP*. Version 20190815. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.502\n\n ScenarioMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.604\n\n\n* **NorESM2-MM**\n\n License description: [data_licenses/NorESM2-MM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-MM.txt)\n\n CMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.506\n\n ScenarioMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.608\n\n\n* **UKESM1-0-LL**\n\n License description: [data_licenses/UKESM1-0-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/UKESM1-0-LL.txt)\n\n CMIP Citation:\n\n > Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Mulcahy, Jane; Sellar, Alistair; Walton, Jeremy; Jones, Colin **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 CMIP*. Version 20190627. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1569\n\n ScenarioMIP Citation:\n\n > Good, Peter; Sellar, Alistair; Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Kuhlbrodt, Till; Walton, Jeremy **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190708; SSP2-4.5 version 20190715; SSP3-7.0 version 20190726; SSP5-8.5 version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1567\n\n* **CanESM5**\n\n License description: [data_licenses/CanESM5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CanESM5.txt). Note: this dataset was previously licensed\n under CC BY-SA 4.0, but was relicensed as CC BY 4.0 in March, 2023.\n\n CMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 CMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1303\n\n ScenarioMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 ScenarioMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1317\n\n## Acknowledgements\n\nThis work is the result of many years worth of work by members of the [Climate Impact Lab](https://impactlab.org), but would not have been possible without many contributions from across the wider scientific and computing communities.\n\nSpecifically, we would like to acknowledge the World Climate Research Programme's Working Group on Coupled Modeling, which is responsible for CMIP, and we would like to thank the climate modeling groups for producing and making their model output available. We would particularly like to thank the modeling institutions whose results are included as an input to this repository (listed above) for their contributions to the CMIP6 project and for responding to and granting our requests for license waivers.\n\nWe would also like to thank Lamont-Doherty Earth Observatory, the [Pangeo Consortium](https://github.com/pangeo-data) (and especially the [ESGF Cloud Data Working Group](https://pangeo-data.github.io/pangeo-cmip6-cloud/#)) and Google Cloud and the Google Public Datasets program for making the [CMIP6 Google Cloud collection](https://console.cloud.google.com/marketplace/details/noaa-public/cmip6) possible. In particular we're extremely grateful to [Ryan Abernathey](https://github.com/rabernat), [Naomi Henderson](https://github.com/naomi-henderson), [Charles Blackmon-Luca](https://github.com/charlesbluca), [Aparna Radhakrishnan](https://github.com/aradhakrishnanGFDL), [Julius Busecke](https://github.com/jbusecke), and [Charles Stern](https://github.com/cisaacstern) for the huge amount of work they've done to translate the ESGF CMIP6 netCDF archives into consistently-formattted, analysis-ready zarr stores on Google Cloud.\n\nWe're also grateful to the [xclim developers](https://github.com/Ouranosinc/xclim/graphs/contributors) ([DOI: 10.5281/zenodo.2795043](https://doi.org/10.5281/zenodo.2795043)), in particular [Pascal Bourgault](https://github.com/aulemahal), [David Huard](https://github.com/huard), and [Travis Logan](https://github.com/tlogan2000), for implementing the QDM bias correction method in the xclim python package, supporting our QPLAD implementation into the package, and ongoing support in integrating dask into downscaling workflows. For method advice and useful conversations, we would like to thank Keith Dixon, Dennis Adams-Smith, and [Joe Hamman](https://github.com/jhamman).\n\n## Financial support\n\nThis research has been supported by The Rockefeller Foundation and the Microsoft AI for Earth Initiative.\n\n## Additional links:\n\n* CIL GDPCIR project homepage: [github.com/ClimateImpactLab/downscaleCMIP6](https://github.com/ClimateImpactLab/downscaleCMIP6)\n* Project listing on zenodo: https://doi.org/10.5281/zenodo.6403794\n* Climate Impact Lab homepage: [impactlab.org](https://impactlab.org)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cil-gdpcir-cc-by,climate-impact-lab,cmip6,precipitation,rhodium-group,temperature", "license": "CC-BY-4.0", "title": "CIL Global Downscaled Projections for Climate Impacts Research (CC-BY-4.0)", "missionStartDate": "1950-01-01T00:00:00Z"}, "noaa-cdr-sea-surface-temperature-whoi-netcdf": {"abstract": "The Sea Surface Temperature-Woods Hole Oceanographic Institution (WHOI) Climate Data Record (CDR) is one of three CDRs which combine to form the NOAA Ocean Surface Bundle (OSB) CDR. The resultant sea surface temperature (SST) data are produced through modeling the diurnal variability in combination with AVHRR SST observations. The final record is output to a 3-hourly 0.25\u00b0 resolution grid over the global ice-free oceans from January 1988\u2014present.\n\nThis is a NetCDF-only collection, for Cloud-Optimized GeoTIFFs use collection `noaa-cdr-sea-surface-temperature-whoi`.\nThe NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-sea-surface-temperature-whoi-netcdf,ocean,temperature", "license": "proprietary", "title": "Sea Surface Temperature - WHOI CDR NetCDFs", "missionStartDate": "1988-01-01T00:00:00Z"}, "noaa-cdr-sea-surface-temperature-optimum-interpolation": {"abstract": "The NOAA 1/4\u00b0 daily Optimum Interpolation Sea Surface Temperature (or daily OISST) Climate Data Record (CDR) provides complete ocean temperature fields constructed by combining bias-adjusted observations from different platforms (satellites, ships, buoys) on a regular global grid, with gaps filled in by interpolation. The main input source is satellite data from the Advanced Very High Resolution Radiometer (AVHRR), which provides high temporal-spatial coverage from late 1981-present. This input must be adjusted to the buoys due to erroneous cold SST data following the Mt Pinatubo and El Chichon eruptions. Applications include climate modeling, resource management, ecological studies on annual to daily scales.\n\nThese Cloud Optimized GeoTIFFs (COGs) were created from NetCDF files which are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\nFor the NetCDF files, see collection `noaa-cdr-sea-surface-temperature-optimum-interpolation-netcdf`.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-sea-surface-temperature-optimum-interpolation,ocean,temperature", "license": "proprietary", "title": "Sea Surface Temperature - Optimum Interpolation CDR", "missionStartDate": "1981-09-01T00:00:00Z"}, "modis-10A1-061": {"abstract": "This global Level-3 (L3) data set provides a daily composite of snow cover and albedo derived from the 'MODIS Snow Cover 5-Min L2 Swath 500m' data set. Each data granule is a 10degx10deg tile projected to a 500 m sinusoidal grid.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod10a1,modis,modis-10a1-061,myd10a1,nasa,satellite,snow,terra", "license": "proprietary", "title": "MODIS Snow Cover Daily", "missionStartDate": "2000-02-24T00:00:00Z"}, "sentinel-5p-l2-netcdf": {"abstract": "The Copernicus [Sentinel-5 Precursor](https://sentinels.copernicus.eu/web/sentinel/missions/sentinel-5p) mission provides high spatio-temporal resolution measurements of the Earth's atmosphere. The mission consists of one satellite carrying the [TROPOspheric Monitoring Instrument](http://www.tropomi.eu/) (TROPOMI). The satellite flies in loose formation with NASA's [Suomi NPP](https://www.nasa.gov/mission_pages/NPP/main/index.html) spacecraft, allowing utilization of co-located cloud mask data provided by the [Visible Infrared Imaging Radiometer Suite](https://www.nesdis.noaa.gov/current-satellite-missions/currently-flying/joint-polar-satellite-system/visible-infrared-imaging) (VIIRS) instrument onboard Suomi NPP during processing of the TROPOMI methane product.\n\nThe Sentinel-5 Precursor mission aims to reduce the global atmospheric data gap between the retired [ENVISAT](https://earth.esa.int/eogateway/missions/envisat) and [AURA](https://www.nasa.gov/mission_pages/aura/main/index.html) missions and the future [Sentinel-5](https://sentinels.copernicus.eu/web/sentinel/missions/sentinel-5) mission. Sentinel-5 Precursor [Level 2 data](http://www.tropomi.eu/data-products/level-2-products) provide total columns of ozone, sulfur dioxide, nitrogen dioxide, carbon monoxide and formaldehyde, tropospheric columns of ozone, vertical profiles of ozone and cloud & aerosol information. These measurements are used for improving air quality forecasts and monitoring the concentrations of atmospheric constituents.\n\nThis STAC Collection provides Sentinel-5 Precursor Level 2 data, in NetCDF format, since April 2018 for the following products:\n\n* [`L2__AER_AI`](http://www.tropomi.eu/data-products/uv-aerosol-index): Ultraviolet aerosol index\n* [`L2__AER_LH`](http://www.tropomi.eu/data-products/aerosol-layer-height): Aerosol layer height\n* [`L2__CH4___`](http://www.tropomi.eu/data-products/methane): Methane (CH4) total column\n* [`L2__CLOUD_`](http://www.tropomi.eu/data-products/cloud): Cloud fraction, albedo, and top pressure\n* [`L2__CO____`](http://www.tropomi.eu/data-products/carbon-monoxide): Carbon monoxide (CO) total column\n* [`L2__HCHO__`](http://www.tropomi.eu/data-products/formaldehyde): Formaldehyde (HCHO) total column\n* [`L2__NO2___`](http://www.tropomi.eu/data-products/nitrogen-dioxide): Nitrogen dioxide (NO2) total column\n* [`L2__O3____`](http://www.tropomi.eu/data-products/total-ozone-column): Ozone (O3) total column\n* [`L2__O3_TCL`](http://www.tropomi.eu/data-products/tropospheric-ozone-column): Ozone (O3) tropospheric column\n* [`L2__SO2___`](http://www.tropomi.eu/data-products/sulphur-dioxide): Sulfur dioxide (SO2) total column\n* [`L2__NP_BD3`](http://www.tropomi.eu/data-products/auxiliary): Cloud from the Suomi NPP mission, band 3\n* [`L2__NP_BD6`](http://www.tropomi.eu/data-products/auxiliary): Cloud from the Suomi NPP mission, band 6\n* [`L2__NP_BD7`](http://www.tropomi.eu/data-products/auxiliary): Cloud from the Suomi NPP mission, band 7\n", "instrument": "TROPOMI", "platform": "Sentinel-5P", "platformSerialIdentifier": "Sentinel 5 Precursor", "processingLevel": null, "keywords": "air-quality,climate-change,copernicus,esa,forecasting,sentinel,sentinel-5-precursor,sentinel-5p,sentinel-5p-l2-netcdf,tropomi", "license": "proprietary", "title": "Sentinel-5P Level-2", "missionStartDate": "2018-04-30T00:18:50Z"}, "sentinel-3-olci-wfr-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 Full Resolution [OLCI Level-2 Water][olci-l2] products containing data on water-leaving reflectance, ocean color, and more.\n\n## Data files\n\nThis dataset includes data on:\n\n- Surface directional reflectance\n- Chlorophyll-a concentration\n- Suspended matter concentration\n- Energy flux\n- Aerosol load\n- Integrated water vapor column\n\nEach variable is contained within NetCDF files. Error estimates are available for each product.\n\n## Processing overview\n\nThe values in the data files have been converted from Top of Atmosphere radiance to reflectance, and include various corrections for gaseous absorption and pixel classification. More information about the product and data processing can be found in the [User Guide](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/processing).\n\nThis Collection contains Level-2 data in NetCDF files from November 2017 to present.\n\n[olci-l2]: https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/ocean-products\n", "instrument": "OLCI", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ocean,olci,sentinel,sentinel-3,sentinel-3-olci-wfr-l2-netcdf,sentinel-3a,sentinel-3b,water", "license": "proprietary", "title": "Sentinel-3 Water (Full Resolution)", "missionStartDate": "2017-11-01T00:07:01.738487Z"}, "noaa-cdr-ocean-heat-content-netcdf": {"abstract": "The Ocean Heat Content Climate Data Record (CDR) is a set of ocean heat content anomaly (OHCA) time-series for 1955-present on 3-monthly, yearly, and pentadal (five-yearly) scales. This CDR quantifies ocean heat content change over time, which is an essential metric for understanding climate change and the Earth's energy budget. It provides time-series for multiple depth ranges in the global ocean and each of the major basins (Atlantic, Pacific, and Indian) divided by hemisphere (Northern, Southern).\n\nThis is a NetCDF-only collection, for Cloud-Optimized GeoTIFFs use collection `noaa-cdr-ocean-heat-content`.\nThe NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-ocean-heat-content-netcdf,ocean,temperature", "license": "proprietary", "title": "Global Ocean Heat Content CDR NetCDFs", "missionStartDate": "1972-03-01T00:00:00Z"}, "sentinel-3-synergy-aod-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 Aerosol Optical Depth](https://sentinels.copernicus.eu/web/sentinel/level-2-aod) product, which is a downstream development of the Sentinel-2 Level-1 [OLCI Full Resolution](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-olci/data-formats/level-1) and [SLSTR Radiances and Brightness Temperatures](https://sentinels.copernicus.eu/web/sentinel/user-guides/Sentinel-3-slstr/data-formats/level-1) products. The dataset provides both retrieved and diagnostic global aerosol parameters at super-pixel (4.5 km x 4.5 km) resolution in a single NetCDF file for all regions over land and ocean free of snow/ice cover, excluding high cloud fraction data. The retrieved and derived aerosol parameters are:\n\n- Aerosol Optical Depth (AOD) at 440, 550, 670, 985, 1600 and 2250 nm\n- Error estimates (i.e. standard deviation) in AOD at 440, 550, 670, 985, 1600 and 2250 nm\n- Single Scattering Albedo (SSA) at 440, 550, 670, 985, 1600 and 2250 nm\n- Fine-mode AOD at 550nm\n- Aerosol Angstrom parameter between 550 and 865nm\n- Dust AOD at 550nm\n- Aerosol absorption optical depth at 550nm\n\nAtmospherically corrected nadir surface directional reflectances at 440, 550, 670, 985, 1600 and 2250 nm at super-pixel (4.5 km x 4.5 km) resolution are also provided. More information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/level-2-aod) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/products-algorithms/level-2-aod-algorithms-and-products).\n\nThis Collection contains Level-2 data in NetCDF files from April 2020 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "aerosol,copernicus,esa,global,olci,satellite,sentinel,sentinel-3,sentinel-3-synergy-aod-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 Global Aerosol", "missionStartDate": "2020-04-16T19:36:28.012367Z"}, "sentinel-3-synergy-v10-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 10-Day Surface Reflectance and NDVI](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) products, which are SPOT VEGETATION Continuity Products similar to those obtained from the [VEGETATION instrument](https://docs.terrascope.be/#/Satellites/SPOT-VGT/MissionInstruments) onboard the SPOT-4 and SPOT-5 satellites. The primary variables are a maximum Normalized Difference Vegetation Index (NDVI) composite, which is derived from ground reflectance during a 10-day window, and four surface reflectance bands:\n\n- B0 (Blue, 450nm)\n- B2 (Red, 645nm)\n- B3 (NIR, 835nm)\n- MIR (SWIR, 1665nm)\n\nThe four reflectance bands have center wavelengths matching those on the original SPOT VEGETATION instrument. The NDVI variable, which is an indicator of the amount of vegetation, is derived from the B3 and B2 bands.\n\n## Data files\n\nThe four reflectance bands and NDVI values are each contained in dedicated NetCDF files. Additional metadata are delivered in annotation NetCDF files, each containing a single variable, including the geometric viewing and illumination conditions, the total water vapour and ozone columns, and the aerosol optical depth.\n\nEach 10-day product is delivered as a set of 10 rectangular scenes:\n\n- AFRICA\n- NORTH_AMERICA\n- SOUTH_AMERICA\n- CENTRAL_AMERICA\n- NORTH_ASIA\n- WEST_ASIA\n- SOUTH_EAST_ASIA\n- ASIAN_ISLANDS\n- AUSTRALASIA\n- EUROPE\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/vgt-s/v10-product).\n\nThis Collection contains Level-2 data in NetCDF files from September 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ndvi,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-v10-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 10-Day Surface Reflectance and NDVI (SPOT VEGETATION)", "missionStartDate": "2018-09-27T11:17:21Z"}, "sentinel-3-olci-lfr-l2-netcdf": {"abstract": "This collection provides Sentinel-3 Full Resolution [OLCI Level-2 Land][olci-l2] products containing data on global vegetation, chlorophyll, and water vapor.\n\n## Data files\n\nThis dataset includes data on three primary variables:\n\n* OLCI global vegetation index file\n* terrestrial Chlorophyll index file\n* integrated water vapor over water file.\n\nEach variable is contained within a separate NetCDF file, and is cataloged as an asset in each Item.\n\nSeveral associated variables are also provided in the annotations data files:\n\n* rectified reflectance for red and NIR channels (RC681 and RC865)\n* classification, quality and science flags (LQSF)\n* common data such as the ortho-geolocation of land pixels, solar and satellite angles, atmospheric and meteorological data, time stamp or instrument information. These variables are inherited from Level-1B products.\n\nThis full resolution product offers a spatial sampling of approximately 300 m.\n\n## Processing overview\n\nThe values in the data files have been converted from Top of Atmosphere radiance to reflectance, and include various corrections for gaseous absorption and pixel classification. More information about the product and data processing can be found in the [User Guide](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-land) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/processing).\n\nThis Collection contains Level-2 data in NetCDF files from April 2016 to present.\n\n[olci-l2]: https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/land-products\n", "instrument": "OLCI", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "biomass,copernicus,esa,land,olci,sentinel,sentinel-3,sentinel-3-olci-lfr-l2-netcdf,sentinel-3a,sentinel-3b", "license": "proprietary", "title": "Sentinel-3 Land (Full Resolution)", "missionStartDate": "2016-04-25T11:33:47.368562Z"}, "sentinel-3-sral-lan-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SRAL Level-2 Land Altimetry](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry/level-2-algorithms-products) products, which contain data on land radar altimetry measurements. Each product contains three NetCDF files:\n\n- A reduced data file containing a subset of the 1 Hz Ku-band parameters.\n- A standard data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters.\n- An enhanced data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters along with the waveforms and parameters necessary to reprocess the data.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-altimetry/overview) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry).\n\nThis Collection contains Level-2 data in NetCDF files from March 2016 to present.\n", "instrument": "SRAL", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "altimetry,copernicus,esa,radar,satellite,sentinel,sentinel-3,sentinel-3-sral-lan-l2-netcdf,sentinel-3a,sentinel-3b,sral", "license": "proprietary", "title": "Sentinel-3 Land Radar Altimetry", "missionStartDate": "2016-03-01T14:07:51.632846Z"}, "sentinel-3-slstr-lst-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SLSTR Level-2 Land Surface Temperature](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-lst) products containing data on land surface temperature measurements on a 1km grid. Radiance is measured in two channels to determine the temperature of the Earth's surface skin in the instrument field of view, where the term \"skin\" refers to the top surface of bare soil or the effective emitting temperature of vegetation canopies as viewed from above.\n\n## Data files\n\nThe dataset includes data on the primary measurement variable, land surface temperature, in a single NetCDF file, `LST_in.nc`. A second file, `LST_ancillary.nc`, contains several ancillary variables:\n\n- Normalized Difference Vegetation Index\n- Surface biome classification\n- Fractional vegetation cover\n- Total water vapor column\n\nIn addition to the primary and ancillary data files, a standard set of annotation data files provide meteorological information, geolocation and time coordinates, geometry information, and quality flags. More information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-lst) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-slstr/level-2/lst-processing).\n\nThis Collection contains Level-2 data in NetCDF files from April 2016 to present.\n\n## STAC Item geometries\n\nThe Collection contains small \"chips\" and long \"stripes\" of data collected along the satellite direction of travel. Approximately five percent of the STAC Items describing long stripes of data contain geometries that encompass a larger area than an exact concave hull of the data extents. This may require additional filtering when searching the Collection for Items that spatially intersect an area of interest.\n", "instrument": "SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,land,satellite,sentinel,sentinel-3,sentinel-3-slstr-lst-l2-netcdf,sentinel-3a,sentinel-3b,slstr,temperature", "license": "proprietary", "title": "Sentinel-3 Land Surface Temperature", "missionStartDate": "2016-04-19T01:35:17.188500Z"}, "sentinel-3-slstr-wst-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SLSTR Level-2 Water Surface Temperature](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-wst) products containing data on sea surface temperature measurements on a 1km grid. Each product consists of a single NetCDF file containing all data variables:\n\n- Sea Surface Temperature (SST) value\n- SST total uncertainty\n- Latitude and longitude coordinates\n- SST time deviation\n- Single Sensor Error Statistic (SSES) bias and standard deviation estimate\n- Contextual parameters such as wind speed at 10 m and fractional sea-ice contamination\n- Quality flag\n- Satellite zenith angle\n- Top Of Atmosphere (TOA) Brightness Temperature (BT)\n- TOA noise equivalent BT\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-wst) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-slstr/level-2/sst-processing).\n\nThis Collection contains Level-2 data in NetCDF files from October 2017 to present.\n", "instrument": "SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ocean,satellite,sentinel,sentinel-3,sentinel-3-slstr-wst-l2-netcdf,sentinel-3a,sentinel-3b,slstr,temperature", "license": "proprietary", "title": "Sentinel-3 Sea Surface Temperature", "missionStartDate": "2017-10-31T23:59:57.451604Z"}, "sentinel-3-sral-wat-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SRAL Level-2 Ocean Altimetry](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry/level-2-algorithms-products) products, which contain data on ocean radar altimetry measurements. Each product contains three NetCDF files:\n\n- A reduced data file containing a subset of the 1 Hz Ku-band parameters.\n- A standard data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters.\n- An enhanced data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters along with the waveforms and parameters necessary to reprocess the data.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-altimetry/overview) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry).\n\nThis Collection contains Level-2 data in NetCDF files from January 2017 to present.\n", "instrument": "SRAL", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "altimetry,copernicus,esa,ocean,radar,satellite,sentinel,sentinel-3,sentinel-3-sral-wat-l2-netcdf,sentinel-3a,sentinel-3b,sral", "license": "proprietary", "title": "Sentinel-3 Ocean Radar Altimetry", "missionStartDate": "2017-01-28T00:59:14.149496Z"}, "ms-buildings": {"abstract": "Bing Maps is releasing open building footprints around the world. We have detected over 999 million buildings from Bing Maps imagery between 2014 and 2021 including Maxar and Airbus imagery. The data is freely available for download and use under ODbL. This dataset complements our other releases.\n\nFor more information, see the [GlobalMLBuildingFootprints](https://github.com/microsoft/GlobalMLBuildingFootprints/) repository on GitHub.\n\n## Building footprint creation\n\nThe building extraction is done in two stages:\n\n1. Semantic Segmentation \u2013 Recognizing building pixels on an aerial image using deep neural networks (DNNs)\n2. Polygonization \u2013 Converting building pixel detections into polygons\n\n**Stage 1: Semantic Segmentation**\n\n![Semantic segmentation](https://raw.githubusercontent.com/microsoft/GlobalMLBuildingFootprints/main/images/segmentation.jpg)\n\n**Stage 2: Polygonization**\n\n![Polygonization](https://github.com/microsoft/GlobalMLBuildingFootprints/raw/main/images/polygonization.jpg)\n\n## Data assets\n\nThe building footprints are provided as a set of [geoparquet](https://github.com/opengeospatial/geoparquet) datasets in [Delta][delta] table format.\nThe data are partitioned by\n\n1. Region\n2. quadkey at [Bing Map Tiles][tiles] level 9\n\nEach `(Region, quadkey)` pair will have one or more geoparquet files, depending on the density of the of the buildings in that area.\n\nNote that older items in this dataset are *not* spatially partitioned. We recommend using data with a processing date\nof 2023-04-25 or newer. This processing date is part of the URL for each parquet file and is captured in the STAC metadata\nfor each item (see below).\n\n## Delta Format\n\nThe collection-level asset under the `delta` key gives you the fsspec-style URL\nto the Delta table. This can be used to efficiently query for matching partitions\nby `Region` and `quadkey`. See the notebook for an example using Python.\n\n## STAC metadata\n\nThis STAC collection has one STAC item per region. The `msbuildings:region`\nproperty can be used to filter items to a specific region, and the `msbuildings:quadkey`\nproperty can be used to filter items to a specific quadkey (though you can also search\nby the `geometry`).\n\nNote that older STAC items are not spatially partitioned. We recommend filtering on\nitems with an `msbuildings:processing-date` of `2023-04-25` or newer. See the collection\nsummary for `msbuildings:processing-date` for a list of valid values.\n\n[delta]: https://delta.io/\n[tiles]: https://learn.microsoft.com/en-us/bingmaps/articles/bing-maps-tile-system\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "bing-maps,buildings,delta,footprint,geoparquet,microsoft,ms-buildings", "license": "ODbL-1.0", "title": "Microsoft Building Footprints", "missionStartDate": "2014-01-01T00:00:00Z"}, "sentinel-3-slstr-frp-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SLSTR Level-2 Fire Radiative Power](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-frp) (FRP) products containing data on fires detected over land and ocean.\n\n## Data files\n\nThe primary measurement data is contained in the `FRP_in.nc` file and provides FRP and uncertainties, projected onto a 1km grid, for fires detected in the thermal infrared (TIR) spectrum over land. Since February 2022, FRP and uncertainties are also provided for fires detected in the short wave infrared (SWIR) spectrum over both land and ocean, with the delivered data projected onto a 500m grid. The latter SWIR-detected fire data is only available for night-time measurements and is contained in the `FRP_an.nc` or `FRP_bn.nc` files.\n\nIn addition to the measurement data files, a standard set of annotation data files provide meteorological information, geolocation and time coordinates, geometry information, and quality flags.\n\n## Processing\n\nThe TIR fire detection is based on measurements from the S7 and F1 bands of the [SLSTR instrument](https://sentinels.copernicus.eu/web/sentinel/technical-guides/sentinel-3-slstr/instrument); SWIR fire detection is based on the S5 and S6 bands. More information about the product and data processing can be found in the [User Guide](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-frp) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-slstr/level-2/frp-processing).\n\nThis Collection contains Level-2 data in NetCDF files from August 2020 to present.\n", "instrument": "SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,fire,satellite,sentinel,sentinel-3,sentinel-3-slstr-frp-l2-netcdf,sentinel-3a,sentinel-3b,slstr,temperature", "license": "proprietary", "title": "Sentinel-3 Fire Radiative Power", "missionStartDate": "2020-08-08T23:11:15.617203Z"}, "sentinel-3-synergy-syn-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 Land Surface Reflectance and Aerosol](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-syn) product, which contains data on Surface Directional Reflectance, Aerosol Optical Thickness, and an Angstrom coefficient estimate over land.\n\n## Data Files\n\nIndividual NetCDF files for the following variables:\n\n- Surface Directional Reflectance (SDR) with their associated error estimates for the sun-reflective [SLSTR](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-slstr) channels (S1 to S6 for both nadir and oblique views, except S4) and for all [OLCI](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-olci) channels, except for the oxygen absorption bands Oa13, Oa14, Oa15, and the water vapor bands Oa19 and Oa20.\n- Aerosol optical thickness at 550nm with error estimates.\n- Angstrom coefficient at 550nm.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-syn) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/level-2/syn-level-2-product).\n\nThis Collection contains Level-2 data in NetCDF files from September 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "aerosol,copernicus,esa,land,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-syn-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 Land Surface Reflectance and Aerosol", "missionStartDate": "2018-09-22T16:51:00.001276Z"}, "sentinel-3-synergy-vgp-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 Top of Atmosphere Reflectance](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vgp) product, which is a SPOT VEGETATION Continuity Product containing measurement data similar to that obtained by the [VEGETATION instrument](https://docs.terrascope.be/#/Satellites/SPOT-VGT/MissionInstruments) onboad the SPOT-3 and SPOT-4 satellites. The primary variables are four top of atmosphere reflectance bands:\n\n- B0 (Blue, 450nm)\n- B2 (Red, 645nm)\n- B3 (NIR, 835nm)\n- MIR (SWIR, 1665nm)\n\nThe four reflectance bands have center wavelengths matching those on the original SPOT VEGETATION instrument and have been adapted for scientific applications requiring highly accurate physical measurements through correction for systematic errors and re-sampling to predefined geographic projections. The pixel brightness count is the ground area's apparent reflectance as seen at the top of atmosphere.\n\n## Data files\n\nNetCDF files are provided for the four reflectance bands. Additional metadata are delivered in annotation NetCDF files, each containing a single variable, including the geometric viewing and illumination conditions, the total water vapour and ozone columns, and the aerosol optical depth.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vgp) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/level-2/vgt-p-product).\n\nThis Collection contains Level-2 data in NetCDF files from October 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-vgp-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 Top of Atmosphere Reflectance (SPOT VEGETATION)", "missionStartDate": "2018-10-08T08:09:40.491227Z"}, "sentinel-3-synergy-vg1-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 1-Day Surface Reflectance and NDVI](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) products, which are SPOT VEGETATION Continuity Products similar to those obtained from the [VEGETATION instrument](https://docs.terrascope.be/#/Satellites/SPOT-VGT/MissionInstruments) onboard the SPOT-4 and SPOT-5 satellites. The primary variables are a maximum Normalized Difference Vegetation Index (NDVI) composite, which is derived from daily ground reflecrtance, and four surface reflectance bands:\n\n- B0 (Blue, 450nm)\n- B2 (Red, 645nm)\n- B3 (NIR, 835nm)\n- MIR (SWIR, 1665nm)\n\nThe four reflectance bands have center wavelengths matching those on the original SPOT VEGETATION instrument. The NDVI variable, which is an indicator of the amount of vegetation, is derived from the B3 and B2 bands.\n\n## Data files\n\nThe four reflectance bands and NDVI values are each contained in dedicated NetCDF files. Additional metadata are delivered in annotation NetCDF files, each containing a single variable, including the geometric viewing and illumination conditions, the total water vapour and ozone columns, and the aerosol optical depth.\n\nEach 1-day product is delivered as a set of 10 rectangular scenes:\n\n- AFRICA\n- NORTH_AMERICA\n- SOUTH_AMERICA\n- CENTRAL_AMERICA\n- NORTH_ASIA\n- WEST_ASIA\n- SOUTH_EAST_ASIA\n- ASIAN_ISLANDS\n- AUSTRALASIA\n- EUROPE\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/vgt-s/vg1-product-surface-reflectance).\n\nThis Collection contains Level-2 data in NetCDF files from October 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ndvi,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-vg1-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 1-Day Surface Reflectance and NDVI (SPOT VEGETATION)", "missionStartDate": "2018-10-04T23:17:21Z"}, "esa-worldcover": {"abstract": "The European Space Agency (ESA) [WorldCover](https://esa-worldcover.org/en) product provides global land cover maps for the years 2020 and 2021 at 10 meter resolution based on the combination of [Sentinel-1](https://sentinel.esa.int/web/sentinel/missions/sentinel-1) radar data and [Sentinel-2](https://sentinel.esa.int/web/sentinel/missions/sentinel-2) imagery. The discrete classification maps provide 11 classes defined using the Land Cover Classification System (LCCS) developed by the United Nations (UN) Food and Agriculture Organization (FAO). The map images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n\nThe WorldCover product is developed by a consortium of European service providers and research organizations. [VITO](https://remotesensing.vito.be/) (Belgium) is the prime contractor of the WorldCover consortium together with [Brockmann Consult](https://www.brockmann-consult.de/) (Germany), [CS SI](https://www.c-s.fr/) (France), [Gamma Remote Sensing AG](https://www.gamma-rs.ch/) (Switzerland), [International Institute for Applied Systems Analysis](https://www.iiasa.ac.at/) (Austria), and [Wageningen University](https://www.wur.nl/nl/Wageningen-University.htm) (The Netherlands).\n\nTwo versions of the WorldCover product are available:\n\n- WorldCover 2020 produced using v100 of the algorithm\n - [WorldCover 2020 v100 User Manual](https://esa-worldcover.s3.eu-central-1.amazonaws.com/v100/2020/docs/WorldCover_PUM_V1.0.pdf)\n - [WorldCover 2020 v100 Validation Report]()\n\n- WorldCover 2021 produced using v200 of the algorithm\n - [WorldCover 2021 v200 User Manual]()\n - [WorldCover 2021 v200 Validaton Report]()\n\nSince the WorldCover maps for 2020 and 2021 were generated with different algorithm versions (v100 and v200, respectively), changes between the maps include both changes in real land cover and changes due to the used algorithms.\n", "instrument": "c-sar,msi", "platform": null, "platformSerialIdentifier": "sentinel-1a,sentinel-1b,sentinel-2a,sentinel-2b", "processingLevel": null, "keywords": "c-sar,esa,esa-worldcover,global,land-cover,msi,sentinel,sentinel-1a,sentinel-1b,sentinel-2a,sentinel-2b", "license": "CC-BY-4.0", "title": "ESA WorldCover", "missionStartDate": "2020-01-01T00:00:00Z"}}}, "usgs_satapi_aws": {"providers_config": {"landsat-c2l2-sr": {"productType": "landsat-c2l2-sr"}, "landsat-c2l2-st": {"productType": "landsat-c2l2-st"}, "landsat-c2ard-st": {"productType": "landsat-c2ard-st"}, "landsat-c2l2alb-bt": {"productType": "landsat-c2l2alb-bt"}, "landsat-c2l3-fsca": {"productType": "landsat-c2l3-fsca"}, "landsat-c2ard-bt": {"productType": "landsat-c2ard-bt"}, "landsat-c2l1": {"productType": "landsat-c2l1"}, "landsat-c2l3-ba": {"productType": "landsat-c2l3-ba"}, "landsat-c2l2alb-st": {"productType": "landsat-c2l2alb-st"}, "landsat-c2ard-sr": {"productType": "landsat-c2ard-sr"}, "landsat-c2l2alb-sr": {"productType": "landsat-c2l2alb-sr"}, "landsat-c2l2alb-ta": {"productType": "landsat-c2l2alb-ta"}, "landsat-c2l3-dswe": {"productType": "landsat-c2l3-dswe"}, "landsat-c2ard-ta": {"productType": "landsat-c2ard-ta"}}, "product_types_config": {"landsat-c2l2-sr": {"abstract": "The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2-sr,surface-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 UTM Surface Reflectance (SR) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2-st": {"abstract": "The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K).", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2-st,surface-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 UTM Surface Temperature (ST) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-st": {"abstract": "The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K).", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-st,surface-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Surface Temperature (ST) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-bt": {"abstract": "The Landsat Top of Atmosphere Brightness Temperature (BT) product is a top of atmosphere product with radiance calculated 'at-sensor', not atmospherically corrected, and expressed in units of Kelvin.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-bt,top-of-atmosphere-brightness-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Top of Atmosphere Brightness Temperature (BT) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l3-fsca": {"abstract": "The Landsat Fractional Snow Covered Area (fSCA) product contains an acquisition-based per-pixel snow cover fraction, an acquisition-based revised cloud mask for quality assessment, and a product metadata file.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,fractional-snow-covered-area,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l3-fsca", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-3 Fractional Snow Covered Area (fSCA) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-bt": {"abstract": "The Landsat Top of Atmosphere Brightness Temperature (BT) product is a top of atmosphere product with radiance calculated 'at-sensor', not atmospherically corrected, and expressed in units of Kelvin.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-bt,top-of-atmosphere-brightness-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Top of Atmosphere Brightness Temperature (BT) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l1": {"abstract": "The Landsat Level-1 product is a top of atmosphere product distributed as scaled and calibrated digital numbers.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_1,LANDSAT_2,LANDSAT_3,LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-1,landsat-2,landsat-3,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l1", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-1 Product", "missionStartDate": "1972-07-25T00:00:00.000Z"}, "landsat-c2l3-ba": {"abstract": "The Landsat Burned Area (BA) contains two acquisition-based raster data products that represent burn classification and burn probability.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,burned-area,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l3-ba", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-3 Burned Area (BA) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-st": {"abstract": "The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K).", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-st,surface-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Surface Temperature (ST) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-sr": {"abstract": "The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-sr,surface-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Surface Reflectance (SR) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-sr": {"abstract": "The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-sr,surface-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Surface Reflectance (SR) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-ta": {"abstract": "The Landsat Top of Atmosphere (TA) Reflectance product applies per pixel angle band corrections to the Level-1 radiance product.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-ta,top-of-atmosphere-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Top of Atmosphere (TA) Reflectance Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l3-dswe": {"abstract": "The Landsat Dynamic Surface Water Extent (DSWE) product contains six acquisition-based raster data products pertaining to the existence and condition of surface water.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,dynamic-surface-water-extent-,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l3-dswe", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-3 Dynamic Surface Water Extent (DSWE) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-ta": {"abstract": "The Landsat Top of Atmosphere (TA) Reflectance product applies per pixel angle band corrections to the Level-1 radiance product.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-ta,top-of-atmosphere-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Top of Atmosphere (TA) Reflectance Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}}}} +{"astraea_eod": {"providers_config": {"landsat8_c2l1t1": {"productType": "landsat8_c2l1t1"}, "mcd43a4": {"productType": "mcd43a4"}, "mod11a1": {"productType": "mod11a1"}, "mod13a1": {"productType": "mod13a1"}, "myd11a1": {"productType": "myd11a1"}, "myd13a1": {"productType": "myd13a1"}, "maxar_open_data": {"productType": "maxar_open_data"}, "naip": {"productType": "naip"}, "sentinel1_l1c_grd": {"productType": "sentinel1_l1c_grd"}, "sentinel2_l1c": {"productType": "sentinel2_l1c"}, "sentinel2_l2a": {"productType": "sentinel2_l2a"}, "spacenet7": {"productType": "spacenet7"}, "umbra_open_data": {"productType": "umbra_open_data"}}, "product_types_config": {"landsat8_c2l1t1": {"abstract": "Landsat 8 Collection 2 Tier 1 Precision Terrain from Landsat 8 Operational Land Imager (OLI) and Thermal Infrared Sensor (TIRS) data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat8-c2l1t1", "license": "PDDL-1.0", "title": "Landsat 8 - Level 1", "missionStartDate": "2013-03-18T15:59:02.333Z"}, "mcd43a4": {"abstract": "MCD43A4: MODIS/Terra and Aqua Nadir BRDF-Adjusted Reflectance Daily L3 Global 500 m SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "mcd43a4", "license": "CC-PDDC", "title": "MCD43A4 NBAR", "missionStartDate": "2000-02-16T00:00:00.000Z"}, "mod11a1": {"abstract": "MOD11A1: MODIS/Terra Land Surface Temperature/Emissivity Daily L3 Global 1 km SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "mod11a1", "license": "CC-PDDC", "title": "MOD11A1 LST", "missionStartDate": "2000-02-24T00:00:00.000Z"}, "mod13a1": {"abstract": "MOD13A1: MODIS/Terra Vegetation Indices 16-Day L3 Global 500 m SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "mod13a1", "license": "CC-PDDC", "title": "MOD13A1 VI", "missionStartDate": "2000-02-18T00:00:00.000Z"}, "myd11a1": {"abstract": "MYD11A1: MODIS/Aqua Land Surface Temperature/Emissivity Daily L3 Global 1 km SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "myd11a1", "license": "CC-PDDC", "title": "MYD11A1 LST", "missionStartDate": "2002-07-04T00:00:00.000Z"}, "myd13a1": {"abstract": "MYD13A1: MODIS/Aqua Vegetation Indices 16-Day L3 Global 500 m SIN Grid V006", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "myd13a1", "license": "CC-PDDC", "title": "MYD13A1 VI", "missionStartDate": "2002-07-04T00:00:00.000Z"}, "maxar_open_data": {"abstract": "Maxar Open Data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "maxar-open-data", "license": "CC-BY-NC-4.0", "title": "Maxar Open Data", "missionStartDate": "2008-01-15T00:00:00.000Z"}, "naip": {"abstract": "National Agriculture Imagery Program aerial imagery", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "naip", "license": "CC-PDDC", "title": "NAIP", "missionStartDate": "2012-04-23T12:00:00.000Z"}, "sentinel1_l1c_grd": {"abstract": "Sentinel-1 Level-1 Ground Range Detected data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel1-l1c-grd", "license": "CC-BY-SA-3.0", "title": "Sentinel-1 L1C GRD", "missionStartDate": "2017-09-27T14:19:16.000"}, "sentinel2_l1c": {"abstract": "Sentinel-2 Level-1C top of atmosphere", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel2-l1c", "license": "CC-BY-SA-3.0", "title": "Sentinel-2 L1C", "missionStartDate": "2015-06-27T10:25:31.456Z"}, "sentinel2_l2a": {"abstract": "Sentinel-2 Level-2A atmospherically corrected data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel2-l2a", "license": "CC-BY-SA-3.0", "title": "Sentinel-2 L2A", "missionStartDate": "2018-04-01T07:02:22.463Z"}, "spacenet7": {"abstract": "SpaceNet 7 Imagery and Labels", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "spacenet7", "license": "CC-BY-SA-4.0", "title": "SpaceNet 7", "missionStartDate": "2018-01-01T00:00:00.000Z"}, "umbra_open_data": {"abstract": "Umbra Open Data", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "umbra-open-data", "license": "proprietary", "title": "Umbra Open Data", "missionStartDate": null}}}, "creodias": {"providers_config": {"Sentinel1": {"collection": "Sentinel1"}, "Sentinel1RTC": {"collection": "Sentinel1RTC"}, "Sentinel2": {"collection": "Sentinel2"}, "Sentinel3": {"collection": "Sentinel3"}, "Sentinel5P": {"collection": "Sentinel5P"}, "Sentinel6": {"collection": "Sentinel6"}, "Landsat5": {"collection": "Landsat5"}, "Landsat7": {"collection": "Landsat7"}, "Landsat8": {"collection": "Landsat8"}, "Envisat": {"collection": "Envisat"}, "SMOS": {"collection": "SMOS"}, "S2GLC": {"collection": "S2GLC"}, "CopDem": {"collection": "CopDem"}}, "product_types_config": {"Sentinel1": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel1", "license": null, "title": null, "missionStartDate": null}, "Sentinel1RTC": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel1rtc", "license": null, "title": null, "missionStartDate": null}, "Sentinel2": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel2", "license": null, "title": null, "missionStartDate": null}, "Sentinel3": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel3", "license": null, "title": null, "missionStartDate": null}, "Sentinel5P": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel5p", "license": null, "title": null, "missionStartDate": null}, "Sentinel6": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "sentinel6", "license": null, "title": null, "missionStartDate": null}, "Landsat5": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat5", "license": null, "title": null, "missionStartDate": null}, "Landsat7": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat7", "license": null, "title": null, "missionStartDate": null}, "Landsat8": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "landsat8", "license": null, "title": null, "missionStartDate": null}, "Envisat": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "envisat", "license": null, "title": null, "missionStartDate": null}, "SMOS": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "smos", "license": null, "title": null, "missionStartDate": null}, "S2GLC": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "s2glc", "license": null, "title": null, "missionStartDate": null}, "CopDem": {"abstract": null, "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "copdem", "license": null, "title": null, "missionStartDate": null}}}, "earth_search": {"providers_config": {"cop-dem-glo-30": {"productType": "cop-dem-glo-30"}, "naip": {"productType": "naip"}, "sentinel-2-l2a": {"productType": "sentinel-2-l2a"}, "sentinel-2-l1c": {"productType": "sentinel-2-l1c"}, "cop-dem-glo-90": {"productType": "cop-dem-glo-90"}, "landsat-c2-l2": {"productType": "landsat-c2-l2"}, "sentinel-1-grd": {"productType": "sentinel-1-grd"}, "sentinel-2-c1-l2a": {"productType": "sentinel-2-c1-l2a"}}, "product_types_config": {"cop-dem-glo-30": {"abstract": "The Copernicus DEM is a Digital Surface Model (DSM) which represents the surface of the Earth including buildings, infrastructure and vegetation. GLO-30 Public provides limited worldwide coverage at 30 meters because a small subset of tiles covering specific countries are not yet released to the public by the Copernicus Programme.", "instrument": null, "platform": null, "platformSerialIdentifier": "tandem-x", "processingLevel": null, "keywords": "cop-dem-glo-30,copernicus,dem,dsm,elevation,tandem-x", "license": "proprietary", "title": "Copernicus DEM GLO-30", "missionStartDate": "2021-04-22T00:00:00Z"}, "naip": {"abstract": "The [National Agriculture Imagery Program](https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/) (NAIP) provides U.S.-wide, high-resolution aerial imagery, with four spectral bands (R, G, B, IR). NAIP is administered by the [Aerial Field Photography Office](https://www.fsa.usda.gov/programs-and-services/aerial-photography/) (AFPO) within the [US Department of Agriculture](https://www.usda.gov/) (USDA). Data are captured at least once every three years for each state. This dataset represents NAIP data from 2010-present, in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "aerial,afpo,agriculture,imagery,naip,united-states,usda", "license": "proprietary", "title": "NAIP: National Agriculture Imagery Program", "missionStartDate": "2010-01-01T00:00:00Z"}, "sentinel-2-l2a": {"abstract": "Global Sentinel-2 data from the Multispectral Instrument (MSI) onboard Sentinel-2", "instrument": "msi", "platform": "sentinel-2", "platformSerialIdentifier": "sentinel-2a,sentinel-2b", "processingLevel": null, "keywords": "earth-observation,esa,msi,sentinel,sentinel-2,sentinel-2-l2a,sentinel-2a,sentinel-2b", "license": "proprietary", "title": "Sentinel-2 Level 2A", "missionStartDate": "2015-06-27T10:25:31.456000Z"}, "sentinel-2-l1c": {"abstract": "Global Sentinel-2 data from the Multispectral Instrument (MSI) onboard Sentinel-2", "instrument": "msi", "platform": "sentinel-2", "platformSerialIdentifier": "sentinel-2a,sentinel-2b", "processingLevel": null, "keywords": "earth-observation,esa,msi,sentinel,sentinel-2,sentinel-2-l1c,sentinel-2a,sentinel-2b", "license": "proprietary", "title": "Sentinel-2 Level 1C", "missionStartDate": "2015-06-27T10:25:31.456000Z"}, "cop-dem-glo-90": {"abstract": "The Copernicus DEM is a Digital Surface Model (DSM) which represents the surface of the Earth including buildings, infrastructure and vegetation. GLO-90 provides worldwide coverage at 90 meters.", "instrument": null, "platform": null, "platformSerialIdentifier": "tandem-x", "processingLevel": null, "keywords": "cop-dem-glo-90,copernicus,dem,elevation,tandem-x", "license": "proprietary", "title": "Copernicus DEM GLO-90", "missionStartDate": "2021-04-22T00:00:00Z"}, "landsat-c2-l2": {"abstract": "Atmospherically corrected global Landsat Collection 2 Level-2 data from the Thematic Mapper (TM) onboard Landsat 4 and 5, the Enhanced Thematic Mapper Plus (ETM+) onboard Landsat 7, and the Operational Land Imager (OLI) and Thermal Infrared Sensor (TIRS) onboard Landsat 8 and 9.", "instrument": "tm,etm+,oli,tirs", "platform": null, "platformSerialIdentifier": "landsat-4,landsat-5,landsat-7,landsat-8,landsat-9", "processingLevel": null, "keywords": "etm+,global,imagery,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2-l2,nasa,oli,reflectance,satellite,temperature,tirs,tm,usgs", "license": "proprietary", "title": "Landsat Collection 2 Level-2", "missionStartDate": "1982-08-22T00:00:00Z"}, "sentinel-1-grd": {"abstract": "Sentinel-1 is a pair of Synthetic Aperture Radar (SAR) imaging satellites launched in 2014 and 2016 by the European Space Agency (ESA). Their 6 day revisit cycle and ability to observe through clouds makes this dataset perfect for sea and land monitoring, emergency response due to environmental disasters, and economic applications. This dataset represents the global Sentinel-1 GRD archive, from beginning to the present, converted to cloud-optimized GeoTIFF format.", "instrument": null, "platform": "sentinel-1", "platformSerialIdentifier": "sentinel-1a,sentinel-1b", "processingLevel": null, "keywords": "c-band,copernicus,esa,grd,sar,sentinel,sentinel-1,sentinel-1-grd,sentinel-1a,sentinel-1b", "license": "proprietary", "title": "Sentinel-1 Level 1C Ground Range Detected (GRD)", "missionStartDate": "2014-10-10T00:28:21Z"}, "sentinel-2-c1-l2a": {"abstract": "Sentinel-2 Collection 1 L2A, data from the Multispectral Instrument (MSI) onboard Sentinel-2", "instrument": "msi", "platform": "sentinel-2", "platformSerialIdentifier": "sentinel-2a,sentinel-2b", "processingLevel": null, "keywords": "earth-observation,esa,msi,sentinel,sentinel-2,sentinel-2-c1-l2a,sentinel-2a,sentinel-2b", "license": "proprietary", "title": "Sentinel-2 Collection 1 Level-2A", "missionStartDate": "2015-06-27T10:25:31.456000Z"}}}, "earth_search_cog": null, "earth_search_gcs": null, "planetary_computer": {"providers_config": {"daymet-annual-pr": {"productType": "daymet-annual-pr"}, "daymet-daily-hi": {"productType": "daymet-daily-hi"}, "3dep-seamless": {"productType": "3dep-seamless"}, "3dep-lidar-dsm": {"productType": "3dep-lidar-dsm"}, "fia": {"productType": "fia"}, "sentinel-1-rtc": {"productType": "sentinel-1-rtc"}, "gridmet": {"productType": "gridmet"}, "daymet-annual-na": {"productType": "daymet-annual-na"}, "daymet-monthly-na": {"productType": "daymet-monthly-na"}, "daymet-annual-hi": {"productType": "daymet-annual-hi"}, "daymet-monthly-hi": {"productType": "daymet-monthly-hi"}, "daymet-monthly-pr": {"productType": "daymet-monthly-pr"}, "gnatsgo-tables": {"productType": "gnatsgo-tables"}, "hgb": {"productType": "hgb"}, "cop-dem-glo-30": {"productType": "cop-dem-glo-30"}, "cop-dem-glo-90": {"productType": "cop-dem-glo-90"}, "goes-cmi": {"productType": "goes-cmi"}, "terraclimate": {"productType": "terraclimate"}, "nasa-nex-gddp-cmip6": {"productType": "nasa-nex-gddp-cmip6"}, "gpm-imerg-hhr": {"productType": "gpm-imerg-hhr"}, "gnatsgo-rasters": {"productType": "gnatsgo-rasters"}, "3dep-lidar-hag": {"productType": "3dep-lidar-hag"}, "io-lulc-annual-v02": {"productType": "io-lulc-annual-v02"}, "3dep-lidar-intensity": {"productType": "3dep-lidar-intensity"}, "3dep-lidar-pointsourceid": {"productType": "3dep-lidar-pointsourceid"}, "mtbs": {"productType": "mtbs"}, "noaa-c-cap": {"productType": "noaa-c-cap"}, "3dep-lidar-copc": {"productType": "3dep-lidar-copc"}, "modis-64A1-061": {"productType": "modis-64A1-061"}, "alos-fnf-mosaic": {"productType": "alos-fnf-mosaic"}, "3dep-lidar-returns": {"productType": "3dep-lidar-returns"}, "mobi": {"productType": "mobi"}, "landsat-c2-l2": {"productType": "landsat-c2-l2"}, "era5-pds": {"productType": "era5-pds"}, "chloris-biomass": {"productType": "chloris-biomass"}, "kaza-hydroforecast": {"productType": "kaza-hydroforecast"}, "planet-nicfi-analytic": {"productType": "planet-nicfi-analytic"}, "modis-17A2H-061": {"productType": "modis-17A2H-061"}, "modis-11A2-061": {"productType": "modis-11A2-061"}, "daymet-daily-pr": {"productType": "daymet-daily-pr"}, "3dep-lidar-dtm-native": {"productType": "3dep-lidar-dtm-native"}, "3dep-lidar-classification": {"productType": "3dep-lidar-classification"}, "3dep-lidar-dtm": {"productType": "3dep-lidar-dtm"}, "gap": {"productType": "gap"}, "modis-17A2HGF-061": {"productType": "modis-17A2HGF-061"}, "planet-nicfi-visual": {"productType": "planet-nicfi-visual"}, "gbif": {"productType": "gbif"}, "modis-17A3HGF-061": {"productType": "modis-17A3HGF-061"}, "modis-09A1-061": {"productType": "modis-09A1-061"}, "alos-dem": {"productType": "alos-dem"}, "alos-palsar-mosaic": {"productType": "alos-palsar-mosaic"}, "deltares-water-availability": {"productType": "deltares-water-availability"}, "modis-16A3GF-061": {"productType": "modis-16A3GF-061"}, "modis-21A2-061": {"productType": "modis-21A2-061"}, "us-census": {"productType": "us-census"}, "jrc-gsw": {"productType": "jrc-gsw"}, "deltares-floods": {"productType": "deltares-floods"}, "modis-43A4-061": {"productType": "modis-43A4-061"}, "modis-09Q1-061": {"productType": "modis-09Q1-061"}, "modis-14A1-061": {"productType": "modis-14A1-061"}, "hrea": {"productType": "hrea"}, "modis-13Q1-061": {"productType": "modis-13Q1-061"}, "modis-14A2-061": {"productType": "modis-14A2-061"}, "sentinel-2-l2a": {"productType": "sentinel-2-l2a"}, "modis-15A2H-061": {"productType": "modis-15A2H-061"}, "modis-11A1-061": {"productType": "modis-11A1-061"}, "modis-15A3H-061": {"productType": "modis-15A3H-061"}, "modis-13A1-061": {"productType": "modis-13A1-061"}, "daymet-daily-na": {"productType": "daymet-daily-na"}, "nrcan-landcover": {"productType": "nrcan-landcover"}, "modis-10A2-061": {"productType": "modis-10A2-061"}, "ecmwf-forecast": {"productType": "ecmwf-forecast"}, "noaa-mrms-qpe-24h-pass2": {"productType": "noaa-mrms-qpe-24h-pass2"}, "sentinel-1-grd": {"productType": "sentinel-1-grd"}, "nasadem": {"productType": "nasadem"}, "io-lulc": {"productType": "io-lulc"}, "landsat-c2-l1": {"productType": "landsat-c2-l1"}, "drcog-lulc": {"productType": "drcog-lulc"}, "chesapeake-lc-7": {"productType": "chesapeake-lc-7"}, "chesapeake-lc-13": {"productType": "chesapeake-lc-13"}, "chesapeake-lu": {"productType": "chesapeake-lu"}, "noaa-mrms-qpe-1h-pass1": {"productType": "noaa-mrms-qpe-1h-pass1"}, "noaa-mrms-qpe-1h-pass2": {"productType": "noaa-mrms-qpe-1h-pass2"}, "noaa-nclimgrid-monthly": {"productType": "noaa-nclimgrid-monthly"}, "goes-glm": {"productType": "goes-glm"}, "usda-cdl": {"productType": "usda-cdl"}, "eclipse": {"productType": "eclipse"}, "esa-cci-lc": {"productType": "esa-cci-lc"}, "esa-cci-lc-netcdf": {"productType": "esa-cci-lc-netcdf"}, "fws-nwi": {"productType": "fws-nwi"}, "usgs-lcmap-conus-v13": {"productType": "usgs-lcmap-conus-v13"}, "usgs-lcmap-hawaii-v10": {"productType": "usgs-lcmap-hawaii-v10"}, "noaa-climate-normals-tabular": {"productType": "noaa-climate-normals-tabular"}, "noaa-climate-normals-netcdf": {"productType": "noaa-climate-normals-netcdf"}, "noaa-climate-normals-gridded": {"productType": "noaa-climate-normals-gridded"}, "aster-l1t": {"productType": "aster-l1t"}, "cil-gdpcir-cc-by-sa": {"productType": "cil-gdpcir-cc-by-sa"}, "naip": {"productType": "naip"}, "io-lulc-9-class": {"productType": "io-lulc-9-class"}, "io-biodiversity": {"productType": "io-biodiversity"}, "noaa-cdr-sea-surface-temperature-whoi": {"productType": "noaa-cdr-sea-surface-temperature-whoi"}, "noaa-cdr-ocean-heat-content": {"productType": "noaa-cdr-ocean-heat-content"}, "cil-gdpcir-cc0": {"productType": "cil-gdpcir-cc0"}, "cil-gdpcir-cc-by": {"productType": "cil-gdpcir-cc-by"}, "noaa-cdr-sea-surface-temperature-whoi-netcdf": {"productType": "noaa-cdr-sea-surface-temperature-whoi-netcdf"}, "noaa-cdr-sea-surface-temperature-optimum-interpolation": {"productType": "noaa-cdr-sea-surface-temperature-optimum-interpolation"}, "modis-10A1-061": {"productType": "modis-10A1-061"}, "sentinel-5p-l2-netcdf": {"productType": "sentinel-5p-l2-netcdf"}, "sentinel-3-olci-wfr-l2-netcdf": {"productType": "sentinel-3-olci-wfr-l2-netcdf"}, "noaa-cdr-ocean-heat-content-netcdf": {"productType": "noaa-cdr-ocean-heat-content-netcdf"}, "sentinel-3-synergy-aod-l2-netcdf": {"productType": "sentinel-3-synergy-aod-l2-netcdf"}, "sentinel-3-synergy-v10-l2-netcdf": {"productType": "sentinel-3-synergy-v10-l2-netcdf"}, "sentinel-3-olci-lfr-l2-netcdf": {"productType": "sentinel-3-olci-lfr-l2-netcdf"}, "sentinel-3-sral-lan-l2-netcdf": {"productType": "sentinel-3-sral-lan-l2-netcdf"}, "sentinel-3-slstr-lst-l2-netcdf": {"productType": "sentinel-3-slstr-lst-l2-netcdf"}, "sentinel-3-slstr-wst-l2-netcdf": {"productType": "sentinel-3-slstr-wst-l2-netcdf"}, "sentinel-3-sral-wat-l2-netcdf": {"productType": "sentinel-3-sral-wat-l2-netcdf"}, "ms-buildings": {"productType": "ms-buildings"}, "sentinel-3-slstr-frp-l2-netcdf": {"productType": "sentinel-3-slstr-frp-l2-netcdf"}, "sentinel-3-synergy-syn-l2-netcdf": {"productType": "sentinel-3-synergy-syn-l2-netcdf"}, "sentinel-3-synergy-vgp-l2-netcdf": {"productType": "sentinel-3-synergy-vgp-l2-netcdf"}, "sentinel-3-synergy-vg1-l2-netcdf": {"productType": "sentinel-3-synergy-vg1-l2-netcdf"}, "esa-worldcover": {"productType": "esa-worldcover"}}, "product_types_config": {"daymet-annual-pr": {"abstract": "Annual climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1852](https://doi.org/10.3334/ORNLDAAC/1852) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#annual). \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-annual-pr,precipitation,puerto-rico,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Annual Puerto Rico", "missionStartDate": "1980-07-01T12:00:00Z"}, "daymet-daily-hi": {"abstract": "Gridded estimates of daily weather parameters. [Daymet](https://daymet.ornl.gov) Version 4 variables include the following parameters: minimum temperature, maximum temperature, precipitation, shortwave radiation, vapor pressure, snow water equivalent, and day length.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1840](https://doi.org/10.3334/ORNLDAAC/1840) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#daily).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "daymet,daymet-daily-hi,hawaii,precipitation,temperature,vapor-pressure,weather", "license": "proprietary", "title": "Daymet Daily Hawaii", "missionStartDate": "1980-01-01T12:00:00Z"}, "3dep-seamless": {"abstract": "U.S.-wide digital elevation data at horizontal resolutions ranging from one to sixty meters.\n\nThe [USGS 3D Elevation Program (3DEP) Datasets](https://www.usgs.gov/core-science-systems/ngp/3dep) from the [National Map](https://www.usgs.gov/core-science-systems/national-geospatial-program/national-map) are the primary elevation data product produced and distributed by the USGS. The 3DEP program provides raster elevation data for the conterminous United States, Alaska, Hawaii, and the island territories, at a variety of spatial resolutions. The seamless DEM layers produced by the 3DEP program are updated frequently to integrate newly available, improved elevation source data. \n\nDEM layers are available nationally at grid spacings of 1 arc-second (approximately 30 meters) for the conterminous United States, and at approximately 1, 3, and 9 meters for parts of the United States. Most seamless DEM data for Alaska is available at a resolution of approximately 60 meters, where only lower resolution source data exist.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-seamless,dem,elevation,ned,usgs", "license": "PDDL-1.0", "title": "USGS 3DEP Seamless DEMs", "missionStartDate": "1925-01-01T00:00:00Z"}, "3dep-lidar-dsm": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It creates a Digital Surface Model (DSM) using [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to output a collection of Cloud Optimized GeoTIFFs, removing all points that have been classified as noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-dsm,cog,dsm,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Digital Surface Model", "missionStartDate": "2012-01-01T00:00:00Z"}, "fia": {"abstract": "Status and trends on U.S. forest location, health, growth, mortality, and production, from the U.S. Forest Service's [Forest Inventory and Analysis](https://www.fia.fs.fed.us/) (FIA) program.\n\nThe Forest Inventory and Analysis (FIA) dataset is a nationwide survey of the forest assets of the United States. The FIA research program has been in existence since 1928. FIA's primary objective is to determine the extent, condition, volume, growth, and use of trees on the nation's forest land.\n\nDomain: continental U.S., 1928-2018\n\nResolution: plot-level (irregular polygon)\n\nThis dataset was curated and brought to Azure by [CarbonPlan](https://carbonplan.org/).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biomass,carbon,fia,forest,forest-service,species,usda", "license": "CC0-1.0", "title": "Forest Inventory and Analysis", "missionStartDate": "2020-06-01T00:00:00Z"}, "sentinel-1-rtc": {"abstract": "The [Sentinel-1](https://sentinel.esa.int/web/sentinel/missions/sentinel-1) mission is a constellation of two polar-orbiting satellites, operating day and night performing C-band synthetic aperture radar imaging. The Sentinel-1 Radiometrically Terrain Corrected (RTC) data in this collection is a radiometrically terrain corrected product derived from the [Ground Range Detected (GRD) Level-1](https://planetarycomputer.microsoft.com/dataset/sentinel-1-grd) products produced by the European Space Agency. The RTC processing is performed by [Catalyst](https://catalyst.earth/).\n\nRadiometric Terrain Correction accounts for terrain variations that affect both the position of a given point on the Earth's surface and the brightness of the radar return, as expressed in radar geometry. Without treatment, the hill-slope modulations of the radiometry threaten to overwhelm weaker thematic land cover-induced backscatter differences. Additionally, comparison of backscatter from multiple satellites, modes, or tracks loses meaning.\n\nA Planetary Computer account is required to retrieve SAS tokens to read the RTC data. See the [documentation](http://planetarycomputer.microsoft.com/docs/concepts/sas/#when-an-account-is-needed) for more information.\n\n### Methodology\n\nThe Sentinel-1 GRD product is converted to calibrated intensity using the conversion algorithm described in the ESA technical note ESA-EOPG-CSCOP-TN-0002, [Radiometric Calibration of S-1 Level-1 Products Generated by the S-1 IPF](https://ai4edatasetspublicassets.blob.core.windows.net/assets/pdfs/sentinel-1/S1-Radiometric-Calibration-V1.0.pdf). The flat earth calibration values for gamma correction (i.e. perpendicular to the radar line of sight) are extracted from the GRD metadata. The calibration coefficients are applied as a two-dimensional correction in range (by sample number) and azimuth (by time). All available polarizations are calibrated and written as separate layers of a single file. The calibrated SAR output is reprojected to nominal map orientation with north at the top and west to the left.\n\nThe data is then radiometrically terrain corrected using PlanetDEM as the elevation source. The correction algorithm is nominally based upon D. Small, [\u201cFlattening Gamma: Radiometric Terrain Correction for SAR Imagery\u201d](https://ai4edatasetspublicassets.blob.core.windows.net/assets/pdfs/sentinel-1/2011_Flattening_Gamma.pdf), IEEE Transactions on Geoscience and Remote Sensing, Vol 49, No 8., August 2011, pp 3081-3093. For each image scan line, the digital elevation model is interpolated to determine the elevation corresponding to the position associated with the known near slant range distance and arc length for each input pixel. The elevations at the four corners of each pixel are estimated using bilinear resampling. The four elevations are divided into two triangular facets and reprojected onto the plane perpendicular to the radar line of sight to provide an estimate of the area illuminated by the radar for each earth flattened pixel. The uncalibrated sum at each earth flattened pixel is normalized by dividing by the flat earth surface area. The adjustment for gamma intensity is given by dividing the normalized result by the cosine of the incident angle. Pixels which are not illuminated by the radar due to the viewing geometry are flagged as shadow.\n\nCalibrated data is then orthorectified to the appropriate UTM projection. The orthorectified output maintains the original sample sizes (in range and azimuth) and was not shifted to any specific grid.\n\nRTC data is processed only for the Interferometric Wide Swath (IW) mode, which is the main acquisition mode over land and satisfies the majority of service requirements.\n", "instrument": null, "platform": "Sentinel-1", "platformSerialIdentifier": "SENTINEL-1A,SENTINEL-1B", "processingLevel": null, "keywords": "c-band,copernicus,esa,rtc,sar,sentinel,sentinel-1,sentinel-1-rtc,sentinel-1a,sentinel-1b", "license": "CC-BY-4.0", "title": "Sentinel 1 Radiometrically Terrain Corrected (RTC)", "missionStartDate": "2014-10-10T00:28:21Z"}, "gridmet": {"abstract": "gridMET is a dataset of daily surface meteorological data at approximately four-kilometer resolution, covering the contiguous U.S. from 1979 to the present. These data can provide important inputs for ecological, agricultural, and hydrological models.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,gridmet,precipitation,temperature,vapor-pressure,water", "license": "CC0-1.0", "title": "gridMET", "missionStartDate": "1979-01-01T00:00:00Z"}, "daymet-annual-na": {"abstract": "Annual climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1852](https://doi.org/10.3334/ORNLDAAC/1852) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#annual). \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-annual-na,north-america,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Annual North America", "missionStartDate": "1980-07-01T12:00:00Z"}, "daymet-monthly-na": {"abstract": "Monthly climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1855](https://doi.org/10.3334/ORNLDAAC/1855) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#monthly).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-monthly-na,north-america,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Monthly North America", "missionStartDate": "1980-01-16T12:00:00Z"}, "daymet-annual-hi": {"abstract": "Annual climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1852](https://doi.org/10.3334/ORNLDAAC/1852) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#annual). \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-annual-hi,hawaii,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Annual Hawaii", "missionStartDate": "1980-07-01T12:00:00Z"}, "daymet-monthly-hi": {"abstract": "Monthly climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1855](https://doi.org/10.3334/ORNLDAAC/1855) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#monthly).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-monthly-hi,hawaii,precipitation,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Monthly Hawaii", "missionStartDate": "1980-01-16T12:00:00Z"}, "daymet-monthly-pr": {"abstract": "Monthly climate summaries derived from [Daymet](https://daymet.ornl.gov) Version 4 daily data at a 1 km x 1 km spatial resolution for five variables: minimum and maximum temperature, precipitation, vapor pressure, and snow water equivalent. Annual averages are provided for minimum and maximum temperature, vapor pressure, and snow water equivalent, and annual totals are provided for the precipitation variable.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1855](https://doi.org/10.3334/ORNLDAAC/1855) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#monthly).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,daymet,daymet-monthly-pr,precipitation,puerto-rico,temperature,vapor-pressure", "license": "proprietary", "title": "Daymet Monthly Puerto Rico", "missionStartDate": "1980-01-16T12:00:00Z"}, "gnatsgo-tables": {"abstract": "This collection contains the table data for gNATSGO. This table data can be used to determine the values of raster data cells for Items in the [gNATSGO Rasters](https://planetarycomputer.microsoft.com/dataset/gnatsgo-rasters) Collection.\n\nThe gridded National Soil Survey Geographic Database (gNATSGO) is a USDA-NRCS Soil & Plant Science Division (SPSD) composite database that provides complete coverage of the best available soils information for all areas of the United States and Island Territories. It was created by combining data from the Soil Survey Geographic Database (SSURGO), State Soil Geographic Database (STATSGO2), and Raster Soil Survey Databases (RSS) into a single seamless ESRI file geodatabase.\n\nSSURGO is the SPSD flagship soils database that has over 100 years of field-validated detailed soil mapping data. SSURGO contains soils information for more than 90 percent of the United States and island territories, but unmapped land remains. STATSGO2 is a general soil map that has soils data for all of the United States and island territories, but the data is not as detailed as the SSURGO data. The Raster Soil Surveys (RSSs) are the next generation soil survey databases developed using advanced digital soil mapping methods.\n\nThe gNATSGO database is composed primarily of SSURGO data, but STATSGO2 data was used to fill in the gaps. The RSSs are newer product with relatively limited spatial extent. These RSSs were merged into the gNATSGO after combining the SSURGO and STATSGO2 data. The extent of RSS is expected to increase in the coming years.\n\nSee the [official documentation](https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/geo/?cid=nrcseprd1464625)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gnatsgo-tables,natsgo,rss,soils,ssurgo,statsgo2,united-states,usda", "license": "CC0-1.0", "title": "gNATSGO Soil Database - Tables", "missionStartDate": "2020-07-01T00:00:00Z"}, "hgb": {"abstract": "This dataset provides temporally consistent and harmonized global maps of aboveground and belowground biomass carbon density for the year 2010 at 300m resolution. The aboveground biomass map integrates land-cover-specific, remotely sensed maps of woody, grassland, cropland, and tundra biomass. Input maps were amassed from the published literature and, where necessary, updated to cover the focal extent or time period. The belowground biomass map similarly integrates matching maps derived from each aboveground biomass map and land-cover-specific empirical models. Aboveground and belowground maps were then integrated separately using ancillary maps of percent tree/land cover and a rule-based decision tree. Maps reporting the accumulated uncertainty of pixel-level estimates are also provided.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biomass,carbon,hgb,ornl", "license": "proprietary", "title": "HGB: Harmonized Global Biomass for 2010", "missionStartDate": "2010-12-31T00:00:00Z"}, "cop-dem-glo-30": {"abstract": "The Copernicus DEM is a digital surface model (DSM), which represents the surface of the Earth including buildings, infrastructure, and vegetation. This DSM is based on radar satellite data acquired during the TanDEM-X Mission, which was funded by a public-private partnership between the German Aerospace Centre (DLR) and Airbus Defence and Space.\n\nCopernicus DEM is available at both 30-meter and 90-meter resolution; this dataset has a horizontal resolution of approximately 30 meters.\n\nSee the [Product Handbook](https://object.cloud.sdsc.edu/v1/AUTH_opentopography/www/metadata/Copernicus_metadata.pdf) for more information.\n\nSee the dataset page on OpenTopography: \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": "tandem-x", "processingLevel": null, "keywords": "cop-dem-glo-30,copernicus,dem,dsm,elevation,tandem-x", "license": "proprietary", "title": "Copernicus DEM GLO-30", "missionStartDate": "2021-04-22T00:00:00Z"}, "cop-dem-glo-90": {"abstract": "The Copernicus DEM is a digital surface model (DSM), which represents the surface of the Earth including buildings, infrastructure, and vegetation. This DSM is based on radar satellite data acquired during the TanDEM-X Mission, which was funded by a public-private partnership between the German Aerospace Centre (DLR) and Airbus Defence and Space.\n\nCopernicus DEM is available at both 30-meter and 90-meter resolution; this dataset has a horizontal resolution of approximately 90 meters.\n\nSee the [Product Handbook](https://object.cloud.sdsc.edu/v1/AUTH_opentopography/www/metadata/Copernicus_metadata.pdf) for more information.\n\nSee the dataset page on OpenTopography: \n\n", "instrument": null, "platform": null, "platformSerialIdentifier": "tandem-x", "processingLevel": null, "keywords": "cop-dem-glo-90,copernicus,dem,elevation,tandem-x", "license": "proprietary", "title": "Copernicus DEM GLO-90", "missionStartDate": "2021-04-22T00:00:00Z"}, "goes-cmi": {"abstract": "The GOES-R Advanced Baseline Imager (ABI) L2 Cloud and Moisture Imagery product provides 16 reflective and emissive bands at high temporal cadence over the Western Hemisphere.\n\nThe GOES-R series is the latest in the Geostationary Operational Environmental Satellites (GOES) program, which has been operated in a collaborative effort by NOAA and NASA since 1975. The operational GOES-R Satellites, GOES-16, GOES-17, and GOES-18, capture 16-band imagery from geostationary orbits over the Western Hemisphere via the Advance Baseline Imager (ABI) radiometer. The ABI captures 2 visible, 4 near-infrared, and 10 infrared channels at resolutions between 0.5km and 2km.\n\n### Geographic coverage\n\nThe ABI captures three levels of coverage, each at a different temporal cadence depending on the modes described below. The geographic coverage for each image is described by the `goes:image-type` STAC Item property.\n\n- _FULL DISK_: a circular image depicting nearly full coverage of the Western Hemisphere.\n- _CONUS_: a 3,000 (lat) by 5,000 (lon) km rectangular image depicting the Continental U.S. (GOES-16) or the Pacific Ocean including Hawaii (GOES-17).\n- _MESOSCALE_: a 1,000 by 1,000 km rectangular image. GOES-16 and 17 both alternate between two different mesoscale geographic regions.\n\n### Modes\n\nThere are three standard scanning modes for the ABI instrument: Mode 3, Mode 4, and Mode 6.\n\n- Mode _3_ consists of one observation of the full disk scene of the Earth, three observations of the continental United States (CONUS), and thirty observations for each of two distinct mesoscale views every fifteen minutes.\n- Mode _4_ consists of the observation of the full disk scene every five minutes.\n- Mode _6_ consists of one observation of the full disk scene of the Earth, two observations of the continental United States (CONUS), and twenty observations for each of two distinct mesoscale views every ten minutes.\n\nThe mode that each image was captured with is described by the `goes:mode` STAC Item property.\n\nSee this [ABI Scan Mode Demonstration](https://youtu.be/_c5H6R-M0s8) video for an idea of how the ABI scans multiple geographic regions over time.\n\n### Cloud and Moisture Imagery\n\nThe Cloud and Moisture Imagery product contains one or more images with pixel values identifying \"brightness values\" that are scaled to support visual analysis. Cloud and Moisture Imagery product (CMIP) files are generated for each of the sixteen ABI reflective and emissive bands. In addition, there is a multi-band product file that includes the imagery at all bands (MCMIP).\n\nThe Planetary Computer STAC Collection `goes-cmi` captures both the CMIP and MCMIP product files into individual STAC Items for each observation from a GOES-R satellite. It contains the original CMIP and MCMIP NetCDF files, as well as cloud-optimized GeoTIFF (COG) exports of the data from each MCMIP band (2km); the full-resolution CMIP band for bands 1, 2, 3, and 5; and a Web Mercator COG of bands 1, 2 and 3, which are useful for rendering.\n\nThis product is not in a standard coordinate reference system (CRS), which can cause issues with some tooling that does not handle non-standard large geographic regions.\n\n### For more information\n- [Beginner\u2019s Guide to GOES-R Series Data](https://www.goes-r.gov/downloads/resources/documents/Beginners_Guide_to_GOES-R_Series_Data.pdf)\n- [GOES-R Series Product Definition and Users\u2019 Guide: Volume 5 (Level 2A+ Products)](https://www.goes-r.gov/products/docs/PUG-L2+-vol5.pdf) ([Spanish verison](https://github.com/NOAA-Big-Data-Program/bdp-data-docs/raw/main/GOES/QuickGuides/Spanish/Guia%20introductoria%20para%20datos%20de%20la%20serie%20GOES-R%20V1.1%20FINAL2%20-%20Copy.pdf))\n\n", "instrument": "ABI", "platform": null, "platformSerialIdentifier": "GOES-16,GOES-17,GOES-18", "processingLevel": null, "keywords": "abi,cloud,goes,goes-16,goes-17,goes-18,goes-cmi,moisture,nasa,noaa,satellite", "license": "proprietary", "title": "GOES-R Cloud & Moisture Imagery", "missionStartDate": "2017-02-28T00:16:52Z"}, "terraclimate": {"abstract": "[TerraClimate](http://www.climatologylab.org/terraclimate.html) is a dataset of monthly climate and climatic water balance for global terrestrial surfaces from 1958 to the present. These data provide important inputs for ecological and hydrological studies at global scales that require high spatial resolution and time-varying data. All data have monthly temporal resolution and a ~4-km (1/24th degree) spatial resolution. This dataset is provided in [Zarr](https://zarr.readthedocs.io/) format.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,precipitation,temperature,terraclimate,vapor-pressure,water", "license": "CC0-1.0", "title": "TerraClimate", "missionStartDate": "1958-01-01T00:00:00Z"}, "nasa-nex-gddp-cmip6": {"abstract": "The NEX-GDDP-CMIP6 dataset is comprised of global downscaled climate scenarios derived from the General Circulation Model (GCM) runs conducted under the Coupled Model Intercomparison Project Phase 6 (CMIP6) and across two of the four \u201cTier 1\u201d greenhouse gas emissions scenarios known as Shared Socioeconomic Pathways (SSPs). The CMIP6 GCM runs were developed in support of the Sixth Assessment Report of the Intergovernmental Panel on Climate Change (IPCC AR6). This dataset includes downscaled projections from ScenarioMIP model runs for which daily scenarios were produced and distributed through the Earth System Grid Federation. The purpose of this dataset is to provide a set of global, high resolution, bias-corrected climate change projections that can be used to evaluate climate change impacts on processes that are sensitive to finer-scale climate gradients and the effects of local topography on climate conditions.\n\nThe [NASA Center for Climate Simulation](https://www.nccs.nasa.gov/) maintains the [next-gddp-cmip6 product page](https://www.nccs.nasa.gov/services/data-collections/land-based-products/nex-gddp-cmip6) where you can find more information about these datasets. Users are encouraged to review the [technote](https://www.nccs.nasa.gov/sites/default/files/NEX-GDDP-CMIP6-Tech_Note.pdf), provided alongside the data set, where more detailed information, references and acknowledgements can be found.\n\nThis collection contains many NetCDF files. There is one NetCDF file per `(model, scenario, variable, year)` tuple.\n\n- **model** is the name of a modeling group (e.g. \"ACCESS-CM-2\"). See the `cmip6:model` summary in the STAC collection for a full list of models.\n- **scenario** is one of \"historical\", \"ssp245\" or \"ssp585\".\n- **variable** is one of \"hurs\", \"huss\", \"pr\", \"rlds\", \"rsds\", \"sfcWind\", \"tas\", \"tasmax\", \"tasmin\".\n- **year** depends on the value of *scenario*. For \"historical\", the values range from 1950 to 2014 (inclusive). For \"ssp245\" and \"ssp585\", the years range from 2015 to 2100 (inclusive).\n\nIn addition to the NetCDF files, we provide some *experimental* **reference files** as collection-level dataset assets. These are JSON files implementing the [references specification](https://fsspec.github.io/kerchunk/spec.html).\nThese files include the positions of data variables within the binary NetCDF files, which can speed up reading the metadata. See the example notebook for more.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,cmip6,humidity,nasa,nasa-nex-gddp-cmip6,precipitation,temperature", "license": "proprietary", "title": "Earth Exchange Global Daily Downscaled Projections (NEX-GDDP-CMIP6)", "missionStartDate": "1950-01-01T00:00:00Z"}, "gpm-imerg-hhr": {"abstract": "The Integrated Multi-satellitE Retrievals for GPM (IMERG) algorithm combines information from the [GPM satellite constellation](https://gpm.nasa.gov/missions/gpm/constellation) to estimate precipitation over the majority of the Earth's surface. This algorithm is particularly valuable over the majority of the Earth's surface that lacks precipitation-measuring instruments on the ground. Now in the latest Version 06 release of IMERG the algorithm fuses the early precipitation estimates collected during the operation of the TRMM satellite (2000 - 2015) with more recent precipitation estimates collected during operation of the GPM satellite (2014 - present). The longer the record, the more valuable it is, as researchers and application developers will attest. By being able to compare and contrast past and present data, researchers are better informed to make climate and weather models more accurate, better understand normal and extreme rain and snowfall around the world, and strengthen applications for current and future disasters, disease, resource management, energy production and food security.\n\nFor more, see the [IMERG homepage](https://gpm.nasa.gov/data/imerg) The [IMERG Technical documentation](https://gpm.nasa.gov/sites/default/files/2020-10/IMERG_doc_201006.pdf) provides more information on the algorithm, input datasets, and output products.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gpm,gpm-imerg-hhr,imerg,precipitation", "license": "proprietary", "title": "GPM IMERG", "missionStartDate": "2000-06-01T00:00:00Z"}, "gnatsgo-rasters": {"abstract": "This collection contains the raster data for gNATSGO. In order to use the map unit values contained in the `mukey` raster asset, you'll need to join to tables represented as Items in the [gNATSGO Tables](https://planetarycomputer.microsoft.com/dataset/gnatsgo-tables) Collection. Many items have commonly used values encoded in additional raster assets.\n\nThe gridded National Soil Survey Geographic Database (gNATSGO) is a USDA-NRCS Soil & Plant Science Division (SPSD) composite database that provides complete coverage of the best available soils information for all areas of the United States and Island Territories. It was created by combining data from the Soil Survey Geographic Database (SSURGO), State Soil Geographic Database (STATSGO2), and Raster Soil Survey Databases (RSS) into a single seamless ESRI file geodatabase.\n\nSSURGO is the SPSD flagship soils database that has over 100 years of field-validated detailed soil mapping data. SSURGO contains soils information for more than 90 percent of the United States and island territories, but unmapped land remains. STATSGO2 is a general soil map that has soils data for all of the United States and island territories, but the data is not as detailed as the SSURGO data. The Raster Soil Surveys (RSSs) are the next generation soil survey databases developed using advanced digital soil mapping methods.\n\nThe gNATSGO database is composed primarily of SSURGO data, but STATSGO2 data was used to fill in the gaps. The RSSs are newer product with relatively limited spatial extent. These RSSs were merged into the gNATSGO after combining the SSURGO and STATSGO2 data. The extent of RSS is expected to increase in the coming years.\n\nSee the [official documentation](https://www.nrcs.usda.gov/wps/portal/nrcs/detail/soils/survey/geo/?cid=nrcseprd1464625)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gnatsgo-rasters,natsgo,rss,soils,ssurgo,statsgo2,united-states,usda", "license": "CC0-1.0", "title": "gNATSGO Soil Database - Rasters", "missionStartDate": "2020-07-01T00:00:00Z"}, "3dep-lidar-hag": {"abstract": "This COG type is generated using the Z dimension of the [COPC data](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc) data and removes noise, water, and using [`pdal.filters.smrf`](https://pdal.io/stages/filters.smrf.html#filters-smrf) followed by [pdal.filters.hag_nn](https://pdal.io/stages/filters.hag_nn.html#filters-hag-nn).\n\nThe Height Above Ground Nearest Neighbor filter takes as input a point cloud with Classification set to 2 for ground points. It creates a new dimension, HeightAboveGround, that contains the normalized height values.\n\nGround points may be generated with [`pdal.filters.pmf`](https://pdal.io/stages/filters.pmf.html#filters-pmf) or [`pdal.filters.smrf`](https://pdal.io/stages/filters.smrf.html#filters-smrf), but you can use any method you choose, as long as the ground returns are marked.\n\nNormalized heights are a commonly used attribute of point cloud data. This can also be referred to as height above ground (HAG) or above ground level (AGL) heights. In the end, it is simply a measure of a point's relative height as opposed to its raw elevation value.\n\nThe filter finds the number of ground points nearest to the non-ground point under consideration. It calculates an average ground height weighted by the distance of each ground point from the non-ground point. The HeightAboveGround is the difference between the Z value of the non-ground point and the interpolated ground height.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-hag,cog,elevation,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Height above Ground", "missionStartDate": "2012-01-01T00:00:00Z"}, "io-lulc-annual-v02": {"abstract": "Time series of annual global maps of land use and land cover (LULC). It currently has data from 2017-2023. The maps are derived from ESA Sentinel-2 imagery at 10m resolution. Each map is a composite of LULC predictions for 9 classes throughout the year in order to generate a representative snapshot of each year.\n\nThis dataset, produced by [Impact Observatory](http://impactobservatory.com/), Microsoft, and Esri, displays a global map of land use and land cover (LULC) derived from ESA Sentinel-2 imagery at 10 meter resolution for the years 2017 - 2023. Each map is a composite of LULC predictions for 9 classes throughout the year in order to generate a representative snapshot of each year. This dataset was generated by Impact Observatory, which used billions of human-labeled pixels (curated by the National Geographic Society) to train a deep learning model for land classification. Each global map was produced by applying this model to the Sentinel-2 annual scene collections from the Mircosoft Planetary Computer. Each of the maps has an assessed average accuracy of over 75%.\n\nThese maps have been improved from Impact Observatory\u2019s [previous release](https://planetarycomputer.microsoft.com/dataset/io-lulc-9-class) and provide a relative reduction in the amount of anomalous change between classes, particularly between \u201cBare\u201d and any of the vegetative classes \u201cTrees,\u201d \u201cCrops,\u201d \u201cFlooded Vegetation,\u201d and \u201cRangeland\u201d. This updated time series of annual global maps is also re-aligned to match the ESA UTM tiling grid for Sentinel-2 imagery.\n\nAll years are available under a Creative Commons BY-4.0.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "global,io-lulc-annual-v02,land-cover,land-use,sentinel", "license": "CC-BY-4.0", "title": "10m Annual Land Use Land Cover (9-class) V2", "missionStartDate": "2017-01-01T00:00:00Z"}, "3dep-lidar-intensity": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It is a collection of Cloud Optimized GeoTIFFs representing the pulse return magnitude.\n\nThe values are based on the Intensity [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-intensity,cog,intensity,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Intensity", "missionStartDate": "2012-01-01T00:00:00Z"}, "3dep-lidar-pointsourceid": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It is a collection of Cloud Optimized GeoTIFFs representing the file source ID from which the point originated. Zero indicates that the point originated in the current file.\n\nThis values are based on the PointSourceId [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-pointsourceid,cog,pointsourceid,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Point Source", "missionStartDate": "2012-01-01T00:00:00Z"}, "mtbs": {"abstract": "[Monitoring Trends in Burn Severity](https://www.mtbs.gov/) (MTBS) is an inter-agency program whose goal is to consistently map the burn severity and extent of large fires across the United States from 1984 to the present. This includes all fires 1000 acres or greater in the Western United States and 500 acres or greater in the Eastern United States. The burn severity mosaics in this dataset consist of thematic raster images of MTBS burn severity classes for all currently completed MTBS fires for the continental United States and Alaska.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "fire,forest,mtbs,usda,usfs,usgs", "license": "proprietary", "title": "MTBS: Monitoring Trends in Burn Severity", "missionStartDate": "1984-12-31T00:00:00Z"}, "noaa-c-cap": {"abstract": "Nationally standardized, raster-based inventories of land cover for the coastal areas of the U.S. Data are derived, through the Coastal Change Analysis Program, from the analysis of multiple dates of remotely sensed imagery. Two file types are available: individual dates that supply a wall-to-wall map, and change files that compare one date to another. The use of standardized data and procedures assures consistency through time and across geographies. C-CAP data forms the coastal expression of the National Land Cover Database (NLCD) and the A-16 land cover theme of the National Spatial Data Infrastructure. The data are updated every 5 years.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "coastal,land-cover,land-use,noaa,noaa-c-cap", "license": "proprietary", "title": "C-CAP Regional Land Cover and Change", "missionStartDate": "1975-01-01T00:00:00Z"}, "3dep-lidar-copc": {"abstract": "This collection contains source data from the [USGS 3DEP program](https://www.usgs.gov/3d-elevation-program) reformatted into the [COPC](https://copc.io) format. A COPC file is a LAZ 1.4 file that stores point data organized in a clustered octree. It contains a VLR that describes the octree organization of data that are stored in LAZ 1.4 chunks. The end product is a one-to-one mapping of LAZ to UTM-reprojected COPC files.\n\nLAZ data is geospatial [LiDAR point cloud](https://en.wikipedia.org/wiki/Point_cloud) (LPC) content stored in the compressed [LASzip](https://laszip.org?) format. Data were reorganized and stored in LAZ-compatible [COPC](https://copc.io) organization for use in Planetary Computer, which supports incremental spatial access and cloud streaming.\n\nLPC can be summarized for construction of digital terrain models (DTM), filtered for extraction of features like vegetation and buildings, and visualized to provide a point cloud map of the physical spaces the laser scanner interacted with. LPC content from 3DEP is used to compute and extract a variety of landscape characterization products, and some of them are provided by Planetary Computer, including Height Above Ground, Relative Intensity Image, and DTM and Digital Surface Models.\n\nThe LAZ tiles represent a one-to-one mapping of original tiled content as provided by the [USGS 3DEP program](https://www.usgs.gov/3d-elevation-program), with the exception that the data were reprojected and normalized into appropriate UTM zones for their location without adjustment to the vertical datum. In some cases, vertical datum description may not match actual data values, especially for pre-2010 USGS 3DEP point cloud data.\n\nIn addition to these COPC files, various higher-level derived products are available as Cloud Optimized GeoTIFFs in [other collections](https://planetarycomputer.microsoft.com/dataset/group/3dep-lidar).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-copc,cog,point-cloud,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Point Cloud", "missionStartDate": "2012-01-01T00:00:00Z"}, "modis-64A1-061": {"abstract": "The Terra and Aqua combined MCD64A1 Version 6.1 Burned Area data product is a monthly, global gridded 500 meter (m) product containing per-pixel burned-area and quality information. The MCD64A1 burned-area mapping approach employs 500 m Moderate Resolution Imaging Spectroradiometer (MODIS) Surface Reflectance imagery coupled with 1 kilometer (km) MODIS active fire observations. The algorithm uses a burn sensitive Vegetation Index (VI) to create dynamic thresholds that are applied to the composite data. The VI is derived from MODIS shortwave infrared atmospherically corrected surface reflectance bands 5 and 7 with a measure of temporal texture. The algorithm identifies the date of burn for the 500 m grid cells within each individual MODIS tile. The date is encoded in a single data layer as the ordinal day of the calendar year on which the burn occurred with values assigned to unburned land pixels and additional special values reserved for missing data and water grid cells. The data layers provided in the MCD64A1 product include Burn Date, Burn Data Uncertainty, Quality Assurance, along with First Day and Last Day of reliable change detection of the year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,fire,global,imagery,mcd64a1,modis,modis-64a1-061,nasa,satellite,terra", "license": "proprietary", "title": "MODIS Burned Area Monthly", "missionStartDate": "2000-11-01T00:00:00Z"}, "alos-fnf-mosaic": {"abstract": "The global 25m resolution SAR mosaics and forest/non-forest maps are free and open annual datasets generated by [JAXA](https://www.eorc.jaxa.jp/ALOS/en/dataset/fnf_e.htm) using the L-band Synthetic Aperture Radar sensors on the Advanced Land Observing Satellite-2 (ALOS-2 PALSAR-2), the Advanced Land Observing Satellite (ALOS PALSAR) and the Japanese Earth Resources Satellite-1 (JERS-1 SAR).\n\nThe global forest/non-forest maps (FNF) were generated by a Random Forest machine learning-based classification method, with the re-processed global 25m resolution [PALSAR-2 mosaic dataset](https://planetarycomputer.microsoft.com/dataset/alos-palsar-mosaic) (Ver. 2.0.0) as input. Here, the \"forest\" is defined as the tree covered land with an area larger than 0.5 ha and a canopy cover of over 10 %, in accordance with the FAO definition of forest. The classification results are presented in four categories, with two categories of forest areas: forests with a canopy cover of 90 % or more and forests with a canopy cover of 10 % to 90 %, depending on the density of the forest area.\n\nSee the [Product Description](https://www.eorc.jaxa.jp/ALOS/en/dataset/pdf/DatasetDescription_PALSAR2_FNF_V200.pdf) for more details.\n", "instrument": "PALSAR,PALSAR-2", "platform": null, "platformSerialIdentifier": "ALOS,ALOS-2", "processingLevel": null, "keywords": "alos,alos-2,alos-fnf-mosaic,forest,global,jaxa,land-cover,palsar,palsar-2", "license": "proprietary", "title": "ALOS Forest/Non-Forest Annual Mosaic", "missionStartDate": "2015-01-01T00:00:00Z"}, "3dep-lidar-returns": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It is a collection of Cloud Optimized GeoTIFFs representing the number of returns for a given pulse.\n\nThis values are based on the PointSourceId [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.\n\nThe values are based on the NumberOfReturns [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.outlier`](https://pdal.io/stages/filters.outlier.html#filters-outlier) and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to remove outliers and noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-returns,cog,numberofreturns,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Returns", "missionStartDate": "2012-01-01T00:00:00Z"}, "mobi": {"abstract": "The [Map of Biodiversity Importance](https://www.natureserve.org/conservation-tools/projects/map-biodiversity-importance) (MoBI) consists of raster maps that combine habitat information for 2,216 imperiled species occurring in the conterminous United States, using weightings based on range size and degree of protection to identify areas of high importance for biodiversity conservation. Species included in the project are those which, as of September 2018, had a global conservation status of G1 (critical imperiled) or G2 (imperiled) or which are listed as threatened or endangered at the full species level under the United States Endangered Species Act. Taxonomic groups included in the project are vertebrates (birds, mammals, amphibians, reptiles, turtles, crocodilians, and freshwater and anadromous fishes), vascular plants, selected aquatic invertebrates (freshwater mussels and crayfish) and selected pollinators (bumblebees, butterflies, and skippers).\n\nThere are three types of spatial data provided, described in more detail below: species richness, range-size rarity, and protection-weighted range-size rarity. For each type, this data set includes five different layers – one for all species combined, and four additional layers that break the data down by taxonomic group (vertebrates, plants, freshwater invertebrates, and pollinators) – for a total of fifteen layers.\n\nThese data layers are intended to identify areas of high potential value for on-the-ground biodiversity protection efforts. As a synthesis of predictive models, they cannot guarantee either the presence or absence of imperiled species at a given location. For site-specific decision-making, these data should be used in conjunction with field surveys and/or documented occurrence data, such as is available from the [NatureServe Network](https://www.natureserve.org/natureserve-network).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biodiversity,mobi,natureserve,united-states", "license": "proprietary", "title": "MoBI: Map of Biodiversity Importance", "missionStartDate": "2020-04-14T00:00:00Z"}, "landsat-c2-l2": {"abstract": "Landsat Collection 2 Level-2 [Science Products](https://www.usgs.gov/landsat-missions/landsat-collection-2-level-2-science-products), consisting of atmospherically corrected [surface reflectance](https://www.usgs.gov/landsat-missions/landsat-collection-2-surface-reflectance) and [surface temperature](https://www.usgs.gov/landsat-missions/landsat-collection-2-surface-temperature) image data. Collection 2 Level-2 Science Products are available from August 22, 1982 to present.\n\nThis dataset represents the global archive of Level-2 data from [Landsat Collection 2](https://www.usgs.gov/core-science-systems/nli/landsat/landsat-collection-2) acquired by the [Thematic Mapper](https://landsat.gsfc.nasa.gov/thematic-mapper/) onboard Landsat 4 and 5, the [Enhanced Thematic Mapper](https://landsat.gsfc.nasa.gov/the-enhanced-thematic-mapper-plus-etm/) onboard Landsat 7, and the [Operatational Land Imager](https://landsat.gsfc.nasa.gov/satellites/landsat-8/spacecraft-instruments/operational-land-imager/) and [Thermal Infrared Sensor](https://landsat.gsfc.nasa.gov/satellites/landsat-8/spacecraft-instruments/thermal-infrared-sensor/) onboard Landsat 8 and 9. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": "tm,etm+,oli,tirs", "platform": null, "platformSerialIdentifier": "landsat-4,landsat-5,landsat-7,landsat-8,landsat-9", "processingLevel": null, "keywords": "etm+,global,imagery,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2-l2,nasa,oli,reflectance,satellite,temperature,tirs,tm,usgs", "license": "proprietary", "title": "Landsat Collection 2 Level-2", "missionStartDate": "1982-08-22T00:00:00Z"}, "era5-pds": {"abstract": "ERA5 is the fifth generation ECMWF atmospheric reanalysis of the global climate\ncovering the period from January 1950 to present. ERA5 is produced by the\nCopernicus Climate Change Service (C3S) at ECMWF.\n\nReanalysis combines model data with observations from across the world into a\nglobally complete and consistent dataset using the laws of physics. This\nprinciple, called data assimilation, is based on the method used by numerical\nweather prediction centres, where every so many hours (12 hours at ECMWF) a\nprevious forecast is combined with newly available observations in an optimal\nway to produce a new best estimate of the state of the atmosphere, called\nanalysis, from which an updated, improved forecast is issued. Reanalysis works\nin the same way, but at reduced resolution to allow for the provision of a\ndataset spanning back several decades. Reanalysis does not have the constraint\nof issuing timely forecasts, so there is more time to collect observations, and\nwhen going further back in time, to allow for the ingestion of improved versions\nof the original observations, which all benefit the quality of the reanalysis\nproduct.\n\nThis dataset was converted to Zarr by [Planet OS](https://planetos.com/).\nSee [their documentation](https://github.com/planet-os/notebooks/blob/master/aws/era5-pds.md)\nfor more.\n\n## STAC Metadata\n\nTwo types of data variables are provided: \"forecast\" (`fc`) and \"analysis\" (`an`).\n\n* An **analysis**, of the atmospheric conditions, is a blend of observations\n with a previous forecast. An analysis can only provide\n [instantaneous](https://confluence.ecmwf.int/display/CKB/Model+grid+box+and+time+step)\n parameters (parameters valid at a specific time, e.g temperature at 12:00),\n but not accumulated parameters, mean rates or min/max parameters.\n* A **forecast** starts with an analysis at a specific time (the 'initialization\n time'), and a model computes the atmospheric conditions for a number of\n 'forecast steps', at increasing 'validity times', into the future. A forecast\n can provide\n [instantaneous](https://confluence.ecmwf.int/display/CKB/Model+grid+box+and+time+step)\n parameters, accumulated parameters, mean rates, and min/max parameters.\n\nEach [STAC](https://stacspec.org/) item in this collection covers a single month\nand the entire globe. There are two STAC items per month, one for each type of data\nvariable (`fc` and `an`). The STAC items include an `ecmwf:kind` properties to\nindicate which kind of variables that STAC item catalogs.\n\n## How to acknowledge, cite and refer to ERA5\n\nAll users of data on the Climate Data Store (CDS) disks (using either the web interface or the CDS API) must provide clear and visible attribution to the Copernicus programme and are asked to cite and reference the dataset provider:\n\nAcknowledge according to the [licence to use Copernicus Products](https://cds.climate.copernicus.eu/api/v2/terms/static/licence-to-use-copernicus-products.pdf).\n\nCite each dataset used as indicated on the relevant CDS entries (see link to \"Citation\" under References on the Overview page of the dataset entry).\n\nThroughout the content of your publication, the dataset used is referred to as Author (YYYY).\n\nThe 3-steps procedure above is illustrated with this example: [Use Case 2: ERA5 hourly data on single levels from 1979 to present](https://confluence.ecmwf.int/display/CKB/Use+Case+2%3A+ERA5+hourly+data+on+single+levels+from+1979+to+present).\n\nFor complete details, please refer to [How to acknowledge and cite a Climate Data Store (CDS) catalogue entry and the data published as part of it](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "ecmwf,era5,era5-pds,precipitation,reanalysis,temperature,weather", "license": "proprietary", "title": "ERA5 - PDS", "missionStartDate": "1979-01-01T00:00:00Z"}, "chloris-biomass": {"abstract": "The Chloris Global Biomass 2003 - 2019 dataset provides estimates of stock and change in aboveground biomass for Earth's terrestrial woody vegetation ecosystems. It covers the period 2003 - 2019, at annual time steps. The global dataset has a circa 4.6 km spatial resolution.\n\nThe maps and data sets were generated by combining multiple remote sensing measurements from space borne satellites, processed using state-of-the-art machine learning and statistical methods, validated with field data from multiple countries. The dataset provides direct estimates of aboveground stock and change, and are not based on land use or land cover area change, and as such they include gains and losses of carbon stock in all types of woody vegetation - whether natural or plantations.\n\nAnnual stocks are expressed in units of tons of biomass. Annual changes in stocks are expressed in units of CO2 equivalent, i.e., the amount of CO2 released from or taken up by terrestrial ecosystems for that specific pixel.\n\nThe spatial data sets are available on [Microsoft\u2019s Planetary Computer](https://planetarycomputer.microsoft.com/dataset/chloris-biomass) under a Creative Common license of the type Attribution-Non Commercial-Share Alike [CC BY-NC-SA](https://spdx.org/licenses/CC-BY-NC-SA-4.0.html).\n\n[Chloris Geospatial](https://chloris.earth/) is a mission-driven technology company that develops software and data products on the state of natural capital for use by business, governments, and the social sector.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biomass,carbon,chloris,chloris-biomass,modis", "license": "CC-BY-NC-SA-4.0", "title": "Chloris Biomass", "missionStartDate": "2003-07-31T00:00:00Z"}, "kaza-hydroforecast": {"abstract": "This dataset is a daily updated set of HydroForecast seasonal river flow forecasts at six locations in the Kwando and Upper Zambezi river basins. More details about the locations, project context, and to interactively view current and previous forecasts, visit our [public website](https://dashboard.hydroforecast.com/public/wwf-kaza).\n\n## Flow forecast dataset and model description\n\n[HydroForecast](https://www.upstream.tech/hydroforecast) is a theory-guided machine learning hydrologic model that predicts streamflow in basins across the world. For the Kwando and Upper Zambezi, HydroForecast makes daily predictions of streamflow rates using a [seasonal analog approach](https://support.upstream.tech/article/125-seasonal-analog-model-a-technical-overview). The model's output is probabilistic and the mean, median and a range of quantiles are available at each forecast step.\n\nThe underlying model has the following attributes: \n\n* Timestep: 10 days\n* Horizon: 10 to 180 days \n* Update frequency: daily\n* Units: cubic meters per second (m\u00b3/s)\n \n## Site details\n\nThe model produces output for six locations in the Kwando and Upper Zambezi river basins.\n\n* Upper Zambezi sites\n * Zambezi at Chavuma\n * Luanginga at Kalabo\n* Kwando basin sites\n * Kwando at Kongola -- total basin flows\n * Kwando Sub-basin 1\n * Kwando Sub-basin 2 \n * Kwando Sub-basin 3\n * Kwando Sub-basin 4\n * Kwando Kongola Sub-basin\n\n## STAC metadata\n\nThere is one STAC item per location. Each STAC item has a single asset linking to a Parquet file in Azure Blob Storage.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "hydroforecast,hydrology,kaza-hydroforecast,streamflow,upstream-tech,water", "license": "CDLA-Sharing-1.0", "title": "HydroForecast - Kwando & Upper Zambezi Rivers", "missionStartDate": "2022-01-01T00:00:00Z"}, "planet-nicfi-analytic": {"abstract": "*Note: Assets in this collection are only available to winners of the [GEO-Microsoft Planetary Computer RFP](https://www.earthobservations.org/geo_blog_obs.php?id=528). Others wishing to use the data can sign up and access it from Planet at [https://www.planet.com/nicfi/](https://www.planet.com/nicfi/) and email [planetarycomputer@microsoft.com](mailto:planetarycomputer@microsoft.com).*\n\nThrough Norway\u2019s International Climate & Forests Initiative (NICFI), users can access Planet\u2019s high-resolution, analysis-ready mosaics of the world\u2019s tropics in order to help reduce and reverse the loss of tropical forests, combat climate change, conserve biodiversity, and facilitate sustainable development.\n\nIn support of NICFI\u2019s mission, you can use this data for a number of projects including, but not limited to:\n\n* Advance scientific research about the world\u2019s tropical forests and the critical services they provide.\n* Implement and improve policies for sustainable forest management and land use in developing tropical forest countries and jurisdictions.\n* Increase transparency and accountability in the tropics.\n* Protect and improve the rights of indigenous peoples and local communities in tropical forest countries.\n* Innovate solutions towards reducing pressure on forests from global commodities and financial markets.\n* In short, the primary purpose of the NICFI Program is to support reducing and reversing the loss of tropical forests, contributing to combating climate change, conserving biodiversity, contributing to forest regrowth, restoration, and enhancement, and facilitating sustainable development, all of which must be Non-Commercial Use.\n\nTo learn how more about the NICFI program, streaming and downloading basemaps please read the [NICFI Data Program User Guide](https://assets.planet.com/docs/NICFI_UserGuidesFAQ.pdf).\n\nThis collection contains both monthly and biannual mosaics. Biannual mosaics are available from December 2015 - August 2020. Monthly mosaics are available from September 2020. The STAC items include a `planet-nicfi:cadence` field indicating the type of mosaic.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "imagery,nicfi,planet,planet-nicfi-analytic,satellite,tropics", "license": "proprietary", "title": "Planet-NICFI Basemaps (Analytic)", "missionStartDate": "2015-12-01T00:00:00Z"}, "modis-17A2H-061": {"abstract": "The Version 6.1 Gross Primary Productivity (GPP) product is a cumulative 8-day composite of values with 500 meter (m) pixel size based on the radiation use efficiency concept that can be potentially used as inputs to data models to calculate terrestrial energy, carbon, water cycle processes, and biogeochemistry of vegetation. The Moderate Resolution Imaging Spectroradiometer (MODIS) data product includes information about GPP and Net Photosynthesis (PSN). The PSN band values are the GPP less the Maintenance Respiration (MR). The data product also contains a PSN Quality Control (QC) layer. The quality layer contains quality information for both the GPP and the PSN.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod17a2h,modis,modis-17a2h-061,myd17a2h,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Gross Primary Productivity 8-Day", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-11A2-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Land Surface Temperature/Emissivity 8-Day Version 6.1 product provides an average 8-day per-pixel Land Surface Temperature and Emissivity (LST&E) with a 1 kilometer (km) spatial resolution in a 1,200 by 1,200 km grid. Each pixel value in the MOD11A2 is a simple average of all the corresponding MOD11A1 LST pixels collected within that 8-day period. The 8-day compositing period was chosen because twice that period is the exact ground track repeat period of the Terra and Aqua platforms. Provided along with the daytime and nighttime surface temperature bands are associated quality control assessments, observation times, view zenith angles, and clear-sky coverages along with bands 31 and 32 emissivities from land cover types.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod11a2,modis,modis-11a2-061,myd11a2,nasa,satellite,temperature,terra", "license": "proprietary", "title": "MODIS Land Surface Temperature/Emissivity 8-Day", "missionStartDate": "2000-02-18T00:00:00Z"}, "daymet-daily-pr": {"abstract": "Gridded estimates of daily weather parameters. [Daymet](https://daymet.ornl.gov) Version 4 variables include the following parameters: minimum temperature, maximum temperature, precipitation, shortwave radiation, vapor pressure, snow water equivalent, and day length.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1840](https://doi.org/10.3334/ORNLDAAC/1840) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#daily).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "daymet,daymet-daily-pr,precipitation,puerto-rico,temperature,vapor-pressure,weather", "license": "proprietary", "title": "Daymet Daily Puerto Rico", "missionStartDate": "1980-01-01T12:00:00Z"}, "3dep-lidar-dtm-native": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It creates a Digital Terrain Model (DTM) using the vendor provided (native) ground classification and [`pdal.filters.range`](https://pdal.io/stages/filters.range.html#filters-range) to output a collection of Cloud Optimized GeoTIFFs, removing all points that have been classified as noise.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-dtm-native,cog,dtm,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Digital Terrain Model (Native)", "missionStartDate": "2012-01-01T00:00:00Z"}, "3dep-lidar-classification": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It uses the [ASPRS](https://www.asprs.org/) (American Society for Photogrammetry and Remote Sensing) [Lidar point classification](https://desktop.arcgis.com/en/arcmap/latest/manage-data/las-dataset/lidar-point-classification.htm). See [LAS specification](https://www.ogc.org/standards/LAS) for details.\n\nThis COG type is based on the Classification [PDAL dimension](https://pdal.io/dimensions.html) and uses [`pdal.filters.range`](https://pdal.io/stages/filters.range.html) to select a subset of interesting classifications. Do note that not all LiDAR collections contain a full compliment of classification labels.\nTo remove outliers, the PDAL pipeline uses a noise filter and then outputs the Classification dimension.\n\nThe STAC collection implements the [`item_assets`](https://github.com/stac-extensions/item-assets) and [`classification`](https://github.com/stac-extensions/classification) extensions. These classes are displayed in the \"Item assets\" below. You can programmatically access the full list of class values and descriptions using the `classification:classes` field form the `data` asset on the STAC collection.\n\nClassification rasters were produced as a subset of LiDAR classification categories:\n\n```\n0, Never Classified\n1, Unclassified\n2, Ground\n3, Low Vegetation\n4, Medium Vegetation\n5, High Vegetation\n6, Building\n9, Water\n10, Rail\n11, Road\n17, Bridge Deck\n```\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-classification,classification,cog,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Classification", "missionStartDate": "2012-01-01T00:00:00Z"}, "3dep-lidar-dtm": {"abstract": "This collection is derived from the [USGS 3DEP COPC collection](https://planetarycomputer.microsoft.com/dataset/3dep-lidar-copc). It creates a Digital Terrain Model (DTM) using [`pdal.filters.smrf`](https://pdal.io/stages/filters.smrf.html#filters-smrf) to output a collection of Cloud Optimized GeoTIFFs.\n\nThe Simple Morphological Filter (SMRF) classifies ground points based on the approach outlined in [Pingel2013](https://pdal.io/references.html#pingel2013).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "3dep,3dep-lidar-dtm,cog,dtm,usgs", "license": "proprietary", "title": "USGS 3DEP Lidar Digital Terrain Model", "missionStartDate": "2012-01-01T00:00:00Z"}, "gap": {"abstract": "The [USGS GAP/LANDFIRE National Terrestrial Ecosystems data](https://www.sciencebase.gov/catalog/item/573cc51be4b0dae0d5e4b0c5), based on the [NatureServe Terrestrial Ecological Systems](https://www.natureserve.org/products/terrestrial-ecological-systems-united-states), are the foundation of the most detailed, consistent map of vegetation available for the United States. These data facilitate planning and management for biological diversity on a regional and national scale.\n\nThis dataset includes the [land cover](https://www.usgs.gov/core-science-systems/science-analytics-and-synthesis/gap/science/land-cover) component of the GAP/LANDFIRE project.\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "gap,land-cover,landfire,united-states,usgs", "license": "proprietary", "title": "USGS Gap Land Cover", "missionStartDate": "1999-01-01T00:00:00Z"}, "modis-17A2HGF-061": {"abstract": "The Version 6.1 Gross Primary Productivity (GPP) product is a cumulative 8-day composite of values with 500 meter (m) pixel size based on the radiation use efficiency concept that can be potentially used as inputs to data models to calculate terrestrial energy, carbon, water cycle processes, and biogeochemistry of vegetation. The Moderate Resolution Imaging Spectroradiometer (MODIS) data product includes information about GPP and Net Photosynthesis (PSN). The PSN band values are the GPP less the Maintenance Respiration (MR). The data product also contains a PSN Quality Control (QC) layer. The quality layer contains quality information for both the GPP and the PSN. This product will be generated at the end of each year when the entire yearly 8-day 15A2H is available. Hence, the gap-filled A2HGF is the improved 17, which has cleaned the poor-quality inputs from 8-day Leaf Area Index and Fraction of Photosynthetically Active Radiation (FPAR/LAI) based on the Quality Control (QC) label for every pixel. If any LAI/FPAR pixel did not meet the quality screening criteria, its value is determined through linear interpolation. However, users cannot get this product in near-real time because it will be generated only at the end of a given year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod17a2hgf,modis,modis-17a2hgf-061,myd17a2hgf,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Gross Primary Productivity 8-Day Gap-Filled", "missionStartDate": "2000-02-18T00:00:00Z"}, "planet-nicfi-visual": {"abstract": "*Note: Assets in this collection are only available to winners of the [GEO-Microsoft Planetary Computer RFP](https://www.earthobservations.org/geo_blog_obs.php?id=528). Others wishing to use the data can sign up and access it from Planet at [https://www.planet.com/nicfi/](https://www.planet.com/nicfi/) and email [planetarycomputer@microsoft.com](mailto:planetarycomputer@microsoft.com).*\n\nThrough Norway\u2019s International Climate & Forests Initiative (NICFI), users can access Planet\u2019s high-resolution, analysis-ready mosaics of the world\u2019s tropics in order to help reduce and reverse the loss of tropical forests, combat climate change, conserve biodiversity, and facilitate sustainable development.\n\nIn support of NICFI\u2019s mission, you can use this data for a number of projects including, but not limited to:\n\n* Advance scientific research about the world\u2019s tropical forests and the critical services they provide.\n* Implement and improve policies for sustainable forest management and land use in developing tropical forest countries and jurisdictions.\n* Increase transparency and accountability in the tropics.\n* Protect and improve the rights of indigenous peoples and local communities in tropical forest countries.\n* Innovate solutions towards reducing pressure on forests from global commodities and financial markets.\n* In short, the primary purpose of the NICFI Program is to support reducing and reversing the loss of tropical forests, contributing to combating climate change, conserving biodiversity, contributing to forest regrowth, restoration, and enhancement, and facilitating sustainable development, all of which must be Non-Commercial Use.\n\nTo learn how more about the NICFI program, streaming and downloading basemaps please read the [NICFI Data Program User Guide](https://assets.planet.com/docs/NICFI_UserGuidesFAQ.pdf).\n\nThis collection contains both monthly and biannual mosaics. Biannual mosaics are available from December 2015 - August 2020. Monthly mosaics are available from September 2020. The STAC items include a `planet-nicfi:cadence` field indicating the type of mosaic.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "imagery,nicfi,planet,planet-nicfi-visual,satellite,tropics", "license": "proprietary", "title": "Planet-NICFI Basemaps (Visual)", "missionStartDate": "2015-12-01T00:00:00Z"}, "gbif": {"abstract": "The [Global Biodiversity Information Facility](https://www.gbif.org) (GBIF) is an international network and data infrastructure funded by the world's governments, providing global data that document the occurrence of species. GBIF currently integrates datasets documenting over 1.6 billion species occurrences.\n\nThe GBIF occurrence dataset combines data from a wide array of sources, including specimen-related data from natural history museums, observations from citizen science networks, and automated environmental surveys. While these data are constantly changing at [GBIF.org](https://www.gbif.org), periodic snapshots are taken and made available here. \n\nData are stored in [Parquet](https://parquet.apache.org/) format; the Parquet file schema is described below. Most field names correspond to [terms from the Darwin Core standard](https://dwc.tdwg.org/terms/), and have been interpreted by GBIF's systems to align taxonomy, location, dates, etc. Additional information may be retrieved using the [GBIF API](https://www.gbif.org/developer/summary).\n\nPlease refer to the GBIF [citation guidelines](https://www.gbif.org/citation-guidelines) for information about how to cite GBIF data in publications.. For analyses using the whole dataset, please use the following citation:\n\n> GBIF.org ([Date]) GBIF Occurrence Data [DOI of dataset]\n\nFor analyses where data are significantly filtered, please track the datasetKeys used and use a \"[derived dataset](https://www.gbif.org/citation-guidelines#derivedDatasets)\" record for citing the data.\n\nThe [GBIF data blog](https://data-blog.gbif.org/categories/gbif/) contains a number of articles that can help you analyze GBIF data.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biodiversity,gbif,species", "license": "proprietary", "title": "Global Biodiversity Information Facility (GBIF)", "missionStartDate": "2021-04-13T00:00:00Z"}, "modis-17A3HGF-061": {"abstract": "The Version 6.1 product provides information about annual Net Primary Production (NPP) at 500 meter (m) pixel resolution. Annual Moderate Resolution Imaging Spectroradiometer (MODIS) NPP is derived from the sum of all 8-day Net Photosynthesis (PSN) products (MOD17A2H) from the given year. The PSN value is the difference of the Gross Primary Productivity (GPP) and the Maintenance Respiration (MR). The product will be generated at the end of each year when the entire yearly 8-day 15A2H is available. Hence, the gap-filled product is the improved 17, which has cleaned the poor-quality inputs from 8-day Leaf Area Index and Fraction of Photosynthetically Active Radiation (LAI/FPAR) based on the Quality Control (QC) label for every pixel. If any LAI/FPAR pixel did not meet the quality screening criteria, its value is determined through linear interpolation. However, users cannot get this product in near-real time because it will be generated only at the end of a given year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod17a3hgf,modis,modis-17a3hgf-061,myd17a3hgf,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Net Primary Production Yearly Gap-Filled", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-09A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) 09A1 Version 6.1 product provides an estimate of the surface spectral reflectance of MODIS Bands 1 through 7 corrected for atmospheric conditions such as gasses, aerosols, and Rayleigh scattering. Along with the seven 500 meter (m) reflectance bands are two quality layers and four observation bands. For each pixel, a value is selected from all the acquisitions within the 8-day composite period. The criteria for the pixel choice include cloud and solar zenith. When several acquisitions meet the criteria the pixel with the minimum channel 3 (blue) value is used.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,imagery,mod09a1,modis,modis-09a1-061,myd09a1,nasa,reflectance,satellite,terra", "license": "proprietary", "title": "MODIS Surface Reflectance 8-Day (500m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "alos-dem": {"abstract": "The \"ALOS World 3D-30m\" (AW3D30) dataset is a 30 meter resolution global digital surface model (DSM), developed by the Japan Aerospace Exploration Agency (JAXA). AWD30 was constructed from the Panchromatic Remote-sensing Instrument for Stereo Mapping (PRISM) on board Advanced Land Observing Satellite (ALOS), operated from 2006 to 2011.\n\nSee the [Product Description](https://www.eorc.jaxa.jp/ALOS/en/aw3d30/aw3d30v3.2_product_e_e1.2.pdf) for more details.\n", "instrument": "prism", "platform": null, "platformSerialIdentifier": "alos", "processingLevel": null, "keywords": "alos,alos-dem,dem,dsm,elevation,jaxa,prism", "license": "proprietary", "title": "ALOS World 3D-30m", "missionStartDate": "2016-12-07T00:00:00Z"}, "alos-palsar-mosaic": {"abstract": "Global 25 m Resolution PALSAR-2/PALSAR Mosaic (MOS)", "instrument": "PALSAR,PALSAR-2", "platform": null, "platformSerialIdentifier": "ALOS,ALOS-2", "processingLevel": null, "keywords": "alos,alos-2,alos-palsar-mosaic,global,jaxa,palsar,palsar-2,remote-sensing", "license": "proprietary", "title": "ALOS PALSAR Annual Mosaic", "missionStartDate": "2015-01-01T00:00:00Z"}, "deltares-water-availability": {"abstract": "[Deltares](https://www.deltares.nl/en/) has produced a hydrological model approach to simulate historical daily reservoir variations for 3,236 locations across the globe for the period 1970-2020 using the distributed [wflow_sbm](https://deltares.github.io/Wflow.jl/stable/model_docs/model_configurations/) model. The model outputs long-term daily information on reservoir volume, inflow and outflow dynamics, as well as information on upstream hydrological forcing.\n\nThey hydrological model was forced with 5 different precipitation products. Two products (ERA5 and CHIRPS) are available at the global scale, while for Europe, USA and Australia a regional product was use (i.e. EOBS, NLDAS and BOM, respectively). Using these different precipitation products, it becomes possible to assess the impact of uncertainty in the model forcing. A different number of basins upstream of reservoirs are simulated, given the spatial coverage of each precipitation product.\n\nSee the complete [methodology documentation](https://ai4edatasetspublicassets.blob.core.windows.net/assets/aod_docs/pc-deltares-water-availability-documentation.pdf) for more information.\n\n## Dataset coverages\n\n| Name | Scale | Period | Number of basins |\n|--------|--------------------------|-----------|------------------|\n| ERA5 | Global | 1967-2020 | 3236 |\n| CHIRPS | Global (+/- 50 latitude) | 1981-2020 | 2951 |\n| EOBS | Europe/North Africa | 1979-2020 | 682 |\n| NLDAS | USA | 1979-2020 | 1090 |\n| BOM | Australia | 1979-2020 | 116 |\n\n## STAC Metadata\n\nThis STAC collection includes one STAC item per dataset. The item includes a `deltares:reservoir` property that can be used to query for the URL of a specific dataset.\n\n## Contact\n\nFor questions about this dataset, contact [`aiforearthdatasets@microsoft.com`](mailto:aiforearthdatasets@microsoft.com?subject=deltares-floods%20question).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "deltares,deltares-water-availability,precipitation,reservoir,water,water-availability", "license": "CDLA-Permissive-1.0", "title": "Deltares Global Water Availability", "missionStartDate": "1970-01-01T00:00:00Z"}, "modis-16A3GF-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) MOD16A3GF Version 6.1 Evapotranspiration/Latent Heat Flux (ET/LE) product is a year-end gap-filled yearly composite dataset produced at 500 meter (m) pixel resolution. The algorithm used for the MOD16 data product collection is based on the logic of the Penman-Monteith equation, which includes inputs of daily meteorological reanalysis data along with MODIS remotely sensed data products such as vegetation property dynamics, albedo, and land cover. The product will be generated at the end of each year when the entire yearly 8-day MOD15A2H/MYD15A2H is available. Hence, the gap-filled product is the improved 16, which has cleaned the poor-quality inputs from yearly Leaf Area Index and Fraction of Photosynthetically Active Radiation (LAI/FPAR) based on the Quality Control (QC) label for every pixel. If any LAI/FPAR pixel did not meet the quality screening criteria, its value is determined through linear interpolation. However, users cannot get this product in near-real time because it will be generated only at the end of a given year. Provided in the product are layers for composited ET, LE, Potential ET (PET), and Potential LE (PLE) along with a quality control layer. Two low resolution browse images, ET and LE, are also available for each granule. The pixel values for the two Evapotranspiration layers (ET and PET) are the sum for all days within the defined year, and the pixel values for the two Latent Heat layers (LE and PLE) are the average of all days within the defined year.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod16a3gf,modis,modis-16a3gf-061,myd16a3gf,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Net Evapotranspiration Yearly Gap-Filled", "missionStartDate": "2001-01-01T00:00:00Z"}, "modis-21A2-061": {"abstract": "A suite of Moderate Resolution Imaging Spectroradiometer (MODIS) Land Surface Temperature and Emissivity (LST&E) products are available in Collection 6.1. The MOD21 Land Surface Temperatuer (LST) algorithm differs from the algorithm of the MOD11 LST products, in that the MOD21 algorithm is based on the ASTER Temperature/Emissivity Separation (TES) technique, whereas the MOD11 uses the split-window technique. The MOD21 TES algorithm uses a physics-based algorithm to dynamically retrieve both the LST and spectral emissivity simultaneously from the MODIS thermal infrared bands 29, 31, and 32. The TES algorithm is combined with an improved Water Vapor Scaling (WVS) atmospheric correction scheme to stabilize the retrieval during very warm and humid conditions. This dataset is an 8-day composite LST product at 1,000 meter spatial resolution that uses an algorithm based on a simple averaging method. The algorithm calculates the average from all the cloud free 21A1D and 21A1N daily acquisitions from the 8-day period. Unlike the 21A1 data sets where the daytime and nighttime acquisitions are separate products, the 21A2 contains both daytime and nighttime acquisitions as separate Science Dataset (SDS) layers within a single Hierarchical Data Format (HDF) file. The LST, Quality Control (QC), view zenith angle, and viewing time have separate day and night SDS layers, while the values for the MODIS emissivity bands 29, 31, and 32 are the average of both the nighttime and daytime acquisitions. Additional details regarding the method used to create this Level 3 (L3) product are available in the Algorithm Theoretical Basis Document (ATBD).", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod21a2,modis,modis-21a2-061,myd21a2,nasa,satellite,temperature,terra", "license": "proprietary", "title": "MODIS Land Surface Temperature/3-Band Emissivity 8-Day", "missionStartDate": "2000-02-16T00:00:00Z"}, "us-census": {"abstract": "The [2020 Census](https://www.census.gov/programs-surveys/decennial-census/decade/2020/2020-census-main.html) counted every person living in the United States and the five U.S. territories. It marked the 24th census in U.S. history and the first time that households were invited to respond to the census online.\n\nThe tables included on the Planetary Computer provide information on population and geographic boundaries at various levels of cartographic aggregation.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "administrative-boundaries,demographics,population,us-census,us-census-bureau", "license": "proprietary", "title": "US Census", "missionStartDate": "2021-08-01T00:00:00Z"}, "jrc-gsw": {"abstract": "Global surface water products from the European Commission Joint Research Centre, based on Landsat 5, 7, and 8 imagery. Layers in this collection describe the occurrence, change, and seasonality of surface water from 1984-2020. Complete documentation for each layer is available in the [Data Users Guide](https://storage.cloud.google.com/global-surface-water/downloads_ancillary/DataUsersGuidev2020.pdf).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "global,jrc-gsw,landsat,water", "license": "proprietary", "title": "JRC Global Surface Water", "missionStartDate": "1984-03-01T00:00:00Z"}, "deltares-floods": {"abstract": "[Deltares](https://www.deltares.nl/en/) has produced inundation maps of flood depth using a model that takes into account water level attenuation and is forced by sea level. At the coastline, the model is forced by extreme water levels containing surge and tide from GTSMip6. The water level at the coastline is extended landwards to all areas that are hydrodynamically connected to the coast following a \u2018bathtub\u2019 like approach and calculates the flood depth as the difference between the water level and the topography. Unlike a simple 'bathtub' model, this model attenuates the water level over land with a maximum attenuation factor of 0.5\u2009m\u2009km-1. The attenuation factor simulates the dampening of the flood levels due to the roughness over land.\n\nIn its current version, the model does not account for varying roughness over land and permanent water bodies such as rivers and lakes, and it does not account for the compound effects of waves, rainfall, and river discharge on coastal flooding. It also does not include the mitigating effect of coastal flood protection. Flood extents must thus be interpreted as the area that is potentially exposed to flooding without coastal protection.\n\nSee the complete [methodology documentation](https://ai4edatasetspublicassets.blob.core.windows.net/assets/aod_docs/11206409-003-ZWS-0003_v0.1-Planetary-Computer-Deltares-global-flood-docs.pdf) for more information.\n\n## Digital elevation models (DEMs)\n\nThis documentation will refer to three DEMs:\n\n* `NASADEM` is the SRTM-derived [NASADEM](https://planetarycomputer.microsoft.com/dataset/nasadem) product.\n* `MERITDEM` is the [Multi-Error-Removed Improved Terrain DEM](http://hydro.iis.u-tokyo.ac.jp/~yamadai/MERIT_DEM/), derived from SRTM and AW3D.\n* `LIDAR` is the [Global LiDAR Lowland DTM (GLL_DTM_v1)](https://data.mendeley.com/datasets/v5x4vpnzds/1).\n\n## Global datasets\n\nThis collection includes multiple global flood datasets derived from three different DEMs (`NASA`, `MERIT`, and `LIDAR`) and at different resolutions. Not all DEMs have all resolutions:\n\n* `NASADEM` and `MERITDEM` are available at `90m` and `1km` resolutions\n* `LIDAR` is available at `5km` resolution\n\n## Historic event datasets\n\nThis collection also includes historical storm event data files that follow similar DEM and resolution conventions. Not all storms events are available for each DEM and resolution combination, but generally follow the format of:\n\n`events/[DEM]_[resolution]-wm_final/[storm_name]_[event_year]_masked.nc`\n\nFor example, a flood map for the MERITDEM-derived 90m flood data for the \"Omar\" storm in 2008 is available at:\n\n\n\n## Contact\n\nFor questions about this dataset, contact [`aiforearthdatasets@microsoft.com`](mailto:aiforearthdatasets@microsoft.com?subject=deltares-floods%20question).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "deltares,deltares-floods,flood,global,sea-level-rise,water", "license": "CDLA-Permissive-1.0", "title": "Deltares Global Flood Maps", "missionStartDate": "2018-01-01T00:00:00Z"}, "modis-43A4-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) MCD43A4 Version 6.1 Nadir Bidirectional Reflectance Distribution Function (BRDF)-Adjusted Reflectance (NBAR) dataset is produced daily using 16 days of Terra and Aqua MODIS data at 500 meter (m) resolution. The view angle effects are removed from the directional reflectances, resulting in a stable and consistent NBAR product. Data are temporally weighted to the ninth day which is reflected in the Julian date in the file name. Users are urged to use the band specific quality flags to isolate the highest quality full inversion results for their own science applications as described in the User Guide. The MCD43A4 provides NBAR and simplified mandatory quality layers for MODIS bands 1 through 7. Essential quality information provided in the corresponding MCD43A2 data file should be consulted when using this product.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,imagery,mcd43a4,modis,modis-43a4-061,nasa,reflectance,satellite,terra", "license": "proprietary", "title": "MODIS Nadir BRDF-Adjusted Reflectance (NBAR) Daily", "missionStartDate": "2000-02-16T00:00:00Z"}, "modis-09Q1-061": {"abstract": "The 09Q1 Version 6.1 product provides an estimate of the surface spectral reflectance of Moderate Resolution Imaging Spectroradiometer (MODIS) Bands 1 and 2, corrected for atmospheric conditions such as gasses, aerosols, and Rayleigh scattering. Provided along with the 250 meter (m) surface reflectance bands are two quality layers. For each pixel, a value is selected from all the acquisitions within the 8-day composite period. The criteria for the pixel choice include cloud and solar zenith. When several acquisitions meet the criteria the pixel with the minimum channel 3 (blue) value is used.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,imagery,mod09q1,modis,modis-09q1-061,myd09q1,nasa,reflectance,satellite,terra", "license": "proprietary", "title": "MODIS Surface Reflectance 8-Day (250m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-14A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Thermal Anomalies and Fire Daily Version 6.1 data are generated every eight days at 1 kilometer (km) spatial resolution as a Level 3 product. MOD14A1 contains eight consecutive days of fire data conveniently packaged into a single file. The Science Dataset (SDS) layers include the fire mask, pixel quality indicators, maximum fire radiative power (MaxFRP), and the position of the fire pixel within the scan. Each layer consists of daily per pixel information for each of the eight days of data acquisition.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,fire,global,mod14a1,modis,modis-14a1-061,myd14a1,nasa,satellite,terra", "license": "proprietary", "title": "MODIS Thermal Anomalies/Fire Daily", "missionStartDate": "2000-02-18T00:00:00Z"}, "hrea": {"abstract": "The [HREA](http://www-personal.umich.edu/~brianmin/HREA/index.html) project aims to provide open access to new indicators of electricity access and reliability across the world. Leveraging satellite imagery with computational methods, these high-resolution data provide new tools to track progress toward reliable and sustainable energy access across the world.\n\nThis dataset includes settlement-level measures of electricity access, reliability, and usage for 89 nations, derived from nightly VIIRS satellite imagery. Specifically, this dataset provides the following annual values at country-level granularity:\n\n1. **Access**: Predicted likelihood that a settlement is electrified, based on night-by-night comparisons of each settlement against matched uninhabited areas over a calendar year.\n\n2. **Reliability**: Proportion of nights a settlement is statistically brighter than matched uninhabited areas. Areas with more frequent power outages or service interruptions have lower rates.\n\n3. **Usage**: Higher levels of brightness indicate more robust usage of outdoor lighting, which is highly correlated with overall energy consumption.\n\n4. **Nighttime Lights**: Annual composites of VIIRS nighttime light output.\n\nFor more information and methodology, please visit the [HREA website](http://www-personal.umich.edu/~brianmin/HREA/index.html).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "electricity,hrea,viirs", "license": "CC-BY-4.0", "title": "HREA: High Resolution Electricity Access", "missionStartDate": "2012-12-31T00:00:00Z"}, "modis-13Q1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Vegetation Indices Version 6.1 data are generated every 16 days at 250 meter (m) spatial resolution as a Level 3 product. The MOD13Q1 product provides two primary vegetation layers. The first is the Normalized Difference Vegetation Index (NDVI) which is referred to as the continuity index to the existing National Oceanic and Atmospheric Administration-Advanced Very High Resolution Radiometer (NOAA-AVHRR) derived NDVI. The second vegetation layer is the Enhanced Vegetation Index (EVI), which has improved sensitivity over high biomass regions. The algorithm chooses the best available pixel value from all the acquisitions from the 16 day period. The criteria used is low clouds, low view angle, and the highest NDVI/EVI value. Along with the vegetation layers and the two quality layers, the HDF file will have MODIS reflectance bands 1 (red), 2 (near-infrared), 3 (blue), and 7 (mid-infrared), as well as four observation layers.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod13q1,modis,modis-13q1-061,myd13q1,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Vegetation Indices 16-Day (250m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "modis-14A2-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Thermal Anomalies and Fire 8-Day Version 6.1 data are generated at 1 kilometer (km) spatial resolution as a Level 3 product. The MOD14A2 gridded composite contains the maximum value of the individual fire pixel classes detected during the eight days of acquisition. The Science Dataset (SDS) layers include the fire mask and pixel quality indicators.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,fire,global,mod14a2,modis,modis-14a2-061,myd14a2,nasa,satellite,terra", "license": "proprietary", "title": "MODIS Thermal Anomalies/Fire 8-Day", "missionStartDate": "2000-02-18T00:00:00Z"}, "sentinel-2-l2a": {"abstract": "The [Sentinel-2](https://sentinel.esa.int/web/sentinel/missions/sentinel-2) program provides global imagery in thirteen spectral bands at 10m-60m resolution and a revisit time of approximately five days. This dataset represents the global Sentinel-2 archive, from 2016 to the present, processed to L2A (bottom-of-atmosphere) using [Sen2Cor](https://step.esa.int/main/snap-supported-plugins/sen2cor/) and converted to [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": "msi", "platform": "sentinel-2", "platformSerialIdentifier": "Sentinel-2A,Sentinel-2B", "processingLevel": null, "keywords": "copernicus,esa,global,imagery,msi,reflectance,satellite,sentinel,sentinel-2,sentinel-2-l2a,sentinel-2a,sentinel-2b", "license": "proprietary", "title": "Sentinel-2 Level-2A", "missionStartDate": "2015-06-27T10:25:31Z"}, "modis-15A2H-061": {"abstract": "The Version 6.1 Moderate Resolution Imaging Spectroradiometer (MODIS) Level 4, Combined Fraction of Photosynthetically Active Radiation (FPAR), and Leaf Area Index (LAI) product is an 8-day composite dataset with 500 meter pixel size. The algorithm chooses the best pixel available from within the 8-day period. LAI is defined as the one-sided green leaf area per unit ground area in broadleaf canopies and as one-half the total needle surface area per unit ground area in coniferous canopies. FPAR is defined as the fraction of incident photosynthetically active radiation (400-700 nm) absorbed by the green elements of a vegetation canopy.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mcd15a2h,mod15a2h,modis,modis-15a2h-061,myd15a2h,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Leaf Area Index/FPAR 8-Day", "missionStartDate": "2002-07-04T00:00:00Z"}, "modis-11A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Land Surface Temperature/Emissivity Daily Version 6.1 product provides daily per-pixel Land Surface Temperature and Emissivity (LST&E) with 1 kilometer (km) spatial resolution in a 1,200 by 1,200 km grid. The pixel temperature value is derived from the MOD11_L2 swath product. Above 30 degrees latitude, some pixels may have multiple observations where the criteria for clear-sky are met. When this occurs, the pixel value is a result of the average of all qualifying observations. Provided along with the daytime and nighttime surface temperature bands are associated quality control assessments, observation times, view zenith angles, and clear-sky coverages along with bands 31 and 32 emissivities from land cover types", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod11a1,modis,modis-11a1-061,myd11a1,nasa,satellite,temperature,terra", "license": "proprietary", "title": "MODIS Land Surface Temperature/Emissivity Daily", "missionStartDate": "2000-02-24T00:00:00Z"}, "modis-15A3H-061": {"abstract": "The MCD15A3H Version 6.1 Moderate Resolution Imaging Spectroradiometer (MODIS) Level 4, Combined Fraction of Photosynthetically Active Radiation (FPAR), and Leaf Area Index (LAI) product is a 4-day composite data set with 500 meter pixel size. The algorithm chooses the best pixel available from all the acquisitions of both MODIS sensors located on NASA's Terra and Aqua satellites from within the 4-day period. LAI is defined as the one-sided green leaf area per unit ground area in broadleaf canopies and as one-half the total needle surface area per unit ground area in coniferous canopies. FPAR is defined as the fraction of incident photosynthetically active radiation (400-700 nm) absorbed by the green elements of a vegetation canopy.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mcd15a3h,modis,modis-15a3h-061,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Leaf Area Index/FPAR 4-Day", "missionStartDate": "2002-07-04T00:00:00Z"}, "modis-13A1-061": {"abstract": "The Moderate Resolution Imaging Spectroradiometer (MODIS) Vegetation Indices 16-Day Version 6.1 product provides Vegetation Index (VI) values at a per pixel basis at 500 meter (m) spatial resolution. There are two primary vegetation layers. The first is the Normalized Difference Vegetation Index (NDVI), which is referred to as the continuity index to the existing National Oceanic and Atmospheric Administration-Advanced Very High Resolution Radiometer (NOAA-AVHRR) derived NDVI. The second vegetation layer is the Enhanced Vegetation Index (EVI), which has improved sensitivity over high biomass regions. The algorithm for this product chooses the best available pixel value from all the acquisitions from the 16 day period. The criteria used is low clouds, low view angle, and the highest NDVI/EVI value. Provided along with the vegetation layers and two quality assurance (QA) layers are reflectance bands 1 (red), 2 (near-infrared), 3 (blue), and 7 (mid-infrared), as well as four observation layers.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod13a1,modis,modis-13a1-061,myd13a1,nasa,satellite,terra,vegetation", "license": "proprietary", "title": "MODIS Vegetation Indices 16-Day (500m)", "missionStartDate": "2000-02-18T00:00:00Z"}, "daymet-daily-na": {"abstract": "Gridded estimates of daily weather parameters. [Daymet](https://daymet.ornl.gov) Version 4 variables include the following parameters: minimum temperature, maximum temperature, precipitation, shortwave radiation, vapor pressure, snow water equivalent, and day length.\n\n[Daymet](https://daymet.ornl.gov/) provides measurements of near-surface meteorological conditions; the main purpose is to provide data estimates where no instrumentation exists. The dataset covers the period from January 1, 1980 to the present. Each year is processed individually at the close of a calendar year. Data are in a Lambert conformal conic projection for North America and are distributed in Zarr and NetCDF formats, compliant with the [Climate and Forecast (CF) metadata conventions (version 1.6)](http://cfconventions.org/).\n\nUse the DOI at [https://doi.org/10.3334/ORNLDAAC/1840](https://doi.org/10.3334/ORNLDAAC/1840) to cite your usage of the data.\n\nThis dataset provides coverage for Hawaii; North America and Puerto Rico are provided in [separate datasets](https://planetarycomputer.microsoft.com/dataset/group/daymet#daily).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "daymet,daymet-daily-na,north-america,precipitation,temperature,vapor-pressure,weather", "license": "proprietary", "title": "Daymet Daily North America", "missionStartDate": "1980-01-01T12:00:00Z"}, "nrcan-landcover": {"abstract": "Collection of Land Cover products for Canada as produced by Natural Resources Canada using Landsat satellite imagery. This collection of cartographic products offers classified Land Cover of Canada at a 30 metre scale, updated on a 5 year basis.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "canada,land-cover,landsat,north-america,nrcan-landcover,remote-sensing", "license": "OGL-Canada-2.0", "title": "Land Cover of Canada", "missionStartDate": "2015-01-01T00:00:00Z"}, "modis-10A2-061": {"abstract": "This global Level-3 (L3) data set provides the maximum snow cover extent observed over an eight-day period within 10degx10deg MODIS sinusoidal grid tiles. Tiles are generated by compositing 500 m observations from the 'MODIS Snow Cover Daily L3 Global 500m Grid' data set. A bit flag index is used to track the eight-day snow/no-snow chronology for each 500 m cell.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod10a2,modis,modis-10a2-061,myd10a2,nasa,satellite,snow,terra", "license": "proprietary", "title": "MODIS Snow Cover 8-day", "missionStartDate": "2000-02-18T00:00:00Z"}, "ecmwf-forecast": {"abstract": "The [ECMWF catalog of real-time products](https://www.ecmwf.int/en/forecasts/datasets/catalogue-ecmwf-real-time-products) offers real-time meterological and oceanographic productions from the ECMWF forecast system. Users should consult the [ECMWF Forecast User Guide](https://confluence.ecmwf.int/display/FUG/1+Introduction) for detailed information on each of the products.\n\n## Overview of products\n\nThe following diagram shows the publishing schedule of the various products.\n\n\n\nThe vertical axis shows the various products, defined below, which are grouped by combinations of `stream`, `forecast type`, and `reference time`. The horizontal axis shows *forecast times* in 3-hour intervals out from the reference time. A black square over a particular forecast time, or step, indicates that a forecast is made for that forecast time, for that particular `stream`, `forecast type`, `reference time` combination.\n\n* **stream** is the forecasting system that produced the data. The values are available in the `ecmwf:stream` summary of the STAC collection. They are:\n * `enfo`: [ensemble forecast](https://confluence.ecmwf.int/display/FUG/ENS+-+Ensemble+Forecasts), atmospheric fields\n * `mmsf`: [multi-model seasonal forecasts](https://confluence.ecmwf.int/display/FUG/Long-Range+%28Seasonal%29+Forecast) fields from the ECMWF model only.\n * `oper`: [high-resolution forecast](https://confluence.ecmwf.int/display/FUG/HRES+-+High-Resolution+Forecast), atmospheric fields \n * `scda`: short cut-off high-resolution forecast, atmospheric fields (also known as \"high-frequency products\")\n * `scwv`: short cut-off high-resolution forecast, ocean wave fields (also known as \"high-frequency products\") and\n * `waef`: [ensemble forecast](https://confluence.ecmwf.int/display/FUG/ENS+-+Ensemble+Forecasts), ocean wave fields,\n * `wave`: wave model\n* **type** is the forecast type. The values are available in the `ecmwf:type` summary of the STAC collection. They are:\n * `fc`: forecast\n * `ef`: ensemble forecast\n * `pf`: ensemble probabilities\n * `tf`: trajectory forecast for tropical cyclone tracks\n* **reference time** is the hours after midnight when the model was run. Each stream / type will produce assets for different forecast times (steps from the reference datetime) depending on the reference time.\n\nVisit the [ECMWF's User Guide](https://confluence.ecmwf.int/display/UDOC/ECMWF+Open+Data+-+Real+Time) for more details on each of the various products.\n\nAssets are available for the previous 30 days.\n\n## Asset overview\n\nThe data are provided as [GRIB2 files](https://confluence.ecmwf.int/display/CKB/What+are+GRIB+files+and+how+can+I+read+them).\nAdditionally, [index files](https://confluence.ecmwf.int/display/UDOC/ECMWF+Open+Data+-+Real+Time#ECMWFOpenDataRealTime-IndexFilesIndexfiles) are provided, which can be used to read subsets of the data from Azure Blob Storage.\n\nWithin each `stream`, `forecast type`, `reference time`, the structure of the data are mostly consistent. Each GRIB2 file will have the\nsame data variables, coordinates (aside from `time` as the *reference time* changes and `step` as the *forecast time* changes). The exception\nis the `enfo-ep` and `waef-ep` products, which have more `step`s in the 240-hour forecast than in the 360-hour forecast. \n\nSee the example notebook for more on how to access the data.\n\n## STAC metadata\n\nThe Planetary Computer provides a single STAC item per GRIB2 file. Each GRIB2 file is global in extent, so every item has the same\n`bbox` and `geometry`.\n\nA few custom properties are available on each STAC item, which can be used in searches to narrow down the data to items of interest:\n\n* `ecmwf:stream`: The forecasting system (see above for definitions). The full set of values is available in the Collection's summaries.\n* `ecmwf:type`: The forecast type (see above for definitions). The full set of values is available in the Collection's summaries.\n* `ecmwf:step`: The offset from the reference datetime, expressed as ``, for example `\"3h\"` means \"3 hours from the reference datetime\". \n* `ecmwf:reference_datetime`: The datetime when the model was run. This indicates when the forecast *was made*, rather than when it's valid for.\n* `ecmwf:forecast_datetime`: The datetime for which the forecast is valid. This is also set as the item's `datetime`.\n\nSee the example notebook for more on how to use the STAC metadata to query for particular data.\n\n## Attribution\n\nThe products listed and described on this page are available to the public and their use is governed by the [Creative Commons CC-4.0-BY license and the ECMWF Terms of Use](https://apps.ecmwf.int/datasets/licences/general/). This means that the data may be redistributed and used commercially, subject to appropriate attribution.\n\nThe following wording should be attached to the use of this ECMWF dataset: \n\n1. Copyright statement: Copyright \"\u00a9 [year] European Centre for Medium-Range Weather Forecasts (ECMWF)\".\n2. Source [www.ecmwf.int](http://www.ecmwf.int/)\n3. License Statement: This data is published under a Creative Commons Attribution 4.0 International (CC BY 4.0). [https://creativecommons.org/licenses/by/4.0/](https://creativecommons.org/licenses/by/4.0/)\n4. Disclaimer: ECMWF does not accept any liability whatsoever for any error or omission in the data, their availability, or for any loss or damage arising from their use.\n5. Where applicable, an indication if the material has been modified and an indication of previous modifications.\n\nThe following wording shall be attached to services created with this ECMWF dataset:\n\n1. Copyright statement: Copyright \"This service is based on data and products of the European Centre for Medium-Range Weather Forecasts (ECMWF)\".\n2. Source www.ecmwf.int\n3. License Statement: This ECMWF data is published under a Creative Commons Attribution 4.0 International (CC BY 4.0). [https://creativecommons.org/licenses/by/4.0/](https://creativecommons.org/licenses/by/4.0/)\n4. Disclaimer: ECMWF does not accept any liability whatsoever for any error or omission in the data, their availability, or for any loss or damage arising from their use.\n5. Where applicable, an indication if the material has been modified and an indication of previous modifications\n\n## More information\n\nFor more, see the [ECMWF's User Guide](https://confluence.ecmwf.int/display/UDOC/ECMWF+Open+Data+-+Real+Time) and [example notebooks](https://github.com/ecmwf/notebook-examples/tree/master/opencharts).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "ecmwf,ecmwf-forecast,forecast,weather", "license": "CC-BY-4.0", "title": "ECMWF Open Data (real-time)", "missionStartDate": null}, "noaa-mrms-qpe-24h-pass2": {"abstract": "The [Multi-Radar Multi-Sensor (MRMS) Quantitative Precipitation Estimation (QPE)](https://www.nssl.noaa.gov/projects/mrms/) products are seamless 1-km mosaics of precipitation accumulation covering the continental United States, Alaska, Hawaii, the Caribbean, and Guam. The products are automatically generated through integration of data from multiple radars and radar networks, surface and satellite observations, numerical weather prediction (NWP) models, and climatology. The products are updated hourly at the top of the hour.\n\nMRMS QPE is available as a \"Pass 1\" or \"Pass 2\" product. The Pass 1 product is available with a 60-minute latency and includes 60-65% of gauges. The Pass 2 product has a higher latency of 120 minutes, but includes 99% of gauges. The Pass 1 and Pass 2 products are broken into 1-, 3-, 6-, 12-, 24-, 48-, and 72-hour accumulation sub-products.\n\nThis Collection contains the **24-Hour Pass 2** sub-product, i.e., 24-hour cumulative precipitation accumulation with a 2-hour latency. The data are available in [Cloud Optimized GeoTIFF](https://www.cogeo.org/) format as well as the original source GRIB2 format files. The GRIB2 files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "caribbean,guam,mrms,noaa,noaa-mrms-qpe-24h-pass2,precipitation,qpe,united-states,weather", "license": "proprietary", "title": "NOAA MRMS QPE 24-Hour Pass 2", "missionStartDate": "2022-07-21T20:00:00Z"}, "sentinel-1-grd": {"abstract": "The [Sentinel-1](https://sentinel.esa.int/web/sentinel/missions/sentinel-1) mission is a constellation of two polar-orbiting satellites, operating day and night performing C-band synthetic aperture radar imaging. The Level-1 Ground Range Detected (GRD) products in this Collection consist of focused SAR data that has been detected, multi-looked and projected to ground range using the Earth ellipsoid model WGS84. The ellipsoid projection of the GRD products is corrected using the terrain height specified in the product general annotation. The terrain height used varies in azimuth but is constant in range (but can be different for each IW/EW sub-swath).\n\nGround range coordinates are the slant range coordinates projected onto the ellipsoid of the Earth. Pixel values represent detected amplitude. Phase information is lost. The resulting product has approximately square resolution pixels and square pixel spacing with reduced speckle at a cost of reduced spatial resolution.\n\nFor the IW and EW GRD products, multi-looking is performed on each burst individually. All bursts in all sub-swaths are then seamlessly merged to form a single, contiguous, ground range, detected image per polarization.\n\nFor more information see the [ESA documentation](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/product-types-processing-levels/level-1)\n\n### Terrain Correction\n\nUsers might want to geometrically or radiometrically terrain correct the Sentinel-1 GRD data from this collection. The [Sentinel-1-RTC Collection](https://planetarycomputer.microsoft.com/dataset/sentinel-1-rtc) collection is a global radiometrically terrain corrected dataset derived from Sentinel-1 GRD. Additionally, users can terrain-correct on the fly using [any DEM available on the Planetary Computer](https://planetarycomputer.microsoft.com/catalog?tags=DEM). See [Customizable radiometric terrain correction](https://planetarycomputer.microsoft.com/docs/tutorials/customizable-rtc-sentinel1/) for more.", "instrument": null, "platform": "Sentinel-1", "platformSerialIdentifier": "SENTINEL-1A,SENTINEL-1B", "processingLevel": null, "keywords": "c-band,copernicus,esa,grd,sar,sentinel,sentinel-1,sentinel-1-grd,sentinel-1a,sentinel-1b", "license": "proprietary", "title": "Sentinel 1 Level-1 Ground Range Detected (GRD)", "missionStartDate": "2014-10-10T00:28:21Z"}, "nasadem": {"abstract": "[NASADEM](https://earthdata.nasa.gov/esds/competitive-programs/measures/nasadem) provides global topographic data at 1 arc-second (~30m) horizontal resolution, derived primarily from data captured via the [Shuttle Radar Topography Mission](https://www2.jpl.nasa.gov/srtm/) (SRTM).\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "dem,elevation,jpl,nasa,nasadem,nga,srtm,usgs", "license": "proprietary", "title": "NASADEM HGT v001", "missionStartDate": "2000-02-20T00:00:00Z"}, "io-lulc": {"abstract": "__Note__: _A new version of this item is available for your use. This mature version of the map remains available for use in existing applications. This item will be retired in December 2024. There is 2020 data available in the newer [9-class dataset](https://planetarycomputer.microsoft.com/dataset/io-lulc-9-class)._\n\nGlobal estimates of 10-class land use/land cover (LULC) for 2020, derived from ESA Sentinel-2 imagery at 10m resolution. This dataset was generated by [Impact Observatory](http://impactobservatory.com/), who used billions of human-labeled pixels (curated by the National Geographic Society) to train a deep learning model for land classification. The global map was produced by applying this model to the relevant yearly Sentinel-2 scenes on the Planetary Computer.\n\nThis dataset is also available on the [ArcGIS Living Atlas of the World](https://livingatlas.arcgis.com/landcover/).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "global,io-lulc,land-cover,land-use,sentinel", "license": "CC-BY-4.0", "title": "Esri 10-Meter Land Cover (10-class)", "missionStartDate": "2017-01-01T00:00:00Z"}, "landsat-c2-l1": {"abstract": "Landsat Collection 2 Level-1 data, consisting of quantized and calibrated scaled Digital Numbers (DN) representing the multispectral image data. These [Level-1](https://www.usgs.gov/landsat-missions/landsat-collection-2-level-1-data) data can be [rescaled](https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product) to top of atmosphere (TOA) reflectance and/or radiance. Thermal band data can be rescaled to TOA brightness temperature.\n\nThis dataset represents the global archive of Level-1 data from [Landsat Collection 2](https://www.usgs.gov/core-science-systems/nli/landsat/landsat-collection-2) acquired by the [Multispectral Scanner System](https://landsat.gsfc.nasa.gov/multispectral-scanner-system/) onboard Landsat 1 through Landsat 5 from July 7, 1972 to January 7, 2013. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": "mss", "platform": null, "platformSerialIdentifier": "landsat-1,landsat-2,landsat-3,landsat-4,landsat-5", "processingLevel": null, "keywords": "global,imagery,landsat,landsat-1,landsat-2,landsat-3,landsat-4,landsat-5,landsat-c2-l1,mss,nasa,satellite,usgs", "license": "proprietary", "title": "Landsat Collection 2 Level-1", "missionStartDate": "1972-07-25T00:00:00Z"}, "drcog-lulc": {"abstract": "The [Denver Regional Council of Governments (DRCOG) Land Use/Land Cover (LULC)](https://drcog.org/services-and-resources/data-maps-and-modeling/regional-land-use-land-cover-project) datasets are developed in partnership with the [Babbit Center for Land and Water Policy](https://www.lincolninst.edu/our-work/babbitt-center-land-water-policy) and the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/)'s Conservation Innovation Center (CIC). DRCOG LULC includes 2018 data at 3.28ft (1m) resolution covering 1,000 square miles and 2020 data at 1ft resolution covering 6,000 square miles of the Denver, Colorado region. The classification data is derived from the USDA's 1m National Agricultural Imagery Program (NAIP) aerial imagery and leaf-off aerial ortho-imagery captured as part of the [Denver Regional Aerial Photography Project](https://drcog.org/services-and-resources/data-maps-and-modeling/denver-regional-aerial-photography-project) (6in resolution everywhere except the mountainous regions to the west, which are 1ft resolution).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "drcog-lulc,land-cover,land-use,naip,usda", "license": "proprietary", "title": "Denver Regional Council of Governments Land Use Land Cover", "missionStartDate": "2018-01-01T00:00:00Z"}, "chesapeake-lc-7": {"abstract": "A high-resolution 1-meter [land cover data product](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-cover-data-project/) in raster format for the entire Chesapeake Bay watershed based on 2013-2014 imagery from the National Agriculture Imagery Program (NAIP). The product area encompasses over 250,000 square kilometers in New York, Pennsylvania, Maryland, Delaware, West Virginia, Virginia, and the District of Columbia. The dataset was created by the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/) [Conservation Innovation Center](https://www.chesapeakeconservancy.org/conservation-innovation-center/) for the [Chesapeake Bay Program](https://www.chesapeakebay.net/), which is a regional partnership of EPA, other federal, state, and local agencies and governments, nonprofits, and academic institutions, that leads and directs Chesapeake Bay restoration efforts. \n\nThe dataset is composed of a uniform set of 7 land cover classes. Additional information is available in a [User Guide](https://www.chesapeakeconservancy.org/wp-content/uploads/2020/06/Chesapeake_Conservancy_LandCover101Guide_June2020.pdf). Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "chesapeake-bay-watershed,chesapeake-conservancy,chesapeake-lc-7,land-cover", "license": "proprietary", "title": "Chesapeake Land Cover (7-class)", "missionStartDate": "2013-01-01T00:00:00Z"}, "chesapeake-lc-13": {"abstract": "A high-resolution 1-meter [land cover data product](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-cover-data-project/) in raster format for the entire Chesapeake Bay watershed based on 2013-2014 imagery from the National Agriculture Imagery Program (NAIP). The product area encompasses over 250,000 square kilometers in New York, Pennsylvania, Maryland, Delaware, West Virginia, Virginia, and the District of Columbia. The dataset was created by the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/) [Conservation Innovation Center](https://www.chesapeakeconservancy.org/conservation-innovation-center/) for the [Chesapeake Bay Program](https://www.chesapeakebay.net/), which is a regional partnership of EPA, other federal, state, and local agencies and governments, nonprofits, and academic institutions, that leads and directs Chesapeake Bay restoration efforts. \n\nThe dataset is composed of 13 land cover classes, although not all classes are used in all areas. Additional information is available in a [User Guide](https://www.chesapeakeconservancy.org/wp-content/uploads/2020/06/Chesapeake_Conservancy_LandCover101Guide_June2020.pdf) and [Class Description](https://www.chesapeakeconservancy.org/wp-content/uploads/2020/03/LC_Class_Descriptions.pdf) document. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "chesapeake-bay-watershed,chesapeake-conservancy,chesapeake-lc-13,land-cover", "license": "proprietary", "title": "Chesapeake Land Cover (13-class)", "missionStartDate": "2013-01-01T00:00:00Z"}, "chesapeake-lu": {"abstract": "A high-resolution 1-meter [land use data product](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-use-data-project/) in raster format for the entire Chesapeake Bay watershed. The dataset was created by modifying the 2013-2014 high-resolution [land cover dataset](https://www.chesapeakeconservancy.org/conservation-innovation-center/high-resolution-data/land-cover-data-project/) using 13 ancillary datasets including data on zoning, land use, parcel boundaries, landfills, floodplains, and wetlands. The product area encompasses over 250,000 square kilometers in New York, Pennsylvania, Maryland, Delaware, West Virginia, Virginia, and the District of Columbia. The dataset was created by the [Chesapeake Conservancy](https://www.chesapeakeconservancy.org/) [Conservation Innovation Center](https://www.chesapeakeconservancy.org/conservation-innovation-center/) for the [Chesapeake Bay Program](https://www.chesapeakebay.net/), which is a regional partnership of EPA, other federal, state, and local agencies and governments, nonprofits, and academic institutions that leads and directs Chesapeake Bay restoration efforts.\n\nThe dataset is composed of 17 land use classes in Virginia and 16 classes in all other jurisdictions. Additional information is available in a land use [Class Description](https://www.chesapeakeconservancy.org/wp-content/uploads/2018/11/2013-Phase-6-Mapped-Land-Use-Definitions-Updated-PC-11302018.pdf) document. Images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "chesapeake-bay-watershed,chesapeake-conservancy,chesapeake-lu,land-use", "license": "proprietary", "title": "Chesapeake Land Use", "missionStartDate": "2013-01-01T00:00:00Z"}, "noaa-mrms-qpe-1h-pass1": {"abstract": "The [Multi-Radar Multi-Sensor (MRMS) Quantitative Precipitation Estimation (QPE)](https://www.nssl.noaa.gov/projects/mrms/) products are seamless 1-km mosaics of precipitation accumulation covering the continental United States, Alaska, Hawaii, the Caribbean, and Guam. The products are automatically generated through integration of data from multiple radars and radar networks, surface and satellite observations, numerical weather prediction (NWP) models, and climatology. The products are updated hourly at the top of the hour.\n\nMRMS QPE is available as a \"Pass 1\" or \"Pass 2\" product. The Pass 1 product is available with a 60-minute latency and includes 60-65% of gauges. The Pass 2 product has a higher latency of 120 minutes, but includes 99% of gauges. The Pass 1 and Pass 2 products are broken into 1-, 3-, 6-, 12-, 24-, 48-, and 72-hour accumulation sub-products.\n\nThis Collection contains the **1-Hour Pass 1** sub-product, i.e., 1-hour cumulative precipitation accumulation with a 1-hour latency. The data are available in [Cloud Optimized GeoTIFF](https://www.cogeo.org/) format as well as the original source GRIB2 format files. The GRIB2 files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "caribbean,guam,mrms,noaa,noaa-mrms-qpe-1h-pass1,precipitation,qpe,united-states,weather", "license": "proprietary", "title": "NOAA MRMS QPE 1-Hour Pass 1", "missionStartDate": "2022-07-21T20:00:00Z"}, "noaa-mrms-qpe-1h-pass2": {"abstract": "The [Multi-Radar Multi-Sensor (MRMS) Quantitative Precipitation Estimation (QPE)](https://www.nssl.noaa.gov/projects/mrms/) products are seamless 1-km mosaics of precipitation accumulation covering the continental United States, Alaska, Hawaii, the Caribbean, and Guam. The products are automatically generated through integration of data from multiple radars and radar networks, surface and satellite observations, numerical weather prediction (NWP) models, and climatology. The products are updated hourly at the top of the hour.\n\nMRMS QPE is available as a \"Pass 1\" or \"Pass 2\" product. The Pass 1 product is available with a 60-minute latency and includes 60-65% of gauges. The Pass 2 product has a higher latency of 120 minutes, but includes 99% of gauges. The Pass 1 and Pass 2 products are broken into 1-, 3-, 6-, 12-, 24-, 48-, and 72-hour accumulation sub-products.\n\nThis Collection contains the **1-Hour Pass 2** sub-product, i.e., 1-hour cumulative precipitation accumulation with a 2-hour latency. The data are available in [Cloud Optimized GeoTIFF](https://www.cogeo.org/) format as well as the original source GRIB2 format files. The GRIB2 files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "caribbean,guam,mrms,noaa,noaa-mrms-qpe-1h-pass2,precipitation,qpe,united-states,weather", "license": "proprietary", "title": "NOAA MRMS QPE 1-Hour Pass 2", "missionStartDate": "2022-07-21T20:00:00Z"}, "noaa-nclimgrid-monthly": {"abstract": "The [NOAA U.S. Climate Gridded Dataset (NClimGrid)](https://www.ncei.noaa.gov/access/metadata/landing-page/bin/iso?id=gov.noaa.ncdc:C00332) consists of four climate variables derived from the [Global Historical Climatology Network daily (GHCNd)](https://www.ncei.noaa.gov/products/land-based-station/global-historical-climatology-network-daily) dataset: maximum temperature, minimum temperature, average temperature, and precipitation. The data is provided in 1/24 degree lat/lon (nominal 5x5 kilometer) grids for the Continental United States (CONUS). \n\nNClimGrid data is available in monthly and daily temporal intervals, with the daily data further differentiated as \"prelim\" (preliminary) or \"scaled\". Preliminary daily data is available within approximately three days of collection. Once a calendar month of preliminary daily data has been collected, it is scaled to match the corresponding monthly value. Monthly data is available from 1895 to the present. Daily preliminary and daily scaled data is available from 1951 to the present. \n\nThis Collection contains **Monthly** data. See the journal publication [\"Improved Historical Temperature and Precipitation Time Series for U.S. Climate Divisions\"](https://journals.ametsoc.org/view/journals/apme/53/5/jamc-d-13-0248.1.xml) for more information about monthly gridded data.\n\nUsers of all NClimGrid data product should be aware that [NOAA advertises](https://www.ncei.noaa.gov/access/metadata/landing-page/bin/iso?id=gov.noaa.ncdc:C00332) that:\n>\"On an annual basis, approximately one year of 'final' NClimGrid data is submitted to replace the initially supplied 'preliminary' data for the same time period. Users should be sure to ascertain which level of data is required for their research.\"\n\nThe source NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n\n*Note*: The Planetary Computer currently has STAC metadata for just the monthly collection. We'll have STAC metadata for daily data in our next release. In the meantime, you can access the daily NetCDF data directly from Blob Storage using the storage container at `https://nclimgridwesteurope.blob.core.windows.net/nclimgrid`. See https://planetarycomputer.microsoft.com/docs/concepts/data-catalog/#access-patterns for more.*\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,nclimgrid,noaa,noaa-nclimgrid-monthly,precipitation,temperature,united-states", "license": "proprietary", "title": "Monthly NOAA U.S. Climate Gridded Dataset (NClimGrid)", "missionStartDate": "1895-01-01T00:00:00Z"}, "goes-glm": {"abstract": "The [Geostationary Lightning Mapper (GLM)](https://www.goes-r.gov/spacesegment/glm.html) is a single-channel, near-infrared optical transient detector that can detect the momentary changes in an optical scene, indicating the presence of lightning. GLM measures total lightning (in-cloud, cloud-to-cloud and cloud-to-ground) activity continuously over the Americas and adjacent ocean regions with near-uniform spatial resolution of approximately 10 km. GLM collects information such as the frequency, location and extent of lightning discharges to identify intensifying thunderstorms and tropical cyclones. Trends in total lightning available from the GLM provide critical information to forecasters, allowing them to focus on developing severe storms much earlier and before these storms produce damaging winds, hail or even tornadoes.\n\nThe GLM data product consists of a hierarchy of earth-located lightning radiant energy measures including events, groups, and flashes:\n\n- Lightning events are detected by the instrument.\n- Lightning groups are a collection of one or more lightning events that satisfy temporal and spatial coincidence thresholds.\n- Similarly, lightning flashes are a collection of one or more lightning groups that satisfy temporal and spatial coincidence thresholds.\n\nThe product includes the relationship among lightning events, groups, and flashes, and the area coverage of lightning groups and flashes. The product also includes processing and data quality metadata, and satellite state and location information. \n\nThis Collection contains GLM L2 data in tabular ([GeoParquet](https://github.com/opengeospatial/geoparquet)) format and the original source NetCDF format. The NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).", "instrument": "FM1,FM2", "platform": "GOES", "platformSerialIdentifier": "GOES-16,GOES-17", "processingLevel": ["L2"], "keywords": "fm1,fm2,goes,goes-16,goes-17,goes-glm,l2,lightning,nasa,noaa,satellite,weather", "license": "proprietary", "title": "GOES-R Lightning Detection", "missionStartDate": "2018-02-13T16:10:00Z"}, "usda-cdl": {"abstract": "The Cropland Data Layer (CDL) is a product of the USDA National Agricultural Statistics Service (NASS) with the mission \"to provide timely, accurate and useful statistics in service to U.S. agriculture\" (Johnson and Mueller, 2010, p. 1204). The CDL is a crop-specific land cover classification product of more than 100 crop categories grown in the United States. CDLs are derived using a supervised land cover classification of satellite imagery. The supervised classification relies on first manually identifying pixels within certain images, often called training sites, which represent the same crop or land cover type. Using these training sites, a spectral signature is developed for each crop type that is then used by the analysis software to identify all other pixels in the satellite image representing the same crop. Using this method, a new CDL is compiled annually and released to the public a few months after the end of the growing season.\n\nThis collection includes Cropland, Confidence, Cultivated, and Frequency products.\n\n- Cropland: Crop-specific land cover data created annually. There are currently four individual crop frequency data layers that represent four major crops: corn, cotton, soybeans, and wheat.\n- Confidence: The predicted confidence associated with an output pixel. A value of zero indicates low confidence, while a value of 100 indicates high confidence.\n- Cultivated: cultivated and non-cultivated land cover for CONUS based on land cover information derived from the 2017 through 2021 Cropland products.\n- Frequency: crop specific planting frequency based on land cover information derived from the 2008 through 2021 Cropland products.\n\nFor more, visit the [Cropland Data Layer homepage](https://www.nass.usda.gov/Research_and_Science/Cropland/SARS1a.php).", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "agriculture,land-cover,land-use,united-states,usda,usda-cdl", "license": "proprietary", "title": "USDA Cropland Data Layers (CDLs)", "missionStartDate": "2008-01-01T00:00:00Z"}, "eclipse": {"abstract": "The [Project Eclipse](https://www.microsoft.com/en-us/research/project/project-eclipse/) Network is a low-cost air quality sensing network for cities and a research project led by the [Urban Innovation Group]( https://www.microsoft.com/en-us/research/urban-innovation-research/) at Microsoft Research.\n\nProject Eclipse currently includes over 100 locations in Chicago, Illinois, USA.\n\nThis network was deployed starting in July, 2021, through a collaboration with the City of Chicago, the Array of Things Project, JCDecaux Chicago, and the Environmental Law and Policy Center as well as local environmental justice organizations in the city. [This talk]( https://www.microsoft.com/en-us/research/video/technology-demo-project-eclipse-hyperlocal-air-quality-monitoring-for-cities/) documents the network design and data calibration strategy.\n\n## Storage resources\n\nData are stored in [Parquet](https://parquet.apache.org/) files in Azure Blob Storage in the West Europe Azure region, in the following blob container:\n\n`https://ai4edataeuwest.blob.core.windows.net/eclipse`\n\nWithin that container, the periodic occurrence snapshots are stored in `Chicago/YYYY-MM-DD`, where `YYYY-MM-DD` corresponds to the date of the snapshot.\nEach snapshot contains a sensor readings from the next 7-days in Parquet format starting with date on the folder name YYYY-MM-DD.\nTherefore, the data files for the first snapshot are at\n\n`https://ai4edataeuwest.blob.core.windows.net/eclipse/chicago/2022-01-01/data_*.parquet\n\nThe Parquet file schema is as described below. \n\n## Additional Documentation\n\nFor details on Calibration of Pm2.5, O3 and NO2, please see [this PDF](https://ai4edatasetspublicassets.blob.core.windows.net/assets/aod_docs/Calibration_Doc_v1.1.pdf).\n\n## License and attribution\nPlease cite: Daepp, Cabral, Ranganathan et al. (2022) [Eclipse: An End-to-End Platform for Low-Cost, Hyperlocal Environmental Sensing in Cities. ACM/IEEE Information Processing in Sensor Networks. Milan, Italy.](https://www.microsoft.com/en-us/research/uploads/prod/2022/05/ACM_2022-IPSN_FINAL_Eclipse.pdf)\n\n## Contact\n\nFor questions about this dataset, contact [`msrurbanops@microsoft.com`](mailto:msrurbanops@microsoft.com?subject=eclipse%20question) \n\n\n## Learn more\n\nThe [Eclipse Project](https://www.microsoft.com/en-us/research/urban-innovation-research/) contains an overview of the Project Eclipse at Microsoft Research.\n\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "air-pollution,eclipse,pm25", "license": "proprietary", "title": "Urban Innovation Eclipse Sensor Data", "missionStartDate": "2021-01-01T00:00:00Z"}, "esa-cci-lc": {"abstract": "The ESA Climate Change Initiative (CCI) [Land Cover dataset](https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-land-cover?tab=overview) provides consistent global annual land cover maps at 300m spatial resolution from 1992 to 2020. The land cover classes are defined using the United Nations Food and Agriculture Organization's (UN FAO) [Land Cover Classification System](https://www.fao.org/land-water/land/land-governance/land-resources-planning-toolbox/category/details/en/c/1036361/) (LCCS). In addition to the land cover maps, four quality flags are produced to document the reliability of the classification and change detection. \n\nThe data in this Collection have been converted from the [original NetCDF data](https://planetarycomputer.microsoft.com/dataset/esa-cci-lc-netcdf) to a set of tiled [Cloud Optimized GeoTIFFs](https://www.cogeo.org/) (COGs).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cci,esa,esa-cci-lc,global,land-cover", "license": "proprietary", "title": "ESA Climate Change Initiative Land Cover Maps (Cloud Optimized GeoTIFF)", "missionStartDate": "1992-01-01T00:00:00Z"}, "esa-cci-lc-netcdf": {"abstract": "The ESA Climate Change Initiative (CCI) [Land Cover dataset](https://cds.climate.copernicus.eu/cdsapp#!/dataset/satellite-land-cover?tab=overview) provides consistent global annual land cover maps at 300m spatial resolution from 1992 to 2020. The land cover classes are defined using the United Nations Food and Agriculture Organization's (UN FAO) [Land Cover Classification System](https://www.fao.org/land-water/land/land-governance/land-resources-planning-toolbox/category/details/en/c/1036361/) (LCCS). In addition to the land cover maps, four quality flags are produced to document the reliability of the classification and change detection. \n\nThe data in this Collection are the original NetCDF files accessed from the [Copernicus Climate Data Store](https://cds.climate.copernicus.eu/#!/home). We recommend users use the [`esa-cci-lc` Collection](planetarycomputer.microsoft.com/dataset/esa-cci-lc), which provides the data as Cloud Optimized GeoTIFFs.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cci,esa,esa-cci-lc-netcdf,global,land-cover", "license": "proprietary", "title": "ESA Climate Change Initiative Land Cover Maps (NetCDF)", "missionStartDate": "1992-01-01T00:00:00Z"}, "fws-nwi": {"abstract": "The Wetlands Data Layer is the product of over 45 years of work by the National Wetlands Inventory (NWI) and its collaborators and currently contains more than 35 million wetland and deepwater features. This dataset, covering the conterminous United States, Hawaii, Puerto Rico, the Virgin Islands, Guam, the major Northern Mariana Islands and Alaska, continues to grow at a rate of 50 to 100 million acres annually as data are updated.\n\n**NOTE:** Due to the variation in use and analysis of this data by the end user, each state's wetlands data extends beyond the state boundary. Each state includes wetlands data that intersect the 1:24,000 quadrangles that contain part of that state (1:2,000,000 source data). This allows the user to clip the data to their specific analysis datasets. Beware that two adjacent states will contain some of the same data along their borders.\n\nFor more information, visit the National Wetlands Inventory [homepage](https://www.fws.gov/program/national-wetlands-inventory).\n\n## STAC Metadata\n\nIn addition to the `zip` asset in every STAC item, each item has its own assets unique to its wetlands. In general, each item will have several assets, each linking to a [geoparquet](https://github.com/opengeospatial/geoparquet) asset with data for the entire region or a sub-region within that state. Use the `cloud-optimized` [role](https://github.com/radiantearth/stac-spec/blob/master/item-spec/item-spec.md#asset-roles) to select just the geoparquet assets. See the Example Notebook for more.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "fws-nwi,united-states,usfws,wetlands", "license": "proprietary", "title": "FWS National Wetlands Inventory", "missionStartDate": "2022-10-01T00:00:00Z"}, "usgs-lcmap-conus-v13": {"abstract": "The [Land Change Monitoring, Assessment, and Projection](https://www.usgs.gov/special-topics/lcmap) (LCMAP) product provides land cover mapping and change monitoring from the U.S. Geological Survey's [Earth Resources Observation and Science](https://www.usgs.gov/centers/eros) (EROS) Center. LCMAP's Science Products are developed by applying time-series modeling on a per-pixel basis to [Landsat Analysis Ready Data](https://www.usgs.gov/landsat-missions/landsat-us-analysis-ready-data) (ARD) using an implementation of the [Continuous Change Detection and Classification](https://doi.org/10.1016/j.rse.2014.01.011) (CCDC) algorithm. All available clear (non-cloudy) U.S. Landsat ARD observations are fit to a harmonic model to predict future Landsat-like surface reflectance. Where Landsat surface reflectance observations differ significantly from those predictions, a change is identified. Attributes of the resulting model sequences (e.g., start/end dates, residuals, model coefficients) are then used to produce a set of land surface change products and as inputs to the subsequent classification to thematic land cover. \n\nThis [STAC](https://stacspec.org/en) Collection contains [LCMAP CONUS Collection 1.3](https://www.usgs.gov/special-topics/lcmap/collection-13-conus-science-products), which was released in August 2022 for years 1985-2021. The data are tiled according to the Landsat ARD tile grid and consist of [Cloud Optimized GeoTIFFs](https://www.cogeo.org/) (COGs) and corresponding metadata files. Note that the provided COGs differ slightly from those in the USGS source data. They have been reprocessed to add overviews, \"nodata\" values where appropriate, and an updated projection definition.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "conus,land-cover,land-cover-change,lcmap,usgs,usgs-lcmap-conus-v13", "license": "proprietary", "title": "USGS LCMAP CONUS Collection 1.3", "missionStartDate": "1985-01-01T00:00:00Z"}, "usgs-lcmap-hawaii-v10": {"abstract": "The [Land Change Monitoring, Assessment, and Projection](https://www.usgs.gov/special-topics/lcmap) (LCMAP) product provides land cover mapping and change monitoring from the U.S. Geological Survey's [Earth Resources Observation and Science](https://www.usgs.gov/centers/eros) (EROS) Center. LCMAP's Science Products are developed by applying time-series modeling on a per-pixel basis to [Landsat Analysis Ready Data](https://www.usgs.gov/landsat-missions/landsat-us-analysis-ready-data) (ARD) using an implementation of the [Continuous Change Detection and Classification](https://doi.org/10.1016/j.rse.2014.01.011) (CCDC) algorithm. All available clear (non-cloudy) U.S. Landsat ARD observations are fit to a harmonic model to predict future Landsat-like surface reflectance. Where Landsat surface reflectance observations differ significantly from those predictions, a change is identified. Attributes of the resulting model sequences (e.g., start/end dates, residuals, model coefficients) are then used to produce a set of land surface change products and as inputs to the subsequent classification to thematic land cover. \n\nThis [STAC](https://stacspec.org/en) Collection contains [LCMAP Hawaii Collection 1.0](https://www.usgs.gov/special-topics/lcmap/collection-1-hawaii-science-products), which was released in January 2022 for years 2000-2020. The data are tiled according to the Landsat ARD tile grid and consist of [Cloud Optimized GeoTIFFs](https://www.cogeo.org/) (COGs) and corresponding metadata files. Note that the provided COGs differ slightly from those in the USGS source data. They have been reprocessed to add overviews, \"nodata\" values where appropriate, and an updated projection definition.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "hawaii,land-cover,land-cover-change,lcmap,usgs,usgs-lcmap-hawaii-v10", "license": "proprietary", "title": "USGS LCMAP Hawaii Collection 1.0", "missionStartDate": "2000-01-01T00:00:00Z"}, "noaa-climate-normals-tabular": {"abstract": "The [NOAA United States Climate Normals](https://www.ncei.noaa.gov/products/land-based-station/us-climate-normals) provide information about typical climate conditions for thousands of weather station locations across the United States. Normals act both as a ruler to compare current weather and as a predictor of conditions in the near future. The official normals are calculated for a uniform 30 year period, and consist of annual/seasonal, monthly, daily, and hourly averages and statistics of temperature, precipitation, and other climatological variables for each weather station. \n\nNOAA produces Climate Normals in accordance with the [World Meteorological Organization](https://public.wmo.int/en) (WMO), of which the United States is a member. The WMO requires each member nation to compute 30-year meteorological quantity averages at least every 30 years, and recommends an update each decade, in part to incorporate newer weather stations. The 1991\u20132020 U.S. Climate Normals are the latest in a series of decadal normals first produced in the 1950s. \n\nThis Collection contains tabular weather variable data at weather station locations in GeoParquet format, converted from the source CSV files. The source NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n\nData are provided for annual/seasonal, monthly, daily, and hourly frequencies for the following time periods:\n\n- Legacy 30-year normals (1981\u20132010)\n- Supplemental 15-year normals (2006\u20132020)\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate-normals,climatology,conus,noaa,noaa-climate-normals-tabular,surface-observations,weather", "license": "proprietary", "title": "NOAA US Tabular Climate Normals", "missionStartDate": "1981-01-01T00:00:00Z"}, "noaa-climate-normals-netcdf": {"abstract": "The [NOAA Gridded United States Climate Normals](https://www.ncei.noaa.gov/products/land-based-station/us-climate-normals#tab-1027) provide a continuous grid of temperature and precipitation data across the contiguous United States (CONUS). The grids are derived from NOAA's [NClimGrid dataset](https://planetarycomputer.microsoft.com/dataset/group/noaa-nclimgrid), and resolutions (nominal 5x5 kilometer) and spatial extents (CONUS) therefore match that of NClimGrid. Monthly, seasonal, and annual gridded normals are computed from simple averages of the NClimGrid data and are provided for three time-periods: 1901\u20132020, 1991\u20132020, and 2006\u20132020. Daily gridded normals are smoothed for a smooth transition from one day to another and are provided for two time-periods: 1991\u20132020, and 2006\u20132020.\n\nNOAA produces Climate Normals in accordance with the [World Meteorological Organization](https://public.wmo.int/en) (WMO), of which the United States is a member. The WMO requires each member nation to compute 30-year meteorological quantity averages at least every 30 years, and recommends an update each decade, in part to incorporate newer weather stations. The 1991\u20132020 U.S. Climate Normals are the latest in a series of decadal normals first produced in the 1950s. \n\nThe data in this Collection are the original NetCDF files provided by NOAA's National Centers for Environmental Information. This Collection contains gridded data for the following frequencies and time periods:\n\n- Annual, seasonal, and monthly normals\n - 100-year (1901\u20132000)\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n- Daily normals\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n\nFor most use-cases, we recommend using the [`noaa-climate-normals-gridded`](https://planetarycomputer.microsoft.com/dataset/noaa-climate-normals-gridded) collection, which contains the same data in Cloud Optimized GeoTIFF format. The NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate-normals,climatology,conus,noaa,noaa-climate-normals-netcdf,surface-observations,weather", "license": "proprietary", "title": "NOAA US Gridded Climate Normals (NetCDF)", "missionStartDate": "1901-01-01T00:00:00Z"}, "noaa-climate-normals-gridded": {"abstract": "The [NOAA Gridded United States Climate Normals](https://www.ncei.noaa.gov/products/land-based-station/us-climate-normals#tab-1027) provide a continuous grid of temperature and precipitation data across the contiguous United States (CONUS). The grids are derived from NOAA's [NClimGrid dataset](https://planetarycomputer.microsoft.com/dataset/group/noaa-nclimgrid), and resolutions (nominal 5x5 kilometer) and spatial extents (CONUS) therefore match that of NClimGrid. Monthly, seasonal, and annual gridded normals are computed from simple averages of the NClimGrid data and are provided for three time-periods: 1901\u20132020, 1991\u20132020, and 2006\u20132020. Daily gridded normals are smoothed for a smooth transition from one day to another and are provided for two time-periods: 1991\u20132020, and 2006\u20132020.\n\nNOAA produces Climate Normals in accordance with the [World Meteorological Organization](https://public.wmo.int/en) (WMO), of which the United States is a member. The WMO requires each member nation to compute 30-year meteorological quantity averages at least every 30 years, and recommends an update each decade, in part to incorporate newer weather stations. The 1991\u20132020 U.S. Climate Normals are the latest in a series of decadal normals first produced in the 1950s. \n\nThis Collection contains gridded data for the following frequencies and time periods:\n\n- Annual, seasonal, and monthly normals\n - 100-year (1901\u20132000)\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n- Daily normals\n - 30-year (1991\u20132020)\n - 15-year (2006\u20132020)\n\nThe data in this Collection have been converted from the original NetCDF format to Cloud Optimized GeoTIFFs (COGs). The source NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n\n## STAC Metadata\n\nThe STAC items in this collection contain several custom fields that can be used to further filter the data.\n\n* `noaa_climate_normals:period`: Climate normal time period. This can be \"1901-2000\", \"1991-2020\", or \"2006-2020\".\n* `noaa_climate_normals:frequency`: Climate normal temporal interval (frequency). This can be \"daily\", \"monthly\", \"seasonal\" , or \"annual\"\n* `noaa_climate_normals:time_index`: Time step index, e.g., month of year (1-12).\n\nThe `description` field of the assets varies by frequency. Using `prcp_norm` as an example, the descriptions are\n\n* annual: \"Annual precipitation normals from monthly precipitation normal values\"\n* seasonal: \"Seasonal precipitation normals (WSSF) from monthly normals\"\n* monthly: \"Monthly precipitation normals from monthly precipitation values\"\n* daily: \"Precipitation normals from daily averages\"\n\nCheck the assets on individual items for the appropriate description.\n\nThe STAC keys for most assets consist of two abbreviations. A \"variable\":\n\n\n| Abbreviation | Description |\n| ------------ | ---------------------------------------- |\n| prcp | Precipitation over the time period |\n| tavg | Mean temperature over the time period |\n| tmax | Maximum temperature over the time period |\n| tmin | Minimum temperature over the time period |\n\nAnd an \"aggregation\":\n\n| Abbreviation | Description |\n| ------------ | ------------------------------------------------------------------------------ |\n| max | Maximum of the variable over the time period |\n| min | Minimum of the variable over the time period |\n| std | Standard deviation of the value over the time period |\n| flag | An count of the number of inputs (months, years, etc.) to calculate the normal |\n| norm | The normal for the variable over the time period |\n\nSo, for example, `prcp_max` for monthly data is the \"Maximum values of all input monthly precipitation normal values\".\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate-normals,climatology,conus,noaa,noaa-climate-normals-gridded,surface-observations,weather", "license": "proprietary", "title": "NOAA US Gridded Climate Normals (Cloud-Optimized GeoTIFF)", "missionStartDate": "1901-01-01T00:00:00Z"}, "aster-l1t": {"abstract": "The [ASTER](https://terra.nasa.gov/about/terra-instruments/aster) instrument, launched on-board NASA's [Terra](https://terra.nasa.gov/) satellite in 1999, provides multispectral images of the Earth at 15m-90m resolution. ASTER images provide information about land surface temperature, color, elevation, and mineral composition.\n\nThis dataset represents ASTER [L1T](https://lpdaac.usgs.gov/products/ast_l1tv003/) data from 2000-2006. L1T images have been terrain-corrected and rotated to a north-up UTM projection. Images are in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n", "instrument": "aster", "platform": null, "platformSerialIdentifier": "terra", "processingLevel": null, "keywords": "aster,aster-l1t,global,nasa,satellite,terra,usgs", "license": "proprietary", "title": "ASTER L1T", "missionStartDate": "2000-03-04T12:00:00Z"}, "cil-gdpcir-cc-by-sa": {"abstract": "The World Climate Research Programme's [6th Coupled Model Intercomparison Project (CMIP6)](https://www.wcrp-climate.org/wgcm-cmip/wgcm-cmip6) represents an enormous advance in the quality, detail, and scope of climate modeling.\n\nThe [Global Downscaled Projections for Climate Impacts Research](https://github.com/ClimateImpactLab/downscaleCMIP6) dataset makes this modeling more applicable to understanding the impacts of changes in the climate on humans and society with two key developments: trend-preserving bias correction and downscaling. In this dataset, the [Climate Impact Lab](https://impactlab.org) provides global, daily minimum and maximum air temperature at the surface (`tasmin` and `tasmax`) and daily cumulative surface precipitation (`pr`) corresponding to the CMIP6 historical, ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 scenarios for 25 global climate models on a 1/4-degree regular global grid.\n\n## Accessing the data\n\nGDPCIR data can be accessed on the Microsoft Planetary Computer. The dataset is made of of three collections, distinguished by data license:\n* [Public domain (CC0-1.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0)\n* [Attribution (CC BY 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by)\n* [Attribution-ShareAlike (CC BY SA 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by-sa)\n\nEach modeling center with bias corrected and downscaled data in this collection falls into one of these license categories - see the [table below](/dataset/cil-gdpcir-cc-by-sa#available-institutions-models-and-scenarios-by-license-collection) to see which model is in each collection, and see the section below on [Citing, Licensing, and using data produced by this project](/dataset/cil-gdpcir-cc-by-sa#citing-licensing-and-using-data-produced-by-this-project) for citations and additional information about each license.\n\n## Data format & contents\n\nThe data is stored as partitioned zarr stores (see [https://zarr.readthedocs.io](https://zarr.readthedocs.io)), each of which includes thousands of data and metadata files covering the full time span of the experiment. Historical zarr stores contain just over 50 GB, while SSP zarr stores contain nearly 70GB. Each store is stored as a 32-bit float, with dimensions time (daily datetime), lat (float latitude), and lon (float longitude). The data is chunked at each interval of 365 days and 90 degree interval of latitude and longitude. Therefore, each chunk is `(365, 360, 360)`, with each chunk occupying approximately 179MB in memory.\n\nHistorical data is daily, excluding leap days, from Jan 1, 1950 to Dec 31, 2014; SSP data is daily, excluding leap days, from Jan 1, 2015 to either Dec 31, 2099 or Dec 31, 2100, depending on data availability in the source GCM.\n\nThe spatial domain covers all 0.25-degree grid cells, indexed by the grid center, with grid edges on the quarter-degree, using a -180 to 180 longitude convention. Thus, the \u201clon\u201d coordinate extends from -179.875 to 179.875, and the \u201clat\u201d coordinate extends from -89.875 to 89.875, with intermediate values at each 0.25-degree increment between (e.g. -179.875, -179.625, -179.375, etc).\n\n## Available institutions, models, and scenarios by license collection\n\n| Modeling institution | Source model | Available experiments | License collection |\n| -------------------- | ----------------- | ------------------------------------------ | ---------------------- |\n| CAS | FGOALS-g3 [^1] | SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM4-8 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM5-0 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| BCC | BCC-CSM2-MR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| CMCC | CMCC-CM2-SR5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40] |\n| CMCC | CMCC-ESM2 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40] |\n| CSIRO-ARCCSS | ACCESS-CM2 | SSP2-4.5 and SSP3-7.0 | CC-BY-40] |\n| CSIRO | ACCESS-ESM1-5 | SSP1-2.6, SSP2-4.5, and SSP3-7.0 | CC-BY-40] |\n| MIROC | MIROC-ES2L | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MIROC | MIROC6 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MOHC | HadGEM3-GC31-LL | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40] |\n| MOHC | UKESM1-0-LL | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MPI-M | MPI-ESM1-2-LR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| MPI-M/DKRZ [^2] | MPI-ESM1-2-HR | SSP1-2.6 and SSP5-8.5 | CC-BY-40] |\n| NCC | NorESM2-LM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| NCC | NorESM2-MM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| NOAA-GFDL | GFDL-CM4 | SSP2-4.5 and SSP5-8.5 | CC-BY-40] |\n| NOAA-GFDL | GFDL-ESM4 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40] |\n| NUIST | NESM3 | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3 | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-AerChem | ssp370 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-CC | ssp245 and ssp585 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-Veg | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40] |\n| EC-Earth-Consortium | EC-Earth3-Veg-LR | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40] |\n| CCCma | CanESM5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-SA-40] |\n\n*Notes:*\n\n[^1]: At the time of running, no ssp1-2.6 precipitation data was available. Therefore, we provide `tasmin` and `tamax` for this model and experiment, but not `pr`. All other model/experiment combinations in the above table include all three variables.\n\n[^2]: The institution which ran MPI-ESM1-2-HR\u2019s historical (CMIP) simulations is `MPI-M`, while the future (ScenarioMIP) simulations were run by `DKRZ`. Therefore, the institution component of `MPI-ESM1-2-HR` filepaths differ between `historical` and `SSP` scenarios.\n\n## Project methods\n\nThis project makes use of statistical bias correction and downscaling algorithms, which are specifically designed to accurately represent changes in the extremes. For this reason, we selected Quantile Delta Mapping (QDM), following the method introduced by [Cannon et al. (2015)](https://doi.org/10.1175/JCLI-D-14-00754.1), which preserves quantile-specific trends from the GCM while fitting the full distribution for a given day-of-year to a reference dataset (ERA5).\n\nWe then introduce a similar method tailored to increase spatial resolution while preserving extreme behavior, Quantile-Preserving Localized-Analog Downscaling (QPLAD).\n\nTogether, these methods provide a robust means to handle both the central and tail behavior seen in climate model output, while aligning the full distribution to a state-of-the-art reanalysis dataset and providing the spatial granularity needed to study surface impacts.\n\nFor further documentation, see [Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts](https://egusphere.copernicus.org/preprints/2023/egusphere-2022-1513/) (EGUsphere, 2022 [preprint]).\n\n## Citing, licensing, and using data produced by this project\n\nProjects making use of the data produced as part of the Climate Impact Lab Global Downscaled Projections for Climate Impacts Research (CIL GDPCIR) project are requested to cite both this project and the source datasets from which these results are derived. Additionally, the use of data derived from some GCMs *requires* citations, and some modeling centers impose licensing restrictions & requirements on derived works. See each GCM's license info in the links below for more information.\n\n### CIL GDPCIR\n\nUsers are requested to cite this project in derived works. Our method documentation paper may be cited using the following:\n\n> Gergel, D. R., Malevich, S. B., McCusker, K. E., Tenezakis, E., Delgado, M. T., Fish, M. A., and Kopp, R. E.: Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts, EGUsphere [preprint], https://doi.org/10.5194/egusphere-2022-1513, 2023. \n\nThe code repository may be cited using the following:\n\n> Diana Gergel, Kelly McCusker, Brewster Malevich, Emile Tenezakis, Meredith Fish, Michael Delgado (2022). ClimateImpactLab/downscaleCMIP6: (v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.6403794\n\n### ERA5\n\nAdditionally, we request you cite the historical dataset used in bias correction and downscaling, ERA5. See the [ECMWF guide to citing a dataset on the Climate Data Store](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it):\n\n> Hersbach, H, et al. The ERA5 global reanalysis. Q J R Meteorol Soc.2020; 146: 1999\u20132049. DOI: [10.1002/qj.3803](https://doi.org/10.1002/qj.3803)\n>\n> Mu\u00f1oz Sabater, J., (2019): ERA5-Land hourly data from 1981 to present. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n>\n> Mu\u00f1oz Sabater, J., (2021): ERA5-Land hourly data from 1950 to 1980. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n\n### GCM-specific citations & licenses\n\nThe CMIP6 simulation data made available through the Earth System Grid Federation (ESGF) are subject to Creative Commons [BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) or [BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/) licenses. The Climate Impact Lab has reached out to each of the modeling institutions to request waivers from these terms so the outputs of this project may be used with fewer restrictions, and has been granted permission to release the data using the licenses listed here.\n\n#### Public Domain Datasets\n\nThe following bias corrected and downscaled model simulations are available in the public domain using a [CC0 1.0 Universal Public Domain Declaration](https://creativecommons.org/publicdomain/zero/1.0/). Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0.\n\n* **FGOALS-g3**\n\n License description: [data_licenses/FGOALS-g3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/FGOALS-g3.txt)\n\n CMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 CMIP*. Version 20190826. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1783\n\n ScenarioMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190818; SSP2-4.5 version 20190818; SSP3-7.0 version 20190820; SSP5-8.5 tasmax version 20190819; SSP5-8.5 tasmin version 20190819; SSP5-8.5 pr version 20190818. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2056\n\n\n* **INM-CM4-8**\n\n License description: [data_licenses/INM-CM4-8.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM4-8.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 CMIP*. Version 20190530. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1422\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 ScenarioMIP*. Version 20190603. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12321\n\n\n* **INM-CM5-0**\n\n License description: [data_licenses/INM-CM5-0.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM5-0.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 CMIP*. Version 20190610. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1423\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190619; SSP2-4.5 version 20190619; SSP3-7.0 version 20190618; SSP5-8.5 version 20190724. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12322\n\n\n#### CC-BY-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/). Note that this license requires citation of the source model output (included here). Please see https://creativecommons.org/licenses/by/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by.\n\n* **ACCESS-CM2**\n\n License description: [data_licenses/ACCESS-CM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-CM2.txt)\n\n CMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2281\n\n ScenarioMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2285\n\n\n* **ACCESS-ESM1-5**\n\n License description: [data_licenses/ACCESS-ESM1-5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-ESM1-5.txt)\n\n CMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 CMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2288\n\n ScenarioMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 ScenarioMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2291\n\n\n* **BCC-CSM2-MR**\n\n License description: [data_licenses/BCC-CSM2-MR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/BCC-CSM2-MR.txt)\n\n CMIP Citation:\n\n > Xin, Xiaoge; Zhang, Jie; Zhang, Fang; Wu, Tongwen; Shi, Xueli; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2018)**. *BCC BCC-CSM2MR model output prepared for CMIP6 CMIP*. Version 20181126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1725\n\n ScenarioMIP Citation:\n\n > Xin, Xiaoge; Wu, Tongwen; Shi, Xueli; Zhang, Fang; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2019)**. *BCC BCC-CSM2MR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190315; SSP2-4.5 version 20190318; SSP3-7.0 version 20190318; SSP5-8.5 version 20190318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1732\n\n\n* **CMCC-CM2-SR5**\n\n License description: [data_licenses/CMCC-CM2-SR5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-CM2-SR5.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 CMIP*. Version 20200616. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1362\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200717; SSP2-4.5 version 20200617; SSP3-7.0 version 20200622; SSP5-8.5 version 20200622. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1365\n\n\n* **CMCC-ESM2**\n\n License description: [data_licenses/CMCC-ESM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-ESM2.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 CMIP*. Version 20210114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13164\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20210126; SSP2-4.5 version 20210129; SSP3-7.0 version 20210202; SSP5-8.5 version 20210126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13168\n\n\n* **EC-Earth3-AerChem**\n\n License description: [data_licenses/EC-Earth3-AerChem.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-AerChem.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 CMIP*. Version 20200624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.639\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 ScenarioMIP*. Version 20200827. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.724\n\n\n* **EC-Earth3-CC**\n\n License description: [data_licenses/EC-Earth3-CC.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-CC.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth-3-CC model output prepared for CMIP6 CMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.640\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2021)**. *EC-Earth-Consortium EC-Earth3-CC model output prepared for CMIP6 ScenarioMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.15327\n\n\n* **EC-Earth3-Veg-LR**\n\n License description: [data_licenses/EC-Earth3-Veg-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg-LR.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 CMIP*. Version 20200217. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.643\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20201201; SSP2-4.5 version 20201123; SSP3-7.0 version 20201123; SSP5-8.5 version 20201201. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.728\n\n\n* **EC-Earth3-Veg**\n\n License description: [data_licenses/EC-Earth3-Veg.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 CMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.642\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 ScenarioMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.727\n\n\n* **EC-Earth3**\n\n License description: [data_licenses/EC-Earth3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 CMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.181\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 ScenarioMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.251\n\n\n* **GFDL-CM4**\n\n License description: [data_licenses/GFDL-CM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-CM4.txt)\n\n CMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Bushuk, Mitchell; Dunne, Krista A.; Dussin, Raphael; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, P.C.D; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1402\n\n ScenarioMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, Chris; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.9242\n\n\n* **GFDL-ESM4**\n\n License description: [data_licenses/GFDL-ESM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-ESM4.txt)\n\n CMIP Citation:\n\n > Krasting, John P.; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Silvers, Levi; Wyman, Bruce; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Dussin, Raphael; Guo, Huan; He, Jian; Held, Isaac M; Horowitz, Larry W.; Lin, Pu; Milly, P.C.D; Shevliakova, Elena; Stock, Charles; Winton, Michael; Wittenberg, Andrew T.; Xie, Yuanyu; Zhao, Ming **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 CMIP*. Version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1407\n\n ScenarioMIP Citation:\n\n > John, Jasmin G; Blanton, Chris; McHugh, Colleen; Radhakrishnan, Aparna; Rand, Kristopher; Vahlenkamp, Hans; Wilson, Chandin; Zadeh, Niki T.; Dunne, John P.; Dussin, Raphael; Horowitz, Larry W.; Krasting, John P.; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Ploshay, Jeffrey; Shevliakova, Elena; Silvers, Levi; Stock, Charles; Winton, Michael; Zeng, Yujin **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1414\n\n\n* **HadGEM3-GC31-LL**\n\n License description: [data_licenses/HadGEM3-GC31-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/HadGEM3-GC31-LL.txt)\n\n CMIP Citation:\n\n > Ridley, Jeff; Menary, Matthew; Kuhlbrodt, Till; Andrews, Martin; Andrews, Tim **(2018)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 CMIP*. Version 20190624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.419\n\n ScenarioMIP Citation:\n\n > Good, Peter **(2019)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200114; SSP2-4.5 version 20190908; SSP5-8.5 version 20200114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.10845\n\n\n* **MIROC-ES2L**\n\n License description: [data_licenses/MIROC-ES2L.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC-ES2L.txt)\n\n CMIP Citation:\n\n > Hajima, Tomohiro; Abe, Manabu; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogura, Tomoo; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio; Tachiiri, Kaoru **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 CMIP*. Version 20191129. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.902\n\n ScenarioMIP Citation:\n\n > Tachiiri, Kaoru; Abe, Manabu; Hajima, Tomohiro; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 ScenarioMIP*. Version 20200318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.936\n\n\n* **MIROC6**\n\n License description: [data_licenses/MIROC6.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC6.txt)\n\n CMIP Citation:\n\n > Tatebe, Hiroaki; Watanabe, Masahiro **(2018)**. *MIROC MIROC6 model output prepared for CMIP6 CMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.881\n\n ScenarioMIP Citation:\n\n > Shiogama, Hideo; Abe, Manabu; Tatebe, Hiroaki **(2019)**. *MIROC MIROC6 model output prepared for CMIP6 ScenarioMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.898\n\n\n* **MPI-ESM1-2-HR**\n\n License description: [data_licenses/MPI-ESM1-2-HR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-HR.txt)\n\n CMIP Citation:\n\n > Jungclaus, Johann; Bittner, Matthias; Wieners, Karl-Hermann; Wachsmann, Fabian; Schupfner, Martin; Legutke, Stephanie; Giorgetta, Marco; Reick, Christian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Esch, Monika; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-HR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.741\n\n ScenarioMIP Citation:\n\n > Schupfner, Martin; Wieners, Karl-Hermann; Wachsmann, Fabian; Steger, Christian; Bittner, Matthias; Jungclaus, Johann; Fr\u00fch, Barbara; Pankatz, Klaus; Giorgetta, Marco; Reick, Christian; Legutke, Stephanie; Esch, Monika; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *DKRZ MPI-ESM1.2-HR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2450\n\n\n* **MPI-ESM1-2-LR**\n\n License description: [data_licenses/MPI-ESM1-2-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-LR.txt)\n\n CMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Legutke, Stephanie; Schupfner, Martin; Wachsmann, Fabian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.742\n\n ScenarioMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.793\n\n\n* **NESM3**\n\n License description: [data_licenses/NESM3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NESM3.txt)\n\n CMIP Citation:\n\n > Cao, Jian; Wang, Bin **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 CMIP*. Version 20190812. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2021\n\n ScenarioMIP Citation:\n\n > Cao, Jian **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190806; SSP2-4.5 version 20190805; SSP5-8.5 version 20190811. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2027\n\n\n* **NorESM2-LM**\n\n License description: [data_licenses/NorESM2-LM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-LM.txt)\n\n CMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 CMIP*. Version 20190815. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.502\n\n ScenarioMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.604\n\n\n* **NorESM2-MM**\n\n License description: [data_licenses/NorESM2-MM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-MM.txt)\n\n CMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.506\n\n ScenarioMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.608\n\n\n* **UKESM1-0-LL**\n\n License description: [data_licenses/UKESM1-0-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/UKESM1-0-LL.txt)\n\n CMIP Citation:\n\n > Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Mulcahy, Jane; Sellar, Alistair; Walton, Jeremy; Jones, Colin **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 CMIP*. Version 20190627. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1569\n\n ScenarioMIP Citation:\n\n > Good, Peter; Sellar, Alistair; Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Kuhlbrodt, Till; Walton, Jeremy **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190708; SSP2-4.5 version 20190715; SSP3-7.0 version 20190726; SSP5-8.5 version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1567\n\n\n#### CC-BY-SA-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). Note that this license requires citation of the source model output (included here) and requires that derived works be shared under the same license. Please see https://creativecommons.org/licenses/by-sa/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by-sa.\n\n* **CanESM5**\n\n License description: [data_licenses/CanESM5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CanESM5.txt)\n\n CMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 CMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1303\n\n ScenarioMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 ScenarioMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1317\n\n## Acknowledgements\n\nThis work is the result of many years worth of work by members of the [Climate Impact Lab](https://impactlab.org), but would not have been possible without many contributions from across the wider scientific and computing communities.\n\nSpecifically, we would like to acknowledge the World Climate Research Programme's Working Group on Coupled Modeling, which is responsible for CMIP, and we would like to thank the climate modeling groups for producing and making their model output available. We would particularly like to thank the modeling institutions whose results are included as an input to this repository (listed above) for their contributions to the CMIP6 project and for responding to and granting our requests for license waivers.\n\nWe would also like to thank Lamont-Doherty Earth Observatory, the [Pangeo Consortium](https://github.com/pangeo-data) (and especially the [ESGF Cloud Data Working Group](https://pangeo-data.github.io/pangeo-cmip6-cloud/#)) and Google Cloud and the Google Public Datasets program for making the [CMIP6 Google Cloud collection](https://console.cloud.google.com/marketplace/details/noaa-public/cmip6) possible. In particular we're extremely grateful to [Ryan Abernathey](https://github.com/rabernat), [Naomi Henderson](https://github.com/naomi-henderson), [Charles Blackmon-Luca](https://github.com/charlesbluca), [Aparna Radhakrishnan](https://github.com/aradhakrishnanGFDL), [Julius Busecke](https://github.com/jbusecke), and [Charles Stern](https://github.com/cisaacstern) for the huge amount of work they've done to translate the ESGF CMIP6 netCDF archives into consistently-formattted, analysis-ready zarr stores on Google Cloud.\n\nWe're also grateful to the [xclim developers](https://github.com/Ouranosinc/xclim/graphs/contributors) ([DOI: 10.5281/zenodo.2795043](https://doi.org/10.5281/zenodo.2795043)), in particular [Pascal Bourgault](https://github.com/aulemahal), [David Huard](https://github.com/huard), and [Travis Logan](https://github.com/tlogan2000), for implementing the QDM bias correction method in the xclim python package, supporting our QPLAD implementation into the package, and ongoing support in integrating dask into downscaling workflows. For method advice and useful conversations, we would like to thank Keith Dixon, Dennis Adams-Smith, and [Joe Hamman](https://github.com/jhamman).\n\n## Financial support\n\nThis research has been supported by The Rockefeller Foundation and the Microsoft AI for Earth Initiative.\n\n## Additional links:\n\n* CIL GDPCIR project homepage: [github.com/ClimateImpactLab/downscaleCMIP6](https://github.com/ClimateImpactLab/downscaleCMIP6)\n* Project listing on zenodo: https://doi.org/10.5281/zenodo.6403794\n* Climate Impact Lab homepage: [impactlab.org](https://impactlab.org)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cil-gdpcir-cc-by-sa,climate-impact-lab,cmip6,precipitation,rhodium-group,temperature", "license": "CC-BY-SA-4.0", "title": "CIL Global Downscaled Projections for Climate Impacts Research (CC-BY-SA-4.0)", "missionStartDate": "1950-01-01T00:00:00Z"}, "naip": {"abstract": "The [National Agriculture Imagery Program](https://www.fsa.usda.gov/programs-and-services/aerial-photography/imagery-programs/naip-imagery/) (NAIP) \nprovides U.S.-wide, high-resolution aerial imagery, with four spectral bands (R, G, B, IR). \nNAIP is administered by the [Aerial Field Photography Office](https://www.fsa.usda.gov/programs-and-services/aerial-photography/) (AFPO) \nwithin the [US Department of Agriculture](https://www.usda.gov/) (USDA). \nData are captured at least once every three years for each state. \nThis dataset represents NAIP data from 2010-present, in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\nYou can visualize the coverage of current and past collections [here](https://naip-usdaonline.hub.arcgis.com/). \n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "aerial,afpo,agriculture,imagery,naip,united-states,usda", "license": "proprietary", "title": "NAIP: National Agriculture Imagery Program", "missionStartDate": "2010-01-01T00:00:00Z"}, "io-lulc-9-class": {"abstract": "__Note__: _A new version of this item is available for your use. This mature version of the map remains available for use in existing applications. This item will be retired in December 2024. There is 2023 data available in the newer [9-class v2 dataset](https://planetarycomputer.microsoft.com/dataset/io-lulc-annual-v02)._\n\nTime series of annual global maps of land use and land cover (LULC). It currently has data from 2017-2022. The maps are derived from ESA Sentinel-2 imagery at 10m resolution. Each map is a composite of LULC predictions for 9 classes throughout the year in order to generate a representative snapshot of each year.\n\nThis dataset was generated by [Impact Observatory](http://impactobservatory.com/), who used billions of human-labeled pixels (curated by the National Geographic Society) to train a deep learning model for land classification. The global map was produced by applying this model to the Sentinel-2 annual scene collections on the Planetary Computer. Each of the maps has an assessed average accuracy of over 75%.\n\nThis map uses an updated model from the [10-class model](https://planetarycomputer.microsoft.com/dataset/io-lulc) and combines Grass(formerly class 3) and Scrub (formerly class 6) into a single Rangeland class (class 11). The original Esri 2020 Land Cover collection uses 10 classes (Grass and Scrub separate) and an older version of the underlying deep learning model. The Esri 2020 Land Cover map was also produced by Impact Observatory. The map remains available for use in existing applications. New applications should use the updated version of 2020 once it is available in this collection, especially when using data from multiple years of this time series, to ensure consistent classification.\n\nAll years are available under a Creative Commons BY-4.0.", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "global,io-lulc-9-class,land-cover,land-use,sentinel", "license": "CC-BY-4.0", "title": "10m Annual Land Use Land Cover (9-class) V1", "missionStartDate": "2017-01-01T00:00:00Z"}, "io-biodiversity": {"abstract": "Generated by [Impact Observatory](https://www.impactobservatory.com/), in collaboration with [Vizzuality](https://www.vizzuality.com/), these datasets estimate terrestrial Biodiversity Intactness as 100-meter gridded maps for the years 2017-2020.\n\nMaps depicting the intactness of global biodiversity have become a critical tool for spatial planning and management, monitoring the extent of biodiversity across Earth, and identifying critical remaining intact habitat. Yet, these maps are often years out of date by the time they are available to scientists and policy-makers. The datasets in this STAC Collection build on past studies that map Biodiversity Intactness using the [PREDICTS database](https://onlinelibrary.wiley.com/doi/full/10.1002/ece3.2579) of spatially referenced observations of biodiversity across 32,000 sites from over 750 studies. The approach differs from previous work by modeling the relationship between observed biodiversity metrics and contemporary, global, geospatial layers of human pressures, with the intention of providing a high resolution monitoring product into the future.\n\nBiodiversity intactness is estimated as a combination of two metrics: Abundance, the quantity of individuals, and Compositional Similarity, how similar the composition of species is to an intact baseline. Linear mixed effects models are fit to estimate the predictive capacity of spatial datasets of human pressures on each of these metrics and project results spatially across the globe. These methods, as well as comparisons to other leading datasets and guidance on interpreting results, are further explained in a methods [white paper](https://ai4edatasetspublicassets.blob.core.windows.net/assets/pdfs/io-biodiversity/Biodiversity_Intactness_whitepaper.pdf) entitled \u201cGlobal 100m Projections of Biodiversity Intactness for the years 2017-2020.\u201d\n\nAll years are available under a Creative Commons BY-4.0 license.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "biodiversity,global,io-biodiversity", "license": "CC-BY-4.0", "title": "Biodiversity Intactness", "missionStartDate": "2017-01-01T00:00:00Z"}, "noaa-cdr-sea-surface-temperature-whoi": {"abstract": "The Sea Surface Temperature-Woods Hole Oceanographic Institution (WHOI) Climate Data Record (CDR) is one of three CDRs which combine to form the NOAA Ocean Surface Bundle (OSB) CDR. The resultant sea surface temperature (SST) data are produced through modeling the diurnal variability in combination with AVHRR SST observations. The final record is output to a 3-hourly 0.25\u00b0 resolution grid over the global ice-free oceans from January 1988\u2014present.\n\nThese Cloud Optimized GeoTIFFs (COGs) were created from NetCDF files which are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\nFor the NetCDF files, see collection `noaa-cdr-sea-surface-temperature-whoi-netcdf`.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-sea-surface-temperature-whoi,ocean,temperature", "license": "proprietary", "title": "Sea Surface Temperature - WHOI CDR", "missionStartDate": "1988-01-01T00:00:00Z"}, "noaa-cdr-ocean-heat-content": {"abstract": "The Ocean Heat Content Climate Data Record (CDR) is a set of ocean heat content anomaly (OHCA) time-series for 1955-present on 3-monthly, yearly, and pentadal (five-yearly) scales. This CDR quantifies ocean heat content change over time, which is an essential metric for understanding climate change and the Earth's energy budget. It provides time-series for multiple depth ranges in the global ocean and each of the major basins (Atlantic, Pacific, and Indian) divided by hemisphere (Northern, Southern).\n\nThese Cloud Optimized GeoTIFFs (COGs) were created from NetCDF files which are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\nFor the NetCDF files, see collection `noaa-cdr-ocean-heat-content-netcdf`.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-ocean-heat-content,ocean,temperature", "license": "proprietary", "title": "Global Ocean Heat Content CDR", "missionStartDate": "1972-03-01T00:00:00Z"}, "cil-gdpcir-cc0": {"abstract": "The World Climate Research Programme's [6th Coupled Model Intercomparison Project (CMIP6)](https://www.wcrp-climate.org/wgcm-cmip/wgcm-cmip6) represents an enormous advance in the quality, detail, and scope of climate modeling.\n\nThe [Global Downscaled Projections for Climate Impacts Research](https://github.com/ClimateImpactLab/downscaleCMIP6) dataset makes this modeling more applicable to understanding the impacts of changes in the climate on humans and society with two key developments: trend-preserving bias correction and downscaling. In this dataset, the [Climate Impact Lab](https://impactlab.org) provides global, daily minimum and maximum air temperature at the surface (`tasmin` and `tasmax`) and daily cumulative surface precipitation (`pr`) corresponding to the CMIP6 historical, ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 scenarios for 25 global climate models on a 1/4-degree regular global grid.\n\n## Accessing the data\n\nGDPCIR data can be accessed on the Microsoft Planetary Computer. The dataset is made of of three collections, distinguished by data license:\n* [Public domain (CC0-1.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0)\n* [Attribution (CC BY 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by)\n\nEach modeling center with bias corrected and downscaled data in this collection falls into one of these license categories - see the [table below](/dataset/cil-gdpcir-cc0#available-institutions-models-and-scenarios-by-license-collection) to see which model is in each collection, and see the section below on [Citing, Licensing, and using data produced by this project](/dataset/cil-gdpcir-cc0#citing-licensing-and-using-data-produced-by-this-project) for citations and additional information about each license.\n\n## Data format & contents\n\nThe data is stored as partitioned zarr stores (see [https://zarr.readthedocs.io](https://zarr.readthedocs.io)), each of which includes thousands of data and metadata files covering the full time span of the experiment. Historical zarr stores contain just over 50 GB, while SSP zarr stores contain nearly 70GB. Each store is stored as a 32-bit float, with dimensions time (daily datetime), lat (float latitude), and lon (float longitude). The data is chunked at each interval of 365 days and 90 degree interval of latitude and longitude. Therefore, each chunk is `(365, 360, 360)`, with each chunk occupying approximately 180MB in memory.\n\nHistorical data is daily, excluding leap days, from Jan 1, 1950 to Dec 31, 2014; SSP data is daily, excluding leap days, from Jan 1, 2015 to either Dec 31, 2099 or Dec 31, 2100, depending on data availability in the source GCM.\n\nThe spatial domain covers all 0.25-degree grid cells, indexed by the grid center, with grid edges on the quarter-degree, using a -180 to 180 longitude convention. Thus, the \u201clon\u201d coordinate extends from -179.875 to 179.875, and the \u201clat\u201d coordinate extends from -89.875 to 89.875, with intermediate values at each 0.25-degree increment between (e.g. -179.875, -179.625, -179.375, etc).\n\n## Available institutions, models, and scenarios by license collection\n\n| Modeling institution | Source model | Available experiments | License collection |\n| -------------------- | ----------------- | ------------------------------------------ | ---------------------- |\n| CAS | FGOALS-g3 [^1] | SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM4-8 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM5-0 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| BCC | BCC-CSM2-MR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| CMCC | CMCC-CM2-SR5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CMCC | CMCC-ESM2 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CSIRO-ARCCSS | ACCESS-CM2 | SSP2-4.5 and SSP3-7.0 | CC-BY-40 |\n| CSIRO | ACCESS-ESM1-5 | SSP1-2.6, SSP2-4.5, and SSP3-7.0 | CC-BY-40 |\n| MIROC | MIROC-ES2L | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MIROC | MIROC6 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MOHC | HadGEM3-GC31-LL | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| MOHC | UKESM1-0-LL | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M | MPI-ESM1-2-LR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M/DKRZ [^2] | MPI-ESM1-2-HR | SSP1-2.6 and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-LM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-MM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-CM4 | SSP2-4.5 and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-ESM4 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NUIST | NESM3 | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3 | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-AerChem | ssp370 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-CC | ssp245 and ssp585 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg-LR | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| CCCma | CanESM5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40[^3] |\n\n*Notes:*\n\n[^1]: At the time of running, no ssp1-2.6 precipitation data was available. Therefore, we provide `tasmin` and `tamax` for this model and experiment, but not `pr`. All other model/experiment combinations in the above table include all three variables.\n\n[^2]: The institution which ran MPI-ESM1-2-HR\u2019s historical (CMIP) simulations is `MPI-M`, while the future (ScenarioMIP) simulations were run by `DKRZ`. Therefore, the institution component of `MPI-ESM1-2-HR` filepaths differ between `historical` and `SSP` scenarios.\n\n[^3]: This dataset was previously licensed as [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), but was relicensed under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0) in March, 2023. \n\n## Project methods\n\nThis project makes use of statistical bias correction and downscaling algorithms, which are specifically designed to accurately represent changes in the extremes. For this reason, we selected Quantile Delta Mapping (QDM), following the method introduced by [Cannon et al. (2015)](https://doi.org/10.1175/JCLI-D-14-00754.1), which preserves quantile-specific trends from the GCM while fitting the full distribution for a given day-of-year to a reference dataset (ERA5).\n\nWe then introduce a similar method tailored to increase spatial resolution while preserving extreme behavior, Quantile-Preserving Localized-Analog Downscaling (QPLAD).\n\nTogether, these methods provide a robust means to handle both the central and tail behavior seen in climate model output, while aligning the full distribution to a state-of-the-art reanalysis dataset and providing the spatial granularity needed to study surface impacts.\n\nFor further documentation, see [Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts](https://egusphere.copernicus.org/preprints/2023/egusphere-2022-1513/) (EGUsphere, 2022 [preprint]).\n\n\n## Citing, licensing, and using data produced by this project\n\nProjects making use of the data produced as part of the Climate Impact Lab Global Downscaled Projections for Climate Impacts Research (CIL GDPCIR) project are requested to cite both this project and the source datasets from which these results are derived. Additionally, the use of data derived from some GCMs *requires* citations, and some modeling centers impose licensing restrictions & requirements on derived works. See each GCM's license info in the links below for more information.\n\n### CIL GDPCIR\n\nUsers are requested to cite this project in derived works. Our method documentation paper may be cited using the following:\n\n> Gergel, D. R., Malevich, S. B., McCusker, K. E., Tenezakis, E., Delgado, M. T., Fish, M. A., and Kopp, R. E.: Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts, EGUsphere [preprint], https://doi.org/10.5194/egusphere-2022-1513, 2023. \n\nThe code repository may be cited using the following:\n\n> Diana Gergel, Kelly McCusker, Brewster Malevich, Emile Tenezakis, Meredith Fish, Michael Delgado (2022). ClimateImpactLab/downscaleCMIP6: (v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.6403794\n\n### ERA5\n\nAdditionally, we request you cite the historical dataset used in bias correction and downscaling, ERA5. See the [ECMWF guide to citing a dataset on the Climate Data Store](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it):\n\n> Hersbach, H, et al. The ERA5 global reanalysis. Q J R Meteorol Soc.2020; 146: 1999\u20132049. DOI: [10.1002/qj.3803](https://doi.org/10.1002/qj.3803)\n>\n> Mu\u00f1oz Sabater, J., (2019): ERA5-Land hourly data from 1981 to present. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n>\n> Mu\u00f1oz Sabater, J., (2021): ERA5-Land hourly data from 1950 to 1980. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n\n### GCM-specific citations & licenses\n\nThe CMIP6 simulation data made available through the Earth System Grid Federation (ESGF) are subject to Creative Commons [BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) or [BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/) licenses. The Climate Impact Lab has reached out to each of the modeling institutions to request waivers from these terms so the outputs of this project may be used with fewer restrictions, and has been granted permission to release the data using the licenses listed here.\n\n#### Public Domain Datasets\n\nThe following bias corrected and downscaled model simulations are available in the public domain using a [CC0 1.0 Universal Public Domain Declaration](https://creativecommons.org/publicdomain/zero/1.0/). Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0.\n\n* **FGOALS-g3**\n\n License description: [data_licenses/FGOALS-g3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/FGOALS-g3.txt)\n\n CMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 CMIP*. Version 20190826. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1783\n\n ScenarioMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190818; SSP2-4.5 version 20190818; SSP3-7.0 version 20190820; SSP5-8.5 tasmax version 20190819; SSP5-8.5 tasmin version 20190819; SSP5-8.5 pr version 20190818. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2056\n\n\n* **INM-CM4-8**\n\n License description: [data_licenses/INM-CM4-8.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM4-8.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 CMIP*. Version 20190530. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1422\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 ScenarioMIP*. Version 20190603. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12321\n\n\n* **INM-CM5-0**\n\n License description: [data_licenses/INM-CM5-0.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM5-0.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 CMIP*. Version 20190610. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1423\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190619; SSP2-4.5 version 20190619; SSP3-7.0 version 20190618; SSP5-8.5 version 20190724. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12322\n\n\n#### CC-BY-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/). Note that this license requires citation of the source model output (included here). Please see https://creativecommons.org/licenses/by/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by.\n\n* **ACCESS-CM2**\n\n License description: [data_licenses/ACCESS-CM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-CM2.txt)\n\n CMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2281\n\n ScenarioMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2285\n\n\n* **ACCESS-ESM1-5**\n\n License description: [data_licenses/ACCESS-ESM1-5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-ESM1-5.txt)\n\n CMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 CMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2288\n\n ScenarioMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 ScenarioMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2291\n\n\n* **BCC-CSM2-MR**\n\n License description: [data_licenses/BCC-CSM2-MR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/BCC-CSM2-MR.txt)\n\n CMIP Citation:\n\n > Xin, Xiaoge; Zhang, Jie; Zhang, Fang; Wu, Tongwen; Shi, Xueli; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2018)**. *BCC BCC-CSM2MR model output prepared for CMIP6 CMIP*. Version 20181126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1725\n\n ScenarioMIP Citation:\n\n > Xin, Xiaoge; Wu, Tongwen; Shi, Xueli; Zhang, Fang; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2019)**. *BCC BCC-CSM2MR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190315; SSP2-4.5 version 20190318; SSP3-7.0 version 20190318; SSP5-8.5 version 20190318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1732\n\n\n* **CMCC-CM2-SR5**\n\n License description: [data_licenses/CMCC-CM2-SR5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-CM2-SR5.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 CMIP*. Version 20200616. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1362\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200717; SSP2-4.5 version 20200617; SSP3-7.0 version 20200622; SSP5-8.5 version 20200622. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1365\n\n\n* **CMCC-ESM2**\n\n License description: [data_licenses/CMCC-ESM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-ESM2.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 CMIP*. Version 20210114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13164\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20210126; SSP2-4.5 version 20210129; SSP3-7.0 version 20210202; SSP5-8.5 version 20210126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13168\n\n\n* **EC-Earth3-AerChem**\n\n License description: [data_licenses/EC-Earth3-AerChem.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-AerChem.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 CMIP*. Version 20200624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.639\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 ScenarioMIP*. Version 20200827. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.724\n\n\n* **EC-Earth3-CC**\n\n License description: [data_licenses/EC-Earth3-CC.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-CC.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth-3-CC model output prepared for CMIP6 CMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.640\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2021)**. *EC-Earth-Consortium EC-Earth3-CC model output prepared for CMIP6 ScenarioMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.15327\n\n\n* **EC-Earth3-Veg-LR**\n\n License description: [data_licenses/EC-Earth3-Veg-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg-LR.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 CMIP*. Version 20200217. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.643\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20201201; SSP2-4.5 version 20201123; SSP3-7.0 version 20201123; SSP5-8.5 version 20201201. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.728\n\n\n* **EC-Earth3-Veg**\n\n License description: [data_licenses/EC-Earth3-Veg.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 CMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.642\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 ScenarioMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.727\n\n\n* **EC-Earth3**\n\n License description: [data_licenses/EC-Earth3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 CMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.181\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 ScenarioMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.251\n\n\n* **GFDL-CM4**\n\n License description: [data_licenses/GFDL-CM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-CM4.txt)\n\n CMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Bushuk, Mitchell; Dunne, Krista A.; Dussin, Raphael; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, P.C.D; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1402\n\n ScenarioMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, Chris; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.9242\n\n\n* **GFDL-ESM4**\n\n License description: [data_licenses/GFDL-ESM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-ESM4.txt)\n\n CMIP Citation:\n\n > Krasting, John P.; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Silvers, Levi; Wyman, Bruce; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Dussin, Raphael; Guo, Huan; He, Jian; Held, Isaac M; Horowitz, Larry W.; Lin, Pu; Milly, P.C.D; Shevliakova, Elena; Stock, Charles; Winton, Michael; Wittenberg, Andrew T.; Xie, Yuanyu; Zhao, Ming **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 CMIP*. Version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1407\n\n ScenarioMIP Citation:\n\n > John, Jasmin G; Blanton, Chris; McHugh, Colleen; Radhakrishnan, Aparna; Rand, Kristopher; Vahlenkamp, Hans; Wilson, Chandin; Zadeh, Niki T.; Dunne, John P.; Dussin, Raphael; Horowitz, Larry W.; Krasting, John P.; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Ploshay, Jeffrey; Shevliakova, Elena; Silvers, Levi; Stock, Charles; Winton, Michael; Zeng, Yujin **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1414\n\n\n* **HadGEM3-GC31-LL**\n\n License description: [data_licenses/HadGEM3-GC31-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/HadGEM3-GC31-LL.txt)\n\n CMIP Citation:\n\n > Ridley, Jeff; Menary, Matthew; Kuhlbrodt, Till; Andrews, Martin; Andrews, Tim **(2018)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 CMIP*. Version 20190624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.419\n\n ScenarioMIP Citation:\n\n > Good, Peter **(2019)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200114; SSP2-4.5 version 20190908; SSP5-8.5 version 20200114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.10845\n\n\n* **MIROC-ES2L**\n\n License description: [data_licenses/MIROC-ES2L.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC-ES2L.txt)\n\n CMIP Citation:\n\n > Hajima, Tomohiro; Abe, Manabu; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogura, Tomoo; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio; Tachiiri, Kaoru **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 CMIP*. Version 20191129. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.902\n\n ScenarioMIP Citation:\n\n > Tachiiri, Kaoru; Abe, Manabu; Hajima, Tomohiro; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 ScenarioMIP*. Version 20200318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.936\n\n\n* **MIROC6**\n\n License description: [data_licenses/MIROC6.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC6.txt)\n\n CMIP Citation:\n\n > Tatebe, Hiroaki; Watanabe, Masahiro **(2018)**. *MIROC MIROC6 model output prepared for CMIP6 CMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.881\n\n ScenarioMIP Citation:\n\n > Shiogama, Hideo; Abe, Manabu; Tatebe, Hiroaki **(2019)**. *MIROC MIROC6 model output prepared for CMIP6 ScenarioMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.898\n\n\n* **MPI-ESM1-2-HR**\n\n License description: [data_licenses/MPI-ESM1-2-HR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-HR.txt)\n\n CMIP Citation:\n\n > Jungclaus, Johann; Bittner, Matthias; Wieners, Karl-Hermann; Wachsmann, Fabian; Schupfner, Martin; Legutke, Stephanie; Giorgetta, Marco; Reick, Christian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Esch, Monika; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-HR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.741\n\n ScenarioMIP Citation:\n\n > Schupfner, Martin; Wieners, Karl-Hermann; Wachsmann, Fabian; Steger, Christian; Bittner, Matthias; Jungclaus, Johann; Fr\u00fch, Barbara; Pankatz, Klaus; Giorgetta, Marco; Reick, Christian; Legutke, Stephanie; Esch, Monika; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *DKRZ MPI-ESM1.2-HR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2450\n\n\n* **MPI-ESM1-2-LR**\n\n License description: [data_licenses/MPI-ESM1-2-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-LR.txt)\n\n CMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Legutke, Stephanie; Schupfner, Martin; Wachsmann, Fabian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.742\n\n ScenarioMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.793\n\n\n* **NESM3**\n\n License description: [data_licenses/NESM3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NESM3.txt)\n\n CMIP Citation:\n\n > Cao, Jian; Wang, Bin **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 CMIP*. Version 20190812. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2021\n\n ScenarioMIP Citation:\n\n > Cao, Jian **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190806; SSP2-4.5 version 20190805; SSP5-8.5 version 20190811. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2027\n\n\n* **NorESM2-LM**\n\n License description: [data_licenses/NorESM2-LM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-LM.txt)\n\n CMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 CMIP*. Version 20190815. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.502\n\n ScenarioMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.604\n\n\n* **NorESM2-MM**\n\n License description: [data_licenses/NorESM2-MM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-MM.txt)\n\n CMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.506\n\n ScenarioMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.608\n\n\n* **UKESM1-0-LL**\n\n License description: [data_licenses/UKESM1-0-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/UKESM1-0-LL.txt)\n\n CMIP Citation:\n\n > Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Mulcahy, Jane; Sellar, Alistair; Walton, Jeremy; Jones, Colin **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 CMIP*. Version 20190627. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1569\n\n ScenarioMIP Citation:\n\n > Good, Peter; Sellar, Alistair; Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Kuhlbrodt, Till; Walton, Jeremy **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190708; SSP2-4.5 version 20190715; SSP3-7.0 version 20190726; SSP5-8.5 version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1567\n\n\n* **CanESM5**\n\n License description: [data_licenses/CanESM5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CanESM5.txt). Note: this dataset was previously licensed\n under CC BY-SA 4.0, but was relicensed as CC BY 4.0 in March, 2023.\n\n CMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 CMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1303\n\n ScenarioMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 ScenarioMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1317\n\n## Acknowledgements\n\nThis work is the result of many years worth of work by members of the [Climate Impact Lab](https://impactlab.org), but would not have been possible without many contributions from across the wider scientific and computing communities.\n\nSpecifically, we would like to acknowledge the World Climate Research Programme's Working Group on Coupled Modeling, which is responsible for CMIP, and we would like to thank the climate modeling groups for producing and making their model output available. We would particularly like to thank the modeling institutions whose results are included as an input to this repository (listed above) for their contributions to the CMIP6 project and for responding to and granting our requests for license waivers.\n\nWe would also like to thank Lamont-Doherty Earth Observatory, the [Pangeo Consortium](https://github.com/pangeo-data) (and especially the [ESGF Cloud Data Working Group](https://pangeo-data.github.io/pangeo-cmip6-cloud/#)) and Google Cloud and the Google Public Datasets program for making the [CMIP6 Google Cloud collection](https://console.cloud.google.com/marketplace/details/noaa-public/cmip6) possible. In particular we're extremely grateful to [Ryan Abernathey](https://github.com/rabernat), [Naomi Henderson](https://github.com/naomi-henderson), [Charles Blackmon-Luca](https://github.com/charlesbluca), [Aparna Radhakrishnan](https://github.com/aradhakrishnanGFDL), [Julius Busecke](https://github.com/jbusecke), and [Charles Stern](https://github.com/cisaacstern) for the huge amount of work they've done to translate the ESGF CMIP6 netCDF archives into consistently-formattted, analysis-ready zarr stores on Google Cloud.\n\nWe're also grateful to the [xclim developers](https://github.com/Ouranosinc/xclim/graphs/contributors) ([DOI: 10.5281/zenodo.2795043](https://doi.org/10.5281/zenodo.2795043)), in particular [Pascal Bourgault](https://github.com/aulemahal), [David Huard](https://github.com/huard), and [Travis Logan](https://github.com/tlogan2000), for implementing the QDM bias correction method in the xclim python package, supporting our QPLAD implementation into the package, and ongoing support in integrating dask into downscaling workflows. For method advice and useful conversations, we would like to thank Keith Dixon, Dennis Adams-Smith, and [Joe Hamman](https://github.com/jhamman).\n\n## Financial support\n\nThis research has been supported by The Rockefeller Foundation and the Microsoft AI for Earth Initiative.\n\n## Additional links:\n\n* CIL GDPCIR project homepage: [github.com/ClimateImpactLab/downscaleCMIP6](https://github.com/ClimateImpactLab/downscaleCMIP6)\n* Project listing on zenodo: https://doi.org/10.5281/zenodo.6403794\n* Climate Impact Lab homepage: [impactlab.org](https://impactlab.org)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cil-gdpcir-cc0,climate-impact-lab,cmip6,precipitation,rhodium-group,temperature", "license": "CC0-1.0", "title": "CIL Global Downscaled Projections for Climate Impacts Research (CC0-1.0)", "missionStartDate": "1950-01-01T00:00:00Z"}, "cil-gdpcir-cc-by": {"abstract": "The World Climate Research Programme's [6th Coupled Model Intercomparison Project (CMIP6)](https://www.wcrp-climate.org/wgcm-cmip/wgcm-cmip6) represents an enormous advance in the quality, detail, and scope of climate modeling.\n\nThe [Global Downscaled Projections for Climate Impacts Research](https://github.com/ClimateImpactLab/downscaleCMIP6) dataset makes this modeling more applicable to understanding the impacts of changes in the climate on humans and society with two key developments: trend-preserving bias correction and downscaling. In this dataset, the [Climate Impact Lab](https://impactlab.org) provides global, daily minimum and maximum air temperature at the surface (`tasmin` and `tasmax`) and daily cumulative surface precipitation (`pr`) corresponding to the CMIP6 historical, ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 scenarios for 25 global climate models on a 1/4-degree regular global grid.\n\n## Accessing the data\n\nGDPCIR data can be accessed on the Microsoft Planetary Computer. The dataset is made of of three collections, distinguished by data license:\n* [Public domain (CC0-1.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0)\n* [Attribution (CC BY 4.0) collection](https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by)\n\nEach modeling center with bias corrected and downscaled data in this collection falls into one of these license categories - see the [table below](/dataset/cil-gdpcir-cc-by#available-institutions-models-and-scenarios-by-license-collection) to see which model is in each collection, and see the section below on [Citing, Licensing, and using data produced by this project](/dataset/cil-gdpcir-cc-by#citing-licensing-and-using-data-produced-by-this-project) for citations and additional information about each license.\n\n## Data format & contents\n\nThe data is stored as partitioned zarr stores (see [https://zarr.readthedocs.io](https://zarr.readthedocs.io)), each of which includes thousands of data and metadata files covering the full time span of the experiment. Historical zarr stores contain just over 50 GB, while SSP zarr stores contain nearly 70GB. Each store is stored as a 32-bit float, with dimensions time (daily datetime), lat (float latitude), and lon (float longitude). The data is chunked at each interval of 365 days and 90 degree interval of latitude and longitude. Therefore, each chunk is `(365, 360, 360)`, with each chunk occupying approximately 180MB in memory.\n\nHistorical data is daily, excluding leap days, from Jan 1, 1950 to Dec 31, 2014; SSP data is daily, excluding leap days, from Jan 1, 2015 to either Dec 31, 2099 or Dec 31, 2100, depending on data availability in the source GCM.\n\nThe spatial domain covers all 0.25-degree grid cells, indexed by the grid center, with grid edges on the quarter-degree, using a -180 to 180 longitude convention. Thus, the \u201clon\u201d coordinate extends from -179.875 to 179.875, and the \u201clat\u201d coordinate extends from -89.875 to 89.875, with intermediate values at each 0.25-degree increment between (e.g. -179.875, -179.625, -179.375, etc).\n\n## Available institutions, models, and scenarios by license collection\n\n| Modeling institution | Source model | Available experiments | License collection |\n| -------------------- | ----------------- | ------------------------------------------ | ---------------------- |\n| CAS | FGOALS-g3 [^1] | SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM4-8 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| INM | INM-CM5-0 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | Public domain datasets |\n| BCC | BCC-CSM2-MR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| CMCC | CMCC-CM2-SR5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CMCC | CMCC-ESM2 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40 |\n| CSIRO-ARCCSS | ACCESS-CM2 | SSP2-4.5 and SSP3-7.0 | CC-BY-40 |\n| CSIRO | ACCESS-ESM1-5 | SSP1-2.6, SSP2-4.5, and SSP3-7.0 | CC-BY-40 |\n| MIROC | MIROC-ES2L | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MIROC | MIROC6 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MOHC | HadGEM3-GC31-LL | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| MOHC | UKESM1-0-LL | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M | MPI-ESM1-2-LR | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| MPI-M/DKRZ [^2] | MPI-ESM1-2-HR | SSP1-2.6 and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-LM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NCC | NorESM2-MM | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-CM4 | SSP2-4.5 and SSP5-8.5 | CC-BY-40 |\n| NOAA-GFDL | GFDL-ESM4 | SSP1-2.6, SSP2-4.5, SSP3-7.0, and SSP5-8.5 | CC-BY-40 |\n| NUIST | NESM3 | SSP1-2.6, SSP2-4.5, and SSP5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3 | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-AerChem | ssp370 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-CC | ssp245 and ssp585 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| EC-Earth-Consortium | EC-Earth3-Veg-LR | ssp1-2.6, ssp2-4.5, ssp3-7.0, and ssp5-8.5 | CC-BY-40 |\n| CCCma | CanESM5 | ssp1-2.6, ssp2-4.5, ssp3-7.0, ssp5-8.5 | CC-BY-40[^3] |\n\n*Notes:*\n\n[^1]: At the time of running, no ssp1-2.6 precipitation data was available. Therefore, we provide `tasmin` and `tamax` for this model and experiment, but not `pr`. All other model/experiment combinations in the above table include all three variables.\n\n[^2]: The institution which ran MPI-ESM1-2-HR\u2019s historical (CMIP) simulations is `MPI-M`, while the future (ScenarioMIP) simulations were run by `DKRZ`. Therefore, the institution component of `MPI-ESM1-2-HR` filepaths differ between `historical` and `SSP` scenarios.\n\n[^3]: This dataset was previously licensed as [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/), but was relicensed under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0) in March, 2023. \n\n## Project methods\n\nThis project makes use of statistical bias correction and downscaling algorithms, which are specifically designed to accurately represent changes in the extremes. For this reason, we selected Quantile Delta Mapping (QDM), following the method introduced by [Cannon et al. (2015)](https://doi.org/10.1175/JCLI-D-14-00754.1), which preserves quantile-specific trends from the GCM while fitting the full distribution for a given day-of-year to a reference dataset (ERA5).\n\nWe then introduce a similar method tailored to increase spatial resolution while preserving extreme behavior, Quantile-Preserving Localized-Analog Downscaling (QPLAD).\n\nTogether, these methods provide a robust means to handle both the central and tail behavior seen in climate model output, while aligning the full distribution to a state-of-the-art reanalysis dataset and providing the spatial granularity needed to study surface impacts.\n\nFor further documentation, see [Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts](https://egusphere.copernicus.org/preprints/2023/egusphere-2022-1513/) (EGUsphere, 2022 [preprint]).\n\n## Citing, licensing, and using data produced by this project\n\nProjects making use of the data produced as part of the Climate Impact Lab Global Downscaled Projections for Climate Impacts Research (CIL GDPCIR) project are requested to cite both this project and the source datasets from which these results are derived. Additionally, the use of data derived from some GCMs *requires* citations, and some modeling centers impose licensing restrictions & requirements on derived works. See each GCM's license info in the links below for more information.\n\n### CIL GDPCIR\n\nUsers are requested to cite this project in derived works. Our method documentation paper may be cited using the following:\n\n> Gergel, D. R., Malevich, S. B., McCusker, K. E., Tenezakis, E., Delgado, M. T., Fish, M. A., and Kopp, R. E.: Global downscaled projections for climate impacts research (GDPCIR): preserving extremes for modeling future climate impacts, EGUsphere [preprint], https://doi.org/10.5194/egusphere-2022-1513, 2023. \n\nThe code repository may be cited using the following:\n\n> Diana Gergel, Kelly McCusker, Brewster Malevich, Emile Tenezakis, Meredith Fish, Michael Delgado (2022). ClimateImpactLab/downscaleCMIP6: (v1.0.0). Zenodo. https://doi.org/10.5281/zenodo.6403794\n\n### ERA5\n\nAdditionally, we request you cite the historical dataset used in bias correction and downscaling, ERA5. See the [ECMWF guide to citing a dataset on the Climate Data Store](https://confluence.ecmwf.int/display/CKB/How+to+acknowledge+and+cite+a+Climate+Data+Store+%28CDS%29+catalogue+entry+and+the+data+published+as+part+of+it):\n\n> Hersbach, H, et al. The ERA5 global reanalysis. Q J R Meteorol Soc.2020; 146: 1999\u20132049. DOI: [10.1002/qj.3803](https://doi.org/10.1002/qj.3803)\n>\n> Mu\u00f1oz Sabater, J., (2019): ERA5-Land hourly data from 1981 to present. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n>\n> Mu\u00f1oz Sabater, J., (2021): ERA5-Land hourly data from 1950 to 1980. Copernicus Climate Change Service (C3S) Climate Data Store (CDS). (Accessed on June 4, 2021), DOI: [10.24381/cds.e2161bac](https://doi.org/10.24381/cds.e2161bac)\n\n### GCM-specific citations & licenses\n\nThe CMIP6 simulation data made available through the Earth System Grid Federation (ESGF) are subject to Creative Commons [BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/) or [BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/) licenses. The Climate Impact Lab has reached out to each of the modeling institutions to request waivers from these terms so the outputs of this project may be used with fewer restrictions, and has been granted permission to release the data using the licenses listed here.\n\n#### Public Domain Datasets\n\nThe following bias corrected and downscaled model simulations are available in the public domain using a [CC0 1.0 Universal Public Domain Declaration](https://creativecommons.org/publicdomain/zero/1.0/). Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc0.\n\n* **FGOALS-g3**\n\n License description: [data_licenses/FGOALS-g3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/FGOALS-g3.txt)\n\n CMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 CMIP*. Version 20190826. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1783\n\n ScenarioMIP Citation:\n\n > Li, Lijuan **(2019)**. *CAS FGOALS-g3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190818; SSP2-4.5 version 20190818; SSP3-7.0 version 20190820; SSP5-8.5 tasmax version 20190819; SSP5-8.5 tasmin version 20190819; SSP5-8.5 pr version 20190818. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2056\n\n\n* **INM-CM4-8**\n\n License description: [data_licenses/INM-CM4-8.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM4-8.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 CMIP*. Version 20190530. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1422\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM4-8 model output prepared for CMIP6 ScenarioMIP*. Version 20190603. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12321\n\n\n* **INM-CM5-0**\n\n License description: [data_licenses/INM-CM5-0.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/INM-CM5-0.txt)\n\n CMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 CMIP*. Version 20190610. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1423\n\n ScenarioMIP Citation:\n\n > Volodin, Evgeny; Mortikov, Evgeny; Gritsun, Andrey; Lykossov, Vasily; Galin, Vener; Diansky, Nikolay; Gusev, Anatoly; Kostrykin, Sergey; Iakovlev, Nikolay; Shestakova, Anna; Emelina, Svetlana **(2019)**. *INM INM-CM5-0 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190619; SSP2-4.5 version 20190619; SSP3-7.0 version 20190618; SSP5-8.5 version 20190724. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.12322\n\n\n#### CC-BY-4.0\n\nThe following bias corrected and downscaled model simulations are licensed under a [Creative Commons Attribution 4.0 International License](https://creativecommons.org/licenses/by/4.0/). Note that this license requires citation of the source model output (included here). Please see https://creativecommons.org/licenses/by/4.0/ for more information. Access the collection on Planetary Computer at https://planetarycomputer.microsoft.com/dataset/cil-gdpcir-cc-by.\n\n* **ACCESS-CM2**\n\n License description: [data_licenses/ACCESS-CM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-CM2.txt)\n\n CMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2281\n\n ScenarioMIP Citation:\n\n > Dix, Martin; Bi, Doahua; Dobrohotoff, Peter; Fiedler, Russell; Harman, Ian; Law, Rachel; Mackallah, Chloe; Marsland, Simon; O'Farrell, Siobhan; Rashid, Harun; Srbinovsky, Jhan; Sullivan, Arnold; Trenham, Claire; Vohralik, Peter; Watterson, Ian; Williams, Gareth; Woodhouse, Matthew; Bodman, Roger; Dias, Fabio Boeira; Domingues, Catia; Hannah, Nicholas; Heerdegen, Aidan; Savita, Abhishek; Wales, Scott; Allen, Chris; Druken, Kelsey; Evans, Ben; Richards, Clare; Ridzwan, Syazwan Mohamed; Roberts, Dale; Smillie, Jon; Snow, Kate; Ward, Marshall; Yang, Rui **(2019)**. *CSIRO-ARCCSS ACCESS-CM2 model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2285\n\n\n* **ACCESS-ESM1-5**\n\n License description: [data_licenses/ACCESS-ESM1-5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/ACCESS-ESM1-5.txt)\n\n CMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 CMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2288\n\n ScenarioMIP Citation:\n\n > Ziehn, Tilo; Chamberlain, Matthew; Lenton, Andrew; Law, Rachel; Bodman, Roger; Dix, Martin; Wang, Yingping; Dobrohotoff, Peter; Srbinovsky, Jhan; Stevens, Lauren; Vohralik, Peter; Mackallah, Chloe; Sullivan, Arnold; O'Farrell, Siobhan; Druken, Kelsey **(2019)**. *CSIRO ACCESS-ESM1.5 model output prepared for CMIP6 ScenarioMIP*. Version 20191115. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2291\n\n\n* **BCC-CSM2-MR**\n\n License description: [data_licenses/BCC-CSM2-MR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/BCC-CSM2-MR.txt)\n\n CMIP Citation:\n\n > Xin, Xiaoge; Zhang, Jie; Zhang, Fang; Wu, Tongwen; Shi, Xueli; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2018)**. *BCC BCC-CSM2MR model output prepared for CMIP6 CMIP*. Version 20181126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1725\n\n ScenarioMIP Citation:\n\n > Xin, Xiaoge; Wu, Tongwen; Shi, Xueli; Zhang, Fang; Li, Jianglong; Chu, Min; Liu, Qianxia; Yan, Jinghui; Ma, Qiang; Wei, Min **(2019)**. *BCC BCC-CSM2MR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190315; SSP2-4.5 version 20190318; SSP3-7.0 version 20190318; SSP5-8.5 version 20190318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1732\n\n\n* **CMCC-CM2-SR5**\n\n License description: [data_licenses/CMCC-CM2-SR5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-CM2-SR5.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 CMIP*. Version 20200616. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1362\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele **(2020)**. *CMCC CMCC-CM2-SR5 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200717; SSP2-4.5 version 20200617; SSP3-7.0 version 20200622; SSP5-8.5 version 20200622. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1365\n\n\n* **CMCC-ESM2**\n\n License description: [data_licenses/CMCC-ESM2.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CMCC-ESM2.txt)\n\n CMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 CMIP*. Version 20210114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13164\n\n ScenarioMIP Citation:\n\n > Lovato, Tomas; Peano, Daniele; Butensch\u00f6n, Momme **(2021)**. *CMCC CMCC-ESM2 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20210126; SSP2-4.5 version 20210129; SSP3-7.0 version 20210202; SSP5-8.5 version 20210126. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.13168\n\n\n* **EC-Earth3-AerChem**\n\n License description: [data_licenses/EC-Earth3-AerChem.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-AerChem.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 CMIP*. Version 20200624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.639\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-AerChem model output prepared for CMIP6 ScenarioMIP*. Version 20200827. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.724\n\n\n* **EC-Earth3-CC**\n\n License description: [data_licenses/EC-Earth3-CC.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-CC.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth-3-CC model output prepared for CMIP6 CMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.640\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2021)**. *EC-Earth-Consortium EC-Earth3-CC model output prepared for CMIP6 ScenarioMIP*. Version 20210113. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.15327\n\n\n* **EC-Earth3-Veg-LR**\n\n License description: [data_licenses/EC-Earth3-Veg-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg-LR.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 CMIP*. Version 20200217. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.643\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2020)**. *EC-Earth-Consortium EC-Earth3-Veg-LR model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20201201; SSP2-4.5 version 20201123; SSP3-7.0 version 20201123; SSP5-8.5 version 20201201. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.728\n\n\n* **EC-Earth3-Veg**\n\n License description: [data_licenses/EC-Earth3-Veg.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3-Veg.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 CMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.642\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3-Veg model output prepared for CMIP6 ScenarioMIP*. Version 20200225. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.727\n\n\n* **EC-Earth3**\n\n License description: [data_licenses/EC-Earth3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/EC-Earth3.txt)\n\n CMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 CMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.181\n\n ScenarioMIP Citation:\n\n > EC-Earth Consortium (EC-Earth) **(2019)**. *EC-Earth-Consortium EC-Earth3 model output prepared for CMIP6 ScenarioMIP*. Version 20200310. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.251\n\n\n* **GFDL-CM4**\n\n License description: [data_licenses/GFDL-CM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-CM4.txt)\n\n CMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Bushuk, Mitchell; Dunne, Krista A.; Dussin, Raphael; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, P.C.D; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1402\n\n ScenarioMIP Citation:\n\n > Guo, Huan; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Schwarzkopf, Daniel M; Seman, Charles J; Shao, Andrew; Silvers, Levi; Wyman, Bruce; Yan, Xiaoqin; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Held, Isaac M; Krasting, John P.; Horowitz, Larry W.; Milly, Chris; Shevliakova, Elena; Winton, Michael; Zhao, Ming; Zhang, Rong **(2018)**. *NOAA-GFDL GFDL-CM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.9242\n\n\n* **GFDL-ESM4**\n\n License description: [data_licenses/GFDL-ESM4.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/GFDL-ESM4.txt)\n\n CMIP Citation:\n\n > Krasting, John P.; John, Jasmin G; Blanton, Chris; McHugh, Colleen; Nikonov, Serguei; Radhakrishnan, Aparna; Rand, Kristopher; Zadeh, Niki T.; Balaji, V; Durachta, Jeff; Dupuis, Christopher; Menzel, Raymond; Robinson, Thomas; Underwood, Seth; Vahlenkamp, Hans; Dunne, Krista A.; Gauthier, Paul PG; Ginoux, Paul; Griffies, Stephen M.; Hallberg, Robert; Harrison, Matthew; Hurlin, William; Malyshev, Sergey; Naik, Vaishali; Paulot, Fabien; Paynter, David J; Ploshay, Jeffrey; Reichl, Brandon G; Schwarzkopf, Daniel M; Seman, Charles J; Silvers, Levi; Wyman, Bruce; Zeng, Yujin; Adcroft, Alistair; Dunne, John P.; Dussin, Raphael; Guo, Huan; He, Jian; Held, Isaac M; Horowitz, Larry W.; Lin, Pu; Milly, P.C.D; Shevliakova, Elena; Stock, Charles; Winton, Michael; Wittenberg, Andrew T.; Xie, Yuanyu; Zhao, Ming **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 CMIP*. Version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1407\n\n ScenarioMIP Citation:\n\n > John, Jasmin G; Blanton, Chris; McHugh, Colleen; Radhakrishnan, Aparna; Rand, Kristopher; Vahlenkamp, Hans; Wilson, Chandin; Zadeh, Niki T.; Dunne, John P.; Dussin, Raphael; Horowitz, Larry W.; Krasting, John P.; Lin, Pu; Malyshev, Sergey; Naik, Vaishali; Ploshay, Jeffrey; Shevliakova, Elena; Silvers, Levi; Stock, Charles; Winton, Michael; Zeng, Yujin **(2018)**. *NOAA-GFDL GFDL-ESM4 model output prepared for CMIP6 ScenarioMIP*. Version 20180701. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1414\n\n\n* **HadGEM3-GC31-LL**\n\n License description: [data_licenses/HadGEM3-GC31-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/HadGEM3-GC31-LL.txt)\n\n CMIP Citation:\n\n > Ridley, Jeff; Menary, Matthew; Kuhlbrodt, Till; Andrews, Martin; Andrews, Tim **(2018)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 CMIP*. Version 20190624. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.419\n\n ScenarioMIP Citation:\n\n > Good, Peter **(2019)**. *MOHC HadGEM3-GC31-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20200114; SSP2-4.5 version 20190908; SSP5-8.5 version 20200114. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.10845\n\n\n* **MIROC-ES2L**\n\n License description: [data_licenses/MIROC-ES2L.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC-ES2L.txt)\n\n CMIP Citation:\n\n > Hajima, Tomohiro; Abe, Manabu; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogura, Tomoo; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio; Tachiiri, Kaoru **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 CMIP*. Version 20191129. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.902\n\n ScenarioMIP Citation:\n\n > Tachiiri, Kaoru; Abe, Manabu; Hajima, Tomohiro; Arakawa, Osamu; Suzuki, Tatsuo; Komuro, Yoshiki; Ogochi, Koji; Watanabe, Michio; Yamamoto, Akitomo; Tatebe, Hiroaki; Noguchi, Maki A.; Ohgaito, Rumi; Ito, Akinori; Yamazaki, Dai; Ito, Akihiko; Takata, Kumiko; Watanabe, Shingo; Kawamiya, Michio **(2019)**. *MIROC MIROC-ES2L model output prepared for CMIP6 ScenarioMIP*. Version 20200318. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.936\n\n\n* **MIROC6**\n\n License description: [data_licenses/MIROC6.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MIROC6.txt)\n\n CMIP Citation:\n\n > Tatebe, Hiroaki; Watanabe, Masahiro **(2018)**. *MIROC MIROC6 model output prepared for CMIP6 CMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.881\n\n ScenarioMIP Citation:\n\n > Shiogama, Hideo; Abe, Manabu; Tatebe, Hiroaki **(2019)**. *MIROC MIROC6 model output prepared for CMIP6 ScenarioMIP*. Version 20191016. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.898\n\n\n* **MPI-ESM1-2-HR**\n\n License description: [data_licenses/MPI-ESM1-2-HR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-HR.txt)\n\n CMIP Citation:\n\n > Jungclaus, Johann; Bittner, Matthias; Wieners, Karl-Hermann; Wachsmann, Fabian; Schupfner, Martin; Legutke, Stephanie; Giorgetta, Marco; Reick, Christian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Esch, Monika; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-HR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.741\n\n ScenarioMIP Citation:\n\n > Schupfner, Martin; Wieners, Karl-Hermann; Wachsmann, Fabian; Steger, Christian; Bittner, Matthias; Jungclaus, Johann; Fr\u00fch, Barbara; Pankatz, Klaus; Giorgetta, Marco; Reick, Christian; Legutke, Stephanie; Esch, Monika; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *DKRZ MPI-ESM1.2-HR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2450\n\n\n* **MPI-ESM1-2-LR**\n\n License description: [data_licenses/MPI-ESM1-2-LR.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/MPI-ESM1-2-LR.txt)\n\n CMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Legutke, Stephanie; Schupfner, Martin; Wachsmann, Fabian; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 CMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.742\n\n ScenarioMIP Citation:\n\n > Wieners, Karl-Hermann; Giorgetta, Marco; Jungclaus, Johann; Reick, Christian; Esch, Monika; Bittner, Matthias; Gayler, Veronika; Haak, Helmuth; de Vrese, Philipp; Raddatz, Thomas; Mauritsen, Thorsten; von Storch, Jin-Song; Behrens, J\u00f6rg; Brovkin, Victor; Claussen, Martin; Crueger, Traute; Fast, Irina; Fiedler, Stephanie; Hagemann, Stefan; Hohenegger, Cathy; Jahns, Thomas; Kloster, Silvia; Kinne, Stefan; Lasslop, Gitta; Kornblueh, Luis; Marotzke, Jochem; Matei, Daniela; Meraner, Katharina; Mikolajewicz, Uwe; Modali, Kameswarrao; M\u00fcller, Wolfgang; Nabel, Julia; Notz, Dirk; Peters-von Gehlen, Karsten; Pincus, Robert; Pohlmann, Holger; Pongratz, Julia; Rast, Sebastian; Schmidt, Hauke; Schnur, Reiner; Schulzweida, Uwe; Six, Katharina; Stevens, Bjorn; Voigt, Aiko; Roeckner, Erich **(2019)**. *MPI-M MPIESM1.2-LR model output prepared for CMIP6 ScenarioMIP*. Version 20190710. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.793\n\n\n* **NESM3**\n\n License description: [data_licenses/NESM3.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NESM3.txt)\n\n CMIP Citation:\n\n > Cao, Jian; Wang, Bin **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 CMIP*. Version 20190812. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2021\n\n ScenarioMIP Citation:\n\n > Cao, Jian **(2019)**. *NUIST NESMv3 model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190806; SSP2-4.5 version 20190805; SSP5-8.5 version 20190811. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.2027\n\n\n* **NorESM2-LM**\n\n License description: [data_licenses/NorESM2-LM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-LM.txt)\n\n CMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 CMIP*. Version 20190815. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.502\n\n ScenarioMIP Citation:\n\n > Seland, \u00d8yvind; Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-LM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.604\n\n\n* **NorESM2-MM**\n\n License description: [data_licenses/NorESM2-MM.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/NorESM2-MM.txt)\n\n CMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 CMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.506\n\n ScenarioMIP Citation:\n\n > Bentsen, Mats; Olivi\u00e8, Dirk Jan Leo; Seland, \u00d8yvind; Toniazzo, Thomas; Gjermundsen, Ada; Graff, Lise Seland; Debernard, Jens Boldingh; Gupta, Alok Kumar; He, Yanchun; Kirkev\u00e5g, Alf; Schwinger, J\u00f6rg; Tjiputra, Jerry; Aas, Kjetil Schanke; Bethke, Ingo; Fan, Yuanchao; Griesfeller, Jan; Grini, Alf; Guo, Chuncheng; Ilicak, Mehmet; Karset, Inger Helene Hafsahl; Landgren, Oskar Andreas; Liakka, Johan; Moseid, Kine Onsum; Nummelin, Aleksi; Spensberger, Clemens; Tang, Hui; Zhang, Zhongshi; Heinze, Christoph; Iversen, Trond; Schulz, Michael **(2019)**. *NCC NorESM2-MM model output prepared for CMIP6 ScenarioMIP*. Version 20191108. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.608\n\n\n* **UKESM1-0-LL**\n\n License description: [data_licenses/UKESM1-0-LL.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/UKESM1-0-LL.txt)\n\n CMIP Citation:\n\n > Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Mulcahy, Jane; Sellar, Alistair; Walton, Jeremy; Jones, Colin **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 CMIP*. Version 20190627. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1569\n\n ScenarioMIP Citation:\n\n > Good, Peter; Sellar, Alistair; Tang, Yongming; Rumbold, Steve; Ellis, Rich; Kelley, Douglas; Kuhlbrodt, Till; Walton, Jeremy **(2019)**. *MOHC UKESM1.0-LL model output prepared for CMIP6 ScenarioMIP*. SSP1-2.6 version 20190708; SSP2-4.5 version 20190715; SSP3-7.0 version 20190726; SSP5-8.5 version 20190726. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1567\n\n* **CanESM5**\n\n License description: [data_licenses/CanESM5.txt](https://raw.githubusercontent.com/ClimateImpactLab/downscaleCMIP6/master/data_licenses/CanESM5.txt). Note: this dataset was previously licensed\n under CC BY-SA 4.0, but was relicensed as CC BY 4.0 in March, 2023.\n\n CMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 CMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1303\n\n ScenarioMIP Citation:\n\n > Swart, Neil Cameron; Cole, Jason N.S.; Kharin, Viatcheslav V.; Lazare, Mike; Scinocca, John F.; Gillett, Nathan P.; Anstey, James; Arora, Vivek; Christian, James R.; Jiao, Yanjun; Lee, Warren G.; Majaess, Fouad; Saenko, Oleg A.; Seiler, Christian; Seinen, Clint; Shao, Andrew; Solheim, Larry; von Salzen, Knut; Yang, Duo; Winter, Barbara; Sigmond, Michael **(2019)**. *CCCma CanESM5 model output prepared for CMIP6 ScenarioMIP*. Version 20190429. Earth System Grid Federation. https://doi.org/10.22033/ESGF/CMIP6.1317\n\n## Acknowledgements\n\nThis work is the result of many years worth of work by members of the [Climate Impact Lab](https://impactlab.org), but would not have been possible without many contributions from across the wider scientific and computing communities.\n\nSpecifically, we would like to acknowledge the World Climate Research Programme's Working Group on Coupled Modeling, which is responsible for CMIP, and we would like to thank the climate modeling groups for producing and making their model output available. We would particularly like to thank the modeling institutions whose results are included as an input to this repository (listed above) for their contributions to the CMIP6 project and for responding to and granting our requests for license waivers.\n\nWe would also like to thank Lamont-Doherty Earth Observatory, the [Pangeo Consortium](https://github.com/pangeo-data) (and especially the [ESGF Cloud Data Working Group](https://pangeo-data.github.io/pangeo-cmip6-cloud/#)) and Google Cloud and the Google Public Datasets program for making the [CMIP6 Google Cloud collection](https://console.cloud.google.com/marketplace/details/noaa-public/cmip6) possible. In particular we're extremely grateful to [Ryan Abernathey](https://github.com/rabernat), [Naomi Henderson](https://github.com/naomi-henderson), [Charles Blackmon-Luca](https://github.com/charlesbluca), [Aparna Radhakrishnan](https://github.com/aradhakrishnanGFDL), [Julius Busecke](https://github.com/jbusecke), and [Charles Stern](https://github.com/cisaacstern) for the huge amount of work they've done to translate the ESGF CMIP6 netCDF archives into consistently-formattted, analysis-ready zarr stores on Google Cloud.\n\nWe're also grateful to the [xclim developers](https://github.com/Ouranosinc/xclim/graphs/contributors) ([DOI: 10.5281/zenodo.2795043](https://doi.org/10.5281/zenodo.2795043)), in particular [Pascal Bourgault](https://github.com/aulemahal), [David Huard](https://github.com/huard), and [Travis Logan](https://github.com/tlogan2000), for implementing the QDM bias correction method in the xclim python package, supporting our QPLAD implementation into the package, and ongoing support in integrating dask into downscaling workflows. For method advice and useful conversations, we would like to thank Keith Dixon, Dennis Adams-Smith, and [Joe Hamman](https://github.com/jhamman).\n\n## Financial support\n\nThis research has been supported by The Rockefeller Foundation and the Microsoft AI for Earth Initiative.\n\n## Additional links:\n\n* CIL GDPCIR project homepage: [github.com/ClimateImpactLab/downscaleCMIP6](https://github.com/ClimateImpactLab/downscaleCMIP6)\n* Project listing on zenodo: https://doi.org/10.5281/zenodo.6403794\n* Climate Impact Lab homepage: [impactlab.org](https://impactlab.org)", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "cil-gdpcir-cc-by,climate-impact-lab,cmip6,precipitation,rhodium-group,temperature", "license": "CC-BY-4.0", "title": "CIL Global Downscaled Projections for Climate Impacts Research (CC-BY-4.0)", "missionStartDate": "1950-01-01T00:00:00Z"}, "noaa-cdr-sea-surface-temperature-whoi-netcdf": {"abstract": "The Sea Surface Temperature-Woods Hole Oceanographic Institution (WHOI) Climate Data Record (CDR) is one of three CDRs which combine to form the NOAA Ocean Surface Bundle (OSB) CDR. The resultant sea surface temperature (SST) data are produced through modeling the diurnal variability in combination with AVHRR SST observations. The final record is output to a 3-hourly 0.25\u00b0 resolution grid over the global ice-free oceans from January 1988\u2014present.\n\nThis is a NetCDF-only collection, for Cloud-Optimized GeoTIFFs use collection `noaa-cdr-sea-surface-temperature-whoi`.\nThe NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-sea-surface-temperature-whoi-netcdf,ocean,temperature", "license": "proprietary", "title": "Sea Surface Temperature - WHOI CDR NetCDFs", "missionStartDate": "1988-01-01T00:00:00Z"}, "noaa-cdr-sea-surface-temperature-optimum-interpolation": {"abstract": "The NOAA 1/4\u00b0 daily Optimum Interpolation Sea Surface Temperature (or daily OISST) Climate Data Record (CDR) provides complete ocean temperature fields constructed by combining bias-adjusted observations from different platforms (satellites, ships, buoys) on a regular global grid, with gaps filled in by interpolation. The main input source is satellite data from the Advanced Very High Resolution Radiometer (AVHRR), which provides high temporal-spatial coverage from late 1981-present. This input must be adjusted to the buoys due to erroneous cold SST data following the Mt Pinatubo and El Chichon eruptions. Applications include climate modeling, resource management, ecological studies on annual to daily scales.\n\nThese Cloud Optimized GeoTIFFs (COGs) were created from NetCDF files which are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\nFor the NetCDF files, see collection `noaa-cdr-sea-surface-temperature-optimum-interpolation-netcdf`.\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-sea-surface-temperature-optimum-interpolation,ocean,temperature", "license": "proprietary", "title": "Sea Surface Temperature - Optimum Interpolation CDR", "missionStartDate": "1981-09-01T00:00:00Z"}, "modis-10A1-061": {"abstract": "This global Level-3 (L3) data set provides a daily composite of snow cover and albedo derived from the 'MODIS Snow Cover 5-Min L2 Swath 500m' data set. Each data granule is a 10degx10deg tile projected to a 500 m sinusoidal grid.", "instrument": "modis", "platform": null, "platformSerialIdentifier": "aqua,terra", "processingLevel": null, "keywords": "aqua,global,mod10a1,modis,modis-10a1-061,myd10a1,nasa,satellite,snow,terra", "license": "proprietary", "title": "MODIS Snow Cover Daily", "missionStartDate": "2000-02-24T00:00:00Z"}, "sentinel-5p-l2-netcdf": {"abstract": "The Copernicus [Sentinel-5 Precursor](https://sentinels.copernicus.eu/web/sentinel/missions/sentinel-5p) mission provides high spatio-temporal resolution measurements of the Earth's atmosphere. The mission consists of one satellite carrying the [TROPOspheric Monitoring Instrument](http://www.tropomi.eu/) (TROPOMI). The satellite flies in loose formation with NASA's [Suomi NPP](https://www.nasa.gov/mission_pages/NPP/main/index.html) spacecraft, allowing utilization of co-located cloud mask data provided by the [Visible Infrared Imaging Radiometer Suite](https://www.nesdis.noaa.gov/current-satellite-missions/currently-flying/joint-polar-satellite-system/visible-infrared-imaging) (VIIRS) instrument onboard Suomi NPP during processing of the TROPOMI methane product.\n\nThe Sentinel-5 Precursor mission aims to reduce the global atmospheric data gap between the retired [ENVISAT](https://earth.esa.int/eogateway/missions/envisat) and [AURA](https://www.nasa.gov/mission_pages/aura/main/index.html) missions and the future [Sentinel-5](https://sentinels.copernicus.eu/web/sentinel/missions/sentinel-5) mission. Sentinel-5 Precursor [Level 2 data](http://www.tropomi.eu/data-products/level-2-products) provide total columns of ozone, sulfur dioxide, nitrogen dioxide, carbon monoxide and formaldehyde, tropospheric columns of ozone, vertical profiles of ozone and cloud & aerosol information. These measurements are used for improving air quality forecasts and monitoring the concentrations of atmospheric constituents.\n\nThis STAC Collection provides Sentinel-5 Precursor Level 2 data, in NetCDF format, since April 2018 for the following products:\n\n* [`L2__AER_AI`](http://www.tropomi.eu/data-products/uv-aerosol-index): Ultraviolet aerosol index\n* [`L2__AER_LH`](http://www.tropomi.eu/data-products/aerosol-layer-height): Aerosol layer height\n* [`L2__CH4___`](http://www.tropomi.eu/data-products/methane): Methane (CH4) total column\n* [`L2__CLOUD_`](http://www.tropomi.eu/data-products/cloud): Cloud fraction, albedo, and top pressure\n* [`L2__CO____`](http://www.tropomi.eu/data-products/carbon-monoxide): Carbon monoxide (CO) total column\n* [`L2__HCHO__`](http://www.tropomi.eu/data-products/formaldehyde): Formaldehyde (HCHO) total column\n* [`L2__NO2___`](http://www.tropomi.eu/data-products/nitrogen-dioxide): Nitrogen dioxide (NO2) total column\n* [`L2__O3____`](http://www.tropomi.eu/data-products/total-ozone-column): Ozone (O3) total column\n* [`L2__O3_TCL`](http://www.tropomi.eu/data-products/tropospheric-ozone-column): Ozone (O3) tropospheric column\n* [`L2__SO2___`](http://www.tropomi.eu/data-products/sulphur-dioxide): Sulfur dioxide (SO2) total column\n* [`L2__NP_BD3`](http://www.tropomi.eu/data-products/auxiliary): Cloud from the Suomi NPP mission, band 3\n* [`L2__NP_BD6`](http://www.tropomi.eu/data-products/auxiliary): Cloud from the Suomi NPP mission, band 6\n* [`L2__NP_BD7`](http://www.tropomi.eu/data-products/auxiliary): Cloud from the Suomi NPP mission, band 7\n", "instrument": "TROPOMI", "platform": "Sentinel-5P", "platformSerialIdentifier": "Sentinel 5 Precursor", "processingLevel": null, "keywords": "air-quality,climate-change,copernicus,esa,forecasting,sentinel,sentinel-5-precursor,sentinel-5p,sentinel-5p-l2-netcdf,tropomi", "license": "proprietary", "title": "Sentinel-5P Level-2", "missionStartDate": "2018-04-30T00:18:50Z"}, "sentinel-3-olci-wfr-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 Full Resolution [OLCI Level-2 Water][olci-l2] products containing data on water-leaving reflectance, ocean color, and more.\n\n## Data files\n\nThis dataset includes data on:\n\n- Surface directional reflectance\n- Chlorophyll-a concentration\n- Suspended matter concentration\n- Energy flux\n- Aerosol load\n- Integrated water vapor column\n\nEach variable is contained within NetCDF files. Error estimates are available for each product.\n\n## Processing overview\n\nThe values in the data files have been converted from Top of Atmosphere radiance to reflectance, and include various corrections for gaseous absorption and pixel classification. More information about the product and data processing can be found in the [User Guide](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-water) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/processing).\n\nThis Collection contains Level-2 data in NetCDF files from November 2017 to present.\n\n[olci-l2]: https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/ocean-products\n", "instrument": "OLCI", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ocean,olci,sentinel,sentinel-3,sentinel-3-olci-wfr-l2-netcdf,sentinel-3a,sentinel-3b,water", "license": "proprietary", "title": "Sentinel-3 Water (Full Resolution)", "missionStartDate": "2017-11-01T00:07:01.738487Z"}, "noaa-cdr-ocean-heat-content-netcdf": {"abstract": "The Ocean Heat Content Climate Data Record (CDR) is a set of ocean heat content anomaly (OHCA) time-series for 1955-present on 3-monthly, yearly, and pentadal (five-yearly) scales. This CDR quantifies ocean heat content change over time, which is an essential metric for understanding climate change and the Earth's energy budget. It provides time-series for multiple depth ranges in the global ocean and each of the major basins (Atlantic, Pacific, and Indian) divided by hemisphere (Northern, Southern).\n\nThis is a NetCDF-only collection, for Cloud-Optimized GeoTIFFs use collection `noaa-cdr-ocean-heat-content`.\nThe NetCDF files are delivered to Azure as part of the [NOAA Open Data Dissemination (NODD) Program](https://www.noaa.gov/information-technology/open-data-dissemination).\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "climate,global,noaa,noaa-cdr-ocean-heat-content-netcdf,ocean,temperature", "license": "proprietary", "title": "Global Ocean Heat Content CDR NetCDFs", "missionStartDate": "1972-03-01T00:00:00Z"}, "sentinel-3-synergy-aod-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 Aerosol Optical Depth](https://sentinels.copernicus.eu/web/sentinel/level-2-aod) product, which is a downstream development of the Sentinel-2 Level-1 [OLCI Full Resolution](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-olci/data-formats/level-1) and [SLSTR Radiances and Brightness Temperatures](https://sentinels.copernicus.eu/web/sentinel/user-guides/Sentinel-3-slstr/data-formats/level-1) products. The dataset provides both retrieved and diagnostic global aerosol parameters at super-pixel (4.5 km x 4.5 km) resolution in a single NetCDF file for all regions over land and ocean free of snow/ice cover, excluding high cloud fraction data. The retrieved and derived aerosol parameters are:\n\n- Aerosol Optical Depth (AOD) at 440, 550, 670, 985, 1600 and 2250 nm\n- Error estimates (i.e. standard deviation) in AOD at 440, 550, 670, 985, 1600 and 2250 nm\n- Single Scattering Albedo (SSA) at 440, 550, 670, 985, 1600 and 2250 nm\n- Fine-mode AOD at 550nm\n- Aerosol Angstrom parameter between 550 and 865nm\n- Dust AOD at 550nm\n- Aerosol absorption optical depth at 550nm\n\nAtmospherically corrected nadir surface directional reflectances at 440, 550, 670, 985, 1600 and 2250 nm at super-pixel (4.5 km x 4.5 km) resolution are also provided. More information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/level-2-aod) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/products-algorithms/level-2-aod-algorithms-and-products).\n\nThis Collection contains Level-2 data in NetCDF files from April 2020 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "aerosol,copernicus,esa,global,olci,satellite,sentinel,sentinel-3,sentinel-3-synergy-aod-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 Global Aerosol", "missionStartDate": "2020-04-16T19:36:28.012367Z"}, "sentinel-3-synergy-v10-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 10-Day Surface Reflectance and NDVI](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) products, which are SPOT VEGETATION Continuity Products similar to those obtained from the [VEGETATION instrument](https://docs.terrascope.be/#/Satellites/SPOT-VGT/MissionInstruments) onboard the SPOT-4 and SPOT-5 satellites. The primary variables are a maximum Normalized Difference Vegetation Index (NDVI) composite, which is derived from ground reflectance during a 10-day window, and four surface reflectance bands:\n\n- B0 (Blue, 450nm)\n- B2 (Red, 645nm)\n- B3 (NIR, 835nm)\n- MIR (SWIR, 1665nm)\n\nThe four reflectance bands have center wavelengths matching those on the original SPOT VEGETATION instrument. The NDVI variable, which is an indicator of the amount of vegetation, is derived from the B3 and B2 bands.\n\n## Data files\n\nThe four reflectance bands and NDVI values are each contained in dedicated NetCDF files. Additional metadata are delivered in annotation NetCDF files, each containing a single variable, including the geometric viewing and illumination conditions, the total water vapour and ozone columns, and the aerosol optical depth.\n\nEach 10-day product is delivered as a set of 10 rectangular scenes:\n\n- AFRICA\n- NORTH_AMERICA\n- SOUTH_AMERICA\n- CENTRAL_AMERICA\n- NORTH_ASIA\n- WEST_ASIA\n- SOUTH_EAST_ASIA\n- ASIAN_ISLANDS\n- AUSTRALASIA\n- EUROPE\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/vgt-s/v10-product).\n\nThis Collection contains Level-2 data in NetCDF files from September 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ndvi,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-v10-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 10-Day Surface Reflectance and NDVI (SPOT VEGETATION)", "missionStartDate": "2018-09-27T11:17:21Z"}, "sentinel-3-olci-lfr-l2-netcdf": {"abstract": "This collection provides Sentinel-3 Full Resolution [OLCI Level-2 Land][olci-l2] products containing data on global vegetation, chlorophyll, and water vapor.\n\n## Data files\n\nThis dataset includes data on three primary variables:\n\n* OLCI global vegetation index file\n* terrestrial Chlorophyll index file\n* integrated water vapor over water file.\n\nEach variable is contained within a separate NetCDF file, and is cataloged as an asset in each Item.\n\nSeveral associated variables are also provided in the annotations data files:\n\n* rectified reflectance for red and NIR channels (RC681 and RC865)\n* classification, quality and science flags (LQSF)\n* common data such as the ortho-geolocation of land pixels, solar and satellite angles, atmospheric and meteorological data, time stamp or instrument information. These variables are inherited from Level-1B products.\n\nThis full resolution product offers a spatial sampling of approximately 300 m.\n\n## Processing overview\n\nThe values in the data files have been converted from Top of Atmosphere radiance to reflectance, and include various corrections for gaseous absorption and pixel classification. More information about the product and data processing can be found in the [User Guide](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-olci/product-types/level-2-land) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/processing).\n\nThis Collection contains Level-2 data in NetCDF files from April 2016 to present.\n\n[olci-l2]: https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-olci/level-2/land-products\n", "instrument": "OLCI", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "biomass,copernicus,esa,land,olci,sentinel,sentinel-3,sentinel-3-olci-lfr-l2-netcdf,sentinel-3a,sentinel-3b", "license": "proprietary", "title": "Sentinel-3 Land (Full Resolution)", "missionStartDate": "2016-04-25T11:33:47.368562Z"}, "sentinel-3-sral-lan-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SRAL Level-2 Land Altimetry](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry/level-2-algorithms-products) products, which contain data on land radar altimetry measurements. Each product contains three NetCDF files:\n\n- A reduced data file containing a subset of the 1 Hz Ku-band parameters.\n- A standard data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters.\n- An enhanced data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters along with the waveforms and parameters necessary to reprocess the data.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-altimetry/overview) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry).\n\nThis Collection contains Level-2 data in NetCDF files from March 2016 to present.\n", "instrument": "SRAL", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "altimetry,copernicus,esa,radar,satellite,sentinel,sentinel-3,sentinel-3-sral-lan-l2-netcdf,sentinel-3a,sentinel-3b,sral", "license": "proprietary", "title": "Sentinel-3 Land Radar Altimetry", "missionStartDate": "2016-03-01T14:07:51.632846Z"}, "sentinel-3-slstr-lst-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SLSTR Level-2 Land Surface Temperature](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-lst) products containing data on land surface temperature measurements on a 1km grid. Radiance is measured in two channels to determine the temperature of the Earth's surface skin in the instrument field of view, where the term \"skin\" refers to the top surface of bare soil or the effective emitting temperature of vegetation canopies as viewed from above.\n\n## Data files\n\nThe dataset includes data on the primary measurement variable, land surface temperature, in a single NetCDF file, `LST_in.nc`. A second file, `LST_ancillary.nc`, contains several ancillary variables:\n\n- Normalized Difference Vegetation Index\n- Surface biome classification\n- Fractional vegetation cover\n- Total water vapor column\n\nIn addition to the primary and ancillary data files, a standard set of annotation data files provide meteorological information, geolocation and time coordinates, geometry information, and quality flags. More information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-lst) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-slstr/level-2/lst-processing).\n\nThis Collection contains Level-2 data in NetCDF files from April 2016 to present.\n\n## STAC Item geometries\n\nThe Collection contains small \"chips\" and long \"stripes\" of data collected along the satellite direction of travel. Approximately five percent of the STAC Items describing long stripes of data contain geometries that encompass a larger area than an exact concave hull of the data extents. This may require additional filtering when searching the Collection for Items that spatially intersect an area of interest.\n", "instrument": "SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,land,satellite,sentinel,sentinel-3,sentinel-3-slstr-lst-l2-netcdf,sentinel-3a,sentinel-3b,slstr,temperature", "license": "proprietary", "title": "Sentinel-3 Land Surface Temperature", "missionStartDate": "2016-04-19T01:35:17.188500Z"}, "sentinel-3-slstr-wst-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SLSTR Level-2 Water Surface Temperature](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-wst) products containing data on sea surface temperature measurements on a 1km grid. Each product consists of a single NetCDF file containing all data variables:\n\n- Sea Surface Temperature (SST) value\n- SST total uncertainty\n- Latitude and longitude coordinates\n- SST time deviation\n- Single Sensor Error Statistic (SSES) bias and standard deviation estimate\n- Contextual parameters such as wind speed at 10 m and fractional sea-ice contamination\n- Quality flag\n- Satellite zenith angle\n- Top Of Atmosphere (TOA) Brightness Temperature (BT)\n- TOA noise equivalent BT\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-wst) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-slstr/level-2/sst-processing).\n\nThis Collection contains Level-2 data in NetCDF files from October 2017 to present.\n", "instrument": "SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ocean,satellite,sentinel,sentinel-3,sentinel-3-slstr-wst-l2-netcdf,sentinel-3a,sentinel-3b,slstr,temperature", "license": "proprietary", "title": "Sentinel-3 Sea Surface Temperature", "missionStartDate": "2017-10-31T23:59:57.451604Z"}, "sentinel-3-sral-wat-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SRAL Level-2 Ocean Altimetry](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry/level-2-algorithms-products) products, which contain data on ocean radar altimetry measurements. Each product contains three NetCDF files:\n\n- A reduced data file containing a subset of the 1 Hz Ku-band parameters.\n- A standard data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters.\n- An enhanced data file containing the standard 1 Hz and 20 Hz Ku- and C-band parameters along with the waveforms and parameters necessary to reprocess the data.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-altimetry/overview) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry).\n\nThis Collection contains Level-2 data in NetCDF files from January 2017 to present.\n", "instrument": "SRAL", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "altimetry,copernicus,esa,ocean,radar,satellite,sentinel,sentinel-3,sentinel-3-sral-wat-l2-netcdf,sentinel-3a,sentinel-3b,sral", "license": "proprietary", "title": "Sentinel-3 Ocean Radar Altimetry", "missionStartDate": "2017-01-28T00:59:14.149496Z"}, "ms-buildings": {"abstract": "Bing Maps is releasing open building footprints around the world. We have detected over 999 million buildings from Bing Maps imagery between 2014 and 2021 including Maxar and Airbus imagery. The data is freely available for download and use under ODbL. This dataset complements our other releases.\n\nFor more information, see the [GlobalMLBuildingFootprints](https://github.com/microsoft/GlobalMLBuildingFootprints/) repository on GitHub.\n\n## Building footprint creation\n\nThe building extraction is done in two stages:\n\n1. Semantic Segmentation \u2013 Recognizing building pixels on an aerial image using deep neural networks (DNNs)\n2. Polygonization \u2013 Converting building pixel detections into polygons\n\n**Stage 1: Semantic Segmentation**\n\n![Semantic segmentation](https://raw.githubusercontent.com/microsoft/GlobalMLBuildingFootprints/main/images/segmentation.jpg)\n\n**Stage 2: Polygonization**\n\n![Polygonization](https://github.com/microsoft/GlobalMLBuildingFootprints/raw/main/images/polygonization.jpg)\n\n## Data assets\n\nThe building footprints are provided as a set of [geoparquet](https://github.com/opengeospatial/geoparquet) datasets in [Delta][delta] table format.\nThe data are partitioned by\n\n1. Region\n2. quadkey at [Bing Map Tiles][tiles] level 9\n\nEach `(Region, quadkey)` pair will have one or more geoparquet files, depending on the density of the of the buildings in that area.\n\nNote that older items in this dataset are *not* spatially partitioned. We recommend using data with a processing date\nof 2023-04-25 or newer. This processing date is part of the URL for each parquet file and is captured in the STAC metadata\nfor each item (see below).\n\n## Delta Format\n\nThe collection-level asset under the `delta` key gives you the fsspec-style URL\nto the Delta table. This can be used to efficiently query for matching partitions\nby `Region` and `quadkey`. See the notebook for an example using Python.\n\n## STAC metadata\n\nThis STAC collection has one STAC item per region. The `msbuildings:region`\nproperty can be used to filter items to a specific region, and the `msbuildings:quadkey`\nproperty can be used to filter items to a specific quadkey (though you can also search\nby the `geometry`).\n\nNote that older STAC items are not spatially partitioned. We recommend filtering on\nitems with an `msbuildings:processing-date` of `2023-04-25` or newer. See the collection\nsummary for `msbuildings:processing-date` for a list of valid values.\n\n[delta]: https://delta.io/\n[tiles]: https://learn.microsoft.com/en-us/bingmaps/articles/bing-maps-tile-system\n", "instrument": null, "platform": null, "platformSerialIdentifier": null, "processingLevel": null, "keywords": "bing-maps,buildings,delta,footprint,geoparquet,microsoft,ms-buildings", "license": "ODbL-1.0", "title": "Microsoft Building Footprints", "missionStartDate": "2014-01-01T00:00:00Z"}, "sentinel-3-slstr-frp-l2-netcdf": {"abstract": "This Collection provides Sentinel-3 [SLSTR Level-2 Fire Radiative Power](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-frp) (FRP) products containing data on fires detected over land and ocean.\n\n## Data files\n\nThe primary measurement data is contained in the `FRP_in.nc` file and provides FRP and uncertainties, projected onto a 1km grid, for fires detected in the thermal infrared (TIR) spectrum over land. Since February 2022, FRP and uncertainties are also provided for fires detected in the short wave infrared (SWIR) spectrum over both land and ocean, with the delivered data projected onto a 500m grid. The latter SWIR-detected fire data is only available for night-time measurements and is contained in the `FRP_an.nc` or `FRP_bn.nc` files.\n\nIn addition to the measurement data files, a standard set of annotation data files provide meteorological information, geolocation and time coordinates, geometry information, and quality flags.\n\n## Processing\n\nThe TIR fire detection is based on measurements from the S7 and F1 bands of the [SLSTR instrument](https://sentinels.copernicus.eu/web/sentinel/technical-guides/sentinel-3-slstr/instrument); SWIR fire detection is based on the S5 and S6 bands. More information about the product and data processing can be found in the [User Guide](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-3-slstr/product-types/level-2-frp) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-slstr/level-2/frp-processing).\n\nThis Collection contains Level-2 data in NetCDF files from August 2020 to present.\n", "instrument": "SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,fire,satellite,sentinel,sentinel-3,sentinel-3-slstr-frp-l2-netcdf,sentinel-3a,sentinel-3b,slstr,temperature", "license": "proprietary", "title": "Sentinel-3 Fire Radiative Power", "missionStartDate": "2020-08-08T23:11:15.617203Z"}, "sentinel-3-synergy-syn-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 Land Surface Reflectance and Aerosol](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-syn) product, which contains data on Surface Directional Reflectance, Aerosol Optical Thickness, and an Angstrom coefficient estimate over land.\n\n## Data Files\n\nIndividual NetCDF files for the following variables:\n\n- Surface Directional Reflectance (SDR) with their associated error estimates for the sun-reflective [SLSTR](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-slstr) channels (S1 to S6 for both nadir and oblique views, except S4) and for all [OLCI](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-olci) channels, except for the oxygen absorption bands Oa13, Oa14, Oa15, and the water vapor bands Oa19 and Oa20.\n- Aerosol optical thickness at 550nm with error estimates.\n- Angstrom coefficient at 550nm.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-syn) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/level-2/syn-level-2-product).\n\nThis Collection contains Level-2 data in NetCDF files from September 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "aerosol,copernicus,esa,land,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-syn-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 Land Surface Reflectance and Aerosol", "missionStartDate": "2018-09-22T16:51:00.001276Z"}, "sentinel-3-synergy-vgp-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 Top of Atmosphere Reflectance](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vgp) product, which is a SPOT VEGETATION Continuity Product containing measurement data similar to that obtained by the [VEGETATION instrument](https://docs.terrascope.be/#/Satellites/SPOT-VGT/MissionInstruments) onboad the SPOT-3 and SPOT-4 satellites. The primary variables are four top of atmosphere reflectance bands:\n\n- B0 (Blue, 450nm)\n- B2 (Red, 645nm)\n- B3 (NIR, 835nm)\n- MIR (SWIR, 1665nm)\n\nThe four reflectance bands have center wavelengths matching those on the original SPOT VEGETATION instrument and have been adapted for scientific applications requiring highly accurate physical measurements through correction for systematic errors and re-sampling to predefined geographic projections. The pixel brightness count is the ground area's apparent reflectance as seen at the top of atmosphere.\n\n## Data files\n\nNetCDF files are provided for the four reflectance bands. Additional metadata are delivered in annotation NetCDF files, each containing a single variable, including the geometric viewing and illumination conditions, the total water vapour and ozone columns, and the aerosol optical depth.\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vgp) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/level-2/vgt-p-product).\n\nThis Collection contains Level-2 data in NetCDF files from October 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-vgp-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 Top of Atmosphere Reflectance (SPOT VEGETATION)", "missionStartDate": "2018-10-08T08:09:40.491227Z"}, "sentinel-3-synergy-vg1-l2-netcdf": {"abstract": "This Collection provides the Sentinel-3 [Synergy Level-2 1-Day Surface Reflectance and NDVI](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) products, which are SPOT VEGETATION Continuity Products similar to those obtained from the [VEGETATION instrument](https://docs.terrascope.be/#/Satellites/SPOT-VGT/MissionInstruments) onboard the SPOT-4 and SPOT-5 satellites. The primary variables are a maximum Normalized Difference Vegetation Index (NDVI) composite, which is derived from daily ground reflecrtance, and four surface reflectance bands:\n\n- B0 (Blue, 450nm)\n- B2 (Red, 645nm)\n- B3 (NIR, 835nm)\n- MIR (SWIR, 1665nm)\n\nThe four reflectance bands have center wavelengths matching those on the original SPOT VEGETATION instrument. The NDVI variable, which is an indicator of the amount of vegetation, is derived from the B3 and B2 bands.\n\n## Data files\n\nThe four reflectance bands and NDVI values are each contained in dedicated NetCDF files. Additional metadata are delivered in annotation NetCDF files, each containing a single variable, including the geometric viewing and illumination conditions, the total water vapour and ozone columns, and the aerosol optical depth.\n\nEach 1-day product is delivered as a set of 10 rectangular scenes:\n\n- AFRICA\n- NORTH_AMERICA\n- SOUTH_AMERICA\n- CENTRAL_AMERICA\n- NORTH_ASIA\n- WEST_ASIA\n- SOUTH_EAST_ASIA\n- ASIAN_ISLANDS\n- AUSTRALASIA\n- EUROPE\n\nMore information about the product and data processing can be found in the [User Guide](https://sentinels.copernicus.eu/web/sentinel/user-guides/sentinel-3-synergy/product-types/level-2-vg1-v10) and [Technical Guide](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-synergy/vgt-s/vg1-product-surface-reflectance).\n\nThis Collection contains Level-2 data in NetCDF files from October 2018 to present.\n", "instrument": "OLCI,SLSTR", "platform": "Sentinel-3", "platformSerialIdentifier": "Sentinel-3A,Sentinel-3B", "processingLevel": null, "keywords": "copernicus,esa,ndvi,olci,reflectance,satellite,sentinel,sentinel-3,sentinel-3-synergy-vg1-l2-netcdf,sentinel-3a,sentinel-3b,slstr", "license": "proprietary", "title": "Sentinel-3 1-Day Surface Reflectance and NDVI (SPOT VEGETATION)", "missionStartDate": "2018-10-04T23:17:21Z"}, "esa-worldcover": {"abstract": "The European Space Agency (ESA) [WorldCover](https://esa-worldcover.org/en) product provides global land cover maps for the years 2020 and 2021 at 10 meter resolution based on the combination of [Sentinel-1](https://sentinel.esa.int/web/sentinel/missions/sentinel-1) radar data and [Sentinel-2](https://sentinel.esa.int/web/sentinel/missions/sentinel-2) imagery. The discrete classification maps provide 11 classes defined using the Land Cover Classification System (LCCS) developed by the United Nations (UN) Food and Agriculture Organization (FAO). The map images are stored in [cloud-optimized GeoTIFF](https://www.cogeo.org/) format.\n\nThe WorldCover product is developed by a consortium of European service providers and research organizations. [VITO](https://remotesensing.vito.be/) (Belgium) is the prime contractor of the WorldCover consortium together with [Brockmann Consult](https://www.brockmann-consult.de/) (Germany), [CS SI](https://www.c-s.fr/) (France), [Gamma Remote Sensing AG](https://www.gamma-rs.ch/) (Switzerland), [International Institute for Applied Systems Analysis](https://www.iiasa.ac.at/) (Austria), and [Wageningen University](https://www.wur.nl/nl/Wageningen-University.htm) (The Netherlands).\n\nTwo versions of the WorldCover product are available:\n\n- WorldCover 2020 produced using v100 of the algorithm\n - [WorldCover 2020 v100 User Manual](https://esa-worldcover.s3.eu-central-1.amazonaws.com/v100/2020/docs/WorldCover_PUM_V1.0.pdf)\n - [WorldCover 2020 v100 Validation Report]()\n\n- WorldCover 2021 produced using v200 of the algorithm\n - [WorldCover 2021 v200 User Manual]()\n - [WorldCover 2021 v200 Validaton Report]()\n\nSince the WorldCover maps for 2020 and 2021 were generated with different algorithm versions (v100 and v200, respectively), changes between the maps include both changes in real land cover and changes due to the used algorithms.\n", "instrument": "c-sar,msi", "platform": null, "platformSerialIdentifier": "sentinel-1a,sentinel-1b,sentinel-2a,sentinel-2b", "processingLevel": null, "keywords": "c-sar,esa,esa-worldcover,global,land-cover,msi,sentinel,sentinel-1a,sentinel-1b,sentinel-2a,sentinel-2b", "license": "CC-BY-4.0", "title": "ESA WorldCover", "missionStartDate": "2020-01-01T00:00:00Z"}}}, "usgs_satapi_aws": {"providers_config": {"landsat-c2l2-sr": {"productType": "landsat-c2l2-sr"}, "landsat-c2l2-st": {"productType": "landsat-c2l2-st"}, "landsat-c2ard-st": {"productType": "landsat-c2ard-st"}, "landsat-c2l2alb-bt": {"productType": "landsat-c2l2alb-bt"}, "landsat-c2l3-fsca": {"productType": "landsat-c2l3-fsca"}, "landsat-c2ard-bt": {"productType": "landsat-c2ard-bt"}, "landsat-c2l1": {"productType": "landsat-c2l1"}, "landsat-c2l3-ba": {"productType": "landsat-c2l3-ba"}, "landsat-c2l2alb-st": {"productType": "landsat-c2l2alb-st"}, "landsat-c2ard-sr": {"productType": "landsat-c2ard-sr"}, "landsat-c2l2alb-sr": {"productType": "landsat-c2l2alb-sr"}, "landsat-c2l2alb-ta": {"productType": "landsat-c2l2alb-ta"}, "landsat-c2l3-dswe": {"productType": "landsat-c2l3-dswe"}, "landsat-c2ard-ta": {"productType": "landsat-c2ard-ta"}}, "product_types_config": {"landsat-c2l2-sr": {"abstract": "The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2-sr,surface-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 UTM Surface Reflectance (SR) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2-st": {"abstract": "The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K).", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2-st,surface-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 UTM Surface Temperature (ST) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-st": {"abstract": "The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K).", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-st,surface-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Surface Temperature (ST) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-bt": {"abstract": "The Landsat Top of Atmosphere Brightness Temperature (BT) product is a top of atmosphere product with radiance calculated 'at-sensor', not atmospherically corrected, and expressed in units of Kelvin.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-bt,top-of-atmosphere-brightness-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Top of Atmosphere Brightness Temperature (BT) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l3-fsca": {"abstract": "The Landsat Fractional Snow Covered Area (fSCA) product contains an acquisition-based per-pixel snow cover fraction, an acquisition-based revised cloud mask for quality assessment, and a product metadata file.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,fractional-snow-covered-area,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l3-fsca", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-3 Fractional Snow Covered Area (fSCA) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-bt": {"abstract": "The Landsat Top of Atmosphere Brightness Temperature (BT) product is a top of atmosphere product with radiance calculated 'at-sensor', not atmospherically corrected, and expressed in units of Kelvin.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-bt,top-of-atmosphere-brightness-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Top of Atmosphere Brightness Temperature (BT) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l1": {"abstract": "The Landsat Level-1 product is a top of atmosphere product distributed as scaled and calibrated digital numbers.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_1,LANDSAT_2,LANDSAT_3,LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-1,landsat-2,landsat-3,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l1", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-1 Product", "missionStartDate": "1972-07-25T00:00:00.000Z"}, "landsat-c2l3-ba": {"abstract": "The Landsat Burned Area (BA) contains two acquisition-based raster data products that represent burn classification and burn probability.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,burned-area,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l3-ba", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-3 Burned Area (BA) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-st": {"abstract": "The Landsat Surface Temperature (ST) product represents the temperature of the Earth's surface in Kelvin (K).", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-st,surface-temperature", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Surface Temperature (ST) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-sr": {"abstract": "The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-sr,surface-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Surface Reflectance (SR) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-sr": {"abstract": "The Landsat Surface Reflectance (SR) product measures the fraction of incoming solar radiation that is reflected from Earth's surface to the Landsat sensor.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-sr,surface-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Surface Reflectance (SR) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l2alb-ta": {"abstract": "The Landsat Top of Atmosphere (TA) Reflectance product applies per pixel angle band corrections to the Level-1 radiance product.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l2alb-ta,top-of-atmosphere-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-2 Albers Top of Atmosphere (TA) Reflectance Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2l3-dswe": {"abstract": "The Landsat Dynamic Surface Water Extent (DSWE) product contains six acquisition-based raster data products pertaining to the existence and condition of surface water.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,dynamic-surface-water-extent-,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2l3-dswe", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Level-3 Dynamic Surface Water Extent (DSWE) Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}, "landsat-c2ard-ta": {"abstract": "The Landsat Top of Atmosphere (TA) Reflectance product applies per pixel angle band corrections to the Level-1 radiance product.", "instrument": null, "platform": null, "platformSerialIdentifier": "LANDSAT_4,LANDSAT_5,LANDSAT_7,LANDSAT_8,LANDSAT_9", "processingLevel": null, "keywords": "analysis-ready-data,landsat,landsat-4,landsat-5,landsat-7,landsat-8,landsat-9,landsat-c2ard-ta,top-of-atmosphere-reflectance", "license": "https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/Landsat_Data_Policy.pdf", "title": "Landsat Collection 2 Analysis Ready Data (ARD) Level-2 UTM Top of Atmosphere (TA) Reflectance Product", "missionStartDate": "1982-08-22T00:00:00.000Z"}}}} diff --git a/eodag/resources/product_types.yml b/eodag/resources/product_types.yml index 07f5c9d01..b3df177e7 100644 --- a/eodag/resources/product_types.yml +++ b/eodag/resources/product_types.yml @@ -2068,62 +2068,291 @@ TIGGE_CF_SFC: missionStartDate: "2003-01-01T00:00:00Z" # COPERNICUS ADS ---------------------------------------------------------------------- -CAMS_GACF_AOT: +CAMS_GAC_FORECAST: + abstract: | + CAMS produces global forecasts for atmospheric composition twice a day. + The forecasts consist of more than 50 chemical species (e.g. ozone, nitrogen dioxide, carbon monoxide) and seven different types of aerosol (desert dust, sea salt, organic matter, black carbon, sulphate, nitrate and ammonium aerosol). + In addition, several meteorological variables are available as well. + The initial conditions of each forecast are obtained by combining a previous forecast with current satellite observations through a process called data assimilation. + This best estimate of the state of the atmosphere at the initial forecast time step, called the analysis, provides a globally complete and consistent dataset allowing for estimates at locations where observation data coverage is low or for atmospheric pollutants for which no direct observations are available. + The forecast itself uses a model of the atmosphere based on the laws of physics and chemistry to determine the evolution of the concentrations of all species over time for the next five days. + Apart from the required initial state, it also uses inventory-based or observation-based emission estimates as a boundary condition at the surface. + The CAMS global forecasting system is upgraded about once a year resulting in technical and scientific changes. + The horizontal or vertical resolution can change, new species can be added, and more generally the accuracy of the forecasts can be improved. + Details of these system changes can be found in the documentation. + Users looking for a more consistent long-term data set should consider using the CAMS Global Reanalysis instead, which is available through the ADS and spans the period from 2003 onwards. + Finally, because some meteorological fields in the forecast do not fall within the general CAMS data licence, they are only available with a delay of 5 days. + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Forecast,GAC + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global atmospheric composition forecasts + missionStartDate: "2015-01-02T00:00:00Z" + +CAMS_EU_AIR_QUALITY_FORECAST: abstract: | - CAMS (Copernicus Atmosphere Monitoring Service) Global Atmospheric Composition Forecast - of Aerosol Optical Thickness from Copernicus ADS + This dataset provides daily air quality analyses and forecasts for Europe. + CAMS produces specific daily air quality analyses and forecasts for the European domain at significantly higher spatial resolution (0.1 degrees, approx. 10km) than is available from the global analyses and forecasts. + The production is based on an ensemble of eleven air quality forecasting systems across Europe. + A median ensemble is calculated from individual outputs, since ensemble products yield on average better performance than the individual model products. + The spread between the eleven models are used to provide an estimate of the forecast uncertainty. + The analysis combines model data with observations provided by the European Environment Agency (EEA) into a complete and consistent dataset using various data assimilation techniques depending upon the air-quality forecasting system used. + In parallel, air quality forecasts are produced once a day for the next four days. + Both the analysis and the forecast are available at hourly time steps at seven height levels. + Note that only nitrogen monoxide, nitrogen dioxide, sulphur dioxide, ozone, PM2.5, PM10 and dust are regularly validated against in situ observations, and therefore forecasts of all other variables are unvalidated and should be considered experimental. instrument: platform: CAMS platformSerialIdentifier: CAMS processingLevel: - keywords: Copernicus,Atmosphere,Atmospheric,Forecast,CAMS,GACF,AOT,ADS + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Air,Forecast,EEA sensorType: ATMOSPHERIC license: proprietary - title: CAMS GACF Aerosol Optical Thickness - missionStartDate: "2003-01-01T00:00:00Z" + title: CAMS European air quality forecasts + missionStartDate: "2021-01-01T00:00:00Z" -CAMS_GACF_RH: +CAMS_GFE_GFAS: abstract: | - CAMS (Copernicus Atmosphere Monitoring Service) Global Atmospheric Composition Forecast - of Relative Humidity from Copernicus ADS + Emissions of atmospheric pollutants from biomass burning and vegetation fires are key drivers of the evolution of atmospheric composition, with a high degree of spatial and temporal variability, and an accurate representation of them in models is essential. + The CAMS Global Fire Assimilation System (GFAS) utilises satellite observations of fire radiative power (FRP) to provide near-real-time information on the location, relative intensity and estimated emissions from biomass burning and vegetation fires. + Emissions are estimated by (i) conversion of FRP observations to the dry matter (DM) consumed by the fire, and (ii) application of emission factors to DM for different biomes, based on field and laboratory studies in the scientific literature, to estimate the emissions. + Emissions estimates for 40 pyrogenic species are available from GFAS, including aerosols, reactive gases and greenhouse gases, on a regular grid with a spatial resolution of 0.1 degrees longitude by 0.1 degrees latitude. + This version of GFAS (v1.2) provides daily averaged data based on a combination of FRP observations from two Moderate Resolution Imaging Spectroradiometer (MODIS) instruments, one on the NASA EOS-Terra satellite and the other on the NASA EOS-Aqua satellite from 1 January 2003 to present. GFAS also provides daily estimates of smoke plume injection heights derived from FRP observations and meteorological information from the operational weather forecasts from ECMWF. + GFAS data have been used to provide surface boundary conditions for the CAMS global atmospheric composition and European regional air quality forecasts, and the wider atmospheric chemistry modelling community. instrument: platform: CAMS platformSerialIdentifier: CAMS processingLevel: - keywords: Copernicus,Atmosphere,Atmospheric,Forecast,CAMS,GACF,RH,ADS + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Fire,FRP,DM,MODIS,NASA,EOS,ECMWF,GFAS sensorType: ATMOSPHERIC license: proprietary - title: CAMS GACF Relative Humidity + title: CAMS global biomass burning emissions based on fire radiative power (GFAS) missionStartDate: "2003-01-01T00:00:00Z" -CAMS_GACF_MR: +CAMS_SOLAR_RADIATION: abstract: | - CAMS (Copernicus Atmosphere Monitoring Service) Global Atmospheric Composition Forecast - of Mixing Ratios from Copernicus ADS + The CAMS solar radiation services provide historical values (2004 to present) of global (GHI), direct (BHI) and diffuse (DHI) solar irradiation, as well as direct normal irradiation (BNI). + The aim is to fulfil the needs of European and national policy development and the requirements of both commercial and public downstream services, e.g. for planning, monitoring, efficiency improvements and the integration of solar energy systems into energy supply grids. + For clear-sky conditions, an irradiation time series is provided for any location in the world using information on aerosol, ozone and water vapour from the CAMS global forecasting system. + Other properties, such as ground albedo and ground elevation, are also taken into account. + Similar time series are available for cloudy (or "all sky") conditions but, since the high-resolution cloud information is directly inferred from satellite observations, these are currently only available inside the field-of-view of the Meteosat Second Generation (MSG) satellite, which is roughly Europe, Africa, the Atlantic Ocean and the Middle East. + Data is offered in both ASCII and netCDF format. + Additionally, an ASCII "expert mode" format can be selected which contains in addition to the irradiation, all the input data used in their calculation (aerosol optical properties, water vapour concentration, etc). + This additional information is only meaningful in the time frame at which the calculation is performed and so is only available at 1-minute time steps in universal time (UT). instrument: platform: CAMS platformSerialIdentifier: CAMS processingLevel: - keywords: Copernicus,Atmosphere,Atmospheric,Forecast,CAMS,GACF,MR,ADS + keywords: Copernicus,ADS,CAMS,Solar,Radiation sensorType: ATMOSPHERIC license: proprietary - title: CAMS GACF Mixing Ratios + title: CAMS solar radiation time-series + missionStartDate: "2004-01-02T00:00:00Z" + +CAMS_GREENHOUSE_INVERSION: + abstract: | + This data set contains net fluxes at the surface, atmospheric mixing ratios at model levels, and column-mean atmospheric mixing ratios for carbon dioxide (CO2), methane (CH4) and nitrous oxide (N20). + Natural and anthropogenic surface fluxes of greenhouse gases are key drivers of the evolution of Earth’s climate, so their monitoring is essential. + Such information has been used in particular as part of the Assessment Reports of the Intergovernmental Panel on Climate Change (IPCC). + Ground-based and satellite remote-sensing observations provide a means to quantifying the net fluxes between the land and ocean on the one hand and the atmosphere on the other hand. + This is done through a process called atmospheric inversion, which uses transport models of the atmosphere to link the observed concentrations of CO2, CH4 and N2O to the net fluxes at the Earth's surface. + By correctly modelling the winds, vertical diffusion, and convection in the global atmosphere, the observed concentrations of the greenhouse gases are used to infer the surface fluxes for the last few decades. + For CH4 and N2O, the flux inversions account also for the chemical loss of these greenhouse gases. The net fluxes include contributions from the natural biosphere (e.g., vegetation, wetlands) as well anthropogenic contributions (e.g., fossil fuel emissions, rice fields). + The data sets for the three species are updated once or twice per year adding the most recent year to the data record, while re-processing the original data record for consistency. + This is reflected by the different version numbers. In addition, fluxes for methane are available based on surface air samples only or based on a combination of surface air samples and satellite observations (reflected by an 's' in the version number). + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,IPCC,CO2,CH4,N2O + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global inversion-optimised greenhouse gas fluxes and concentrations + missionStartDate: "1979-01-01T00:00:00Z" + +CAMS_EAC4_MONTHLY: + abstract: | + EAC4 (ECMWF Atmospheric Composition Reanalysis 4) is the fourth generation ECMWF global reanalysis of atmospheric composition. + Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. + This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. + Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. + Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. + The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. + The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. + The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. + The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. + For this reason, EAC4 is only available from 2003 onwards. + Although the analysis procedure considers chunks of data in a window of 12 hours in one go, EAC4 provides estimates every 3 hours, worldwide. + This is made possible by the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,EWMCF,EAC4 + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global reanalysis (EAC4) monthly averaged fields missionStartDate: "2003-01-01T00:00:00Z" +CAMS_EU_AIR_QUALITY_RE: + abstract: | + This dataset provides annual air quality reanalyses for Europe based on both unvalidated (interim) and validated observations. + CAMS produces annual air quality (interim) reanalyses for the European domain at significantly higher spatial resolution (0.1 degrees, approx. 10km) than is available from the global reanalyses. + The production is currently based on an ensemble of nine air quality data assimilation systems across Europe. A median ensemble is calculated from individual outputs, since ensemble products yield on average better performance than the individual model products. + The spread between the nine models can be used to provide an estimate of the analysis uncertainty. + The reanalysis combines model data with observations provided by the European Environment Agency (EEA) into a complete and consistent dataset using various data assimilation techniques depending upon the air-quality forecasting system used. + Additional sources of observations can complement the in-situ data assimilation, like satellite data. + An interim reanalysis is provided each year for the year before based on the unvalidated near-real-time observation data stream that has not undergone full quality control by the data providers yet. + Once the fully quality-controlled observations are available from the data provider, typically with an additional delay of about 1 year, a final validated annual reanalysis is provided. + Both reanalyses are available at hourly time steps at height levels. + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Air,EEA + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS European air quality reanalyses + missionStartDate: "2013-01-01T00:00:00Z" + CAMS_EAC4: abstract: | - CAMS (Copernicus Atmosphere Monitoring Service) ECMWF Atmospheric Composition Reanalysis 4 - from Copernicus ADS + EAC4 (ECMWF Atmospheric Composition Reanalysis 4) is the fourth generation ECMWF global reanalysis of atmospheric composition. Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. + This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. + Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. + Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. + The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. + The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. + The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. + The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. + For this reason, EAC4 is only available from 2003 onwards. + Although the analysis procedure considers chunks of data in a window of 12 hours in one go, EAC4 provides estimates every 3 hours, worldwide. This is made possible by the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. instrument: platform: CAMS platformSerialIdentifier: CAMS processingLevel: - keywords: Copernicus,Atmosphere,Atmospheric,Reanalysis,CAMS,EAC4,ADS,ECMWF + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,EWMCF,EAC4 sensorType: ATMOSPHERIC license: proprietary - title: CAMS ECMWF Atmospheric Composition Reanalysis 4 + title: CAMS global reanalysis (EAC4) missionStartDate: "2003-01-01T00:00:00Z" +CAMS_GRF_AUX: + abstract: | + This dataset provides aerosol optical depths and aerosol-radiation radiative effects for four different aerosol origins: anthropogenic, mineral dust, marine, and land-based fine-mode natural aerosol. + The latter mostly consists of biogenic aerosols. + The data are a necessary complement to the "CAMS global radiative forcings" dataset (see "Related Data"). + The calculation of aerosol radiative forcing requires a discrimination between aerosol of anthropogenic and natural origin. + However, the CAMS reanalysis, which is used to provide the aerosol concentrations, does not make this distinction. + The anthropogenic fraction was therefore derived by a method which uses aerosol size as a proxy for aerosol origin. + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmospheric,Atmosphere,RF,CO2,CH4,O3,Aerosol + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global radiative forcing - auxilliary variables + missionStartDate: "2003-01-01T00:00:00Z" + +CAMS_GRF: + abstract: | + This dataset provides geographical distributions of the radiative forcing (RF) by key atmospheric constituents. + The radiative forcing estimates are based on the CAMS reanalysis and additional model simulations and are provided separately for CO2 CH4, O3 (tropospheric and stratospheric), interactions between anthropogenic aerosols and radiation and interactions between anthropogenic aerosols and clouds. + Radiative forcing measures the imbalance in the Earth's energy budget caused by a perturbation of the climate system, such as changes in atmospheric composition caused by human activities. + RF is a useful predictor of globally-averaged temperature change, especially when rapid adjustments of atmospheric temperature and moisture profiles are taken into account. + RF has therefore become a quantitative metric to compare the potential climate response to different perturbations. + Increases in greenhouse gas concentrations over the industrial era exerted a positive RF, causing a gain of energy in the climate system. + In contrast, concurrent changes in atmospheric aerosol concentrations are thought to exert a negative RF leading to a loss of energy. + Products are quantified both in "all-sky" conditions, meaning that the radiative effects of clouds are included in the radiative transfer calculations, and in "clear-sky" conditions, which are computed by excluding clouds in the radiative transfer calculations. + The upgrade from version 1.5 to 2 consists of an extension of the period by 2017-2018, the addition of an "effective radiative forcing" product and new ways to calculate the pre-industrial reference state for aerosols and cloud condensation nuclei. + More details are given in the documentation section. + New versions may be released in future as scientific methods develop, and existing versions may be extended with later years if data for the period is available from the CAMS reanalysis. + Newer versions supercede old versions so it is always recommended to use the latest one. + CAMS also produces distributions of aerosol optical depths, distinguishing natural from anthropogenic aerosols, which are a separate dataset. See "Related Data". + + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmospheric,Atmosphere,RF,CO2,CH4,O3,Aerosol + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global radiative forcings + missionStartDate: "2003-01-01T00:00:00Z" + +CAMS_GREENHOUSE_EGG4_MONTHLY: + abstract: | + This dataset is part of the ECMWF Atmospheric Composition Reanalysis focusing on long-lived greenhouse gases: carbon dioxide (CO2) and methane (CH4). + The emissions and natural fluxes at the surface are crucial for the evolution of the long-lived greenhouse gases in the atmosphere. + In this dataset the CO2 fluxes from terrestrial vegetation are modelled in order to simulate the variability across a wide range of scales from diurnal to inter-annual. + The CH4 chemical loss is represented by a climatological loss rate and the emissions at the surface are taken from a range of datasets. + Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. + This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. + Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. + The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. + The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. + The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. + The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. + For this reason, EAC4 is only available from 2003 onwards. + The analysis procedure assimilates data in a window of 12 hours using the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmospheric,Atmosphere,CO2,CH4,Greenhouse,ECMWF,EGG4 + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global greenhouse gas reanalysis (EGG4) monthly averaged fields + missionStartDate: "2003-01-01T00:00:00Z" + +CAMS_GREENHOUSE_EGG4: + abstract: | + This dataset is part of the ECMWF Atmospheric Composition Reanalysis focusing on long-lived greenhouse gases: carbon dioxide (CO2) and methane (CH4). + The emissions and natural fluxes at the surface are crucial for the evolution of the long-lived greenhouse gases in the atmosphere. + In this dataset the CO2 fluxes from terrestrial vegetation are modelled in order to simulate the variability across a wide range of scales from diurnal to inter-annual. + The CH4 chemical loss is represented by a climatological loss rate and the emissions at the surface are taken from a range of datasets. + Reanalysis combines model data with observations from across the world into a globally complete and consistent dataset using a model of the atmosphere based on the laws of physics and chemistry. + This principle, called data assimilation, is based on the method used by numerical weather prediction centres and air quality forecasting centres, where every so many hours (12 hours at ECMWF) a previous forecast is combined with newly available observations in an optimal way to produce a new best estimate of the state of the atmosphere, called analysis, from which an updated, improved forecast is issued. + Reanalysis works in the same way to allow for the provision of a dataset spanning back more than a decade. Reanalysis does not have the constraint of issuing timely forecasts, so there is more time to collect observations, and when going further back in time, to allow for the ingestion of improved versions of the original observations, which all benefit the quality of the reanalysis product. + The assimilation system is able to estimate biases between observations and to sift good-quality data from poor data. + The atmosphere model allows for estimates at locations where data coverage is low or for atmospheric pollutants for which no direct observations are available. + The provision of estimates at each grid point around the globe for each regular output time, over a long period, always using the same format, makes reanalysis a very convenient and popular dataset to work with. + The observing system has changed drastically over time, and although the assimilation system can resolve data holes, the initially much sparser networks will lead to less accurate estimates. + For this reason, EAC4 is only available from 2003 onwards. + The analysis procedure assimilates data in a window of 12 hours using the 4D-Var assimilation method, which takes account of the exact timing of the observations and model evolution within the assimilation window. + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmospheric,Atmosphere,CO2,CH4,GHG,ECMWF,EGG4 + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global greenhouse gas reanalysis (EGG4) + missionStartDate: "2003-01-01T00:00:00Z" + +CAMS_GLOBAL_EMISSIONS: + abstract: | + This data set contains gridded distributions of global anthropogenic and natural emissions. + Natural and anthropogenic emissions of atmospheric pollutants and greenhouse gases are key drivers of the evolution of the composition of the atmosphere, so an accurate representation of them in forecast models of atmospheric composition is essential. + CAMS compiles inventories of emission data that serve as input to its own forecast models, but which can also be used by other atmospheric chemical transport models. + These inventories are based on a combination of existing data sets and new information, describing anthropogenic emissions from fossil fuel use on land, shipping, and aviation, and natural emissions from vegetation, soil, the ocean and termites. + The anthropogenic emissions on land are further separated in specific activity sectors (e.g., power generation, road traffic, industry). + The CAMS emission data sets provide good consistency between the emissions of greenhouse gases, reactive gases, and aerosol particles and their precursors. + Because most inventory-based data sets are only available with a delay of several years, the CAMS emission inventories also extend these existing data sets forward in time by using the trends from the most recent available years, producing timely input data for real-time forecast models. + Most of the data sets are updated once or twice per year adding the most recent year to the data record, while re-processing the original data record for consistency, when needed. This is reflected by the different version numbers. + instrument: + platform: CAMS + platformSerialIdentifier: CAMS + processingLevel: + keywords: Copernicus,ADS,CAMS,Atmosphere,Atmospheric,Emissions,Pollutants,GHG + sensorType: ATMOSPHERIC + license: proprietary + title: CAMS global emission inventories + missionStartDate: "2000-01-01T00:00:00Z" + # COPERNICUS CDS ---------------------------------------------------------------------- ERA5_SL: abstract: | @@ -2397,7 +2626,7 @@ UERRA_EUROPE_SL: sensorType: ATMOSPHERIC license: proprietary title: UERRA regional reanalysis for Europe on single levels from 1961 to 2019 - missionStartDate: "1918-10-18T00:00:00Z" + missionStartDate: "1961-01-01T00:00:00Z" GLACIERS_ELEVATION_AND_MASS_CHANGE: abstract: | @@ -2555,7 +2784,7 @@ SEASONAL_POSTPROCESSED_PL: sensorType: ATMOSPHERIC license: proprietary title: Seasonal forecast anomalies on pressure levels - missionStartDate: "2017-01-01T00:00:00Z" + missionStartDate: "2017-09-01T00:00:00Z" SATELLITE_SEA_LEVEL_BLACK_SEA: abstract: | @@ -2635,7 +2864,7 @@ SEASONAL_POSTPROCESSED_SL: sensorType: ATMOSPHERIC license: proprietary title: Seasonal forecast anomalies on single levels - missionStartDate: "2017-01-01T00:00:00Z" + missionStartDate: "2017-09-01T00:00:00Z" SEASONAL_ORIGINAL_SL: abstract: | @@ -2685,7 +2914,7 @@ SEASONAL_ORIGINAL_SL: sensorType: ATMOSPHERIC license: proprietary title: Seasonal forecast daily and subdaily data on single levels - missionStartDate: "2017-01-01T00:00:00Z" + missionStartDate: "1981-01-01T00:00:00Z" SEASONAL_ORIGINAL_PL: abstract: | @@ -2729,7 +2958,7 @@ SEASONAL_ORIGINAL_PL: sensorType: ATMOSPHERIC license: proprietary title: Seasonal forecast subdaily data on pressure levels - missionStartDate: "1993-01-01T00:00:00Z" + missionStartDate: "1981-01-01T00:00:00Z" SEASONAL_MONTHLY_PL: abstract: | @@ -2772,7 +3001,7 @@ SEASONAL_MONTHLY_PL: sensorType: ATMOSPHERIC license: proprietary title: Seasonal forecast monthly statistics on pressure levels - missionStartDate: "1993-01-01T00:00:00Z" + missionStartDate: "1981-01-01T00:00:00Z" SEASONAL_MONTHLY_SL: abstract: | @@ -2823,7 +3052,7 @@ SEASONAL_MONTHLY_SL: sensorType: ATMOSPHERIC license: proprietary title: Seasonal forecast monthly statistics on single levels - missionStartDate: "1993-01-01T00:00:00Z" + missionStartDate: "1981-01-01T00:00:00Z" missionEndDate: "2023-05-01T00:00:00Z" SIS_HYDRO_MET_PROJ: @@ -2888,7 +3117,7 @@ FIRE_HISTORICAL: sensorType: ATMOSPHERIC license: proprietary title: Fire danger indices historical data from the Copernicus Emergency Management Service - missionStartDate: "1979-01-01T00:00:00Z" + missionStartDate: "1940-01-03T00:00:00Z" GLOFAS_FORECAST: abstract: | @@ -2908,7 +3137,7 @@ GLOFAS_FORECAST: sensorType: ATMOSPHERIC license: proprietary title: River discharge and related forecasted data by the Global Flood Awareness System - missionStartDate: "2019-11-05T00:00:00Z" + missionStartDate: "2021-05-26T00:00:00Z" GLOFAS_HISTORICAL: abstract: | @@ -2926,7 +3155,7 @@ GLOFAS_HISTORICAL: sensorType: ATMOSPHERIC license: proprietary title: River discharge and related historical data from the Global Flood Awareness System - missionStartDate: "1991-01-01T00:00:00Z" + missionStartDate: "1979-01-01T00:00:00Z" GLOFAS_REFORECAST: abstract: | @@ -2957,7 +3186,7 @@ GLOFAS_REFORECAST: sensorType: ATMOSPHERIC license: proprietary title: Reforecasts of river discharge and related data by the Global Flood Awareness System - missionStartDate: "1999-01-03T00:00:00Z" + missionStartDate: "2003-03-27T00:00:00Z" missionEndDate: "2018-12-30T23:59:00Z" GLOFAS_SEASONAL: @@ -2988,7 +3217,7 @@ GLOFAS_SEASONAL: sensorType: ATMOSPHERIC license: proprietary title: Seasonal forecasts of river discharge and related data by the Global Flood Awareness System - missionStartDate: "2020-01-12T00:00:00Z" + missionStartDate: "2021-06-01T00:00:00Z" GLOFAS_SEASONAL_REFORECAST: abstract: | @@ -3022,7 +3251,7 @@ GLOFAS_SEASONAL_REFORECAST: sensorType: ATMOSPHERIC license: proprietary title: Seasonal reforecasts of river discharge and related data from the Global Flood Awareness System - missionStartDate: "2020-01-12T00:00:00Z" + missionStartDate: "1981-01-01T00:00:00Z" EFAS_FORECAST: abstract: | @@ -3060,7 +3289,7 @@ EFAS_FORECAST: sensorType: ATMOSPHERIC license: proprietary title: River discharge and related forecasted data by the European Flood Awareness System - missionStartDate: "2018-10-10T00:00:00Z" + missionStartDate: "2018-10-11T00:00:00Z" EFAS_HISTORICAL: abstract: | @@ -3093,7 +3322,7 @@ EFAS_HISTORICAL: sensorType: ATMOSPHERIC license: proprietary title: River discharge and related historical data from the European Flood Awareness System - missionStartDate: "1991-01-01T00:00:00Z" + missionStartDate: "1992-01-02T00:00:00Z" EFAS_REFORECAST: abstract: | @@ -3129,7 +3358,7 @@ EFAS_REFORECAST: sensorType: ATMOSPHERIC license: proprietary title: Reforecasts of river discharge and related data by the European Flood Awareness System - missionStartDate: "1999-01-03T00:00:00Z" + missionStartDate: "2003-03-27T00:00:00Z" missionEndDate: "2018-12-30T00:00:00Z" EFAS_SEASONAL: @@ -3205,7 +3434,7 @@ EFAS_SEASONAL_REFORECAST: # COPERNICUS Digital Elevation Model COP_DEM_GLO30_DGED: abstract: | - Defence Gridded Elevation Data (DGED) formatted Copernicus DEM GLO-30 data. + Defence Gridded Elevation Data (DGED, 32 Bit floating point) formatted Copernicus DEM GLO-30 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-30 provides worldwide coverage at 30 meters.Data were acquired through the TanDEM-X mission @@ -3221,7 +3450,7 @@ COP_DEM_GLO30_DGED: missionStartDate: "2010-06-21T00:00:00Z" COP_DEM_GLO30_DTED: abstract: | - Digital Terrain Elevation Data (DTED) formatted Copernicus DEM GLO-30 data. + Digital Terrain Elevation Data (DTED, 16 Bit signed integer) formatted Copernicus DEM GLO-30 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-30 provides worldwide coverage at 30 meters.Data were acquired through the TanDEM-X mission @@ -3237,7 +3466,7 @@ COP_DEM_GLO30_DTED: missionStartDate: "2010-06-21T00:00:00Z" COP_DEM_GLO90_DGED: abstract: | - Defence Gridded Elevation Data (DGED) formatted Copernicus DEM GLO-90 data. + Defence Gridded Elevation Data (DGED, 32 Bit floating point) formatted Copernicus DEM GLO-90 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-90 provides worldwide coverage at 90 meters.Data were acquired through the TanDEM-X mission @@ -3253,7 +3482,7 @@ COP_DEM_GLO90_DGED: missionStartDate: "2010-06-21T00:00:00Z" COP_DEM_GLO90_DTED: abstract: | - Digital Terrain Elevation Data (DTED) formatted Copernicus DEM GLO-90 data. + Digital Terrain Elevation Data (DTED, 16 Bit signed integer) formatted Copernicus DEM GLO-90 data. The Copernicus Digital Elevation Model is a Digital Surface Model (DSM) that represents the surface of the Earth including buildings, infrastructure and vegetation. The Copernicus DEM is provided in 3 different instances: EEA-10, GLO-30 and GLO-90. GLO-90 provides worldwide coverage at 90 meters.Data were acquired through the TanDEM-X mission diff --git a/eodag/resources/providers.yml b/eodag/resources/providers.yml index dc4d34e82..09acbeb3b 100644 --- a/eodag/resources/providers.yml +++ b/eodag/resources/providers.yml @@ -850,9 +850,6 @@ S2_MSI_L1C: collection: S2ST productType: S2MSI1C - S2_MSI_L2A: - collection: S2ST - productType: S2MSI2A GENERIC_PRODUCT_TYPE: productType: '{productType}' collection: '{collection}' @@ -980,14 +977,16 @@ id: - 'productIdentifier=%{id#remove_extension}%' - '{$.properties.title#remove_extension}' - tileIdentifier: '$.null' + tileIdentifier: + - tileId + - '$.null' # The geographic extent of the product geometry: - 'geometry={geometry#to_rounded_wkt}' # - '$.geometry' - '($.geometry.`str()`.`sub(/^\\[\\]$/, POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90)))`)|($.geometry[*])' # The url of the quicklook - quicklook: '$.properties.quicklook' + quicklook: '$.properties.thumbnail' # The url to download the product "as is" (literal or as a template to be completed either after the search result # is obtained from the provider or during the eodag download phase) downloadLink: 'https://zipper.creodias.eu/download/{uid}' @@ -1520,6 +1519,9 @@ type: StacSearch api_endpoint: https://eod-catalog-svc-prod.astraea.earth/search need_auth: false + discover_queryables: + fetch_url: null + product_type_fetch_url: null pagination: # 2021/03/19: The docs (https://eod-catalog-svc-prod.astraea.earth/api.html#operation/getSearchSTAC) # say the max is 10_000. In practice 1_000 products are returned if more are asked (even greater @@ -1637,6 +1639,9 @@ type: StacSearch api_endpoint: https://landsatlook.usgs.gov/stac-server/search need_auth: false + discover_queryables: + fetch_url: null + product_type_fetch_url: null pagination: # 2021/03/19: no more than 10_000 (if greater, returns a 500 error code) # but in practive if an Internal Server Error is returned for more than @@ -1679,10 +1684,13 @@ url: https://www.element84.com/earth-search/ search: !plugin type: StacSearch - api_endpoint: https://earth-search.aws.element84.com/v0/search + api_endpoint: https://earth-search.aws.element84.com/v1/search need_auth: false discover_product_types: results_entry: '$.collections[?id!="sentinel-s2-l2a-cogs"]' + discover_queryables: + fetch_url: null + product_type_fetch_url: null pagination: # Override the default next page url key path of StacSearch because the next link returned # by Earth Search is invalid (as of 2021/04/29). Entry set to null (None) to avoid using the @@ -1695,28 +1703,30 @@ max_items_per_page: 500 metadata_mapping: utmZone: - - '{{"query":{{"sentinel:utm_zone":{{"eq":"{utmZone}"}}}}}}' - - '$.properties."sentinel:utm_zone"' + - '{{"query":{{"mgrs:utm_zone":{{"eq":"{utmZone}"}}}}}}' + - '$.properties."mgrs:utm_zone"' latitudeBand: - - '{{"query":{{"sentinel:latitude_band":{{"eq":"{latitudeBand}"}}}}}}' - - '$.properties."sentinel:latitude_band"' + - '{{"query":{{"mgrs:latitude_band":{{"eq":"{latitudeBand}"}}}}}}' + - '$.properties."mgrs:latitude_band"' gridSquare: - - '{{"query":{{"sentinel:grid_square":{{"eq":"{gridSquare}"}}}}}}' - - '$.properties."sentinel:grid_square"' + - '{{"query":{{"mgrs:grid_square":{{"eq":"{gridSquare}"}}}}}}' + - '$.properties."mgrs:grid_square"' tileIdentifier: - - '{{"query":{{"sentinel:utm_zone":{{"eq":"{tileIdentifier#slice_str(0,2,1)}"}},"sentinel:latitude_band":{{"eq":"{tileIdentifier#slice_str(2,3,1)}"}},"sentinel:grid_square":{{"eq":"{tileIdentifier#slice_str(3,5,1)}"}}}}}}' + - '{{"query":{{"mgrs:utm_zone":{{"eq":"{tileIdentifier#slice_str(0,2,1)}"}},"mgrs:latitude_band":{{"eq":"{tileIdentifier#slice_str(2,3,1)}"}},"mgrs:grid_square":{{"eq":"{tileIdentifier#slice_str(3,5,1)}"}}}}}}' - '{utmZone}{latitudeBand}{gridSquare}' products: + S1_SAR_GRD: + productType: sentinel-1-grd S2_MSI_L1C: - productType: sentinel-s2-l1c + productType: sentinel-2-l1c metadata_mapping: - title: '$.properties."sentinel:product_id"' + title: '{$.properties."s2:product_uri"#remove_extension}' platformSerialIdentifier: '$.id.`split(_, 0, -1)`' polarizationMode: '$.id.`sub(/.{14}([A-Z]{2}).*/, \\1)`' productPath: | - $.properties."sentinel:product_id".`sub(/([S2AB]{3})_MSIL1C_([0-9]{4})([0-9]{2})([0-9]{2})(T.*)/, products!\\2!\\3!\\4!\\1_MSIL1C_\\2\\3\\4\\5)`.`sub(/!0*/, /)` + $.properties."s2:product_uri".`sub(/([S2AB]{3})_MSIL1C_([0-9]{4})([0-9]{2})([0-9]{2})(T.*).SAFE/, products!\\2!\\3!\\4!\\1_MSIL1C_\\2\\3\\4\\5)`.`sub(/!0*/, /)` tilePath: | - $.assets.info.href.`sub(/.*/sentinel-s2-l1c\/(tiles\/.*)\/tileInfo\.json/, \\1)` + $.assets.tileinfo_metadata.href.`sub(/.*/sentinel-s2-l1c\/(tiles\/.*)\/tileInfo\.json/, \\1)` utmZone: - '{{"query":{{"mgrs:utm_zone":{{"eq":"{utmZone}"}}}}}}' - '$.properties."mgrs:utm_zone"' @@ -1729,18 +1739,14 @@ tileIdentifier: - '{{"query":{{"mgrs:utm_zone":{{"eq":"{tileIdentifier#slice_str(0,2,1)}"}},"mgrs:latitude_band":{{"eq":"{tileIdentifier#slice_str(2,3,1)}"}},"mgrs:grid_square":{{"eq":"{tileIdentifier#slice_str(3,5,1)}"}}}}}}' - '{utmZone}{latitudeBand}{gridSquare}' - S2_MSI_L2A: - productType: sentinel-s2-l2a - metadata_mapping: - title: '$.properties."sentinel:product_id"' - platformSerialIdentifier: '$.id.`split(_, 0, -1)`' - polarizationMode: '$.id.`sub(/.{14}([A-Z]{2}).*/, \\1)`' - productPath: | - $.properties."sentinel:product_id".`sub(/([S2AB]{3})_MSIL2A_([0-9]{4})([0-9]{2})([0-9]{2})(T.*)/, products!\\2!\\3!\\4!\\1_MSIL2A_\\2\\3\\4\\5)`.`sub(/!0*/, /)` - tilePath: | - $.assets.info.href.`sub(/.*/sentinel-s2-l2a\/(tiles\/.*)\/tileInfo\.json/, \\1)` - L8_OLI_TIRS_C1L1: - productType: landsat-8-l1-c1 + LANDSAT_C2L2: + productType: landsat-c2-l2 + NAIP: + productType: naip + COP_DEM_GLO30_DGED: + productType: cop-dem-glo-30 + COP_DEM_GLO90_DGED: + productType: cop-dem-glo-90 GENERIC_PRODUCT_TYPE: productType: '{productType}' download: !plugin @@ -1754,12 +1760,6 @@ complementary_url_key: - productPath - tilePath - S2_MSI_L2A: - default_bucket: 'sentinel-s2-l2a' - build_safe: true - complementary_url_key: - - productPath - - tilePath auth: !plugin type: AwsAuth @@ -1773,10 +1773,13 @@ url: https://www.element84.com/earth-search/ search: !plugin type: StacSearch - api_endpoint: https://earth-search.aws.element84.com/v0/search + api_endpoint: https://earth-search.aws.element84.com/v1/search need_auth: false discover_product_types: fetch_url: null + discover_queryables: + fetch_url: null + product_type_fetch_url: null pagination: # Override the default next page url key path of StacSearch because the next link returned # by Earth Search is invalid (as of 2021/04/29). Entry set to null (None) to avoid using the @@ -1790,9 +1793,10 @@ metadata_mapping: platformSerialIdentifier: '$.id.`split(_, 0, -1)`' polarizationMode: '$.id.`sub(/.{14}([A-Z]{2}).*/, \\1)`' + title: '{$.properties."s2:product_uri"#remove_extension}' products: S2_MSI_L2A_COG: - productType: sentinel-s2-l2a-cogs + productType: sentinel-2-l2a GENERIC_PRODUCT_TYPE: productType: '{productType}' download: !plugin @@ -1812,6 +1816,9 @@ need_auth: false discover_product_types: fetch_url: null + discover_queryables: + fetch_url: null + product_type_fetch_url: null pagination: # Override the default next page url key path of StacSearch because the next link returned # by Earth Search is invalid (as of 2021/04/29). Entry set to null (None) to avoid using the @@ -1874,6 +1881,7 @@ geometry: - 'area={geometry#to_nwse_bounds_str(/)}' - '$.geometry' + defaultGeometry: 'POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))' # storageStatus set to ONLINE for consistency between providers storageStatus: '{$.null#replace_str("Not Available","ONLINE")}' downloadLink: 'https://apps.ecmwf.int/datasets/data/{dataset}' @@ -2084,6 +2092,8 @@ type: CdsApi api_endpoint: https://ads.atmosphere.copernicus.eu/api/v2 extract: false + flatten_top_dirs: True + constraints_file_url: "https://datastore.copernicus-climate.eu/cams/published-forms/camsprod/{dataset}/constraints.json" metadata_mapping: productType: '$.productType' title: '$.id' @@ -2096,6 +2106,7 @@ geometry: - 'area={geometry#to_nwse_bounds_str(/)}' - '$.geometry' + defaultGeometry: 'POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))' # storageStatus set to ONLINE for consistency between providers storageStatus: '{$.null#replace_str("Not Available","ONLINE")}' downloadLink: 'https://ads.atmosphere.copernicus.eu/cdsapp#!/dataset/{dataset}' @@ -2103,12 +2114,21 @@ accuracy: - accuracy - '$.accuracy' + aerosol_type: + - aerosol_type + - '$.aerosol_type' + altitude: + - altitude + - '$.altitude' anoffset: - anoffset - '$.anoffset' api_product_type: - product_type - '$.api_product_type' + band: + - band + - '$.band' bitmap: - bitmap - '$.bitmap' @@ -2151,9 +2171,6 @@ expver: - expver - '$.expver' - fcmonth: - - fcmonth - - '$.fcmonth' fcperiod: - fcperiod - '$.fcperiod' @@ -2163,12 +2180,12 @@ filter: - filter - '$.filter' + forcing_type: + - forcing_type + - '$.forcing_type' format: - format - '$.format' - frame: - - frame - - '$.frame' frequency: - frequency - '$.frequency' @@ -2181,6 +2198,9 @@ ident: - ident - '$.ident' + input_observations: + - input_observations + - '$.input_observations' interpolation: - interpolation - '$.interpolation' @@ -2193,6 +2213,9 @@ latitude: - latitude - '$.latitude' + level: + - level + - '$.level' levelist: - levelist - '$.levelist' @@ -2202,6 +2225,9 @@ levtype: - levtype - '$.levtype' + location: + - location + - '$.location' longitude: - longitude - '$.longitude' @@ -2211,6 +2237,9 @@ method: - method - '$.method' + model: + - model + - '$.model' model_level: - model_level - '$.model_level' @@ -2271,6 +2300,9 @@ section: - section - '$.section' + sky_type: + - sky_type + - '$.sky_type' source: - source - '$.source' @@ -2289,6 +2321,15 @@ time: - time - '$.time' + time_aggregation: + - time_aggregation + - '$.time_aggregation' + time_reference: + - time_reference + - '$.time_reference' + time_step: + - time_step + - '$.time_step' truncation: - truncation - '$.truncation' @@ -2301,93 +2342,150 @@ variable: - variable - '$.variable' + version: + - version + - '$.version' + quantity: + - quantity + - '$.quantity' year: - year - '$.year' products: # See available Public Datasets in https://ads.atmosphere.copernicus.eu/cdsapp#!/search?type=dataset - CAMS_GACF_AOT: + CAMS_GAC_FORECAST: dataset: cams-global-atmospheric-composition-forecasts - stream: oper - class: mc - expver: '0001' - step: 0 - levtype: SFC - variable: - - sea_salt_aerosol_optical_depth_550nm - - dust_aerosol_optical_depth_550nm - - organic_matter_aerosol_optical_depth_550nm - - black_carbon_aerosol_optical_depth_550nm - - sulphate_aerosol_optical_depth_550nm - time: '00:00' - leadtime_hour: 0 type: forecast - format: netcdf - CAMS_GACF_RH: - dataset: cams-global-atmospheric-composition-forecasts - stream: oper - class: mc - expver: '0001' - step: 0 - variable: 'relative_humidity' - pressure_level: ['1', '2', '3', '5', '7', '10', '20', '30', '50', '70', '100', '150', '200', '250', '300', '400', '500', '600', '700', '850', '925', '1000'] + format: grib + variable: ammonium_aerosol_optical_depth_550nm time: '00:00' - leadtime_hour: 0 + leadtime_hour: '0' + _default_end_date: '2019-07-12T00:00Z' + CAMS_GFE_GFAS: + dataset: cams-global-fire-emissions-gfas + format: grib + variable: altitude_of_plume_bottom + _default_end_date: '2018-07-03T00:00Z' + CAMS_EU_AIR_QUALITY_FORECAST: + dataset: cams-europe-air-quality-forecasts + model: ensemble + format: grib + variable: alder_pollen type: forecast - format: netcdf - CAMS_GACF_MR: - dataset: cams-global-atmospheric-composition-forecasts - stream: oper - class: mc - expver: '0001' - step: 0 - variable: - - sea_salt_aerosol_0.03-0.5um_mixing_ratio - - sea_salt_aerosol_0.5-5um_mixing_ratio - - sea_salt_aerosol_5-20um_mixing_ratio - - dust_aerosol_0.03-0.55um_mixing_ratio - - dust_aerosol_0.55-0.9um_mixing_ratio - - dust_aerosol_0.9-20um_mixing_ratio - - hydrophilic_organic_matter_aerosol_mixing_ratio - - hydrophobic_organic_matter_aerosol_mixing_ratio - - hydrophilic_black_carbon_aerosol_mixing_ratio - - hydrophobic_black_carbon_aerosol_mixing_ratio - - sulphate_aerosol_mixing_ratio - model_level: ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60', '61'] time: '00:00' - leadtime_hour: 0 - type: forecast - format: netcdf + level: '0' + leadtime_hour: '0' + metadata_mapping: + geometry: + - 'area={geometry#to_nwse_bounds}' + - '$.geometry' + CAMS_EU_AIR_QUALITY_RE: + dataset: cams-europe-air-quality-reanalyses + type: validated_reanalysis + format: zip + variable: nitrogen_dioxide + model: ensemble + level: '0' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + CAMS_GRF: + dataset: cams-global-radiative-forcings + format: zip + variable: radiative_forcing_of_carbon_dioxide + forcing_type: instantaneous + band: long_wave + sky_type: all_sky + level: surface + version: '2' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + CAMS_GRF_AUX: + dataset: cams-global-radiative-forcing-auxilliary-variables + band: short_wave + sky_type: clear_sky + version: '1.5' + format: zip + variable: aerosol_radiation_effect + aerosol_type: marine + level: surface + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + CAMS_SOLAR_RADIATION: + dataset: cams-solar-radiation-timeseries + sky_type: clear + time_step: 1minute + time_reference: true_solar_time + location: + latitude: 0 + longitude: 0 + altitude: -999 + format: csv + CAMS_GREENHOUSE_EGG4_MONTHLY: + dataset: cams-global-ghg-reanalysis-egg4-monthly + format: grib + variable: snow_albedo + api_product_type: monthly_mean + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + CAMS_GREENHOUSE_EGG4: + dataset: cams-global-ghg-reanalysis-egg4 + format: grib + variable: snow_albedo + step: '0' + CAMS_GREENHOUSE_INVERSION: + dataset: cams-global-greenhouse-gas-inversion + version: latest + variable: carbon_dioxide + quantity: mean_column + input_observations: surface + time_aggregation: instantaneous + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + CAMS_GLOBAL_EMISSIONS: + dataset: cams-global-emission-inventories + version: latest + format: zip + variable: acids + source: anthropogenic + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' CAMS_EAC4: dataset: cams-global-reanalysis-eac4 - stream: oper - class: mc - expver: '0001' - variable: - - dust_aerosol_0.03-0.55um_mixing_ratio - - dust_aerosol_0.55-0.9um_mixing_ratio - - dust_aerosol_0.9-20um_mixing_ratio - - dust_aerosol_optical_depth_550nm - - hydrophilic_black_carbon_aerosol_mixing_ratio - - hydrophilic_organic_matter_aerosol_mixing_ratio - - hydrophobic_black_carbon_aerosol_mixing_ratio - - hydrophobic_organic_matter_aerosol_mixing_ratio - - sea_salt_aerosol_0.03-0.5um_mixing_ratio - - sea_salt_aerosol_0.5-5um_mixing_ratio - - sea_salt_aerosol_5-20um_mixing_ratio - - sea_salt_aerosol_optical_depth_550nm - - sulphate_aerosol_optical_depth_550nm - model_level: ['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '60'] + format: grib + variable: '2m_dewpoint_temperature' time: '00:00' - format: netcdf + CAMS_EAC4_MONTHLY: + dataset: cams-global-reanalysis-eac4-monthly + format: grib + variable: 2m_dewpoint_temperature + api_product_type: monthly_mean + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' GENERIC_PRODUCT_TYPE: dataset: '{productType}' - stream: oper - class: mc - expver: '0001' - step: 0 + format: grib time: '00:00' - format: netcdf --- !provider name: cop_cds @@ -2400,6 +2498,11 @@ type: CdsApi api_endpoint: https://cds.climate.copernicus.eu/api/v2 extract: false + discover_queryables: + fetch_url: null + product_type_fetch_url: null + flatten_top_dirs: True + constraints_file_url: "http://datastore.copernicus-climate.eu/c3s/published-forms/c3sprod/{dataset}/constraints.json" metadata_mapping: productType: '$.productType' title: '$.id' @@ -2412,6 +2515,7 @@ geometry: - 'area={geometry#to_nwse_bounds_str(/)}' - '$.geometry' + defaultGeometry: 'POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))' # storageStatus set to ONLINE for consistency between providers storageStatus: '{$.null#replace_str("Not Available","ONLINE")}' downloadLink: 'https://cds.climate.copernicus.eu/cdsapp#!/dataset/{dataset}' @@ -2443,6 +2547,9 @@ dataset: - dataset - '$.dataset' + dataset_type: + - dataset_type + - '$.dataset_type' date_range: - date_range - '$.date_range' @@ -2461,15 +2568,18 @@ duplicates: - duplicates - '$.duplicates' + ensemble_member: + - ensemble_member + - '$.ensemble_member' expect: - expect - '$.expect' + experiment: + - experiment + - '$.experiment' expver: - expver - '$.expver' - fcmonth: - - fcmonth - - '$.fcmonth' fcperiod: - fcperiod - '$.fcperiod' @@ -2482,18 +2592,24 @@ format: - format - '$.format' - frame: - - frame - - '$.frame' frequency: - frequency - '$.frequency' + gcm: + - gcm + - '$.gcm' grid: - grid - '$.grid' hdate: - hdate - '$.hdate' + horizontal_resolution: + - horizontal_resolution + - '$.horizontal_resolution' + hydrological_model: + - hydrological_model + - '$.hydrological_model' ident: - ident - '$.ident' @@ -2515,6 +2631,9 @@ leadtime_hour: - leadtime_hour - '$.leadtime_hour' + leadtime_month: + - leadtime_month + - '$.leadtime_month' levtype: - levtype - '$.levtype' @@ -2527,9 +2646,9 @@ method: - method - '$.method' - model_level: - - model_level - - '$.model_level' + model_levels: + - model_levels + - '$.model_levels' month: - month - '$.month' @@ -2545,6 +2664,9 @@ origin: - origin - '$.origin' + originating_centre: + - originating_centre + - '$.originating_centre' packing: - packing - '$.packing' @@ -2554,18 +2676,30 @@ param: - param - '$.param' + period: + - period + - '$.period' pressure_level: - pressure_level - '$.pressure_level' priority: - priority - '$.priority' + processing_level: + - processing_level + - '$.processing_level' + processing_type: + - processing_type + - '$.processing_type' product: - product - '$.product' range: - range - '$.range' + rcm: + - rcm + - '$.rcm' refdate: - refdate - '$.refdate' @@ -2587,6 +2721,9 @@ section: - section - '$.section' + sensor_and_algorithm: + - sensor_and_algorithm + - '$.sensor_and_algorithm' source: - source - '$.source' @@ -2605,6 +2742,9 @@ time: - time - '$.time' + time_aggregation: + - time_aggregation + - '$.time_aggregation' truncation: - truncation - '$.truncation' @@ -2617,6 +2757,15 @@ variable: - variable - '$.variable' + variable_type: + - variable_type + - '$.variable_type' + version: + - version + - '$.version' + system_version: + - system_version + - '$.system_version' year: - year - '$.year' @@ -2626,7 +2775,293 @@ dataset: reanalysis-era5-single-levels api_product_type: reanalysis time: '00:00' - format: netcdf + format: grib + ERA5_PL: + dataset: reanalysis-era5-pressure-levels + api_product_type: reanalysis + time: '00:00' + format: grib + ERA5_PL_MONTHLY: + dataset: reanalysis-era5-pressure-levels-monthly-means + api_product_type: monthly_averaged_reanalysis + time: '00:00' + format: grib + variable: divergence + pressure_level: '1' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + ERA5_LAND: + dataset: reanalysis-era5-land + variable: 2m_dewpoint_temperature + time: '01:00' + format: grib + ERA5_LAND_MONTHLY: + dataset: reanalysis-era5-land-monthly-means + api_product_type: monthly_averaged_reanalysis + variable: 2m_dewpoint_temperature + time: '00:00' + format: grib + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + ERA5_SL_MONTHLY: + dataset: reanalysis-era5-single-levels-monthly-means + api_product_type: monthly_averaged_reanalysis + time: '00:00' + format: grib + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + UERRA_EUROPE_SL: + dataset: reanalysis-uerra-europe-single-levels + origin: mescan_surfex + variable: 10m_wind_direction + time: '00:00' + format: grib + SATELLITE_SEA_LEVEL_BLACK_SEA: + dataset: satellite-sea-level-black-sea + format: zip + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + GLACIERS_DIST_RANDOLPH: + dataset: insitu-glaciers-extent + variable: glacier_area + api_product_type: gridded + version: '6_0' + format: zip + FIRE_HISTORICAL: + dataset: cems-fire-historical-v1 + grid: original_grid + dataset_type: consolidated_dataset + api_product_type: reanalysis + variable: build_up_index + system_version: '4_1' + format: grib + GLOFAS_FORECAST: + dataset: cems-glofas-forecast + system_version: operational + variable: river_discharge_in_the_last_24_hours + format: grib + hydrological_model: lisflood + api_product_type: control_forecast + leadtime_hour: '24' + GLOFAS_REFORECAST: + dataset: cems-glofas-reforecast + variable: river_discharge_in_the_last_24_hours + format: grib + system_version: version_4_0 + hydrological_model: lisflood + api_product_type: control_reforecast + leadtime_hour: '24' + GLOFAS_HISTORICAL: + dataset: cems-glofas-historical + system_version: version_4_0 + variable: river_discharge_in_the_last_24_hours + format: grib + hydrological_model: lisflood + api_product_type: consolidated + metadata_mapping: + completionTimeFromAscendingNode: + - 'hdate={startTimeFromAscendingNode#to_iso_date}/{completionTimeFromAscendingNode#to_iso_date(-1,)}' + - '{$.completionTimeFromAscendingNode#to_iso_date}' + GLOFAS_SEASONAL: + dataset: cems-glofas-seasonal + variable: river_discharge_in_the_last_24_hours + format: grib + system_version: operational + hydrological_model: lisflood + leadtime_hour: '24' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + GLOFAS_SEASONAL_REFORECAST: + dataset: cems-glofas-seasonal-reforecast + format: grib + variable: river_discharge_in_the_last_24_hours + system_version: version_4_0 + hydrological_model: lisflood + leadtime_hour: '24' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"hyear": {_date#interval_to_datetime_dict}["year"], "hmonth": {_date#interval_to_datetime_dict}["month"], "hday": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + EFAS_FORECAST: + dataset: efas-forecast + format: grib.zip + system_version: operational + originating_centre: ecmwf + api_product_type: control_forecast + variable: river_discharge_in_the_last_24_hours + model_levels: surface_level + time: '00:00' + leadtime_hour: '24' + EFAS_HISTORICAL: + dataset: efas-historical + format: grib.zip + model_levels: surface_level + variable: snow_depth_water_equivalent + system_version: version_5_0 + time: '00:00' + metadata_mapping: + completionTimeFromAscendingNode: + - 'hdate={startTimeFromAscendingNode#to_iso_date}/{completionTimeFromAscendingNode#to_iso_date(-1,)}' + - '{$.completionTimeFromAscendingNode#to_iso_date}' + EFAS_REFORECAST: + dataset: efas-reforecast + system_version: version_5_0 + format: grib.zip + api_product_type: control_forecast + variable: river_discharge_in_the_last_6_hours + model_levels: surface_level + leadtime_hour: '6' + metadata_mapping: + completionTimeFromAscendingNode: + - 'hdate={startTimeFromAscendingNode#to_iso_date}/{completionTimeFromAscendingNode#to_iso_date(-1,)}' + - '{$.completionTimeFromAscendingNode#to_iso_date}' + EFAS_SEASONAL: + dataset: efas-seasonal + system_version: operational + format: grib.zip + variable: river_discharge_in_the_last_24_hours + model_levels: surface_level + leadtime_hour: '24' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + EFAS_SEASONAL_REFORECAST: + dataset: efas-seasonal-reforecast + system_version: version_5_0 + format: grib.zip + variable: river_discharge_in_the_last_24_hours + model_levels: surface_level + leadtime_hour: '24' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"hyear": {_date#interval_to_datetime_dict}["year"], "hmonth": {_date#interval_to_datetime_dict}["month"], "hday": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + SATELLITE_CARBON_DIOXIDE: + dataset: satellite-carbon-dioxide + format: zip + processing_level: level_2 + variable: xco2 + sensor_and_algorithm: sciamachy_wfmd + version: '4.0' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + SATELLITE_METHANE: + dataset: satellite-methane + format: zip + processing_level: level_2 + variable: xch4 + sensor_and_algorithm: sciamachy_wfmd + version: '4.0' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + SEASONAL_POSTPROCESSED_PL: + dataset: seasonal-postprocessed-pressure-levels + format: grib + originating_centre: ecmwf + system: '4' + variable: geopotential_anomaly + pressure_level: '10' + api_product_type: 'ensemble_mean' + leadtime_month: '1' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + SEASONAL_POSTPROCESSED_SL: + dataset: seasonal-postprocessed-single-levels + format: grib + originating_centre: ecmwf + system: '4' + variable: 2m_dewpoint_temperature_anomaly + api_product_type: 'ensemble_mean' + leadtime_month: '1' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + SEASONAL_ORIGINAL_SL: + dataset: seasonal-original-single-levels + format: grib + originating_centre: ecmwf + system: '5' + variable: 10m_u_component_of_wind + leadtime_hour: '6' + SEASONAL_ORIGINAL_PL: + dataset: seasonal-original-pressure-levels + format: grib + originating_centre: ecmwf + system: '5' + variable: geopotential + pressure_level: '10' + leadtime_hour: '12' + SEASONAL_MONTHLY_PL: + dataset: seasonal-monthly-pressure-levels + format: grib + originating_centre: ecmwf + system: '5' + variable: geopotential + pressure_level: '10' + api_product_type: monthly_mean + leadtime_month: '1' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + SEASONAL_MONTHLY_SL: + dataset: seasonal-monthly-single-levels + format: grib + originating_centre: ecmwf + system: '5' + variable: 10m_u_component_of_wind + api_product_type: monthly_mean + leadtime_month: '1' + metadata_mapping: + completionTimeFromAscendingNode: '{$.completionTimeFromAscendingNode#to_iso_date}' + _date: + - '{{"_date": {{"year": {_date#interval_to_datetime_dict}["year"], "month": {_date#interval_to_datetime_dict}["month"], "day": {_date#interval_to_datetime_dict}["day"]}} }}' + - '{$._date}' + SIS_HYDRO_MET_PROJ: + dataset: sis-hydrology-meteorology-derived-projections + format: zip + api_product_type: climate_impact_indicators + variable: 2m_air_temperature + variable_type: absolute_change_from_reference_period + processing_type: original + time_aggregation: monthly_mean + horizontal_resolution: 5_km + experiment: degree_scenario + rcm: cclm4_8_17 + gcm: ec_earth + ensemble_member: r12i1p1 + period: 1_5_c GENERIC_PRODUCT_TYPE: dataset: '{productType}' time: '00:00' @@ -2925,6 +3360,7 @@ geometry: - '{{"geometry": {geometry#to_geojson} }}' - '$.geometry' + defaultGeometry: 'POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))' productType: '$.queries[0].domain' storageStatus: '{$.requiresJobQueue#get_group_name((?PFalse)|(?PTrue))}' downloadLink: 'https://my.meteoblue.com/dataset/query' @@ -3059,16 +3495,22 @@ id: - 'productIdentifier={id#remove_extension}' - '{$.properties.title#remove_extension}' - tileIdentifier: '$.null' + tileIdentifier: + - tileId + - '$.null' # The geographic extent of the product geometry: - 'geometry={geometry#to_rounded_wkt}' - '($.geometry.`str()`.`sub(/^\\[\\]$/, POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90)))`)|($.geometry[*])' + # The url of the quicklook + quicklook: '$.properties.thumbnail' # The url to download the product "as is" (literal or as a template to be completed either after the search result # is obtained from the provider or during the eodag download phase) downloadLink: 'https://catalogue.dataspace.copernicus.eu/odata/v1/Products({uid})/$value' # storageStatus: must be one of ONLINE, STAGING, OFFLINE storageStatus: '$.properties.status' + # Additional metadata provided by the providers but that don't appear in the reference spec + thumbnail: '$.properties.thumbnail' download: !plugin type: HTTPDownload base_uri: 'https://catalogue.dataspace.copernicus.eu/odata/v1/Products' @@ -3288,6 +3730,10 @@ products: S1_SAR_GRD: productType: sentinel-1-grd + metadata_mapping: + processingLevel: + - '{{"query":{{"s1:processing_level":{{"eq":"{processingLevel}"}}}}}}' + - '$.properties."s1:processing_level"' S2_MSI_L2A: productType: sentinel-2-l2a LANDSAT_C2L1: @@ -3324,6 +3770,9 @@ api_endpoint: https://hydroweb.next.theia-land.fr/api/v1/rs-catalog/stac/search need_auth: true auth_error_code: 401 + discover_queryables: + fetch_url: null + product_type_fetch_url: null pagination: max_items_per_page: 10_000 metadata_mapping: @@ -3355,11 +3804,11 @@ url: https://www.wekeo.eu/ search: !plugin type: DataRequestSearch - api_endpoint: "https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/" - data_request_url: "https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/datarequest" - metadata_url: "https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/querymetadata/" - status_url: "https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/datarequest/status/" - result_url: "https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/datarequest/jobs/{jobId}/result?size={items_per_page}&page={page}" + api_endpoint: "https://wekeo-broker.prod.wekeo2.eu/databroker/" + data_request_url: "https://wekeo-broker.prod.wekeo2.eu/databroker/datarequest" + metadata_url: "https://wekeo-broker.prod.wekeo2.eu/databroker/querymetadata/" + status_url: "https://wekeo-broker.prod.wekeo2.eu/databroker/datarequest/status/" + result_url: "https://wekeo-broker.prod.wekeo2.eu/databroker/datarequest/jobs/{jobId}/result?size={items_per_page}&page={page}" timeout: 60 need_auth: true auth_error_code: 401 @@ -3372,6 +3821,8 @@ total_items_nb_key_path: '$.totItems' discover_product_types: fetch_url: null + constraints_file_url: "https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/querymetadata/{dataset}" + constraints_entry: constraints metadata_mapping: productType: - '{{"datasetId": "{productType}"}}' @@ -3388,11 +3839,11 @@ cloudCover: '$.extraInformation.cloudCover' downloadLink: '$.url' title: '$.productInfo.product' - orderLink: 'https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/dataorder?{{"uri": "{downloadLink}","jobId":"requestJobId"}}' + orderLink: 'https://wekeo-broker.prod.wekeo2.eu/databroker/dataorder?{{"uri": "{downloadLink}","jobId":"requestJobId"}}' storageStatus: 'OFFLINE' auth: !plugin type: TokenAuth - auth_uri: 'https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/gettoken' + auth_uri: 'https://wekeo-broker.prod.wekeo2.eu/databroker/gettoken' token_type: json token_key: access_token request_method: GET @@ -6261,7 +6712,7 @@ download: !plugin type: HTTPDownload - base_uri: https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker + base_uri: https://wekeo-broker.prod.wekeo2.eu/databroker flatten_top_dirs: true auth_error_code: 401 order_enabled: true @@ -6269,9 +6720,354 @@ order_on_response: metadata_mapping: order_id: '$.orderId' - orderStatusLink: 'https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/dataorder/status/{order_id}' - downloadLink: 'https://wekeo-broker.apps.mercator.dpi.wekeo.eu/databroker/dataorder/download/{order_id}' + orderStatusLink: 'https://wekeo-broker.prod.wekeo2.eu/databroker/dataorder/status/{order_id}' + downloadLink: 'https://wekeo-broker.prod.wekeo2.eu/databroker/dataorder/download/{order_id}' order_status_method: 'GET' order_status_success: status: completed message: "Done!" + +--- +!provider + name: creodias_s3 + priority: 0 + description: CloudFerro DIAS data through S3 protocol + roles: + - host + url: https://creodias.eu/ + search: !plugin + type: CreodiasS3Search + api_endpoint: 'http://datahub.creodias.eu/resto/api/collections/{collection}/search.json' + s3_endpoint: 'https://eodata.cloudferro.com' + need_auth: true + timeout: 60 + pagination: + next_page_url_tpl: '{url}?{search}&maxRecords={items_per_page}&page={page}&exactCount=1' + total_items_nb_key_path: '$.properties.totalResults' + max_items_per_page: 1_000 + discover_metadata: + auto_discovery: true + metadata_pattern: '^(?!collection)[a-zA-Z0-9]+$' + search_param: '{metadata}={{{metadata}}}' + metadata_path: '$.properties.*' + discover_product_types: + fetch_url: https://finder.creodias.eu/attributes.json + result_type: json + results_entry: 'collections[?billing=="free"]' + generic_product_type_id: '$.id' + generic_product_type_parsable_properties: + collection: '$.id' + generic_product_type_parsable_metadata: + abstract: '$.description' + instrument: '{$.summaries.instruments#csv_list}' + platform: '{$.summaries.constellation#csv_list}' + platformSerialIdentifier: '{$.summaries.platform#csv_list}' + processingLevel: '$.summaries."processing:level"' + keywords: '{$.keywords#csv_list}' + license: '$.license' + title: '$.title' + missionStartDate: '$.extent.temporal.interval[0][0]' + metadata_mapping: + uid: '$.id' + productType: + - productType + - '$.properties.productType' + platform: '$.properties.collection' + platformSerialIdentifier: + - platform + - '$.properties.platform' + instrument: + - instrument + - '$.properties.instrument' + processingLevel: + - processingLevel + - '$.properties.processingLevel' + # INSPIRE obligated OpenSearch Parameters for Collection Search (Table 4) + title: '{$.properties.title#remove_extension}' + topicCategory: '$.properties.topicCategory' + keyword: '$.properties.keywords' + abstract: '$.properties.description' + resolution: + - 'resolution' + - '$.properties.resolution' + organisationName: + - 'organisationName' + - '$.properties.organisationName' + publicationDate: '$.properties.published' + accessConstraint: '$.properties.license' + + # OpenSearch Parameters for Product Search (Table 5) + parentIdentifier: + - 'parentIdentifier' + - '$.properties.parentIdentifier' + acquisitionType: '$.properties.acquisitionType' + orbitNumber: + - 'orbitNumber' + - '$.properties.orbitNumber' + orbitDirection: + - 'orbitDirection' + - '$.properties.orbitDirection' + swathIdentifier: + - 'swath' + - '$.properties.swath' + cloudCover: + - 'cloudCover=[0,{cloudCover}]' + - '$.properties.cloudCover' + snowCover: + - 'snowCover=[0,{snowCover}]' + - '$.properties.snowCover' + productVersion: '$.properties.version' + productQualityStatus: '$.properties.onlineQualityCheck' + processorName: '$.properties.processingName' + processingCenter: '$.properties.processingCenter' + creationDate: '$.properties.dhusIngestDate' + modificationDate: '$.properties.updated' + sensorMode: + - 'sensorMode' + - '$.properties.sensorMode' + # OpenSearch Parameters for Acquistion Parameters Search (Table 6) + startTimeFromAscendingNode: + - startDate + - '$.properties.startDate' + completionTimeFromAscendingNode: + - completionDate + - '$.properties.completionDate' + polarizationMode: + - 'polarisation' + - '$.properties.polarisation' + + # Custom parameters (not defined in the base document referenced above) + id: + - 'productIdentifier=%{id#remove_extension}%' + - '{$.properties.title#remove_extension}' + tileIdentifier: + - tileId + - '$.null' + # The geographic extent of the product + geometry: + - 'geometry={geometry#to_rounded_wkt}' + # - '$.geometry' + - '($.geometry.`str()`.`sub(/^\\[\\]$/, POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90)))`)|($.geometry[*])' + # The url of the quicklook + quicklook: '$.properties.thumbnail' + # The url to download the product "as is" (literal or as a template to be completed either after the search result + # is obtained from the provider or during the eodag download phase) + downloadLink: 'https://zipper.creodias.eu/download/{uid}' + # storageStatus must be one of ONLINE, STAGING, OFFLINE + storageStatus: '$.properties.status' + + # Additional metadata provided by the providers but that don't appear in the reference spec + thumbnail: '$.properties.thumbnail' + download: !plugin + type: CreodiasS3Download + flatten_top_dirs: True + base_uri: 'https://eodata.cloudferro.com' + s3_bucket: 'eodata' + auth: !plugin + type: AwsAuth + auth_error_code: 403 + products: + # S1 + S1_SAR_RAW: + productType: RAW + collection: Sentinel1 + metadata_mapping: + cloudCover: '$.null' + S1_SAR_GRD: + productType: GRD + collection: Sentinel1 + metadata_mapping: + cloudCover: '$.null' + S1_SAR_SLC: + productType: SLC + collection: Sentinel1 + metadata_mapping: + cloudCover: '$.null' + S1_SAR_OCN: + productType: OCN + collection: Sentinel1 + metadata_mapping: + cloudCover: '$.null' + # S2 + S2_MSI_L1C: + collection: SENTINEL-2 + productType: S2MSI1C + prefixes_delimiter: ".jp2" + S2_MSI_L2A: + collection: SENTINEL-2 + productType: S2MSI2A + # S3 SRAL + S3_SRA: + productType: SRA + collection: Sentinel3 + metadata_mapping: + cloudCover: '$.null' + S3_SRA_A: + productType: SRA_A + collection: Sentinel3 + metadata_mapping: + cloudCover: '$.null' + S3_SRA_BS: + productType: SRA_BS + collection: Sentinel3 + metadata_mapping: + cloudCover: '$.null' + S3_LAN: + productType: LAN + collection: Sentinel3 + metadata_mapping: + cloudCover: '$.null' + S3_WAT: + productType: WAT + collection: Sentinel3 + metadata_mapping: + cloudCover: '$.null' + # S3 OLCI + S3_EFR: + productType: EFR + collection: Sentinel3 + S3_ERR: + productType: ERR + collection: Sentinel3 + S3_OLCI_L2LRR: + productType: LRR + collection: Sentinel3 + S3_OLCI_L2LFR: + productType: LFR + collection: Sentinel3 + S3_OLCI_L2WRR: + productType: WRR + collection: Sentinel3 + S3_OLCI_L2WFR: + productType: WFR + collection: Sentinel3 + # S3 SLSTR + S3_SLSTR_L1RBT: + productType: RBT + collection: Sentinel3 + S3_SLSTR_L2LST: + productType: LST + collection: Sentinel3 + S3_SLSTR_L2WST: + productType: WST + collection: Sentinel3 + S3_SLSTR_L2AOD: + productType: AOD___ + collection: Sentinel3 + S3_SLSTR_L2FRP: + productType: FRP___ + collection: Sentinel3 + # S3 SY + S3_SY_AOD: + productType: SY_2_AOD___ + collection: Sentinel3 + S3_SY_SYN: + productType: SY_2_SYN___ + collection: Sentinel3 + S3_SY_V10: + productType: SY_2_V10___ + collection: Sentinel3 + S3_SY_VG1: + productType: SY_2_VG1___ + collection: Sentinel3 + S3_SY_VGP: + productType: SY_2_VGP___ + collection: Sentinel3 + # S5P L1 + S5P_L1B_IR_SIR: + productType: L1B_IR_SIR + collection: Sentinel5P + S5P_L1B_IR_UVN: + productType: L1B_IR_UVN + collection: Sentinel5P + S5P_L1B_RA_BD1: + productType: L1B_RA_BD1 + collection: Sentinel5P + S5P_L1B_RA_BD2: + productType: L1B_RA_BD2 + collection: Sentinel5P + S5P_L1B_RA_BD3: + productType: L1B_RA_BD3 + collection: Sentinel5P + S5P_L1B_RA_BD4: + productType: L1B_RA_BD4 + collection: Sentinel5P + S5P_L1B_RA_BD5: + productType: L1B_RA_BD5 + collection: Sentinel5P + S5P_L1B_RA_BD6: + productType: L1B_RA_BD6 + collection: Sentinel5P + S5P_L1B_RA_BD7: + productType: L1B_RA_BD7 + collection: Sentinel5P + S5P_L1B_RA_BD8: + productType: L1B_RA_BD8 + collection: Sentinel5P + # S5P L2 + S5P_L2_NO2: + productType: L2__NO2___ + collection: Sentinel5P + S5P_L2_CLOUD: + productType: L2__CLOUD_ + collection: Sentinel5P + S5P_L2_O3: + productType: L2__O3____ + collection: Sentinel5P + S5P_L2_CO: + productType: L2__CO____ + collection: Sentinel5P + S5P_L2_AER_AI: + productType: L2__AER_AI + collection: Sentinel5P + S5P_L2_O3_PR: + productType: L2__O3__PR + collection: Sentinel5P + S5P_L2_O3_TCL: + productType: L2__O3_TCL + collection: Sentinel5P + S5P_L2_AER_LH: + productType: L2__AER_LH + collection: Sentinel5P + S5P_L2_HCHO: + productType: L2__HCHO__ + collection: Sentinel5P + S5P_L2_CH4: + productType: L2__CH4___ + collection: Sentinel5P + S5P_L2_NP_BD3: + productType: L2__NP_BD3 + collection: Sentinel5P + S5P_L2_NP_BD6: + productType: L2__NP_BD6 + collection: Sentinel5P + S5P_L2_NP_BD7: + productType: L2__NP_BD7 + collection: Sentinel5P + S5P_L2_SO2: + productType: L2__SO2___ + collection: Sentinel5P + # COP DEM + COP_DEM_GLO30_DGED: + productType: DGE_30 + collection: CopDem + metadata_mapping: + cloudCover: '$.null' + COP_DEM_GLO30_DTED: + productType: DTE_30 + collection: CopDem + metadata_mapping: + cloudCover: '$.null' + COP_DEM_GLO90_DGED: + productType: DGE_90 + collection: CopDem + metadata_mapping: + cloudCover: '$.null' + COP_DEM_GLO90_DTED: + productType: DTE_90 + collection: CopDem + metadata_mapping: + cloudCover: '$.null' + GENERIC_PRODUCT_TYPE: + productType: '{productType}' + collection: '{collection}' diff --git a/eodag/resources/stac.yml b/eodag/resources/stac.yml index 67a6215cd..2de3ea7fa 100644 --- a/eodag/resources/stac.yml +++ b/eodag/resources/stac.yml @@ -153,19 +153,14 @@ items: href: '$.search_results.next' title: Next page type: application/geo+json - method: GET + method: '$.search_results.method' + body: '$.search_results.body' # time and date when the response was generated timeStamp: '$.search_results.timeStamp' # count request result numberMatched: '$.search_results.numberMatched' # len(features) numberReturned: '$.search_results.numberReturned' - # 0.7.0 stac-browser compatibility - context: - page: '$.search_results.properties.page' - limit: '$.search_results.properties.itemsPerPage' - matched: '$.search_results.properties.totalResults' - returned: '$.search_results.numberReturned' # https://stacspec.org/STAC-api.html#operation/getFeature item: @@ -191,14 +186,16 @@ item: license: '{catalog[license]}' constellation: '$.product.properties.platform' platform: '$.product.properties.platformSerialIdentifier' - intruments: + instruments: - '$.product.properties.instrument' gsd: '$.product.properties.resolution' published: '$.product.properties.publicationDate' eo:cloud_cover: '$.product.properties.cloudCover' + eo:snow_cover: '$.product.properties.snowCover' processing:level: '$.product.properties.processingLevel' sat:orbit_state: '$.product.properties.orbitDirection' sat:relative_orbit: '$.product.properties.relativeOrbitNumber' + sat:absolute_orbit : '$.product.properties.orbitNumber' sar:product_type: '$.product.properties.productType' sar:instrument_mode: '$.product.properties.sensorMode' sar:polarizations: '$.product.properties.polarizationChannels' @@ -225,6 +222,7 @@ item: href: "{catalog[url]}/items/{item[id]}/download" # https://github.com/radiantearth/stac-spec/blob/master/item-spec/item-spec.md#media-types type: 'application/zip' + _dc_qs: '$.product.properties._dc_qs' thumbnail: title: 'Thumbnail' href: '$.product.properties.quicklook' diff --git a/eodag/resources/stac_api.yml b/eodag/resources/stac_api.yml index 8ae1a9b8b..5415be3d8 100644 --- a/eodag/resources/stac_api.yml +++ b/eodag/resources/stac_api.yml @@ -196,6 +196,8 @@ paths: - Capabilities parameters: - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/provider' + - $ref: '#/components/parameters/queryablesAdditionalParameters' summary: Get the JSON Schema queryables that apply to a given collection. operationId: getQueryablesForCollection description: |- @@ -204,6 +206,13 @@ paths: When used as a subresource of the collection resource, it returns queryables pertaining only to that single collection. + + If no provider is given, the intersection of the queryables of all providers available for this collection will + be returned. + + If the selected provider (or an available provider if no provider is selected) provides constraints which define + which value is possible for which parameter, these constraints will be used to derive queryables. + responses: '200': $ref: '#/components/responses/Queryables' @@ -478,6 +487,22 @@ components: required: true schema: type: string + queryablesAdditionalParameters: + name: additional parameters + in: query + description: >- + Search parameters that will be used to filter remaining queryable parameters available value, if constraints + are set for the given collection. + Empty values can be associated to parameters in order to unset existing default values. + + Examples: + + * remove queryables defaults to view all available values `{"system_version": null, "model_levels: null}` + + * add a search parameter to check associated available queryables values `{"variable": "elevation"}` + + schema: + type: object catalogPath: name: catalogPath in: path @@ -587,6 +612,15 @@ components: schema: $ref: '#/components/schemas/collectionsArray' explode: false + provider: + name: provider + in: query + description: | + Provider from which the requested data shall be fetched; + If the requested data is available from this provider it will be used instead of the provider with the highest configured priority. + required: false + schema: + type: string schemas: queryProp: description: Apply query operations to a specific property diff --git a/eodag/resources/stac_provider.yml b/eodag/resources/stac_provider.yml index 897e7e1a9..a6acbeff0 100644 --- a/eodag/resources/stac_provider.yml +++ b/eodag/resources/stac_provider.yml @@ -20,7 +20,7 @@ search: results_entry: features pagination: next_page_query_obj: '{{"limit":{items_per_page},"page":{page}}}' - total_items_nb_key_path: '$.context.matched' + total_items_nb_key_path: '$.numberMatched' next_page_url_key_path: '$.links[?(@.rel="next")].href' next_page_query_obj_key_path: '$.links[?(@.rel="next")].body' next_page_merge_key_path: '$.links[?(@.rel="next")].merge' @@ -30,22 +30,33 @@ search: search_param: '{{{{"query":{{{{"{metadata}":{{{{"eq":"{{{metadata}}}" }}}} }}}} }}}}' metadata_path: '$.properties.*' discover_product_types: - fetch_url: '{api_endpoint}/../collections' - result_type: json - results_entry: '$.collections[*]' - generic_product_type_id: '$.id' - generic_product_type_parsable_properties: - productType: '$.id' - generic_product_type_parsable_metadata: - abstract: '$.description' - instrument: '{$.summaries.instruments#csv_list}' - platform: '{$.summaries.constellation#csv_list}' - platformSerialIdentifier: '{$.summaries.platform#csv_list}' - processingLevel: '$.summaries."processing:level"' - keywords: '{$.keywords#csv_list}' - license: '$.license' - title: '$.title' - missionStartDate: '$.extent.temporal.interval[0][0]' + fetch_url: '{api_endpoint}/../collections' + result_type: json + results_entry: '$.collections[*]' + generic_product_type_id: '$.id' + generic_product_type_parsable_properties: + productType: '$.id' + generic_product_type_parsable_metadata: + abstract: '$.description' + instrument: '{$.summaries.instruments#csv_list}' + platform: '{$.summaries.constellation#csv_list}' + platformSerialIdentifier: '{$.summaries.platform#csv_list}' + processingLevel: '$.summaries."processing:level"' + keywords: '{$.keywords#csv_list}' + license: '$.license' + title: '$.title' + missionStartDate: '$.extent.temporal.interval[0][0]' + metadata_path: '$.properties.*' + discover_queryables: + fetch_url: '{api_endpoint}/../queryables' + product_type_fetch_url: '{api_endpoint}/../collections/{provider_product_type}/queryables' + result_type: json + results_entry: '$.properties[*]' + queryable_parsable_metadata: + description: '$.description' + title: '$.title' + type: '$.type' + pattern: '$.pattern' common_metadata_mapping_path: '$' metadata_mapping: # OpenSearch Parameters for Collection Search (Table 3) diff --git a/eodag/resources/user_conf_template.yml b/eodag/resources/user_conf_template.yml index 7651e1a9c..60b9abc47 100644 --- a/eodag/resources/user_conf_template.yml +++ b/eodag/resources/user_conf_template.yml @@ -199,3 +199,13 @@ wekeo: credentials: username: password: + +creodias_s3: + priority: # Lower value means lower priority (Default: 0) + search: # Search parameters configuration + download: + outputs_prefix: + auth: + credentials: + aws_access_key_id: + aws_secret_access_key: diff --git a/eodag/rest/server.py b/eodag/rest/server.py index 8fdcac835..b2b9b48bf 100755 --- a/eodag/rest/server.py +++ b/eodag/rest/server.py @@ -15,31 +15,37 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import io +from __future__ import annotations + import logging import os -import re import traceback from contextlib import asynccontextmanager -from distutils import dist -from typing import List, Optional, Union +from importlib.metadata import version +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + Awaitable, + Callable, + Dict, + List, + Optional, + Union, +) -import pkg_resources from fastapi import APIRouter as FastAPIRouter from fastapi import FastAPI, HTTPException, Request from fastapi.encoders import jsonable_encoder from fastapi.middleware.cors import CORSMiddleware from fastapi.openapi.utils import get_openapi -from fastapi.responses import ORJSONResponse -from fastapi.types import Any, Callable, DecoratedCallable +from fastapi.responses import ORJSONResponse, StreamingResponse from pydantic import BaseModel from starlette.exceptions import HTTPException as StarletteHTTPException -from eodag.api.core import DEFAULT_ITEMS_PER_PAGE from eodag.config import load_stac_api_config +from eodag.rest.types.stac_queryables import StacQueryables from eodag.rest.utils import ( - QueryableProperty, - Queryables, download_stac_item_by_id_stream, eodag_api_init, fetch_collection_queryable_properties, @@ -53,7 +59,7 @@ get_stac_item_by_id, search_stac_items, ) -from eodag.utils import parse_header, update_nested_dict +from eodag.utils import DEFAULT_ITEMS_PER_PAGE, parse_header, update_nested_dict from eodag.utils.exceptions import ( AuthenticationError, DownloadError, @@ -61,13 +67,18 @@ NoMatchingProductType, NotAvailableError, RequestError, + TimeOutError, UnsupportedProductType, UnsupportedProvider, ValidationError, ) +if TYPE_CHECKING: + from fastapi.types import DecoratedCallable + from requests import Response + + logger = logging.getLogger("eodag.rest.server") -CAMEL_TO_SPACE_TITLED = re.compile(r"[:_-]|(?<=[a-z])(?=[A-Z])") class APIRouter(FastAPIRouter): @@ -104,7 +115,7 @@ def decorator(func: DecoratedCallable) -> DecoratedCallable: @asynccontextmanager -async def lifespan(app: FastAPI): +async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: """API init and tear-down""" eodag_api_init() yield @@ -117,24 +128,18 @@ async def lifespan(app: FastAPI): @router.get("/api", tags=["Capabilities"], include_in_schema=False) -def eodag_openapi(): +def eodag_openapi() -> Dict[str, Any]: """Customized openapi""" logger.debug("URL: /api") if app.openapi_schema: return app.openapi_schema - # eodag metadata - distribution = pkg_resources.get_distribution("eodag") - metadata_str = distribution.get_metadata(distribution.PKG_INFO) - metadata_obj = dist.DistributionMetadata() - metadata_obj.read_pkg_file(io.StringIO(metadata_str)) - root_catalog = get_stac_catalogs(url="", fetch_providers=False) stac_api_version = get_stac_api_version() openapi_schema = get_openapi( title=f"{root_catalog['title']} / eodag", - version=getattr(metadata_obj, "version", None), + version=version("eodag"), routes=app.routes, ) @@ -180,7 +185,9 @@ def eodag_openapi(): @app.middleware("http") -async def forward_middleware(request: Request, call_next): +async def forward_middleware( + request: Request, call_next: Callable[[Request], Awaitable[Response]] +) -> Response: """Middleware that handles forward headers and sets request.state.url*""" forwarded_host = request.headers.get("x-forwarded-host", None) @@ -199,7 +206,9 @@ async def forward_middleware(request: Request, call_next): @app.exception_handler(StarletteHTTPException) -async def default_exception_handler(request: Request, error): +async def default_exception_handler( + request: Request, error: Exception +) -> ORJSONResponse: """Default errors handle""" description = ( getattr(error, "description", None) @@ -216,7 +225,7 @@ async def default_exception_handler(request: Request, error): @app.exception_handler(UnsupportedProductType) @app.exception_handler(UnsupportedProvider) @app.exception_handler(ValidationError) -async def handle_invalid_usage(request: Request, error): +async def handle_invalid_usage(request: Request, error: Exception) -> ORJSONResponse: """Invalid usage [400] errors handle""" logger.warning(traceback.format_exc()) return await default_exception_handler( @@ -229,7 +238,9 @@ async def handle_invalid_usage(request: Request, error): @app.exception_handler(NotAvailableError) -async def handle_resource_not_found(request: Request, error): +async def handle_resource_not_found( + request: Request, error: Exception +) -> ORJSONResponse: """Not found [404] errors handle""" return await default_exception_handler( request, @@ -242,7 +253,7 @@ async def handle_resource_not_found(request: Request, error): @app.exception_handler(MisconfiguredError) @app.exception_handler(AuthenticationError) -async def handle_auth_error(request: Request, error): +async def handle_auth_error(request: Request, error: Exception) -> ORJSONResponse: """AuthenticationError should be sent as internal server error to the client""" logger.error(f"{type(error).__name__}: {str(error)}") return await default_exception_handler( @@ -256,7 +267,7 @@ async def handle_auth_error(request: Request, error): @app.exception_handler(DownloadError) @app.exception_handler(RequestError) -async def handle_server_error(request: Request, error): +async def handle_server_error(request: Request, error: Exception) -> ORJSONResponse: """These errors should be sent as internal server error with details to the client""" logger.error(f"{type(error).__name__}: {str(error)}") return await default_exception_handler( @@ -268,8 +279,21 @@ async def handle_server_error(request: Request, error): ) +@app.exception_handler(TimeOutError) +async def handle_timeout(request: Request, error: Exception) -> ORJSONResponse: + """Timeout [504] errors handle""" + logger.error(f"{type(error).__name__}: {str(error)}") + return await default_exception_handler( + request, + HTTPException( + status_code=504, + detail=f"{type(error).__name__}: {str(error)}", + ), + ) + + @router.get("/", tags=["Capabilities"]) -def catalogs_root(request: Request): +def catalogs_root(request: Request) -> Any: """STAC catalogs root""" logger.debug(f"URL: {request.url}") @@ -284,7 +308,7 @@ def catalogs_root(request: Request): @router.get("/conformance", tags=["Capabilities"]) -def conformance(): +def conformance() -> Any: """STAC conformance""" logger.debug("URL: /conformance") response = get_stac_conformance() @@ -293,7 +317,7 @@ def conformance(): @router.get("/extensions/oseo/json-schema/schema.json", include_in_schema=False) -def stac_extension_oseo(request: Request): +def stac_extension_oseo(request: Request) -> Any: """STAC OGC / OpenSearch extension for EO""" logger.debug(f"URL: {request.url}") response = get_stac_extension_oseo(url=request.state.url) @@ -306,15 +330,15 @@ class SearchBody(BaseModel): class which describes the body of a search request """ - provider: Union[str, None] = None + provider: Optional[str] = None collections: Union[List[str], str] - datetime: Union[str, None] = None - bbox: Union[list, str, None] = None - intersects: Union[dict, None] = None - limit: Union[int, None] = DEFAULT_ITEMS_PER_PAGE - page: Union[int, None] = 1 - query: Union[dict, None] = None - ids: Union[List[str], None] = None + datetime: Optional[str] = None + bbox: Optional[List[Union[int, float]]] = None + intersects: Optional[Dict[str, Any]] = None + limit: Optional[int] = DEFAULT_ITEMS_PER_PAGE + page: Optional[int] = 1 + query: Optional[Dict[str, Any]] = None + ids: Optional[List[str]] = None @router.get( @@ -322,15 +346,40 @@ class which describes the body of a search request tags=["Data"], include_in_schema=False, ) -def stac_collections_item_download(collection_id, item_id, request: Request): - """STAC collection item local download""" +def stac_collections_item_download( + collection_id: str, item_id: str, request: Request +) -> StreamingResponse: + """STAC collection item download""" + logger.debug(f"URL: {request.url}") + + arguments = dict(request.query_params) + provider = arguments.pop("provider", None) + + return download_stac_item_by_id_stream( + catalogs=[collection_id], item_id=item_id, provider=provider, **arguments + ) + + +@router.get( + "/collections/{collection_id}/items/{item_id}/download/{asset_filter}", + tags=["Data"], + include_in_schema=False, +) +def stac_collections_item_download_asset( + collection_id, item_id, asset_filter, request: Request +): + """STAC collection item asset download""" logger.debug(f"URL: {request.url}") arguments = dict(request.query_params) provider = arguments.pop("provider", None) return download_stac_item_by_id_stream( - catalogs=[collection_id], item_id=item_id, provider=provider + catalogs=[collection_id], + item_id=item_id, + provider=provider, + asset=asset_filter, + **arguments, ) @@ -339,7 +388,7 @@ def stac_collections_item_download(collection_id, item_id, request: Request): tags=["Data"], include_in_schema=False, ) -def stac_collections_item(collection_id, item_id, request: Request): +def stac_collections_item(collection_id: str, item_id: str, request: Request) -> Any: """STAC collection item by id""" logger.debug(f"URL: {request.url}") url = request.state.url @@ -354,6 +403,7 @@ def stac_collections_item(collection_id, item_id, request: Request): root=url_root, catalogs=[collection_id], provider=provider, + **arguments, ) if response: @@ -372,7 +422,7 @@ def stac_collections_item(collection_id, item_id, request: Request): tags=["Data"], include_in_schema=False, ) -def stac_collections_items(collection_id, request: Request): +def stac_collections_items(collection_id: str, request: Request) -> Any: """STAC collections items""" logger.debug(f"URL: {request.url}") url = request.state.url @@ -399,7 +449,7 @@ def stac_collections_items(collection_id, request: Request): ) def list_collection_queryables( request: Request, collection_id: str, provider: Optional[str] = None -) -> Queryables: +) -> Any: """Returns the list of queryable properties for a specific collection. This endpoint provides a list of properties that can be used as filters when querying @@ -412,21 +462,24 @@ def list_collection_queryables( :type collection_id: str :param provider: (optional) The provider for which to retrieve additional properties. :type provider: str - :returns: An object containing the list of available queryable properties for the specified collection. - :rtype: eodag.rest.utils.Queryables + :returns: A json object containing the list of available queryable properties for the specified collection. + :rtype: Any """ logger.debug(f"URL: {request.url}") + query_params = request.query_params.items() + additional_params = dict(query_params) + additional_params.pop("provider", None) - queryables = Queryables(q_id=request.state.url, additional_properties=False) - conf_args = [collection_id, provider] if provider else [collection_id] + queryables = StacQueryables(q_id=request.state.url, additional_properties=False) - provider_properties = set(fetch_collection_queryable_properties(*conf_args)) - - for prop in provider_properties: - titled_name = re.sub(CAMEL_TO_SPACE_TITLED, " ", prop).title() - queryables[prop] = QueryableProperty(description=titled_name) + collection_queryables = fetch_collection_queryable_properties( + collection_id, provider, **additional_params + ) + for key, collection_queryable in collection_queryables.items(): + queryables[key] = collection_queryable + queryables.properties.pop("collections") - return queryables + return jsonable_encoder(queryables) @router.get( @@ -434,7 +487,7 @@ def list_collection_queryables( tags=["Capabilities"], include_in_schema=False, ) -def collection_by_id(collection_id, request: Request): +def collection_by_id(collection_id: str, request: Request) -> Any: """STAC collection by id""" logger.debug(f"URL: {request.url}") url = request.state.url_root + "/collections" @@ -458,7 +511,7 @@ def collection_by_id(collection_id, request: Request): tags=["Capabilities"], include_in_schema=False, ) -def collections(request: Request): +def collections(request: Request) -> Any: """STAC collections Can be filtered using parameters: instrument, platform, platformSerialIdentifier, sensorType, processingLevel @@ -476,6 +529,7 @@ def collections(request: Request): arguments=arguments, provider=provider, ) + return jsonable_encoder(response) @@ -484,8 +538,31 @@ def collections(request: Request): tags=["Data"], include_in_schema=False, ) -def stac_catalogs_item_download(catalogs, item_id, request: Request): - """STAC item local download""" +def stac_catalogs_item_download( + catalogs: str, item_id: str, request: Request +) -> StreamingResponse: + """STAC Catalog item download""" + logger.debug(f"URL: {request.url}") + + arguments = dict(request.query_params) + provider = arguments.pop("provider", None) + + list_catalog = catalogs.strip("/").split("/") + + return download_stac_item_by_id_stream( + catalogs=list_catalog, item_id=item_id, provider=provider, **arguments + ) + + +@router.get( + "/catalogs/{catalogs:path}/items/{item_id}/download/{asset_filter}", + tags=["Data"], + include_in_schema=False, +) +def stac_catalogs_item_download_asset( + catalogs, item_id, asset_filter, request: Request +): + """STAC Catalog item asset download""" logger.debug(f"URL: {request.url}") arguments = dict(request.query_params) @@ -494,7 +571,11 @@ def stac_catalogs_item_download(catalogs, item_id, request: Request): catalogs = catalogs.strip("/").split("/") return download_stac_item_by_id_stream( - catalogs=catalogs, item_id=item_id, provider=provider + catalogs=catalogs, + item_id=item_id, + provider=provider, + asset=asset_filter, + **arguments, ) @@ -503,7 +584,7 @@ def stac_catalogs_item_download(catalogs, item_id, request: Request): tags=["Data"], include_in_schema=False, ) -def stac_catalogs_item(catalogs, item_id, request: Request): +def stac_catalogs_item(catalogs: str, item_id: str, request: Request): """Fetch catalog's single features.""" logger.debug(f"URL: {request.url}") url = request.state.url @@ -512,13 +593,14 @@ def stac_catalogs_item(catalogs, item_id, request: Request): arguments = dict(request.query_params) provider = arguments.pop("provider", None) - catalogs = catalogs.strip("/").split("/") + list_catalog = catalogs.strip("/").split("/") response = get_stac_item_by_id( url=url, item_id=item_id, root=url_root, - catalogs=catalogs, + catalogs=list_catalog, provider=provider, + **arguments, ) if response: @@ -537,7 +619,7 @@ def stac_catalogs_item(catalogs, item_id, request: Request): tags=["Data"], include_in_schema=False, ) -def stac_catalogs_items(catalogs, request: Request): +def stac_catalogs_items(catalogs: str, request: Request) -> Any: """Fetch catalog's features '""" logger.debug(f"URL: {request.url}") @@ -547,13 +629,13 @@ def stac_catalogs_items(catalogs, request: Request): arguments = dict(request.query_params) provider = arguments.pop("provider", None) - catalogs = catalogs.strip("/").split("/") + list_catalog = catalogs.strip("/").split("/") response = search_stac_items( url=url, arguments=arguments, root=url_root, - catalogs=catalogs, + catalogs=list_catalog, provider=provider, ) return jsonable_encoder(response) @@ -564,7 +646,7 @@ def stac_catalogs_items(catalogs, request: Request): tags=["Capabilities"], include_in_schema=False, ) -def stac_catalogs(catalogs, request: Request): +def stac_catalogs(catalogs: str, request: Request) -> Any: """Describe the given catalog and list available sub-catalogs""" logger.debug(f"URL: {request.url}") url = request.state.url @@ -573,11 +655,11 @@ def stac_catalogs(catalogs, request: Request): arguments = dict(request.query_params) provider = arguments.pop("provider", None) - catalogs = catalogs.strip("/").split("/") + list_catalog = catalogs.strip("/").split("/") response = get_stac_catalogs( url=url, root=url_root, - catalogs=catalogs, + catalogs=list_catalog, provider=provider, ) return jsonable_encoder(response) @@ -586,10 +668,10 @@ def stac_catalogs(catalogs, request: Request): @router.get( "/queryables", tags=["Capabilities"], - include_in_schema=False, response_model_exclude_none=True, + include_in_schema=False, ) -def list_queryables(request: Request) -> Queryables: +def list_queryables(request: Request, provider: Optional[str] = None) -> Any: """Returns the list of terms available for use when writing filter expressions. This endpoint provides a list of terms that can be used as filters when querying @@ -598,12 +680,20 @@ def list_queryables(request: Request) -> Queryables: :param request: The incoming request object. :type request: fastapi.Request - :returns: An object containing the list of available queryable terms. - :rtype: eodag.rest.utils.Queryables + :returns: A json object containing the list of available queryable terms. + :rtype: Any """ logger.debug(f"URL: {request.url}") + query_params = request.query_params.items() + additional_params = dict(query_params) + additional_params.pop("provider", None) + queryables = StacQueryables(q_id=request.state.url) + if provider: + queryables.properties.update( + fetch_collection_queryable_properties(None, provider, **additional_params) + ) - return Queryables(q_id=request.state.url) + return jsonable_encoder(queryables) @router.get( @@ -616,7 +706,9 @@ def list_queryables(request: Request) -> Queryables: tags=["STAC"], include_in_schema=False, ) -def stac_search(request: Request, search_body: Optional[SearchBody] = None): +def stac_search( + request: Request, search_body: Optional[SearchBody] = None +) -> ORJSONResponse: """STAC collections items""" logger.debug(f"URL: {request.url}") logger.debug(f"Body: {search_body}") @@ -633,7 +725,11 @@ def stac_search(request: Request, search_body: Optional[SearchBody] = None): provider = arguments.pop("provider", None) response = search_stac_items( - url=url, arguments=arguments, root=url_root, provider=provider + url=url, + arguments=arguments, + root=url_root, + provider=provider, + method=request.method, ) resp = ORJSONResponse( content=response, status_code=200, media_type="application/json" diff --git a/eodag/rest/stac.py b/eodag/rest/stac.py index 46b74b7b2..4c6275928 100644 --- a/eodag/rest/stac.py +++ b/eodag/rest/stac.py @@ -15,11 +15,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import datetime +from __future__ import annotations + import logging import os -import re from collections import defaultdict +from datetime import datetime, timezone +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast from urllib.parse import parse_qs, urlencode, urlparse import dateutil.parser @@ -28,6 +30,7 @@ from dateutil import tz from dateutil.relativedelta import relativedelta from shapely.geometry import shape +from shapely.geometry.base import BaseGeometry from shapely.ops import unary_union from eodag.api.product.metadata_mapping import ( @@ -50,6 +53,12 @@ ValidationError, ) +if TYPE_CHECKING: + from eodag.api.core import EODataAccessGateway + from eodag.api.product import EOProduct + from eodag.api.search_result import SearchResult + + logger = logging.getLogger("eodag.rest.stac") DEFAULT_MISSION_START_DATE = "2015-01-01T00:00:00Z" @@ -63,7 +72,7 @@ class StacCommon: :type url: str :param stac_config: STAC configuration from stac.yml conf file :type stac_config: dict - :param provider: Chosen provider + :param provider: (optional) Chosen provider :type provider: str :param eodag_api: EODAG python API instance :type eodag_api: :class:`eodag.api.core.EODataAccessGateway` @@ -72,17 +81,22 @@ class StacCommon: """ def __init__( - self, url, stac_config, provider, eodag_api, root="/", *args, **kwargs - ): + self, + url: str, + stac_config: Dict[str, Any], + provider: Optional[str], + eodag_api: EODataAccessGateway, + root: str = "/", + ) -> None: self.url = url.rstrip("/") if len(url) > 1 else url self.stac_config = stac_config self.provider = provider self.eodag_api = eodag_api self.root = root.rstrip("/") if len(root) > 1 else root - self.data = {} + self.data: Dict[str, Any] = {} - def update_data(self, data): + def update_data(self, data: Dict[str, Any]) -> None: """Updates data using given input STAC dict data :param data: Catalog data (parsed STAC dict) @@ -113,7 +127,9 @@ def update_data(self, data): self.data["stac_extensions"] = [] @staticmethod - def get_stac_extension(url, stac_config, extension, **kwargs): + def get_stac_extension( + url: str, stac_config: Dict[str, Any], extension: str, **kwargs: Any + ) -> Dict[str, str]: """Parse STAC extension from config and return as dict :param url: Requested URL @@ -135,11 +151,9 @@ def get_stac_extension(url, stac_config, extension, **kwargs): "url": url, "properties": kwargs.get("properties", {}), } - extension = format_dict_items(extension_model, **format_args) + return format_dict_items(extension_model, **format_args) - return extension - - def as_dict(self): + def as_dict(self) -> Dict[str, Any]: """Returns object data as dictionnary :returns: STAC data dictionnary @@ -157,7 +171,7 @@ class StacItem(StacCommon): :type url: str :param stac_config: STAC configuration from stac.yml conf file :type stac_config: dict - :param provider: Chosen provider + :param provider: (optional) Chosen provider :type provider: str :param eodag_api: EODAG python API instance :type eodag_api: :class:`eodag.api.core.EODataAccessGateway` @@ -166,19 +180,24 @@ class StacItem(StacCommon): """ def __init__( - self, url, stac_config, provider, eodag_api, root="/", *args, **kwargs - ): + self, + url: str, + stac_config: Dict[str, Any], + provider: Optional[str], + eodag_api: EODataAccessGateway, + root: str = "/", + ) -> None: super(StacItem, self).__init__( url=url, stac_config=stac_config, provider=provider, eodag_api=eodag_api, root=root, - *args, - **kwargs, ) - def __get_item_list(self, search_results, catalog): + def __get_item_list( + self, search_results: SearchResult, catalog: Dict[str, Any] + ) -> List[Dict[str, Any]]: """Build STAC items list from EODAG search results :param search_results: EODAG search results @@ -192,12 +211,12 @@ def __get_item_list(self, search_results, catalog): return [] item_model = self.__filter_item_model_properties( - self.stac_config["item"], search_results[0].product_type + self.stac_config["item"], str(search_results[0].product_type) ) provider_model = deepcopy(self.stac_config["provider"]) # check if some items need to be converted - need_conversion = {} + need_conversion: Dict[str, Any] = {} for k, v in item_model["properties"].items(): if isinstance(v, str): conversion, item_model["properties"][k] = get_metadata_path( @@ -210,7 +229,7 @@ def __get_item_list(self, search_results, catalog): k, item_model["properties"][k] ) - item_list = [] + item_list: List[Dict[str, Any]] = [] for product in search_results: # parse jsonpath provider_dict = jsonpath_parse_dict_items( @@ -222,31 +241,66 @@ def __get_item_list(self, search_results, catalog): }, ) + product_dict = deepcopy(product.__dict__) + if isinstance(product.assets, dict): + product_dict["assets"] = product.assets + else: + product_dict["assets"] = product.assets.as_dict() + product_item = jsonpath_parse_dict_items( item_model, { - "product": product.__dict__, + "product": product_dict, "providers": [provider_dict], }, ) - # add origin assets to product assets - origin_assets = product_item["assets"].pop("origin_assets") - if getattr(product, "assets", False): - product_item["assets"] = dict(product_item["assets"], **origin_assets) - # append provider query-arg to download link if specified + + # parse download link + url_parts = urlparse(str(product_item["assets"]["downloadLink"]["href"])) + query_dict = parse_qs(url_parts.query) + without_arg_url = ( + f"{url_parts.scheme}://{url_parts.netloc}{url_parts.path}" + if url_parts.scheme + else f"{url_parts.netloc}{url_parts.path}" + ) + + # add provider to query-args if self.provider: - parts = urlparse(product_item["assets"]["downloadLink"]["href"]) - query_dict = parse_qs(parts.query) - query_dict.update(provider=self.provider) - without_arg_url = ( - f"{parts.scheme}://{parts.netloc}{parts.path}" - if parts.scheme - else f"{parts.netloc}{parts.path}" - ) + query_dict.update(provider=[self.provider]) + # add datacube query-string to query-args + _dc_qs = product_item["assets"]["downloadLink"].pop("_dc_qs", None) + if _dc_qs: + query_dict.update(_dc_qs=_dc_qs) + + # update download link with up-to-date query-args + if query_dict: product_item["assets"]["downloadLink"][ "href" ] = f"{without_arg_url}?{urlencode(query_dict, doseq=True)}" + # move origin asset urls to alternate links and replace with eodag-server ones + origin_assets = product_item["assets"].pop("origin_assets", {}) + if getattr(product, "assets", False): + # replace origin asset urls with eodag-server ones + for asset_key, asset_value in origin_assets.items(): + # use origin asset as default + product_item["assets"][asset_key] = asset_value + # origin assets as alternate link + product_item["assets"][asset_key]["alternate"] = { + "origin": { + "title": "Origin asset link", + "href": asset_value["href"], + } + } + # use server-mode assets download links + asset_value["href"] = without_arg_url + if query_dict: + product_item["assets"][asset_key][ + "href" + ] += f"/{asset_key}?{urlencode(query_dict, doseq=True)}" + else: + product_item["assets"][asset_key]["href"] += f"/{asset_key}" + # apply conversion if needed for prop_key, prop_val in need_conversion.items(): conv_func, conv_args = prop_val @@ -267,17 +321,33 @@ def __get_item_list(self, search_results, catalog): format_args = deepcopy(self.stac_config) format_args["catalog"] = catalog format_args["item"] = product_item - product_item = format_dict_items(product_item, **format_args) + product_item: Dict[str, Any] = format_dict_items( + product_item, **format_args + ) product_item["bbox"] = [float(i) for i in product_item["bbox"]] # remove empty properties product_item = self.__filter_item_properties_values(product_item) + # update item link with datacube query-string + if _dc_qs: + url_parts = urlparse(str(product_item["links"][0]["href"])) + without_arg_url = ( + f"{url_parts.scheme}://{url_parts.netloc}{url_parts.path}" + if url_parts.scheme + else f"{url_parts.netloc}{url_parts.path}" + ) + product_item["links"][0][ + "href" + ] = f"{without_arg_url}?{urlencode(query_dict, doseq=True)}" + item_list.append(product_item) return item_list - def get_stac_items(self, search_results, catalog): + def get_stac_items( + self, search_results: SearchResult, catalog: Dict[str, Any] + ) -> Dict[str, Any]: """Build STAC items from EODAG search results :param search_results: EODAG search results @@ -298,28 +368,9 @@ def get_stac_items(self, search_results, catalog): for i, _ in enumerate(items_model["links"]): if items_model["links"][i]["rel"] == "self": items_model["links"][i]["href"] = catalog["url"] - if "page=" not in self.url: - search_results.next = "%s&page=%s" % ( - self.url, - search_results.properties["page"] + 1, - ) - else: - search_results.next = re.sub( - r"^(.*)(page=[0-9]+)(.*)$", - r"\1page=%s\3" % (search_results.properties["page"] + 1), - self.url, - ) - else: - search_results.next = "%s?page=%s" % ( - self.url, - search_results.properties["page"] + 1, - ) search_results.timeStamp = ( - datetime.datetime.now(datetime.timezone.utc) - .isoformat() - .replace("+00:00", "") - + "Z" + datetime.now(timezone.utc).isoformat().replace("+00:00", "") + "Z" ) # parse jsonpath @@ -353,7 +404,9 @@ def get_stac_items(self, search_results, catalog): self.update_data(items) return geojson.loads(geojson.dumps(self.data)) - def __filter_item_model_properties(self, item_model, product_type): + def __filter_item_model_properties( + self, item_model: Dict[str, Any], product_type: str + ) -> Dict[str, Any]: """Filter item model depending on product type metadata and its extensions. Removes not needed parameters, and adds supplementary ones as part of oseo extension. @@ -372,6 +425,7 @@ def __filter_item_model_properties(self, item_model, product_type): provider=self.provider, fetch_providers=False ) if pt["ID"] == product_type + or ("alias" in pt and pt["alias"] == product_type) ][0] except IndexError: raise NoMatchingProductType( @@ -396,6 +450,9 @@ def __filter_item_model_properties(self, item_model, product_type): if ( v not in result_item_model["properties"].values() and k not in self.stac_config["metadata_ignore"] + and not any( + k in str(prop) for prop in result_item_model["properties"].values() + ) ): result_item_model["properties"]["oseo:" + k] = string_to_jsonpath(k, v) @@ -418,7 +475,7 @@ def __filter_item_model_properties(self, item_model, product_type): return result_item_model - def __filter_item_properties_values(self, item): + def __filter_item_properties_values(self, item: Dict[str, Any]) -> Dict[str, Any]: """Removes empty properties, unused extensions, and add missing extensions :param item: STAC item data @@ -426,7 +483,9 @@ def __filter_item_properties_values(self, item): :returns: Filtered item model :rtype: dict """ - all_extensions_dict = deepcopy(self.stac_config["stac_extensions"]) + all_extensions_dict: Dict[str, str] = deepcopy( + self.stac_config["stac_extensions"] + ) # parse f-strings with root all_extensions_dict = format_dict_items( all_extensions_dict, **{"catalog": {"root": self.root}} @@ -435,7 +494,7 @@ def __filter_item_properties_values(self, item): item["stac_extensions"] = [] # dict to list of keys to permit pop() while iterating for k in list(item["properties"]): - extension_prefix = k.split(":")[0] + extension_prefix: str = k.split(":")[0] if item["properties"][k] is None: item["properties"].pop(k, None) @@ -449,7 +508,7 @@ def __filter_item_properties_values(self, item): return item - def get_stac_item_from_product(self, product): + def get_stac_item_from_product(self, product: EOProduct) -> Dict[str, Any]: """Build STAC item from EODAG product :param product: EODAG product @@ -457,7 +516,7 @@ def get_stac_item_from_product(self, product): :returns: STAC item :rtype: list """ - product_type = product.product_type + product_type = str(product.product_type) item_model = self.__filter_item_model_properties( self.stac_config["item"], product_type @@ -478,11 +537,14 @@ def get_stac_item_from_product(self, product): catalogs=[product_type], ) + product_dict = deepcopy(product.__dict__) + product_dict["assets"] = product.assets.as_dict() + # parse jsonpath product_item = jsonpath_parse_dict_items( item_model, { - "product": product.__dict__, + "product": product_dict, "providers": provider_dict, }, ) @@ -509,7 +571,7 @@ class StacCollection(StacCommon): :type url: str :param stac_config: STAC configuration from stac.yml conf file :type stac_config: dict - :param provider: Chosen provider + :param provider: (optional) Chosen provider :type provider: str :param eodag_api: EODAG python API instance :type eodag_api: :class:`eodag.api.core.EODataAccessGateway` @@ -518,19 +580,24 @@ class StacCollection(StacCommon): """ def __init__( - self, url, stac_config, provider, eodag_api, root="/", *args, **kwargs - ): + self, + url: str, + stac_config: Dict[str, Any], + provider: Optional[str], + eodag_api: EODataAccessGateway, + root: str = "/", + ) -> None: super(StacCollection, self).__init__( url=url, stac_config=stac_config, provider=provider, eodag_api=eodag_api, root=root, - *args, - **kwargs, ) - def __get_product_types(self, filters=None): + def __get_product_types( + self, filters: Optional[Dict[str, Any]] = None + ) -> List[Dict[str, Any]]: """Returns a list of supported product types :param filters: (optional) Additional filters for product types search @@ -554,7 +621,9 @@ def __get_product_types(self, filters=None): product_types = self.eodag_api.list_product_types(provider=self.provider) return product_types - def __get_collection_list(self, filters=None): + def __get_collection_list( + self, filters: Optional[Dict[str, Any]] = None + ) -> List[Dict[str, Any]]: """Build STAC collections list :param filters: (optional) Additional filters for collections search @@ -567,7 +636,7 @@ def __get_collection_list(self, filters=None): product_types = self.__get_product_types(filters) - collection_list = [] + collection_list: List[Dict[str, Any]] = [] for product_type in product_types: if self.provider: providers = [self.provider] @@ -576,10 +645,12 @@ def __get_collection_list(self, filters=None): providers = [ plugin.provider for plugin in self.eodag_api._plugins_manager.get_search_plugins( - product_type=product_type["ID"] + product_type=( + product_type.get("_id", None) or product_type["ID"] + ) ) ] - providers_models = [] + providers_models: List[Dict[str, Any]] = [] for provider in providers: provider_m = jsonpath_parse_dict_items( provider_model, @@ -609,7 +680,9 @@ def __get_collection_list(self, filters=None): return collection_list - def get_collections(self, filters=None): + def get_collections( + self, filters: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: """Build STAC collections :param filters: (optional) Additional filters for collections search @@ -642,7 +715,7 @@ def get_collections(self, filters=None): self.update_data(collections) return self.as_dict() - def get_collection_by_id(self, collection_id): + def get_collection_by_id(self, collection_id: str) -> Dict[str, Any]: """Build STAC collection by its id :param collection_id: Product type as collection ID @@ -671,7 +744,7 @@ class StacCatalog(StacCommon): :param stac_config: STAC configuration from stac.yml conf file :type stac_config: dict :param provider: Chosen provider - :type provider: str + :type provider: (optional) str :param eodag_api: EODAG python API instance :type eodag_api: :class:`eodag.api.core.EODataAccessGateway` :param root: (optional) API root @@ -685,30 +758,26 @@ class StacCatalog(StacCommon): def __init__( self, - url, - stac_config, - provider, - eodag_api, - root="/", - catalogs=[], - fetch_providers=True, - *args, - **kwargs, - ): + url: str, + stac_config: Dict[str, Any], + provider: Optional[str], + eodag_api: EODataAccessGateway, + root: str = "/", + catalogs: List[str] = [], + fetch_providers: bool = True, + ) -> None: super(StacCatalog, self).__init__( url=url, stac_config=stac_config, provider=provider, eodag_api=eodag_api, root=root, - *args, - **kwargs, ) - self.shp_location_config = eodag_api.locations_config - self.search_args = {} - self.data = {} - self.children = [] + + self.shp_location_config = eodag_api.locations_config + self.search_args: Dict[str, Any] = {} + self.children: List[Dict[str, Any]] = [] self.catalog_config = deepcopy(stac_config["catalog"]) @@ -724,7 +793,7 @@ def __init__( # build catalog self.__build_stac_catalog(catalogs, fetch_providers=fetch_providers) - def __update_data_from_catalog_config(self, catalog_config): + def __update_data_from_catalog_config(self, catalog_config: Dict[str, Any]) -> bool: """Updates configuration and data using given input catalog config :param catalog_config: Catalog config, from yml stac_config[catalogs] @@ -748,7 +817,7 @@ def __update_data_from_catalog_config(self, catalog_config): return True - def set_children(self, children=[]): + def set_children(self, children: List[Dict[str, Any]] = []) -> bool: """Set catalog children / links :param children: (optional) Children list @@ -761,7 +830,9 @@ def set_children(self, children=[]): self.data["links"] += children return True - def set_stac_product_type_by_id(self, product_type, **kwargs): + def set_stac_product_type_by_id( + self, product_type: str, **kwargs: Any + ) -> Dict[str, Any]: """Updates catalog with given product_type :param product_type: Product type @@ -781,7 +852,7 @@ def set_stac_product_type_by_id(self, product_type, **kwargs): format_args["catalog"] = defaultdict(str, **self.data) format_args["collection"] = collection try: - parsed_dict = format_dict_items(cat_model, **format_args) + parsed_dict: Dict[str, Any] = format_dict_items(cat_model, **format_args) except Exception: logger.error("Could not format product_type catalog") raise @@ -795,7 +866,7 @@ def set_stac_product_type_by_id(self, product_type, **kwargs): # get / set dates filters ------------------------------------------------- - def get_stac_years_list(self, **kwargs): + def get_stac_years_list(self, **kwargs: Any) -> List[int]: """Get catalog available years list :returns: Years list @@ -805,7 +876,7 @@ def get_stac_years_list(self, **kwargs): return list(range(extent_date_min.year, extent_date_max.year + 1)) - def get_stac_months_list(self, **kwargs): + def get_stac_months_list(self, **kwargs: Any) -> List[int]: """Get catalog available months list :returns: Months list @@ -820,7 +891,7 @@ def get_stac_months_list(self, **kwargs): ) ) - def get_stac_days_list(self, **kwargs): + def get_stac_days_list(self, **kwargs: Any) -> List[int]: """Get catalog available days list :returns: Days list @@ -834,7 +905,7 @@ def get_stac_days_list(self, **kwargs): ) ) - def set_stac_year_by_id(self, year, **kwargs): + def set_stac_year_by_id(self, year: str, **kwargs: Any) -> Dict[str, Any]: """Updates and returns catalog with given year :param year: Year number @@ -861,7 +932,7 @@ def set_stac_year_by_id(self, year, **kwargs): return parsed_dict - def set_stac_month_by_id(self, month, **kwargs): + def set_stac_month_by_id(self, month: str, **kwargs: Any) -> Dict[str, Any]: """Updates and returns catalog with given month :param month: Month number @@ -892,7 +963,7 @@ def set_stac_month_by_id(self, month, **kwargs): return parsed_dict - def set_stac_day_by_id(self, day, **kwargs): + def set_stac_day_by_id(self, day: str, **kwargs: Any) -> Dict[str, Any]: """Updates and returns catalog with given day :param day: Day number @@ -924,7 +995,7 @@ def set_stac_day_by_id(self, day, **kwargs): return parsed_dict - def get_datetime_extent(self): + def get_datetime_extent(self) -> Tuple[datetime, datetime]: """Returns catalog temporal extent as datetime objs :returns: Start & stop dates @@ -933,9 +1004,7 @@ def get_datetime_extent(self): extent_date_min = dateutil.parser.parse(DEFAULT_MISSION_START_DATE).replace( tzinfo=tz.UTC ) - extent_date_max = datetime.datetime.now(datetime.timezone.utc).replace( - tzinfo=tz.UTC - ) + extent_date_max = datetime.now(timezone.utc).replace(tzinfo=tz.UTC) for interval in self.data["extent"]["temporal"]["interval"]: extent_date_min_str, extent_date_max_str = interval # date min @@ -954,13 +1023,18 @@ def get_datetime_extent(self): extent_date_max.replace(tzinfo=tz.UTC), ) - def set_stac_date(self, datetime_min, datetime_max, catalog_model): + def set_stac_date( + self, + datetime_min: datetime, + datetime_max: datetime, + catalog_model: Dict[str, Any], + ): """Updates catalog data using given dates :param datetime_min: Date min of interval - :type datetime_min: :class:`datetime.datetime` + :type datetime_min: :class:`datetime` :param datetime_max: Date max of interval - :type datetime_max: :class:`datetime.datetime` + :type datetime_max: :class:`datetime` :param catalog_model: Catalog model to use, from yml stac_config[catalogs] :type catalog_model: dict :returns: Updated catalog @@ -979,7 +1053,7 @@ def set_stac_date(self, datetime_min, datetime_max, catalog_model): "max": datetime_max.isoformat().replace("+00:00", "") + "Z", }, ) - parsed_dict = format_dict_items(catalog_model, **format_args) + parsed_dict: Dict[str, Any] = format_dict_items(catalog_model, **format_args) self.update_data(parsed_dict) @@ -994,7 +1068,7 @@ def set_stac_date(self, datetime_min, datetime_max, catalog_model): # get / set cloud_cover filter -------------------------------------------- - def get_stac_cloud_covers_list(self, **kwargs): + def get_stac_cloud_covers_list(self, **kwargs: Any) -> List[int]: """Get cloud_cover list :returns: cloud_cover list @@ -1002,7 +1076,9 @@ def get_stac_cloud_covers_list(self, **kwargs): """ return list(range(0, 101, 10)) - def set_stac_cloud_cover_by_id(self, cloud_cover, **kwargs): + def set_stac_cloud_cover_by_id( + self, cloud_cover: str, **kwargs: Any + ) -> Dict[str, Any]: """Updates and returns catalog with given max cloud_cover :param cloud_cover: Cloud_cover number @@ -1015,7 +1091,7 @@ def set_stac_cloud_cover_by_id(self, cloud_cover, **kwargs): format_args = deepcopy(self.stac_config) format_args["catalog"] = defaultdict(str, **self.data) format_args["cloud_cover"] = cloud_cover - parsed_dict = format_dict_items(cat_model, **format_args) + parsed_dict: Dict[str, Any] = format_dict_items(cat_model, **format_args) self.update_data(parsed_dict) @@ -1026,7 +1102,7 @@ def set_stac_cloud_cover_by_id(self, cloud_cover, **kwargs): # get / set locations filter ---------------------------------------------- - def get_stac_location_list(self, catalog_name): + def get_stac_location_list(self, catalog_name: str) -> List[str]: """Get locations list using stac_conf & locations_config :param catalog_name: Catalog/location name @@ -1052,7 +1128,7 @@ def get_stac_location_list(self, catalog_name): attr = location_config["attr"] with shapefile.Reader(path) as shp: - countries_list = [rec[attr] for rec in shp.records()] + countries_list: List[str] = [rec[attr] for rec in shp.records()] # remove duplicates countries_list = list(set(countries_list)) @@ -1061,7 +1137,9 @@ def get_stac_location_list(self, catalog_name): return countries_list - def set_stac_location_by_id(self, location, catalog_name): + def set_stac_location_by_id( + self, location: str, catalog_name: str + ) -> Dict[str, Any]: """Updates and returns catalog with given location :param location: Feature attribute value for shp filtering @@ -1104,14 +1182,14 @@ def set_stac_location_by_id(self, location, catalog_name): ) return {} - geom = unary_union(geom_hits) + geom = cast(BaseGeometry, unary_union(geom_hits)) cat_model = deepcopy(self.stac_config["catalogs"]["country"]["model"]) # parse f-strings format_args = deepcopy(self.stac_config) format_args["catalog"] = defaultdict(str, **self.data) format_args["feature"] = defaultdict(str, {"geometry": geom, "id": location}) - parsed_dict = format_dict_items(cat_model, **format_args) + parsed_dict: Dict[str, Any] = format_dict_items(cat_model, **format_args) self.update_data(parsed_dict) @@ -1120,7 +1198,7 @@ def set_stac_location_by_id(self, location, catalog_name): return parsed_dict - def build_locations_config(self): + def build_locations_config(self) -> Dict[str, str]: """Build locations config from stac_conf[locations_catalogs] & eodag_api.locations_config :returns: Locations configuration dict @@ -1130,7 +1208,7 @@ def build_locations_config(self): locations_config_model = deepcopy(self.stac_config["locations_catalogs"]) - locations_config = {} + locations_config: Dict[str, str] = {} for loc in user_config_locations_list: # parse jsonpath parsed = jsonpath_parse_dict_items( @@ -1145,7 +1223,9 @@ def build_locations_config(self): return locations_config - def __build_stac_catalog(self, catalogs=[], fetch_providers=True): + def __build_stac_catalog( + self, catalogs: List[str] = [], fetch_providers: bool = True + ) -> StacCatalog: """Build nested catalog from catalag list :param catalogs: (optional) Catalogs list @@ -1309,7 +1389,7 @@ def __build_stac_catalog(self, catalogs=[], fetch_providers=True): return self - def get_stac_catalog(self): + def get_stac_catalog(self) -> Dict[str, Any]: """Get nested STAC catalog as data dict :returns: Catalog dictionnary diff --git a/eodag/rest/types/__init__.py b/eodag/rest/types/__init__.py new file mode 100644 index 000000000..f0402eb89 --- /dev/null +++ b/eodag/rest/types/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2023, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""EODAG rest.types package""" diff --git a/eodag/rest/types/eodag_search.py b/eodag/rest/types/eodag_search.py new file mode 100644 index 000000000..937a64f31 --- /dev/null +++ b/eodag/rest/types/eodag_search.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +# Copyright 2023, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Dict, List, Optional, Tuple, Union + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + ValidationInfo, + field_validator, + model_validator, +) +from shapely.geometry import ( + GeometryCollection, + LinearRing, + LineString, + MultiLineString, + MultiPoint, + MultiPolygon, + Point, + Polygon, +) + +from eodag.utils import DEFAULT_ITEMS_PER_PAGE + +Geometry = Union[ + Dict[str, Any], + Point, + MultiPoint, + LineString, + MultiLineString, + Polygon, + MultiPolygon, + LinearRing, + GeometryCollection, +] + + +class EODAGSearch(BaseModel): + """Model used to convert a STAC formated request to an EODAG formated one""" + + model_config = ConfigDict( + extra="allow", populate_by_name=True, arbitrary_types_allowed=True + ) + + productType: Optional[str] = Field(None, alias="collections", validate_default=True) + provider: Optional[str] = Field(None) + ids: Optional[List[str]] = Field(None) + id: Optional[List[str]] = Field(None, alias="ids") + geom: Optional[Geometry] = Field(None, alias="geometry") + start: Optional[str] = Field(None, alias="start_datetime") + end: Optional[str] = Field(None, alias="end_datetime") + publicationDate: Optional[str] = Field(None, alias="published") + creationDate: Optional[str] = Field(None, alias="created") + modificationDate: Optional[str] = Field(None, alias="updated") + platformSerialIdentifier: Optional[str] = Field(None, alias="platform") + instrument: Optional[str] = Field(None, alias="instruments") + platform: Optional[str] = Field(None, alias="constellation") + resolution: Optional[int] = Field(None, alias="gsd") + cloudCover: Optional[int] = Field(None, alias="eo:cloud_cover") + snowCover: Optional[int] = Field(None, alias="eo:snow_cover") + processingLevel: Optional[str] = Field(None, alias="processing:level") + orbitDirection: Optional[str] = Field(None, alias="sat:orbit_state") + relativeOrbitNumber: Optional[int] = Field(None, alias="sat:relative_orbit") + orbitNumber: Optional[int] = Field(None, alias="sat:absolute_orbit") + # TODO: colision in property name. Need to handle "sar:product_type" + sensorMode: Optional[str] = Field(None, alias="sar:instrument_mode") + polarizationChannels: Optional[List[str]] = Field(None, alias="sar:polarizations") + dopplerFrequency: Optional[str] = Field(None, alias="sar:frequency_band") + doi: Optional[str] = Field(None, alias="sci:doi") + productVersion: Optional[str] = Field(None, alias="version") + illuminationElevationAngle: Optional[float] = Field( + None, alias="view:sun_elevation" + ) + illuminationAzimuthAngle: Optional[float] = Field(None, alias="view:sun_azimuth") + page: Optional[int] = Field(1) + items_per_page: int = Field(DEFAULT_ITEMS_PER_PAGE, alias="limit") + sortBy: Optional[List[Tuple[str, str]]] = Field(None, alias="sortby") + + @model_validator(mode="before") + @classmethod + def remove_custom_extensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """process unknown and oseo EODAG custom extensions fields""" + # Transform EODAG custom extensions OSEO and UNK. + keys_to_update: Dict[str, str] = {} + for key in values.keys(): + if key.startswith("unk:"): + keys_to_update[key] = key[len("unk:") :] + elif key.startswith("oseo:"): + keys_to_update[key] = key[len("oseo:") :] + + for old_key, new_key in keys_to_update.items(): + values[cls.snake_to_camel(new_key)] = values.pop(old_key) + + return values + + @model_validator(mode="before") + @classmethod + def remove_keys(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Remove 'datetime', 'crunch', 'intersects', and 'bbox' keys""" + for key in ["datetime", "crunch", "intersects", "bbox"]: + values.pop(key, None) + return values + + @field_validator("instrument", mode="before") + @classmethod + def join_instruments(cls, v: Union[str, List[str]]) -> str: + """convert instruments to instrument""" + if isinstance(v, list): + return ",".join(v) + return v + + @field_validator("sortBy", mode="before") + @classmethod + def convert_stac_to_eodag_sortby( + cls, + sortby_post_params: List[Dict[str, str]], + ) -> List[Tuple[str, str]]: + """ + Convert STAC POST sortby to EODAG sortby + """ + eodag_sortby: List[Tuple[str, str]] = [] + for sortby_post_param in sortby_post_params: + field = cls.snake_to_camel(cls.to_eodag(sortby_post_param["field"])) + eodag_sortby.append((field, sortby_post_param["direction"])) + return eodag_sortby + + @field_validator("productType") + @classmethod + def verify_producttype_is_present( + cls, v: Optional[str], info: ValidationInfo + ) -> Optional[str]: + """Verify productType is present when required""" + if not v and ( + not info + or not getattr(info, "context", None) + or not info.context.get("isCatalog") # type: ignore + ): + raise ValueError("A collection is required") + + return v + + @field_validator("start", "end") + @classmethod + def cleanup_dates(cls, v: str) -> str: + """proper format dates""" + if v.endswith("+00:00"): + return v.replace("+00:00", "") + "Z" + return v + + @classmethod + def snake_to_camel(cls, snake_str: str) -> str: + """Convert snake_case to camelCase""" + # Split the string by underscore and capitalize each component except the first one + components = snake_str.split("_") + return components[0] + "".join(x.title() for x in components[1:]) + + @classmethod + def to_eodag(cls, value: str) -> str: + """Convert a STAC parameter to its matching EODAG name""" + alias_map = { + field_info.alias: name + for name, field_info in cls.model_fields.items() + if field_info.alias + } + return alias_map.get(value, value) + + @classmethod + def to_stac(cls, field_name: str) -> str: + """Get the alias of a field in a Pydantic model""" + field = cls.model_fields.get(field_name) + if field is not None and field.alias is not None: + return field.alias + return field_name diff --git a/eodag/rest/types/stac_queryables.py b/eodag/rest/types/stac_queryables.py new file mode 100644 index 000000000..36cc9a324 --- /dev/null +++ b/eodag/rest/types/stac_queryables.py @@ -0,0 +1,134 @@ +# -*- coding: utf-8 -*- +# Copyright 2023, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union + +from pydantic import BaseModel, Field + +from eodag.types import python_field_definition_to_json +from eodag.utils import Annotated + +if TYPE_CHECKING: + from pydantic.fields import FieldInfo + + +class StacQueryableProperty(BaseModel): + """A class representing a queryable property. + + :param description: The description of the queryables property + :type description: str + :param ref: (optional) A reference link to the schema of the property. + :type ref: str + :param type: (optional) possible types of the property + :type type: list[str] + """ + + description: str + ref: Optional[str] = Field(default=None, serialization_alias="$ref") + type: Optional[Union[str, List[str]]] = None + enum: Optional[List[Any]] = None + value: Optional[Any] = None + + @classmethod + def from_python_field_definition( + cls, id: str, python_field_definition: Annotated[Any, FieldInfo] + ) -> StacQueryableProperty: + """Build Model from python_field_definition""" + def_dict = python_field_definition_to_json(python_field_definition) + + if not def_dict.get("description", None): + def_dict["description"] = def_dict.get("title", None) or id + + return cls(**def_dict) + + +class StacQueryables(BaseModel): + """A class representing queryable properties for the STAC API. + + :param json_schema: The URL of the JSON schema. + :type json_schema: str + :param q_id: (optional) The identifier of the queryables. + :type q_id: str + :param q_type: The type of the object. + :type q_type: str + :param title: The title of the queryables. + :type title: str + :param description: The description of the queryables + :type description: str + :param properties: A dictionary of queryable properties. + :type properties: dict + :param additional_properties: Whether additional properties are allowed. + :type additional_properties: bool + """ + + json_schema: str = Field( + default="https://json-schema.org/draft/2019-09/schema", + serialization_alias="$schema", + ) + q_id: Optional[str] = Field(default=None, serialization_alias="$id") + q_type: str = Field(default="object", serialization_alias="type") + title: str = Field(default="Queryables for EODAG STAC API") + description: str = Field( + default="Queryable names for the EODAG STAC API Item Search filter." + ) + properties: Dict[str, StacQueryableProperty] = Field( + default={ + "ids": StacQueryableProperty( + description="ID", + ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/id", + ), + "collections": StacQueryableProperty( + description="Collection", + ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/collection", + ), + "geometry": StacQueryableProperty( + description="Geometry", + ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/geometry", + ), + "datetime": StacQueryableProperty( + description="Datetime - use parameters year, month, day, time instead if available", + ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/datetime", + ), + } + ) + additional_properties: bool = Field( + default=True, serialization_alias="additionalProperties" + ) + + def get_properties(self) -> Dict[str, StacQueryableProperty]: + """Get the queryable properties. + + :returns: A dictionary containing queryable properties. + :rtype: typing.Dict[str, StacQueryableProperty] + """ + properties = {} + for key, property in self.properties.items(): + property = StacQueryableProperty( + description=property.description, type=property.type + ) + properties[key] = property + return properties + + def __contains__(self, name: str) -> bool: + return name in self.properties + + def __setitem__(self, name: str, qprop: StacQueryableProperty) -> None: + # only keep "datetime" queryable for dates + if name not in ("start_datetime", "end_datetime"): + self.properties[name] = qprop diff --git a/eodag/rest/utils.py b/eodag/rest/utils.py index a377fd229..524a52edd 100644 --- a/eodag/rest/utils.py +++ b/eodag/rest/utils.py @@ -1,26 +1,51 @@ # -*- coding: utf-8 -*- -# Copyright 2017-2018 CS GROUP - France (CS SI) -# All rights reserved +# Copyright 2018, CS Systemes d'Information, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations import ast import datetime +import glob import json import logging import os import re -from collections import namedtuple from shutil import make_archive, rmtree -from typing import Dict, Optional +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + NamedTuple, + Optional, + Tuple, + Union, +) +from urllib.parse import urlencode import dateutil.parser from dateutil import tz from fastapi.responses import StreamingResponse -from pydantic import BaseModel, Field from shapely.geometry import Polygon, shape import eodag from eodag import EOProduct -from eodag.api.core import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE from eodag.api.product.metadata_mapping import OSEO_METADATA_MAPPING from eodag.api.search_result import SearchResult from eodag.config import load_stac_config, load_stac_provider_config @@ -28,7 +53,11 @@ from eodag.plugins.crunch.filter_latest_tpl_name import FilterLatestByName from eodag.plugins.crunch.filter_overlap import FilterOverlap from eodag.rest.stac import StacCatalog, StacCollection, StacCommon, StacItem +from eodag.rest.types.eodag_search import EODAGSearch +from eodag.rest.types.stac_queryables import StacQueryableProperty from eodag.utils import ( + DEFAULT_ITEMS_PER_PAGE, + DEFAULT_PAGE, GENERIC_PRODUCT_TYPE, _deprecated, dict_items_recursive_apply, @@ -38,14 +67,29 @@ MisconfiguredError, NoMatchingProductType, NotAvailableError, + RequestError, UnsupportedProductType, ValidationError, ) +if TYPE_CHECKING: + from io import BufferedReader + + from shapely.geometry.base import BaseGeometry + + logger = logging.getLogger("eodag.rest.utils") eodag_api = eodag.EODataAccessGateway() -Cruncher = namedtuple("Cruncher", ["clazz", "config_params"]) + + +class Cruncher(NamedTuple): + """Type hinted Cruncher namedTuple""" + + clazz: Callable[..., Any] + config_params: List[str] + + crunchers = { "latestIntersect": Cruncher(FilterLatestIntersect, []), "latestByName": Cruncher(FilterLatestByName, ["name_pattern"]), @@ -61,19 +105,21 @@ reason="Function internally used by get_home_page_content, also deprecated", version="2.6.1", ) -def format_product_types(product_types): +def format_product_types(product_types: List[Dict[str, Any]]) -> str: """Format product_types :param product_types: A list of EODAG product types as returned by the core api :type product_types: list """ - result = [] + result: List[str] = [] for pt in product_types: result.append("* *__{ID}__*: {abstract}".format(**pt)) return "\n".join(sorted(result)) -def get_detailled_collections_list(provider=None, fetch_providers=True): +def get_detailled_collections_list( + provider: Optional[str] = None, fetch_providers: bool = True +) -> List[Dict[str, Any]]: """Returns detailled collections / product_types list for a given provider as a list of config dicts :param provider: (optional) Chosen provider @@ -90,7 +136,7 @@ def get_detailled_collections_list(provider=None, fetch_providers=True): @_deprecated(reason="No more needed with STAC API + Swagger", version="2.6.1") -def get_home_page_content(base_url, ipp=None): +def get_home_page_content(base_url: str, ipp: Optional[int] = None) -> str: """Compute eodag service home page content :param base_url: The service root URL @@ -110,12 +156,14 @@ def get_home_page_content(base_url, ipp=None): reason="Used to format output from deprecated function get_home_page_content", version="2.6.1", ) -def get_templates_path(): +def get_templates_path() -> str: """Returns Jinja templates path""" return os.path.join(os.path.dirname(__file__), "templates") -def get_product_types(provider=None, filters=None): +def get_product_types( + provider: Optional[str] = None, filters: Optional[Dict[str, Any]] = None +) -> List[Dict[str, Any]]: """Returns a list of supported product types :param provider: (optional) Provider name @@ -148,54 +196,60 @@ def get_product_types(provider=None, filters=None): return product_types -def search_bbox(request_bbox): +def search_bbox(request_bbox: str) -> Optional[Dict[str, float]]: """Transform request bounding box as a bbox suitable for eodag search""" eodag_bbox = None search_bbox_keys = ["lonmin", "latmin", "lonmax", "latmax"] - if request_bbox: - try: - request_bbox_list = [float(coord) for coord in request_bbox.split(",")] - except ValueError as e: - raise ValidationError("invalid box coordinate type: %s" % e) + if not request_bbox: + return None + + try: + request_bbox_list = [float(coord) for coord in request_bbox.split(",")] + except ValueError as e: + raise ValidationError("invalid box coordinate type: %s" % e) - eodag_bbox = dict(zip(search_bbox_keys, request_bbox_list)) - if len(eodag_bbox) != 4: - raise ValidationError("input box is invalid: %s" % request_bbox) + eodag_bbox = dict(zip(search_bbox_keys, request_bbox_list)) + if len(eodag_bbox) != 4: + raise ValidationError("input box is invalid: %s" % request_bbox) return eodag_bbox -def get_date(date): +def get_date(date: Optional[str]) -> Optional[str]: """Check if the input date can be parsed as a date""" - if date: - try: - date = ( - dateutil.parser.parse(date) - .replace(tzinfo=tz.UTC) - .isoformat() - .replace("+00:00", "") - ) - except ValueError as e: - exc = ValidationError("invalid input date: %s" % e) - raise exc - return date + if not date: + return None + try: + return ( + dateutil.parser.parse(date) + .replace(tzinfo=tz.UTC) + .isoformat() + .replace("+00:00", "") + ) + except ValueError as e: + exc = ValidationError("invalid input date: %s" % e) + raise exc -def get_int(val): +def get_int(input: Optional[Any]) -> Optional[int]: """Check if the input can be parsed as an integer""" - if val: - try: - val = int(val) - except ValueError as e: - raise ValidationError("invalid input integer value: %s" % e) + if input is None: + return None + + try: + val = int(input) + except ValueError as e: + raise ValidationError("invalid input integer value: %s" % e) return val -def filter_products(products, arguments, **kwargs): +def filter_products( + products: SearchResult, arguments: Dict[str, Any], **kwargs: Any +) -> SearchResult: """Apply an eodag cruncher to filter products""" filter_name = arguments.get("filter") if filter_name: @@ -203,7 +257,7 @@ def filter_products(products, arguments, **kwargs): if not cruncher: raise ValidationError("unknown filter name") - cruncher_config = dict() + cruncher_config: Dict[str, Any] = dict() for config_param in cruncher.config_params: config_param_value = arguments.get(config_param) if not config_param_value: @@ -216,12 +270,14 @@ def filter_products(products, arguments, **kwargs): try: products = products.crunch(cruncher.clazz(cruncher_config), **kwargs) except MisconfiguredError as e: - raise ValidationError(e) + raise ValidationError(str(e)) return products -def get_pagination_info(arguments): +def get_pagination_info( + arguments: Dict[str, Any] +) -> Tuple[Optional[int], Optional[int]]: """Get pagination arguments""" page = get_int(arguments.pop("page", DEFAULT_PAGE)) # items_per_page can be specified using limit or itemsPerPage @@ -237,7 +293,7 @@ def get_pagination_info(arguments): return page, items_per_page -def get_geometry(arguments: dict): +def get_geometry(arguments: Dict[str, Any]) -> Optional[BaseGeometry]: """Get geometry from arguments""" if arguments.get("intersects") and arguments.get("bbox"): raise ValidationError("Only one of bbox and intersects can be used at a time.") @@ -290,7 +346,7 @@ def get_geometry(arguments: dict): return geom -def get_datetime(arguments): +def get_datetime(arguments: Dict[str, Any]) -> Tuple[Optional[str], Optional[str]]: """Get the datetime criterias from the search arguments :param arguments: Request args @@ -320,7 +376,7 @@ def get_datetime(arguments): return get_date(dtstart), get_date(dtend) -def get_metadata_query_paths(metadata_mapping): +def get_metadata_query_paths(metadata_mapping: Dict[str, Any]) -> Dict[str, Any]: """Get dict of query paths and their names from metadata_mapping :param metadata_mapping: STAC metadata mapping (see 'resources/stac_provider.yml') @@ -328,7 +384,7 @@ def get_metadata_query_paths(metadata_mapping): :returns: Mapping of query paths with their corresponding names :rtype: dict """ - metadata_query_paths = {} + metadata_query_paths: Dict[str, Any] = {} for metadata_name, metadata_spec in metadata_mapping.items(): # When metadata_spec have a length of 1 the query path is not specified if len(metadata_spec) == 2: @@ -353,7 +409,7 @@ def get_metadata_query_paths(metadata_mapping): return metadata_query_paths -def get_arguments_query_paths(arguments): +def get_arguments_query_paths(arguments: Dict[str, Any]) -> Dict[str, Any]: """Get dict of query paths and their values from arguments Build a mapping of the query paths present in the arguments @@ -371,7 +427,9 @@ def get_arguments_query_paths(arguments): ) -def get_criterias_from_metadata_mapping(metadata_mapping, arguments): +def get_criterias_from_metadata_mapping( + metadata_mapping: Dict[str, Any], arguments: Dict[str, Any] +) -> Dict[str, Any]: """Get criterias from the search arguments with the metadata mapping config :param metadata_mapping: STAC metadata mapping (see 'resources/stac_provider.yml') @@ -381,7 +439,7 @@ def get_criterias_from_metadata_mapping(metadata_mapping, arguments): :returns: Mapping of criterias with their corresponding values :rtype: dict """ - criterias = {} + criterias: Dict[str, Any] = {} metadata_query_paths = get_metadata_query_paths(metadata_mapping) arguments_query_paths = get_arguments_query_paths(arguments) for query_path in arguments_query_paths: @@ -396,7 +454,9 @@ def get_criterias_from_metadata_mapping(metadata_mapping, arguments): return criterias -def search_products(product_type, arguments, stac_formatted=True): +def search_products( + product_type: str, arguments: Dict[str, Any], stac_formatted: bool = True +) -> Union[Dict[str, Any], SearchResult]: """Returns product search results :param product_type: The product type criteria @@ -439,15 +499,27 @@ def search_products(product_type, arguments, stac_formatted=True): else: criterias.update(arguments) + if provider: + criterias["raise_errors"] = True + # We remove potential None values to use the default values of the search method criterias = dict((k, v) for k, v in criterias.items() if v is not None) products, total = eodag_api.search(**criterias) + if not products and eodag_api.search_errors: + search_error = RequestError( + "No result could be obtained from any available provider and following " + "error(s) appeared while searching:" + ) + search_error.history = eodag_api.search_errors + raise search_error + products = filter_products(products, arguments, **criterias) + response: Union[Dict[str, Any], SearchResult] if not unserialized: - response = SearchResult(products).as_geojson_object() + response = products.as_geojson_object() response.update( { "properties": { @@ -458,7 +530,7 @@ def search_products(product_type, arguments, stac_formatted=True): } ) else: - response = SearchResult(products) + response = products response.properties = { "page": page, "itemsPerPage": items_per_page, @@ -475,7 +547,12 @@ def search_products(product_type, arguments, stac_formatted=True): return response -def search_product_by_id(uid, product_type=None, provider=None): +def search_product_by_id( + uid: str, + product_type: Optional[str] = None, + provider: Optional[str] = None, + **kwargs: Any, +) -> SearchResult: """Search a product by its id :param uid: The uid of the EO product @@ -484,14 +561,18 @@ def search_product_by_id(uid, product_type=None, provider=None): :type product_type: str :param provider: (optional) The provider to be used :type provider: str + :param kwargs: additional search parameters + :type kwargs: Any :returns: A search result :rtype: :class:`~eodag.api.search_result.SearchResult` :raises: :class:`~eodag.utils.exceptions.ValidationError` :raises: RuntimeError """ + if provider: + kwargs["raise_errors"] = True try: - products, total = eodag_api.search( - id=uid, productType=product_type, provider=provider + products, _ = eodag_api.search( + id=uid, productType=product_type, provider=provider, **kwargs ) return products except ValidationError: @@ -503,7 +584,7 @@ def search_product_by_id(uid, product_type=None, provider=None): # STAC ------------------------------------------------------------------------ -def get_stac_conformance(): +def get_stac_conformance() -> Dict[str, str]: """Build STAC conformance :returns: conformance dictionnary @@ -512,7 +593,7 @@ def get_stac_conformance(): return stac_config["conformance"] -def get_stac_api_version(): +def get_stac_api_version() -> str: """Get STAC API version :returns: STAC API version @@ -521,7 +602,9 @@ def get_stac_api_version(): return stac_config["stac_api_version"] -def get_stac_collections(url, root, arguments, provider=None): +def get_stac_collections( + url: str, root: str, arguments: Dict[str, Any], provider: Optional[str] = None +) -> Dict[str, Any]: """Build STAC collections :param url: Requested URL @@ -544,7 +627,9 @@ def get_stac_collections(url, root, arguments, provider=None): ).get_collections(arguments) -def get_stac_collection_by_id(url, root, collection_id, provider=None): +def get_stac_collection_by_id( + url: str, root: str, collection_id: str, provider: Optional[str] = None +) -> Dict[str, Any]: """Build STAC collection by id :param url: Requested URL @@ -567,7 +652,14 @@ def get_stac_collection_by_id(url, root, collection_id, provider=None): ).get_collection_by_id(collection_id) -def get_stac_item_by_id(url, item_id, catalogs, root="/", provider=None): +def get_stac_item_by_id( + url: str, + item_id: str, + catalogs: List[str], + root: str = "/", + provider: Optional[str] = None, + **kwargs: Any, +) -> Dict[str, Any]: """Build STAC item by id :param url: Requested URL @@ -580,12 +672,22 @@ def get_stac_item_by_id(url, item_id, catalogs, root="/", provider=None): :type root: str :param provider: (optional) Chosen provider :type provider: str + :param kwargs: additional search parameters + :type kwargs: Any :returns: Collection dictionary :rtype: dict """ product_type = catalogs[0] - found_products = search_product_by_id(item_id, product_type=product_type) + _dc_qs = kwargs.get("_dc_qs", None) + + found_products = search_product_by_id( + item_id, product_type=product_type, provider=provider, _dc_qs=_dc_qs + ) + if len(found_products) > 0: + found_products[0].product_type = eodag_api.get_alias_from_product_type( + found_products[0].product_type + ) return StacItem( url=url, stac_config=stac_config, @@ -597,7 +699,13 @@ def get_stac_item_by_id(url, item_id, catalogs, root="/", provider=None): return None -def download_stac_item_by_id_stream(catalogs, item_id, provider=None): +def download_stac_item_by_id_stream( + catalogs: List[str], + item_id: str, + provider: Optional[str] = None, + asset: Optional[str] = None, + **kwargs: Any, +) -> StreamingResponse: """Download item :param catalogs: Catalogs list (only first is used as product_type) @@ -606,12 +714,14 @@ def download_stac_item_by_id_stream(catalogs, item_id, provider=None): :type item_id: str :param provider: (optional) Chosen provider :type provider: str - :param zip: if the downloaded filed should be zipped - :type zip: str - :returns: a stream of the downloaded data (either as a zip or the individual assets) + :param kwargs: additional download parameters + :type kwargs: Any + :returns: a stream of the downloaded data (zip file) :rtype: StreamingResponse """ product_type = catalogs[0] + _dc_qs = kwargs.get("_dc_qs", None) + search_plugin = next( eodag_api._plugins_manager.get_search_plugins(product_type, provider) ) @@ -634,8 +744,9 @@ def download_stac_item_by_id_stream(catalogs, item_id, provider=None): } product = EOProduct(provider or product_data["provider"], properties) else: + search_results = search_product_by_id( - item_id, product_type=product_type, provider=provider + item_id, product_type=product_type, provider=provider, _dc_qs=_dc_qs ) if len(search_results) > 0: product = search_results[0] @@ -658,16 +769,22 @@ def download_stac_item_by_id_stream(catalogs, item_id, provider=None): ) try: download_stream_dict = product.downloader._stream_download_dict( - product, auth=auth + product, auth=auth, asset=asset ) except NotImplementedError: logger.warning( f"Download streaming not supported for {product.downloader}: downloading locally then delete" ) - product_path = eodag_api.download(product, extract=False) + product_path = eodag_api.download(product, extract=False, asset=asset) if os.path.isdir(product_path): # do not zip if dir contains only one file - all_filenames = next(os.walk(product_path), (None, None, []))[2] + all_filenames = [ + f + for f in glob.glob( + os.path.join(product_path, "**", "*"), recursive=True + ) + if os.path.isfile(f) + ] if len(all_filenames) == 1: filepath_to_stream = all_filenames[0] else: @@ -690,7 +807,9 @@ def download_stac_item_by_id_stream(catalogs, item_id, provider=None): return StreamingResponse(**download_stream_dict) -def read_file_chunks_and_delete(opened_file, chunk_size=64 * 1024): +def read_file_chunks_and_delete( + opened_file: BufferedReader, chunk_size: int = 64 * 1024 +) -> Iterator[bytes]: """Yield file chunks and delete file when finished.""" while True: data = opened_file.read(chunk_size) @@ -703,7 +822,13 @@ def read_file_chunks_and_delete(opened_file, chunk_size=64 * 1024): yield data -def get_stac_catalogs(url, root="/", catalogs=[], provider=None, fetch_providers=True): +def get_stac_catalogs( + url: str, + root: str = "/", + catalogs: List[str] = [], + provider: Optional[str] = None, + fetch_providers: bool = True, +) -> Dict[str, Any]: """Build STAC catalog :param url: Requested URL @@ -731,7 +856,14 @@ def get_stac_catalogs(url, root="/", catalogs=[], provider=None, fetch_providers ).get_stac_catalog() -def search_stac_items(url, arguments, root="/", catalogs=[], provider=None): +def search_stac_items( + url: str, + arguments: Dict[str, Any], + root: str = "/", + catalogs: List[str] = [], + provider: Optional[str] = None, + method: Optional[str] = "GET", +) -> Dict[str, Any]: """Get items collection dict for given catalogs list :param url: Requested URL @@ -744,6 +876,8 @@ def search_stac_items(url, arguments, root="/", catalogs=[], provider=None): :type catalogs: list :param provider: (optional) Chosen provider :type provider: str + :param method: (optional) search request HTTP method ('GET' or 'POST') + :type method: str :returns: Catalog dictionnary :rtype: dict """ @@ -751,6 +885,14 @@ def search_stac_items(url, arguments, root="/", catalogs=[], provider=None): catalog_url = url.replace("/items", "") + next_page_kwargs = { + key: value for key, value in arguments.copy().items() if value is not None + } + next_page_id = ( + int(next_page_kwargs["page"]) + 1 if "page" in next_page_kwargs else 2 + ) + next_page_kwargs["page"] = next_page_id + # use catalogs from path or if it is empty, collections from args if catalogs: result_catalog = StacCatalog( @@ -773,7 +915,8 @@ def search_stac_items(url, arguments, root="/", catalogs=[], provider=None): root=root, provider=provider, eodag_api=eodag_api, - # handle only one collection per request (STAC allows multiple) + # handle only one collection + # per request (STAC allows multiple) catalogs=collections[0:1], url=catalog_url.replace("/search", f"/collections/{collections[0]}"), ) @@ -809,6 +952,7 @@ def search_stac_items(url, arguments, root="/", catalogs=[], provider=None): ) # check if time filtering appears both in search arguments and catalog + # (for catalogs built by date: i.e. `year/2020/month/05`) if set(["dtstart", "dtend"]) <= set(arguments.keys()) and set( ["dtstart", "dtend"] ) <= set(result_catalog.search_args.keys()): @@ -869,12 +1013,25 @@ def search_stac_items(url, arguments, root="/", catalogs=[], provider=None): **{"url": result_catalog.url, "root": result_catalog.root}, ), ) + search_results = search_products( product_type=result_catalog.search_args["product_type"], arguments=search_products_arguments, ) - return StacItem( + for record in search_results: + record.product_type = eodag_api.get_alias_from_product_type(record.product_type) + + search_results.method = method + if method == "POST": + search_results.next = f"{url}" + search_results.body = next_page_kwargs + + elif method == "GET": + next_query_string = urlencode(next_page_kwargs) + search_results.next = f"{url}?{next_query_string}" + + items = StacItem( url=url, stac_config=stac_config, provider=provider, @@ -888,8 +1045,10 @@ def search_stac_items(url, arguments, root="/", catalogs=[], provider=None): ), ) + return items + -def get_stac_extension_oseo(url): +def get_stac_extension_oseo(url: str) -> Dict[str, str]: """Build STAC OGC / OpenSearch Extension for EO :param url: Requested URL @@ -917,136 +1076,55 @@ def get_stac_extension_oseo(url): ) -class QueryableProperty(BaseModel): - """A class representing a queryable property. - - :param description: The description of the queryables property - :type description: str - :param ref: (optional) A reference link to the schema of the property. - :type ref: str - """ - - description: str - ref: Optional[str] = Field(default=None, serialization_alias="$ref") - - -class Queryables(BaseModel): - """A class representing queryable properties for the STAC API. - - :param json_schema: The URL of the JSON schema. - :type json_schema: str - :param q_id: (optional) The identifier of the queryables. - :type q_id: str - :param q_type: The type of the object. - :type q_type: str - :param title: The title of the queryables. - :type title: str - :param description: The description of the queryables - :type description: str - :param properties: A dictionary of queryable properties. - :type properties: dict - :param additional_properties: Whether additional properties are allowed. - :type additional_properties: bool - """ - - json_schema: str = Field( - default="https://json-schema.org/draft/2019-09/schema", - serialization_alias="$schema", - ) - q_id: Optional[str] = Field(default=None, serialization_alias="$id") - q_type: str = Field(default="object", serialization_alias="type") - title: str = Field(default="Queryables for EODAG STAC API") - description: str = Field( - default="Queryable names for the EODAG STAC API Item Search filter." - ) - properties: Dict[str, QueryableProperty] = Field( - default={ - "id": QueryableProperty( - description="ID", - ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/id", - ), - "collection": QueryableProperty( - description="Collection", - ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/collection", - ), - "geometry": QueryableProperty( - description="Geometry", - ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/geometry", - ), - "bbox": QueryableProperty( - description="Bbox", - ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/bbox", - ), - "datetime": QueryableProperty( - description="Datetime", - ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/datetime.json#/properties/datetime", - ), - "ids": QueryableProperty(description="IDs"), - } - ) - additional_properties: bool = Field( - default=True, serialization_alias="additionalProperties" - ) - - def get_properties(self) -> Dict[str, QueryableProperty]: - """Get the queryable properties. - - :returns: A dictionary containing queryable properties. - :rtype: typing.Dict[str, QueryableProperty] - """ - return self.properties - - def __contains__(self, name: str): - return name in self.properties - - def __setitem__(self, name: str, qprop: QueryableProperty): - self.properties[name] = qprop - - -def rename_to_stac_standard(key: str) -> str: - """Fetch the queryable properties for a collection. - - :param key: The camelCase key name obtained from a collection's metadata mapping. - :type key: str - :returns: The STAC-standardized property name if it exists, else the default camelCase queryable name - :rtype: str - """ - # Load the stac config properties for renaming the properties - # to their STAC standard - stac_config_properties = stac_config["item"]["properties"] - - for stac_property, value in stac_config_properties.items(): - if str(value).endswith(key): - return stac_property - return key - - def fetch_collection_queryable_properties( - collection_id: str, provider: Optional[str] = None -) -> set: + collection_id: Optional[str] = None, provider: Optional[str] = None, **kwargs: Any +) -> Dict[str, StacQueryableProperty]: """Fetch the queryable properties for a collection. :param collection_id: The ID of the collection. :type collection_id: str :param provider: (optional) The provider. :type provider: str - :returns queryable_properties: A set containing the STAC standardized queryable properties for a collection. - :rtype queryable_properties: set + :param kwargs: additional filters for queryables (`productType` or other search + arguments) + :type kwargs: Any + :returns: A set containing the STAC standardized queryable properties for a collection. + :rtype Dict[str, StacQueryableProperty]: set """ - # Fetch the metadata mapping for collection-specific queryables - args = [collection_id, provider] if provider else [collection_id] - search_plugin = next(eodag_api._plugins_manager.get_search_plugins(*args)) - mapping = dict(search_plugin.config.metadata_mapping) + if not collection_id and "collections" in kwargs: + collection_ids = kwargs.pop("collections").split(",") + collection_id = collection_ids[0] + + if collection_id and "productType" in kwargs: + kwargs.pop("productType") + elif "productType" in kwargs: + collection_id = kwargs.pop("productType") + + if "ids" in kwargs: + kwargs["id"] = kwargs.pop("ids") + + if "datetime" in kwargs: + dates = get_datetime(kwargs) + kwargs["start"] = dates[0] + kwargs["end"] = dates[1] + + python_queryables = eodag_api.list_queryables( + provider=provider, productType=collection_id, **kwargs + ) + python_queryables.pop("start") + python_queryables.pop("end") + + stac_queryables = dict() + for param, queryable in python_queryables.items(): + stac_param = EODAGSearch.to_stac(param) + stac_queryables[ + stac_param + ] = StacQueryableProperty.from_python_field_definition(stac_param, queryable) - # list of all the STAC standardized collection-specific queryables - queryable_properties = set() - for key, value in mapping.items(): - if isinstance(value, list) and "TimeFromAscendingNode" not in key: - queryable_properties.add(rename_to_stac_standard(key)) - return queryable_properties + return stac_queryables -def eodag_api_init(): +def eodag_api_init() -> None: """Init EODataAccessGateway server instance, pre-running all time consuming tasks""" eodag_api.fetch_product_types_list() diff --git a/eodag/types/__init__.py b/eodag/types/__init__.py new file mode 100644 index 000000000..7d950bfc0 --- /dev/null +++ b/eodag/types/__init__.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# Copyright 2023, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""EODAG types""" +from __future__ import annotations + +from typing import Any, Dict, List, Literal, Optional, Union + +from pydantic import Field +from pydantic.fields import FieldInfo + +from eodag.utils import Annotated, copy_deepcopy, get_args, get_origin +from eodag.utils.exceptions import ValidationError + +# Types mapping from JSON Schema and OpenAPI 3.1.0 specifications to Python +# See https://spec.openapis.org/oas/v3.1.0#data-types +JSON_TYPES_MAPPING: Dict[str, type] = { + "boolean": bool, + "integer": int, + "number": float, + "string": str, + "array": list, + "object": dict, + "null": type(None), +} + + +def json_type_to_python(json_type: Union[str, List[str]]) -> type: + """Get python type from json type https://spec.openapis.org/oas/v3.1.0#data-types + + >>> json_type_to_python("number") + + + :param json_type: the json type + :returns: the python type + """ + if isinstance(json_type, list) and len(json_type) > 1: + return Union[tuple(JSON_TYPES_MAPPING.get(jt, type(None)) for jt in json_type)] # type: ignore + elif isinstance(json_type, str): + return JSON_TYPES_MAPPING.get(json_type, type(None)) + else: + return type(None) + + +def python_type_to_json(python_type: type) -> Optional[Union[str, List[str]]]: + """Get json type from python https://spec.openapis.org/oas/v3.1.0#data-types + + >>> python_type_to_json(int) + 'integer' + >>> python_type_to_json(Union[float, str]) + ['number', 'string'] + + :param python_type: the python type + :returns: the json type + """ + if get_origin(python_type) is Union: + json_type = list() + for single_python_type in get_args(python_type): + if single_python_type in JSON_TYPES_MAPPING.values(): + # JSON_TYPES_MAPPING key from given value + single_json_type = list(JSON_TYPES_MAPPING.keys())[ + list(JSON_TYPES_MAPPING.values()).index(single_python_type) + ] + json_type.append(single_json_type) + return json_type + elif python_type in JSON_TYPES_MAPPING.values(): + # JSON_TYPES_MAPPING key from given value + return list(JSON_TYPES_MAPPING.keys())[ + list(JSON_TYPES_MAPPING.values()).index(python_type) + ] + else: + return None + + +def json_field_definition_to_python( + json_field_definition: Dict[str, Any], + default_value: Optional[Any] = None, + required: Optional[bool] = False, +) -> Annotated[Any, FieldInfo]: + """Get python field definition from json object + + >>> result = json_field_definition_to_python( + ... { + ... 'type': 'boolean', + ... 'title': 'Foo parameter' + ... } + ... ) + >>> str(result).replace('_extensions', '') # python3.8 compatibility + "typing.Annotated[bool, FieldInfo(annotation=NoneType, required=False, title='Foo parameter')]" + + :param json_field_definition: the json field definition + :param default_value: default value of the field + :param required: if the field is required + :returns: the python field definition + """ + python_type = json_type_to_python(json_field_definition.get("type", None)) + + field_type_kwargs = dict( + title=json_field_definition.get("title", None), + description=json_field_definition.get("description", None), + pattern=json_field_definition.get("pattern", None), + ) + + if "enum" in json_field_definition and ( + isinstance(json_field_definition["enum"], (list, set)) + ): + python_type = Literal[tuple(sorted(json_field_definition["enum"]))] # type: ignore + + if "$ref" in json_field_definition: + field_type_kwargs["json_schema_extra"] = {"$ref": json_field_definition["$ref"]} + + if not required or default_value: + return Annotated[python_type, Field(default=default_value, **field_type_kwargs)] + else: + return Annotated[python_type, Field(..., **field_type_kwargs)] + + +def python_field_definition_to_json( + python_field_definition: Annotated[Any, FieldInfo] +) -> Dict[str, Any]: + """Get json field definition from python `typing.Annotated` + + >>> from pydantic import Field + >>> from eodag.utils import Annotated + >>> python_field_definition_to_json( + ... Annotated[ + ... Optional[str], + ... Field(None, description='Foo bar', json_schema_extra={'$ref': '/path/to/schema'}) + ... ] + ... ) + {'type': ['string', 'null'], 'description': 'Foo bar', '$ref': '/path/to/schema'} + + :param python_field_annotated: the python field annotated type + :returns: the json field definition + """ + if get_origin(python_field_definition) is not Annotated: + raise ValidationError( + "%s must be an instance of Annotated" % python_field_definition + ) + + json_field_definition = dict() + + python_field_args = get_args(python_field_definition) + + # enum & type + if get_origin(python_field_args[0]) is Literal: + enum_args = get_args(python_field_args[0]) + json_field_definition["type"] = python_type_to_json(type(enum_args[0])) + json_field_definition["enum"] = list(enum_args) + # type + else: + field_type = python_type_to_json(python_field_args[0]) + if field_type is not None: + json_field_definition["type"] = python_type_to_json(python_field_args[0]) + + if len(python_field_args) < 2: + return json_field_definition + + # other definition args + title = getattr(python_field_args[1], "title", None) + if title is not None: + json_field_definition["title"] = title + + description = getattr(python_field_args[1], "description", None) + if description is not None: + json_field_definition["description"] = description + + pattern = getattr(python_field_args[1], "pattern", None) + if pattern is not None: + json_field_definition["pattern"] = description + + if ( + python_field_args[1].json_schema_extra is not None + and "$ref" in python_field_args[1].json_schema_extra + ): + json_field_definition["$ref"] = python_field_args[1].json_schema_extra["$ref"] + + default = python_field_args[1].get_default() + if default: + json_field_definition["value"] = default + + return json_field_definition + + +def model_fields_to_annotated( + model_fields: Dict[str, FieldInfo] +) -> Dict[str, Annotated[Any, FieldInfo]]: + """Convert BaseModel.model_fields from FieldInfo to Annotated + + >>> from pydantic import create_model + >>> some_model = create_model("some_model", foo=(str, None)) + >>> fields_definitions = model_fields_to_annotated(some_model.model_fields) + >>> str(fields_definitions).replace('_extensions', '') # python3.8 compatibility + "{'foo': typing.Annotated[str, FieldInfo(annotation=NoneType, required=False)]}" + + :param model_fields: BaseModel.model_fields to convert + :returns: Annotated tuple usable as create_model argument + """ + annotated_model_fields = dict() + for param, field_info in model_fields.items(): + field_type = field_info.annotation or type(None) + new_field_info = copy_deepcopy(field_info) + new_field_info.annotation = None + annotated_model_fields[param] = Annotated[field_type, new_field_info] + return annotated_model_fields diff --git a/eodag/types/bbox.py b/eodag/types/bbox.py new file mode 100644 index 000000000..612f29d64 --- /dev/null +++ b/eodag/types/bbox.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +# Copyright 2018, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Any, Dict, List, Tuple, Union + +from pydantic import BaseModel, validator +from shapely.geometry.polygon import Polygon + +NumType = Union[float, int] +BBoxArgs = Union[ + List[NumType], Tuple[NumType, NumType, NumType, NumType], Dict[str, NumType] +] + + +class BBox(BaseModel): + """ + A class used to represent a Bounding Box. + """ + + lonmin: NumType + latmin: NumType + lonmax: NumType + latmax: NumType + + def __init__(__pydantic_self__, bboxArgs: BBoxArgs) -> None: # type: ignore + """ + Constructs all the necessary attributes for the BBox object. + + :param bboxArgs: Four float values representing lonmin, latmin, lonmax, latmax respectively + or a dictionary containing key-value pairs of attribute names and values. + """ + if isinstance(bboxArgs, (list, tuple)) and len(bboxArgs) == 4: + values = { + "lonmin": bboxArgs[0], + "latmin": bboxArgs[1], + "lonmax": bboxArgs[2], + "latmax": bboxArgs[3], + } + + elif isinstance(bboxArgs, dict) and len(bboxArgs) == 4: + values = bboxArgs + else: + raise ValueError( + "Expected a dictionary, list or tuple with 4 values for lonmin, latmin, lonmax, latmax" + ) + super().__init__(**values) + + @validator("lonmin", "lonmax") + @classmethod + def validate_longitude(cls, v: NumType) -> NumType: + """ + Validates the longitude values. + + :param v: The longitude value to be validated. + :return: The validated longitude value. + """ + if not -180 <= v <= 180: + raise ValueError("Longitude values must be between -180 and 180") + return v + + @validator("latmin", "latmax") + @classmethod + def validate_latitude(cls, v: NumType) -> NumType: + """ + Validates the latitude values. + + :param v: The latitude value to be validated. + :return: The validated latitude value. + """ + if not -90 <= v <= 90: + raise ValueError("Latitude values must be between -90 and 90") + return v + + @validator("lonmax") + @classmethod + def validate_lonmax(cls, v: NumType, values: Dict[str, Any]) -> NumType: + """ + Validates that lonmax is greater than lonmin. + + :param v: The lonmax value to be validated. + :param values: A dictionary containing the current attribute values. + :return: The validated lonmax value. + """ + if "lonmin" in values and v < values["lonmin"]: + raise ValueError("lonmax must be greater than lonmin") + return v + + @validator("latmax") + @classmethod + def validate_latmax(cls, v: NumType, values: Dict[str, Any]) -> NumType: + """ + Validates that latmax is greater than latmin. + + :param v: The latmax value to be validated. + :param values: A dictionary containing the current attribute values. + :return: The validated latmax value. + """ + if "latmin" in values and v < values["latmin"]: + raise ValueError("latmax must be greater than latmin") + return v + + def to_polygon(self) -> Polygon: + """ + Converts the bounding box to a polygon. + + :return: The Polygon object representing the bounding box. + """ + return Polygon( + ( + (self.lonmin, self.latmin), + (self.lonmin, self.latmax), + (self.lonmax, self.latmax), + (self.lonmax, self.latmin), + ) + ) diff --git a/eodag/types/queryables.py b/eodag/types/queryables.py new file mode 100644 index 000000000..36025d492 --- /dev/null +++ b/eodag/types/queryables.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +# Copyright 2024, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Optional + +from annotated_types import Lt +from pydantic import BaseModel, Field +from pydantic.types import PositiveInt +from typing_extensions import Annotated + +Percentage = Annotated[PositiveInt, Lt(100)] + + +class CommonQueryables(BaseModel): + """A class representing search common queryable properties.""" + + productType: Annotated[str, Field()] + id: Annotated[Optional[str], Field(None)] + start: Annotated[Optional[str], Field(None, alias="startTimeFromAscendingNode")] + end: Annotated[Optional[str], Field(None, alias="completionTimeFromAscendingNode")] + geom: Annotated[Optional[str], Field(None, alias="geometry")] + + @classmethod + def get_queryable_from_alias(cls, value: str) -> str: + """Get queryable parameter from alias + + >>> CommonQueryables.get_queryable_from_alias('startTimeFromAscendingNode') + 'start' + >>> CommonQueryables.get_queryable_from_alias('productType') + 'productType' + """ + alias_map = { + field_info.alias: name + for name, field_info in cls.model_fields.items() + if field_info.alias + } + return alias_map.get(value, value) + + +class Queryables(CommonQueryables): + """A class representing all search queryable properties.""" + + uid: Annotated[Optional[str], Field(None)] + # OpenSearch Parameters for Collection Search (Table 3) + doi: Annotated[Optional[str], Field(None)] + platform: Annotated[Optional[str], Field(None)] + platformSerialIdentifier: Annotated[Optional[str], Field(None)] + instrument: Annotated[Optional[str], Field(None)] + sensorType: Annotated[Optional[str], Field(None)] + compositeType: Annotated[Optional[str], Field(None)] + processingLevel: Annotated[Optional[str], Field(None)] + orbitType: Annotated[Optional[str], Field(None)] + spectralRange: Annotated[Optional[str], Field(None)] + wavelengths: Annotated[Optional[str], Field(None)] + hasSecurityConstraints: Annotated[Optional[str], Field(None)] + dissemination: Annotated[Optional[str], Field(None)] + # INSPIRE obligated OpenSearch Parameters for Collection Search (Table 4) + title: Annotated[Optional[str], Field(None)] + topicCategory: Annotated[Optional[str], Field(None)] + keyword: Annotated[Optional[str], Field(None)] + abstract: Annotated[Optional[str], Field(None)] + resolution: Annotated[Optional[int], Field(None)] + organisationName: Annotated[Optional[str], Field(None)] + organisationRole: Annotated[Optional[str], Field(None)] + publicationDate: Annotated[Optional[str], Field(None)] + lineage: Annotated[Optional[str], Field(None)] + useLimitation: Annotated[Optional[str], Field(None)] + accessConstraint: Annotated[Optional[str], Field(None)] + otherConstraint: Annotated[Optional[str], Field(None)] + classification: Annotated[Optional[str], Field(None)] + language: Annotated[Optional[str], Field(None)] + specification: Annotated[Optional[str], Field(None)] + # OpenSearch Parameters for Product Search (Table 5) + parentIdentifier: Annotated[Optional[str], Field(None)] + productionStatus: Annotated[Optional[str], Field(None)] + acquisitionType: Annotated[Optional[str], Field(None)] + orbitNumber: Annotated[Optional[int], Field(None)] + orbitDirection: Annotated[Optional[str], Field(None)] + track: Annotated[Optional[str], Field(None)] + frame: Annotated[Optional[str], Field(None)] + swathIdentifier: Annotated[Optional[str], Field(None)] + cloudCover: Annotated[Optional[Percentage], Field(None)] + snowCover: Annotated[Optional[Percentage], Field(None)] + lowestLocation: Annotated[Optional[str], Field(None)] + highestLocation: Annotated[Optional[str], Field(None)] + productVersion: Annotated[Optional[str], Field(None)] + productQualityStatus: Annotated[Optional[str], Field(None)] + productQualityDegradationTag: Annotated[Optional[str], Field(None)] + processorName: Annotated[Optional[str], Field(None)] + processingCenter: Annotated[Optional[str], Field(None)] + creationDate: Annotated[Optional[str], Field(None)] + modificationDate: Annotated[Optional[str], Field(None)] + processingDate: Annotated[Optional[str], Field(None)] + sensorMode: Annotated[Optional[str], Field(None)] + archivingCenter: Annotated[Optional[str], Field(None)] + processingMode: Annotated[Optional[str], Field(None)] + # OpenSearch Parameters for Acquistion Parameters Search (Table 6) + availabilityTime: Annotated[Optional[str], Field(None)] + acquisitionStation: Annotated[Optional[str], Field(None)] + acquisitionSubType: Annotated[Optional[str], Field(None)] + illuminationAzimuthAngle: Annotated[Optional[str], Field(None)] + illuminationZenithAngle: Annotated[Optional[str], Field(None)] + illuminationElevationAngle: Annotated[Optional[str], Field(None)] + polarizationMode: Annotated[Optional[str], Field(None)] + polarizationChannels: Annotated[Optional[str], Field(None)] + antennaLookDirection: Annotated[Optional[str], Field(None)] + minimumIncidenceAngle: Annotated[Optional[float], Field(None)] + maximumIncidenceAngle: Annotated[Optional[float], Field(None)] + dopplerFrequency: Annotated[Optional[float], Field(None)] + incidenceAngleVariation: Annotated[Optional[float], Field(None)] diff --git a/eodag/types/search_args.py b/eodag/types/search_args.py new file mode 100644 index 000000000..ea2ca24d5 --- /dev/null +++ b/eodag/types/search_args.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2018, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from datetime import datetime +from typing import Dict, List, Optional, Tuple, Union, cast + +from pydantic import BaseModel, ConfigDict, Field, conint, field_validator +from shapely import wkt +from shapely.errors import GEOSException +from shapely.geometry import Polygon, shape +from shapely.geometry.base import GEOMETRY_TYPES, BaseGeometry + +from eodag.types.bbox import BBox +from eodag.utils import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE + +NumType = Union[float, int] +GeomArgs = Union[List[NumType], Tuple[NumType], Dict[str, NumType], str, BaseGeometry] + +PositiveInt = conint(gt=0) + + +class SearchArgs(BaseModel): + """Represents an EODAG search""" + + model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True) + + provider: Optional[str] = Field(None) + productType: str = Field() + id: Optional[str] = Field(None) + start: Optional[str] = Field(None) + end: Optional[str] = Field(None) + geom: Optional[BaseGeometry] = Field(None) + locations: Optional[Dict[str, str]] = Field(None) + page: Optional[int] = Field(DEFAULT_PAGE, gt=0) # type: ignore + items_per_page: Optional[PositiveInt] = Field(DEFAULT_ITEMS_PER_PAGE) # type: ignore + + @field_validator("start", "end", mode="before") + @classmethod + def check_date_format(cls, v: str) -> str: + """Validate dates""" + try: + datetime.fromisoformat(v) + except ValueError as e: + raise ValueError("start_date and end must follow ISO8601 format") from e + return v + + @field_validator("geom", mode="before") + @classmethod + def check_geom(cls, v: GeomArgs) -> BaseGeometry: + """Validate geom""" + # GeoJSON geometry + if isinstance(v, dict) and v.get("type") in GEOMETRY_TYPES: + return cast(BaseGeometry, shape(v)) + + # Bounding Box + if isinstance(v, (list, tuple, dict)): + return BBox(v).to_polygon() + + if isinstance(v, str): + # WKT geometry + try: + return cast(Polygon, wkt.loads(v)) + except GEOSException as e: + raise ValueError(f"Invalid geometry WKT string: {v}") from e + + if isinstance(v, BaseGeometry): + return v + + raise TypeError(f"Invalid geometry type: {type(v)}") diff --git a/eodag/utils/__init__.py b/eodag/utils/__init__.py index 368f9452c..76469f8a7 100644 --- a/eodag/utils/__init__.py +++ b/eodag/utils/__init__.py @@ -20,6 +20,8 @@ Everything that does not fit into one of the specialised categories of utilities in this package should go here """ +from __future__ import annotations + import ast import datetime import errno @@ -36,11 +38,25 @@ import warnings from collections import defaultdict from copy import deepcopy as copy_deepcopy +from datetime import datetime as dt from email.message import Message from glob import glob +from importlib.metadata import metadata from itertools import repeat, starmap from pathlib import Path from tempfile import mkdtemp +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Tuple, + Type, + Union, +) # All modules using these should import them from utils package from urllib.parse import ( # noqa; noqa @@ -52,10 +68,15 @@ urljoin, urlparse, urlsplit, - urlunparse, ) from urllib.request import url2pathname +try: + from typing import Annotated, get_args, get_origin # noqa +except ImportError: + # for python < 3.9 + from typing_extensions import Annotated, get_args, get_origin # type: ignore # noqa + import click import orjson import shapefile @@ -66,7 +87,7 @@ from jsonpath_ng import jsonpath from jsonpath_ng.ext import parse from jsonpath_ng.jsonpath import Child, Fields, Index, Root, Slice -from requests.auth import AuthBase +from requests import HTTPError from shapely.geometry import Polygon, shape from shapely.geometry.base import BaseGeometry from tqdm.auto import tqdm @@ -74,11 +95,11 @@ from eodag.utils import logging as eodag_logging from eodag.utils.exceptions import MisconfiguredError -try: - from importlib.metadata import metadata # type: ignore -except ImportError: # pragma: no cover - # for python < 3.8 - from importlib_metadata import metadata # type: ignore +if TYPE_CHECKING: + from jsonpath_ng import JSONPath + + from eodag.api.product import EOProduct + logger = py_logging.getLogger("eodag.utils") @@ -92,12 +113,23 @@ HTTP_REQ_TIMEOUT = 5 # in seconds DEFAULT_STREAM_REQUESTS_TIMEOUT = 60 # in seconds +# default wait times in minutes +DEFAULT_DOWNLOAD_WAIT = 2 # in minutes +DEFAULT_DOWNLOAD_TIMEOUT = 20 # in minutes + JSONPATH_MATCH = re.compile(r"^[\{\(]*\$(\..*)*$") WORKABLE_JSONPATH_MATCH = re.compile(r"^\$(\.[a-zA-Z0-9-_:\.\[\]\"\(\)=\?\*]+)*$") ARRAY_FIELD_MATCH = re.compile(r"^[a-zA-Z0-9-_:]+(\[[0-9\*]+\])+$") +# pagination defaults +DEFAULT_PAGE = 1 +DEFAULT_ITEMS_PER_PAGE = 20 +# Default maximum number of items per page requested by search_all. 50 instead of 20 +# (DEFAULT_ITEMS_PER_PAGE) to increase it to the known and current minimum value (mundi) +DEFAULT_MAX_ITEMS_PER_PAGE = 50 -def _deprecated(reason="", version=None): + +def _deprecated(reason: str = "", version: Optional[str] = None) -> Callable[..., Any]: """Simple decorator to mark functions/methods/classes as deprecated. Warning: Does not work with staticmethods! @@ -109,8 +141,7 @@ def foo(): DeprecationWarning: Call to deprecated function/method foo (why) -- Deprecated since v1.2 """ - def decorator(callable): - + def decorator(callable: Callable[..., Any]) -> Any: if inspect.isclass(callable): ctype = "class" else: @@ -120,7 +151,7 @@ def decorator(callable): version_ = f" -- Deprecated since v{version}" if version else "" @functools.wraps(callable) - def wrapper(*args, **kwargs): + def wrapper(*args: Any, **kwargs: Any) -> Any: with warnings.catch_warnings(): warnings.simplefilter("always", DeprecationWarning) warnings.warn( @@ -135,39 +166,6 @@ def wrapper(*args, **kwargs): return decorator -class RequestsTokenAuth(AuthBase): - """A custom authentication class to be used with requests module""" - - def __init__(self, token, where, qs_key=None, headers=None): - self.token = token - self.where = where - self.qs_key = qs_key - self.headers = headers - - def __call__(self, request): - """Perform the actual authentication""" - if self.headers and isinstance(self.headers, dict): - for k, v in self.headers.items(): - request.headers[k] = v - if self.where == "qs": - parts = urlparse(request.url) - qs = parse_qs(parts.query) - qs[self.qs_key] = self.token - request.url = urlunparse( - ( - parts.scheme, - parts.netloc, - parts.path, - parts.params, - urlencode(qs), - parts.fragment, - ) - ) - elif self.where == "header": - request.headers["Authorization"] = "Bearer {}".format(self.token) - return request - - class FloatRange(click.types.FloatParamType): """A parameter that works similar to :data:`click.FLOAT` but restricts the value to fit into a range. Fails if the value doesn't fit into the range. @@ -175,11 +173,18 @@ class FloatRange(click.types.FloatParamType): name = "percentage" - def __init__(self, min=None, max=None): + def __init__( + self, min: Optional[float] = None, max: Optional[float] = None + ) -> None: self.min = min self.max = max - def convert(self, value, param, ctx): + def convert( + self, + value: Any, + param: Optional[click.core.Parameter], + ctx: Optional[click.core.Context], + ) -> Any: """Convert value""" rv = click.types.FloatParamType.convert(self, value, param, ctx) if ( @@ -210,11 +215,11 @@ def convert(self, value, param, ctx): ) return rv - def __repr__(self): + def __repr__(self) -> str: return "FloatRange(%r, %r)" % (self.min, self.max) -def slugify(value, allow_unicode=False): +def slugify(value: Any, allow_unicode: bool = False) -> str: """Copied from Django Source code, only modifying last line (no need for safe strings). source: https://github.com/django/django/blob/master/django/utils/text.py @@ -236,7 +241,7 @@ def slugify(value, allow_unicode=False): return re.sub(r"[-\s]+", "-", value) -def sanitize(value): +def sanitize(value: str) -> str: """Sanitize string to be used as a name of a directory. >>> sanitize('productName') @@ -260,7 +265,7 @@ def sanitize(value): return str(rv) -def strip_accents(s): +def strip_accents(s: str) -> str: """Strip accents of a string. >>> strip_accents('productName') @@ -270,13 +275,13 @@ def strip_accents(s): >>> strip_accents('preserve-punct-special-chars:;,?!§%$£œ') 'preserve-punct-special-chars:;,?!§%$£œ' """ - # Mn stands for a nonspacing combining mark (e.g. '́') + # Mn stands for a nonspacing combining mark return "".join( c for c in unicodedata.normalize("NFD", s) if unicodedata.category(c) != "Mn" ) -def uri_to_path(uri): +def uri_to_path(uri: str) -> str: """ Convert a file URI (e.g. 'file:///tmp') to a local path (e.g. '/tmp') """ @@ -289,12 +294,12 @@ def uri_to_path(uri): return path -def path_to_uri(path): +def path_to_uri(path: str) -> str: """Convert a local absolute path to a file URI""" return Path(path).as_uri() -def mutate_dict_in_place(func, mapping): +def mutate_dict_in_place(func: Callable[[Any], Any], mapping: Dict[Any, Any]) -> None: """Apply func to values of mapping. The mapping object's values are modified in-place. The function is recursive, @@ -314,7 +319,7 @@ def mutate_dict_in_place(func, mapping): mapping[key] = func(value) -def merge_mappings(mapping1, mapping2): +def merge_mappings(mapping1: Dict[Any, Any], mapping2: Dict[Any, Any]) -> None: """Merge two mappings with string keys, values from `mapping2` overriding values from `mapping1`. @@ -368,29 +373,9 @@ def merge_mappings(mapping1, mapping2): and current_value_type == list ): mapping1[m1_keys_lowercase.get(key, key)] = value - elif isinstance(value, str): - # Bool is a type with special meaning in Python, thus the special - # case - if current_value_type is bool: - if value.capitalize() not in ("True", "False"): - raise ValueError( - "Only true or false strings (case insensitive) are " - "allowed for booleans" - ) - # Get the real Python value of the boolean. e.g: value='tRuE' - # => eval(value.capitalize())=True. - # str.capitalize() transforms the first character of the string - # to a capital letter - mapping1[m1_keys_lowercase.get(key, key)] = eval( - value.capitalize() - ) - else: - mapping1[ - m1_keys_lowercase.get(key, key) - ] = current_value_type(value) else: - mapping1[m1_keys_lowercase.get(key, key)] = current_value_type( - value + mapping1[m1_keys_lowercase.get(key, key)] = cast_scalar_value( + value, current_value_type ) except (TypeError, ValueError): # Ignore any override value that does not have the same type @@ -404,7 +389,7 @@ def merge_mappings(mapping1, mapping2): mapping1[key] = value -def maybe_generator(obj): +def maybe_generator(obj: Any) -> Iterator[Any]: """Generator function that get an arbitrary object and generate values from it if the object is a generator.""" if isinstance(obj, types.GeneratorType): @@ -414,7 +399,7 @@ def maybe_generator(obj): yield obj -def get_timestamp(date_time): +def get_timestamp(date_time: str) -> float: """Return the Unix timestamp of an ISO8601 date/datetime in seconds. If the datetime has no offset, it is assumed to be an UTC datetime. @@ -430,7 +415,7 @@ def get_timestamp(date_time): return dt.timestamp() -def datetime_range(start, end): +def datetime_range(start: dt, end: dt) -> Iterator[dt]: """Generator function for all dates in-between start and end date.""" delta = end - start for nday in range(delta.days + 1): @@ -440,7 +425,7 @@ def datetime_range(start, end): class DownloadedCallback: """Example class for callback after each download in :meth:`~eodag.api.core.EODataAccessGateway.download_all`""" - def __call__(self, product): + def __call__(self, product: EOProduct) -> None: """Callback :param product: The downloaded EO product @@ -463,7 +448,7 @@ class ProgressCallback(tqdm): individually disabled using `disable=True`. """ - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: self.kwargs = kwargs.copy() if "unit" not in kwargs: kwargs["unit"] = "B" @@ -480,7 +465,7 @@ def __init__(self, *args, **kwargs): super(ProgressCallback, self).__init__(*args, **kwargs) - def __call__(self, increment, total=None): + def __call__(self, increment: int, total: Optional[int] = None) -> None: """Update the progress bar. :param increment: Amount of data already processed @@ -493,7 +478,7 @@ def __call__(self, increment, total=None): self.update(increment) - def copy(self, *args, **kwargs): + def copy(self, *args: Any, **kwargs: Any) -> ProgressCallback: """Returns another progress callback using the same initial keyword-arguments. @@ -513,18 +498,18 @@ class NotebookProgressCallback(tqdm): @_deprecated(reason="Use ProgressCallback class instead", version="2.2.1") -def get_progress_callback(): +def get_progress_callback() -> tqdm: """Get progress_callback""" return tqdm() -def repeatfunc(func, n, *args): +def repeatfunc(func: Callable[..., Any], n: int, *args: Any) -> starmap: """Call `func` `n` times with `args`""" return starmap(func, repeat(args, n)) -def makedirs(dirpath): +def makedirs(dirpath: str) -> None: """Create a directory in filesystem with parents if necessary""" try: os.makedirs(dirpath) @@ -534,7 +519,7 @@ def makedirs(dirpath): raise -def rename_subfolder(dirpath, name): +def rename_subfolder(dirpath: str, name: str) -> None: """Rename first subfolder found in dirpath with given name, raise RuntimeError if no subfolder can be found @@ -578,7 +563,9 @@ def rename_subfolder(dirpath, name): ) -def format_dict_items(config_dict, **format_variables): +def format_dict_items( + config_dict: Dict[str, Any], **format_variables: Any +) -> Dict[Any, Any]: r"""Recursive apply string.format(\**format_variables) to dict elements >>> format_dict_items( @@ -597,7 +584,9 @@ def format_dict_items(config_dict, **format_variables): return dict_items_recursive_apply(config_dict, format_string, **format_variables) -def jsonpath_parse_dict_items(jsonpath_dict, values_dict): +def jsonpath_parse_dict_items( + jsonpath_dict: Dict[str, Any], values_dict: Dict[str, Any] +) -> Dict[Any, Any]: """Recursive parse jsonpath elements in dict >>> import jsonpath_ng.ext as jsonpath @@ -618,12 +607,12 @@ def jsonpath_parse_dict_items(jsonpath_dict, values_dict): def update_nested_dict( - old_dict, - new_dict, - extend_list_values=False, - allow_empty_values=False, - allow_extend_duplicates=True, -): + old_dict: Dict[Any, Any], + new_dict: Dict[Any, Any], + extend_list_values: bool = False, + allow_empty_values: bool = False, + allow_extend_duplicates: bool = True, +) -> Dict[Any, Any]: """Update recursively old_dict items with new_dict ones >>> update_nested_dict( @@ -707,7 +696,11 @@ def update_nested_dict( return old_dict -def items_recursive_apply(input_obj, apply_method, **apply_method_parameters): +def items_recursive_apply( + input_obj: Union[Dict[Any, Any], List[Any]], + apply_method: Callable[..., Any], + **apply_method_parameters: Any, +) -> Union[Dict[Any, Any], List[Any]]: """Recursive apply method to items contained in input object (dict or list) >>> items_recursive_apply( @@ -748,7 +741,11 @@ def items_recursive_apply(input_obj, apply_method, **apply_method_parameters): return input_obj -def dict_items_recursive_apply(config_dict, apply_method, **apply_method_parameters): +def dict_items_recursive_apply( + config_dict: Dict[Any, Any], + apply_method: Callable[..., Any], + **apply_method_parameters: Any, +) -> Dict[Any, Any]: """Recursive apply method to dict elements >>> dict_items_recursive_apply( @@ -766,7 +763,7 @@ def dict_items_recursive_apply(config_dict, apply_method, **apply_method_paramet :returns: Updated dict :rtype: dict """ - result_dict = deepcopy(config_dict) + result_dict: Dict[Any, Any] = deepcopy(config_dict) for dict_k, dict_v in result_dict.items(): if isinstance(dict_v, dict): result_dict[dict_k] = dict_items_recursive_apply( @@ -784,7 +781,11 @@ def dict_items_recursive_apply(config_dict, apply_method, **apply_method_paramet return result_dict -def list_items_recursive_apply(config_list, apply_method, **apply_method_parameters): +def list_items_recursive_apply( + config_list: List[Any], + apply_method: Callable[..., Any], + **apply_method_parameters: Any, +) -> List[Any]: """Recursive apply method to list elements >>> list_items_recursive_apply( @@ -820,7 +821,9 @@ def list_items_recursive_apply(config_list, apply_method, **apply_method_paramet return result_list -def items_recursive_sort(input_obj): +def items_recursive_sort( + input_obj: Union[List[Any], Dict[Any, Any]] +) -> Union[List[Any], Dict[Any, Any]]: """Recursive sort dict items contained in input object (dict or list) >>> items_recursive_sort( @@ -846,7 +849,7 @@ def items_recursive_sort(input_obj): return input_obj -def dict_items_recursive_sort(config_dict): +def dict_items_recursive_sort(config_dict: Dict[Any, Any]) -> Dict[Any, Any]: """Recursive sort dict elements >>> dict_items_recursive_sort( @@ -859,7 +862,7 @@ def dict_items_recursive_sort(config_dict): :returns: Updated dict :rtype: dict """ - result_dict = deepcopy(config_dict) + result_dict: Dict[Any, Any] = deepcopy(config_dict) for dict_k, dict_v in result_dict.items(): if isinstance(dict_v, dict): result_dict[dict_k] = dict_items_recursive_sort(dict_v) @@ -871,7 +874,7 @@ def dict_items_recursive_sort(config_dict): return dict(sorted(result_dict.items())) -def list_items_recursive_sort(config_list): +def list_items_recursive_sort(config_list: List[Any]) -> List[Any]: """Recursive sort dicts in list elements >>> list_items_recursive_sort(["b", {2: 0, 0: 1, 1: 2}]) @@ -882,7 +885,7 @@ def list_items_recursive_sort(config_list): :returns: Updated list :rtype: list """ - result_list = deepcopy(config_list) + result_list: List[Any] = deepcopy(config_list) for list_idx, list_v in enumerate(result_list): if isinstance(list_v, dict): result_list[list_idx] = dict_items_recursive_sort(list_v) @@ -894,7 +897,7 @@ def list_items_recursive_sort(config_list): return result_list -def string_to_jsonpath(*args, force=False): +def string_to_jsonpath(*args: Any, force: bool = False) -> Union[str, JSONPath]: """Get jsonpath for "$.foo.bar" like string >>> string_to_jsonpath(None, "$.foo.bar") @@ -913,9 +916,9 @@ def string_to_jsonpath(*args, force=False): :param force: force conversion even if input string is not detected as a jsonpath :type force: bool :returns: Parsed value - :rtype: str + :rtype: str or Child or Root """ - path_str = args[-1] + path_str: str = args[-1] if JSONPATH_MATCH.match(str(path_str)) or force: try: common_jsonpath = "$" @@ -934,7 +937,7 @@ def string_to_jsonpath(*args, force=False): indexed_path = indexed_path_and_indexes[0] parsed_path = Child(parsed_path, Fields(indexed_path)) for idx in range(len(indexed_path_and_indexes) - 1): - index = ( + index: Union[int, str] = ( indexed_path_and_indexes[idx + 1][:-1] if idx < len(indexed_path_and_indexes) - 2 else indexed_path_and_indexes[idx + 1] @@ -974,7 +977,7 @@ def string_to_jsonpath(*args, force=False): return path_str -def format_string(key, str_to_format, **format_variables): +def format_string(key: str, str_to_format: Any, **format_variables: Any) -> Any: """Format "{foo}" like string >>> format_string(None, "foo {bar}, {baz} ?", **{"bar": "qux", "baz": "quux"}) @@ -987,33 +990,35 @@ def format_string(key, str_to_format, **format_variables): :returns: Parsed value :rtype: str """ - if isinstance(str_to_format, str): - # eodag mappings function usage, e.g. '{foo#to_bar}' - COMPLEX_QS_REGEX = re.compile(r"^(.+=)?([^=]*)({.+})+([^=&]*)$") - if COMPLEX_QS_REGEX.match(str_to_format) and "#" in str_to_format: - from eodag.api.product.metadata_mapping import format_metadata + if not isinstance(str_to_format, str): + return str_to_format - result = format_metadata(str_to_format, **format_variables) + # eodag mappings function usage, e.g. '{foo#to_bar}' + COMPLEX_QS_REGEX = re.compile(r"^(.+=)?([^=]*)({.+})+([^=&]*)$") + if COMPLEX_QS_REGEX.match(str_to_format) and "#" in str_to_format: + from eodag.api.product.metadata_mapping import format_metadata - else: - # defaultdict usage will return "" for missing keys in format_args - try: - result = str_to_format.format_map(defaultdict(str, **format_variables)) - except TypeError as e: - raise MisconfiguredError( - f"Unable to format str={str_to_format} using {str(format_variables)}: {str(e)}" - ) + result = format_metadata(str_to_format, **format_variables) - # try to convert string to python object - try: - return ast.literal_eval(result) - except (SyntaxError, ValueError): - return result else: - return str_to_format + # defaultdict usage will return "" for missing keys in format_args + try: + result = str_to_format.format_map(defaultdict(str, **format_variables)) + except TypeError as e: + raise MisconfiguredError( + f"Unable to format str={str_to_format} using {str(format_variables)}: {str(e)}" + ) + # try to convert string to python object + try: + return ast.literal_eval(result) + except (SyntaxError, ValueError): + return result -def parse_jsonpath(key, jsonpath_obj, **values_dict): + +def parse_jsonpath( + key: str, jsonpath_obj: Union[str, jsonpath.Child], **values_dict: Dict[str, Any] +) -> Optional[str]: """Parse jsonpah in jsonpath_obj using values_dict >>> import jsonpath_ng.ext as jsonpath @@ -1023,7 +1028,7 @@ def parse_jsonpath(key, jsonpath_obj, **values_dict): :param key: Input item key :type key: str :param jsonpath_obj: Input item value, to be parsed - :type jsonpath_obj: str + :type jsonpath_obj: str or jsonpath.Child :param values_dict: Values used as args for parsing :type values_dict: dict :returns: Parsed value @@ -1036,16 +1041,16 @@ def parse_jsonpath(key, jsonpath_obj, **values_dict): return jsonpath_obj -def nested_pairs2dict(pairs): +def nested_pairs2dict(pairs: Union[List[Any], Any]) -> Union[Any, Dict[Any, Any]]: """Create a dict using nested pairs >>> nested_pairs2dict([["foo", [["bar", "baz"]]]]) {'foo': {'bar': 'baz'}} :param pairs: Pairs of key / value - :type pairs: list + :type pairs: list or Any :returns: Created dict - :rtype: dict + :rtype: dict or Any """ d = {} try: @@ -1059,7 +1064,9 @@ def nested_pairs2dict(pairs): return d -def get_geometry_from_various(locations_config=[], **query_args): +def get_geometry_from_various( + locations_config: List[Dict[str, Any]] = [], **query_args: Any +) -> BaseGeometry: """Creates a shapely geometry using given query kwargs arguments :param locations_config: (optional) EODAG locations configuration @@ -1147,17 +1154,22 @@ def get_geometry_from_various(locations_config=[], **query_args): class MockResponse: """Fake requests response""" - def __init__(self, json_data, status_code): + def __init__(self, json_data: Any, status_code: int) -> None: self.json_data = json_data self.status_code = status_code self.content = json_data - def json(self): + def json(self) -> Any: """Return json data""" return self.json_data + def raise_for_status(self) -> None: + """raises an exception when the status is not ok""" + if self.status_code != 200: + raise HTTPError() -def md5sum(file_path): + +def md5sum(file_path: str) -> str: """Get file MD5 checksum >>> import os @@ -1176,7 +1188,7 @@ def md5sum(file_path): return hash_md5.hexdigest() -def obj_md5sum(data): +def obj_md5sum(data: Any) -> str: """Get MD5 checksum from JSON serializable object >>> obj_md5sum(None) @@ -1191,7 +1203,7 @@ def obj_md5sum(data): @functools.lru_cache() -def cached_parse(str_to_parse): +def cached_parse(str_to_parse: str) -> JSONPath: """Cached jsonpath_ng.ext.parse >>> cached_parse.cache_clear() @@ -1217,12 +1229,12 @@ def cached_parse(str_to_parse): @functools.lru_cache() -def _mutable_cached_yaml_load(config_path): +def _mutable_cached_yaml_load(config_path: str) -> Any: with open(os.path.abspath(os.path.realpath(config_path)), "r") as fh: return yaml.load(fh, Loader=yaml.SafeLoader) -def cached_yaml_load(config_path): +def cached_yaml_load(config_path: str) -> Dict[str, Any]: """Cached yaml.load :param config_path: path to the yaml configuration file @@ -1234,12 +1246,12 @@ def cached_yaml_load(config_path): @functools.lru_cache() -def _mutable_cached_yaml_load_all(config_path): +def _mutable_cached_yaml_load_all(config_path: str) -> List[Any]: with open(config_path, "r") as fh: return list(yaml.load_all(fh, Loader=yaml.Loader)) -def cached_yaml_load_all(config_path): +def cached_yaml_load_all(config_path: str) -> List[Any]: """Cached yaml.load_all Load all configurations stored in the configuration file as separated yaml documents @@ -1252,7 +1264,9 @@ def cached_yaml_load_all(config_path): return copy_deepcopy(_mutable_cached_yaml_load_all(config_path)) -def get_bucket_name_and_prefix(url=None, bucket_path_level=None): +def get_bucket_name_and_prefix( + url: str, bucket_path_level: Optional[int] = None +) -> Tuple[Optional[str], Optional[str]]: """Extract bucket name and prefix from URL :param url: (optional) URL to use as product.location @@ -1264,7 +1278,7 @@ def get_bucket_name_and_prefix(url=None, bucket_path_level=None): """ bucket, prefix = None, None - scheme, netloc, path, params, query, fragment = urlparse(url) + scheme, netloc, path, _, _, _ = urlparse(url) subdomain = netloc.split(".")[0] path = path.strip("/") @@ -1282,7 +1296,9 @@ def get_bucket_name_and_prefix(url=None, bucket_path_level=None): return bucket, prefix -def flatten_top_directories(nested_dir_root, common_subdirs_path=None): +def flatten_top_directories( + nested_dir_root: str, common_subdirs_path: Optional[str] = None +) -> None: """Flatten directory structure, removing common empty sub-directories :param nested_dir_root: Absolute path of the directory structure to flatten @@ -1305,7 +1321,7 @@ def flatten_top_directories(nested_dir_root, common_subdirs_path=None): shutil.move(tmp_path, nested_dir_root) -def deepcopy(sth): +def deepcopy(sth: Any) -> Any: """Customized and faster deepcopy inspired by https://stackoverflow.com/a/45858907 `_copy_list` and `_copy_dict` available for the moment @@ -1314,9 +1330,11 @@ def deepcopy(sth): :returns: Copied object :rtype: Any """ - _dispatcher = {} + _dispatcher: Dict[Type[Any], Callable[..., Any]] = {} - def _copy_list(input_list, dispatch): + def _copy_list( + input_list: List[Any], dispatch: Dict[Type[Any], Callable[..., Any]] + ): ret = input_list.copy() for idx, item in enumerate(ret): cp = dispatch.get(type(item)) @@ -1324,7 +1342,9 @@ def _copy_list(input_list, dispatch): ret[idx] = cp(item, dispatch) return ret - def _copy_dict(input_dict, dispatch): + def _copy_dict( + input_dict: Dict[Any, Any], dispatch: Dict[Type[Any], Callable[..., Any]] + ): ret = input_dict.copy() for key, value in ret.items(): cp = dispatch.get(type(value)) @@ -1342,7 +1362,7 @@ def _copy_dict(input_dict, dispatch): return cp(sth, _dispatcher) -def parse_header(header): +def parse_header(header: str) -> Message: """Parse HTTP header >>> parse_header( @@ -1358,3 +1378,34 @@ def parse_header(header): m = Message() m["content-type"] = header return m + + +def cast_scalar_value(value: Any, new_type: Any) -> Any: + """Convert a scalar (not nested) value type to the given one + + >>> cast_scalar_value('1', int) + 1 + >>> cast_scalar_value(1, str) + '1' + >>> cast_scalar_value('false', bool) + False + + :param value: the scalar value to convert + :param new_type: the wanted type + :returns: scalar value converted to new_type + """ + if isinstance(value, str) and new_type is bool: + # Bool is a type with special meaning in Python, thus the special + # case + if value.capitalize() not in ("True", "False"): + raise ValueError( + "Only true or false strings (case insensitive) are " + "allowed for booleans" + ) + # Get the real Python value of the boolean. e.g: value='tRuE' + # => eval(value.capitalize())=True. + # str.capitalize() transforms the first character of the string + # to a capital letter + return eval(value.capitalize()) + + return new_type(value) diff --git a/eodag/utils/constraints.py b/eodag/utils/constraints.py new file mode 100644 index 000000000..e59d4f7bf --- /dev/null +++ b/eodag/utils/constraints.py @@ -0,0 +1,252 @@ +# -*- coding: utf-8 -*- +# Copyright 2024, CS Systemes d'Information, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import copy +import logging +from typing import Any, Dict, List, Set, Union + +import requests + +from eodag.api.product.metadata_mapping import get_provider_queryable_key +from eodag.plugins.apis.base import Api +from eodag.plugins.search.base import Search +from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT, deepcopy +from eodag.utils.exceptions import TimeOutError, ValidationError + +logger = logging.getLogger("eodag.constraints") + + +def get_constraint_queryables_with_additional_params( + constraints: List[Any], + input_params: Dict[str, Any], + plugin: Union[Search, Api], + product_type: str, +) -> Dict[str, Dict[str, Set[Any]]]: + """ + gets the queryables from the constraints using the given parameters + For all queryables only values matching the given parameters based on the constraints will be returned + :param constraints: list of constraints fetched from the provider + :type constraints: List[Any] + :param input_params: conditions the constraints should fulfil + :type input_params: dict + :param plugin: search or api plugin that is used + :type plugin: Union[Search, Api] + :param product_type: product type for which the data should be fetched + :type product_type: str + :returns: dict containing queryable data + :rtype: Dict[str, Dict[str, Set[Any]]] + """ + defaults = copy.deepcopy(input_params) + constraint_matches = {} + params = {k: v for k, v in defaults.items() if v} + for p in params.keys(): + defaults.pop(p, None) + params_available = {k: False for k in params.keys()} + # check which constraints match the given parameters + eodag_provider_key_mapping = {} + values_available: Dict[str, Set[Any]] = {k: set() for k in params.keys()} + metadata_mapping = plugin.config.products.get(product_type, {}).get( + "metadata_mapping", {} + ) + if not metadata_mapping: + metadata_mapping = plugin.config.metadata_mapping + for i, constraint in enumerate(constraints): + params_matched = {k: False for k in params.keys()} + for param, value in params.items(): + provider_key = get_provider_queryable_key( + param, constraint, metadata_mapping + ) + if provider_key and provider_key in constraint: + eodag_provider_key_mapping[provider_key] = param + params_available[param] = True + if value in constraint[provider_key]: + params_matched[param] = True + values_available[param].update(constraint[provider_key]) + # match with default values of params + for default_param, default_value in defaults.items(): + provider_key = get_provider_queryable_key( + default_param, + constraint, + metadata_mapping, + ) + if provider_key and provider_key in constraint: + eodag_provider_key_mapping[provider_key] = default_param + params_matched[default_param] = False + if default_value in constraint[provider_key]: + params_matched[default_param] = True + constraint_matches[i] = params_matched + + # check if all parameters are available in the constraints + not_available_params = set() + for param, available in params_available.items(): + if not available: + not_available_params.add(param) + if not_available_params: + return {"not_available": {"enum": not_available_params}} + + # clear constraint_matches if no combination matches + matching_combinations = [ + False not in v.values() for v in constraint_matches.values() + ] + if not any(matching_combinations): + constraint_matches = {} + + # add values of constraints matching params + queryables: Dict[str, Dict[str, Set[Any]]] = {} + for num, matches in constraint_matches.items(): + for key in constraints[num]: + other_keys_matching = [v for k, v in matches.items() if k != key] + key_matches_a_constraint = any( + v.get(key, False) for v in constraint_matches.values() + ) + if False in other_keys_matching or ( + not key_matches_a_constraint and key in matches + ): + continue + if key in queryables: + queryables[key]["enum"].update(constraints[num][key]) + else: + queryables[key] = {} + queryables[key]["enum"] = set(constraints[num][key]) + + other_values = _get_other_possible_values_for_values_with_defaults( + defaults, params, constraints, metadata_mapping + ) + for key in queryables: + if key in other_values: + queryables[key]["enum"].update(other_values[key]) + + # check if constraints matching params have been found + if len(queryables) == 0: + if len(params) > 1: + raise ValidationError( + f"combination of values {str(params)} is not possible" + ) + elif len(params) == 1 and len(defaults) > 0: + raise ValidationError( + f"value {list(params.values())[0]} not available for param {list(params.keys())[0]} " + f"with default values {str(defaults)}" + ) + + elif len(params) == 1: + raise ValidationError( + f"value {list(params.values())[0]} not available for param {list(params.keys())[0]}, " + f"possible values: {str(sorted(values_available[list(params.keys())[0]]))}" + ) + else: + raise ValidationError( + f"no constraints matching default params {str(defaults)} found" + ) + + return queryables + + +def fetch_constraints( + constraints_url: str, plugin: Union[Search, Api] +) -> List[Dict[Any, Any]]: + """ + fetches the constraints from a provider + :param constraints_url: url from which the constraints can be fetched + :type constraints_url: str + :param plugin: api or search plugin of the provider + :type plugin: Union[Search, Api] + :returns: list of constraints fetched from the provider + :rtype: List[Dict[Any, Any]] + """ + try: + headers = USER_AGENT + logger.debug("fetching constraints from %s", constraints_url) + if hasattr(plugin, "auth"): + res = requests.get( + constraints_url, + headers=headers, + auth=plugin.auth, + timeout=HTTP_REQ_TIMEOUT, + ) + else: + res = requests.get( + constraints_url, headers=headers, timeout=HTTP_REQ_TIMEOUT + ) + res.raise_for_status() + except requests.exceptions.Timeout as exc: + raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc + except requests.exceptions.HTTPError as err: + logger.error( + "constraints could not be fetched from %s, error: %s", + constraints_url, + str(err), + ) + return [] + else: + constraints_data = res.json() + config = plugin.config.__dict__ + if ( + "constraints_entry" in config + and config["constraints_entry"] + and config["constraints_entry"] in constraints_data + ): + constraints = constraints_data[config["constraints_entry"]] + else: + constraints = constraints_data + return constraints + + +def _get_other_possible_values_for_values_with_defaults( + defaults: Dict[str, Any], + params: Dict[str, Any], + constraints: List[Dict[Any, Any]], + metadata_mapping: Dict[str, Union[str, list]], +) -> Dict[str, Set[Any]]: + possible_values = {} + for param, default_value in defaults.items(): + fixed_params = deepcopy(params) + param_values = set() + for p in defaults: + if p != param: + fixed_params[p] = defaults[p] + for constraint in constraints: + provider_key = get_provider_queryable_key( + param, constraint, metadata_mapping + ) + if not provider_key: + provider_key = param + if ( + _matches_constraint(constraint, fixed_params, metadata_mapping) + and provider_key in constraint + ): + param_values.update(constraint[provider_key]) + possible_values[provider_key] = param_values + return possible_values + + +def _matches_constraint( + constraint: Dict[Any, Any], + params: Dict[str, Any], + metadata_mapping: Dict[str, Union[str, list]], +) -> bool: + for p in params: + provider_key = get_provider_queryable_key(p, constraint, metadata_mapping) + if provider_key not in constraint: + continue + if isinstance(params[p], list): + for value in params[p]: + if value not in constraint[provider_key]: + return False + else: + if params[p] not in constraint[provider_key]: + return False + return True diff --git a/eodag/utils/exceptions.py b/eodag/utils/exceptions.py index c9ba9f19c..d12f18a9a 100644 --- a/eodag/utils/exceptions.py +++ b/eodag/utils/exceptions.py @@ -15,12 +15,18 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from typing import Optional, Set, Tuple class ValidationError(Exception): """Error validating data""" - def __init__(self, message): + def __init__(self, message: str) -> None: self.message = message @@ -47,7 +53,7 @@ class UnsupportedProvider(Exception): class UnsupportedProductType(Exception): """An error indicating that eodag does not support a product type""" - def __init__(self, product_type): + def __init__(self, product_type: str) -> None: self.product_type = product_type @@ -70,9 +76,17 @@ class NotAvailableError(Exception): class RequestError(Exception): - """An error indicating that a HTTP request has failed. Usually eodag functions + """An error indicating that a request has failed. Usually eodag functions and methods should catch and skip this""" + history: Set[Tuple[Exception, str]] = set() + + def __str__(self): + repr = super().__str__() + for err_tuple in self.history: + repr += f"\n- {str(err_tuple)}" + return repr + class NoMatchingProductType(Exception): """An error indicating that eodag was unable to derive a product type from a set @@ -81,3 +95,17 @@ class NoMatchingProductType(Exception): class STACOpenerError(Exception): """An error indicating that a STAC file could not be opened""" + + +class TimeOutError(RequestError): + """An error indicating that a timeout has occurred""" + + def __init__( + self, exception: Optional[Exception] = None, timeout: Optional[float] = None + ) -> None: + url = getattr(getattr(exception, "request", None), "url", None) + timeout_msg = f"({timeout}s)" if timeout else "" + message = ( + f"Request timeout {timeout_msg} for URL {url}" if url else str(exception) + ) + super().__init__(message) diff --git a/eodag/utils/import_system.py b/eodag/utils/import_system.py index 596672705..76fa438a5 100644 --- a/eodag/utils/import_system.py +++ b/eodag/utils/import_system.py @@ -15,14 +15,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import importlib import pkgutil from contextlib import contextmanager from functools import partial +from typing import TYPE_CHECKING, Any, Generator, Tuple +if TYPE_CHECKING: + from types import ModuleType -def import_all_modules(base_package, depth=1, exclude=()): + +def import_all_modules( + base_package: ModuleType, depth: int = 1, exclude: Tuple[str, ...] = () +) -> None: """Import all modules in base_package, including modules in the sub-packages up to `depth` and excluding modules in `exclude`. @@ -75,7 +82,7 @@ def import_all_modules(base_package, depth=1, exclude=()): @contextmanager -def patch_owslib_requests(verify=True): +def patch_owslib_requests(verify: bool = True) -> Generator[None, Any, None]: """Overrides call to the :func:`requests.request` and :func:`requests.post` functions by :func:`owslib.util.openURL` and :func:`owslib.util.http_post` functions, providing some control over how to use these functions in `owslib `_. diff --git a/eodag/utils/logging.py b/eodag/utils/logging.py index 52449341b..56c444545 100644 --- a/eodag/utils/logging.py +++ b/eodag/utils/logging.py @@ -15,13 +15,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations import logging.config +from typing import Optional disable_tqdm = False -def setup_logging(verbose, no_progress_bar=False): +def setup_logging(verbose: int, no_progress_bar: bool = False) -> None: """Define logging level :param verbose: Accepted values: @@ -121,7 +123,7 @@ def setup_logging(verbose, no_progress_bar=False): raise ValueError("'verbose' must be one of: 0, 1, 2, 3") -def get_logging_verbose(): +def get_logging_verbose() -> Optional[int]: """Get logging verbose level >>> from eodag import setup_logging diff --git a/eodag/utils/notebook.py b/eodag/utils/notebook.py index e7679dd51..8adb6cdcc 100644 --- a/eodag/utils/notebook.py +++ b/eodag/utils/notebook.py @@ -15,12 +15,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations +from typing import Any, Optional -from typing import Any - -def check_ipython(): +def check_ipython() -> bool: """Check if called from ipython""" try: __IPYTHON__ @@ -29,7 +29,7 @@ def check_ipython(): return False -def check_notebook(): +def check_notebook() -> bool: """Check if called from a notebook""" try: shell = get_ipython().__class__.__name__ @@ -46,12 +46,12 @@ def check_notebook(): class NotebookWidgets: """Display / handle ipython widgets""" - is_notebook = False - html_box: Any = None - html_box_shown = False + is_notebook: bool = False + html_box: Optional[Any] = None + html_box_shown: bool = False display: Any = None - def __init__(self): + def __init__(self) -> None: self.is_notebook = check_notebook() if self.is_notebook: @@ -63,23 +63,25 @@ def __init__(self): else: pass - def display_html(self, html_value): + def display_html(self, html_value: str) -> None: """Display HTML message""" - if self.is_notebook: - self.html_box.data = html_value + if not self.is_notebook: + return None - if not self.html_box_shown: - self._html_handle = self.display(self.html_box, display_id=True) - self.html_box_shown = True - else: - self._update_display( - self.html_box, display_id=self._html_handle.display_id - ) + self.html_box.data = html_value + + if not self.html_box_shown: + self._html_handle = self.display(self.html_box, display_id=True) + self.html_box_shown = True + else: + self._update_display(self.html_box, display_id=self._html_handle.display_id) - def clear_html(self): + def clear_html(self) -> None: """Clear HTML message""" - if self.is_notebook: - self.html_box.data = "" - self._update_display(self.html_box, display_id=self._html_handle.display_id) + if not self.is_notebook: + return None + + self.html_box.data = "" + self._update_display(self.html_box, display_id=self._html_handle.display_id) diff --git a/eodag/utils/stac_reader.py b/eodag/utils/stac_reader.py index 694f5ea7a..ac2772af3 100644 --- a/eodag/utils/stac_reader.py +++ b/eodag/utils/stac_reader.py @@ -15,10 +15,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import logging import re import socket -from urllib.error import HTTPError, URLError +from typing import Any, Callable, Dict, List, Optional, Union +from urllib.error import URLError from urllib.request import urlopen import concurrent.futures @@ -35,13 +38,13 @@ class _TextOpener: """Exhaust read methods for pystac.StacIO in the order defined in the openers list""" - def __init__(self, timeout): + def __init__(self, timeout: int) -> None: self.openers = [self.read_local_json, self.read_http_remote_json] # Only used by read_http_remote_json self.timeout = timeout @staticmethod - def read_local_json(url, as_json): + def read_local_json(url: str, as_json: bool = False) -> Any: """Read JSON local file""" try: if as_json: @@ -54,7 +57,7 @@ def read_local_json(url, as_json): logger.debug("read_local_json is not the right STAC opener") raise STACOpenerError - def read_http_remote_json(self, url, as_json): + def read_http_remote_json(self, url: str, as_json: bool = False) -> Any: """Read JSON remote HTTP file""" try: res = urlopen(url, timeout=self.timeout) @@ -78,11 +81,8 @@ def read_http_remote_json(self, url, as_json): else: logger.debug("read_http_remote_json is not the right STAC opener") raise STACOpenerError - except HTTPError: - logger.debug("read_http_remote_json is not the right STAC opener") - raise STACOpenerError - def __call__(self, url, as_json=False): + def __call__(self, url: str, as_json: bool = False) -> Any: res = None while self.openers: try: @@ -98,8 +98,11 @@ def __call__(self, url, as_json=False): def fetch_stac_items( - stac_path, recursive=False, max_connections=100, timeout=HTTP_REQ_TIMEOUT -): + stac_path: str, + recursive: bool = False, + max_connections: int = 100, + timeout: int = HTTP_REQ_TIMEOUT, +) -> List[Dict[str, Any]]: """Fetch STAC item from a single item file or items from a catalog. :param stac_path: A STAC object filepath @@ -132,11 +135,16 @@ def fetch_stac_items( raise STACOpenerError(f"{stac_path} must be a STAC catalog or a STAC item") -def _fetch_stac_items_from_catalog(cat, recursive, max_connections, _text_opener): +def _fetch_stac_items_from_catalog( + cat: pystac.Catalog, + recursive: bool, + max_connections: int, + _text_opener: Callable[[str, bool], Any], +) -> List[Any]: """Fetch items from a STAC catalog""" # pystac cannot yet return links from a single file catalog, see: # https://github.com/stac-utils/pystac/issues/256 - extensions = getattr(cat, "stac_extensions", None) + extensions: Optional[Union[List[str], str]] = getattr(cat, "stac_extensions", None) if extensions: extensions = extensions if isinstance(extensions, list) else [extensions] if "single-file-stac" in extensions: @@ -146,7 +154,9 @@ def _fetch_stac_items_from_catalog(cat, recursive, max_connections, _text_opener # Making the links absolutes allow for both relative and absolute links # to be handled. if not recursive: - hrefs = [link.get_absolute_href() for link in cat.get_item_links()] + hrefs: List[Optional[str]] = [ + link.get_absolute_href() for link in cat.get_item_links() + ] else: hrefs = [] for parent_catalog, _, _ in cat.walk(): @@ -154,14 +164,14 @@ def _fetch_stac_items_from_catalog(cat, recursive, max_connections, _text_opener link.get_absolute_href() for link in parent_catalog.get_item_links() ] - items = [] + items: List[Dict[Any, Any]] = [] if hrefs: logger.debug("Fetching %s items", len(hrefs)) with concurrent.futures.ThreadPoolExecutor( max_workers=max_connections ) as executor: future_to_href = ( - executor.submit(_text_opener, href, True) for href in hrefs + executor.submit(_text_opener, str(href), True) for href in hrefs ) for future in concurrent.futures.as_completed(future_to_href): item = future.result() diff --git a/pyproject.toml b/pyproject.toml index 86233d364..53e0795aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] -fallback_version = "2.11.1.dev0" +fallback_version = "2.12.1.dev0" [[tool.mypy.overrides]] module = [ diff --git a/pytest.ini b/pytest.ini index e1f2e4104..56283ece4 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,6 @@ [pytest] testpaths = + eodag/types eodag/utils tests addopts = --doctest-modules --disable-socket --allow-unix-socket diff --git a/setup.cfg b/setup.cfg index 6438485cc..a346cc3d0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -7,7 +7,7 @@ author = CS GROUP - France (CSSI) author_email = eodag@csgroup.space url = https://github.com/CS-SI/eodag license = Apache 2.0 -license_file = LICENSE +license_files = LICENSE project_urls = Bug Tracker = https://github.com/CS-SI/eodag/issues/ Documentation = https://eodag.readthedocs.io @@ -39,28 +39,34 @@ python_requires = >=3.6 install_requires = click requests + urllib3 python-dateutil PyYAML tqdm shapely pyshp - owslib < 0.26;python_version>='3.10' - owslib;python_version<'3.10' + OWSLib >=0.27.1 orjson geojson pyproj >= 2.1.0 usgs >= 0.3.1 boto3 + botocore fastapi >= 0.93.0 + starlette uvicorn jsonpath-ng < 1.6.0 lxml - whoosh + Whoosh pystac >= 1.0.0b1 ecmwf-api-client cdsapi stream-zip requests-ftp + pydantic >= 2.1.0 + typing_extensions + annotated-types + setuptools [options.extras_require] dev = @@ -82,6 +88,8 @@ dev = pre-commit responses < 0.24.0 fastapi[all] + stdlib-list + notebook = tqdm[notebook] tutorials = eodag-cube >= 0.2.0 @@ -97,10 +105,13 @@ docs = sphinx sphinx-book-theme < 1.0.0 sphinx-copybutton + sphinxcontrib-applehelp < 1.0.8 + sphinxcontrib-devhelp < 1.0.6 + sphinxcontrib-htmlhelp < 2.0.5 + sphinxcontrib-qthelp < 1.0.7 + sphinxcontrib-serializinghtml < 1.1.10 sphinxcontrib-jquery nbsphinx - ipython!=8.7.0 - # pined ipython until https://github.com/spatialaudio/nbsphinx/issues/687 is fixed [options.packages.find] exclude = @@ -142,6 +153,7 @@ eodag.plugins.download = AwsDownload = eodag.plugins.download.aws:AwsDownload HTTPDownload = eodag.plugins.download.http:HTTPDownload S3RestDownload = eodag.plugins.download.s3rest:S3RestDownload + CreodiasS3Download = eodag.plugins.download.creodias_s3:CreodiasS3Download eodag.plugins.search = CSWSearch = eodag.plugins.search.csw:CSWSearch QueryStringSearch = eodag.plugins.search.qssearch:QueryStringSearch @@ -152,6 +164,7 @@ eodag.plugins.search = StaticStacSearch = eodag.plugins.search.static_stac_search:StaticStacSearch BuildPostSearchResult = eodag.plugins.search.build_search_result:BuildPostSearchResult DataRequestSearch = eodag.plugins.search.data_request_search:DataRequestSearch + CreodiasS3Search = eodag.plugins.search.creodias_s3:CreodiasS3Search [flake8] ignore = E203, W503 diff --git a/tests/context.py b/tests/context.py index 7f30d883d..72b07403c 100644 --- a/tests/context.py +++ b/tests/context.py @@ -65,6 +65,8 @@ from eodag.plugins.manager import PluginManager from eodag.plugins.search.base import Search from eodag.rest.stac import DEFAULT_MISSION_START_DATE +from eodag.types import model_fields_to_annotated +from eodag.types.queryables import CommonQueryables, Queryables from eodag.utils import ( DEFAULT_STREAM_REQUESTS_TIMEOUT, HTTP_REQ_TIMEOUT, @@ -76,8 +78,8 @@ merge_mappings, path_to_uri, ProgressCallback, - uri_to_path, DownloadedCallback, + uri_to_path, parse_qsl, urlsplit, GENERIC_PRODUCT_TYPE, @@ -97,6 +99,7 @@ PluginImplementationError, RequestError, UnsupportedDatasetAddressScheme, + UnsupportedProductType, UnsupportedProvider, ValidationError, STACOpenerError, diff --git a/tests/integration/test_core_config.py b/tests/integration/test_core_config.py index 525a5391d..ce09a7956 100644 --- a/tests/integration/test_core_config.py +++ b/tests/integration/test_core_config.py @@ -57,9 +57,6 @@ def test_core_providers_config_update( mock__request.return_value = mock.Mock() mock__request_side_effect = [ { - "context": { - "matched": 1, - }, "features": [ { "id": "foo", diff --git a/tests/resources/NOTICE b/tests/resources/NOTICE index 5e2bb1a11..fa9ade2ee 100644 --- a/tests/resources/NOTICE +++ b/tests/resources/NOTICE @@ -1,14 +1,16 @@ eodag_search_result.geojson is derived from results from https://peps.cnes.fr/ catalog. -eodag_search_result_awseos.geojson is derived from AWS EOS catalog. +awseos_search.json, s2l2a_productInfo.json and s2l2a_tileInfo.json are results obtained from AWS EOS catalog and eodag_search_result_awseos.geojson is derived from this catalog. -eodag_search_result_creodias.geojson is derived from results from https://creodias.eu/ catalog. +creodias_s3_objects.json are results obtained from https://creodias.eu/ catalog through S3 protocol and eodag_search_result_creodias.geojson is derived from results from this catalog. -eodag_search_result_mundi.geojson is derived from results from https://mundiwebservices.com/ catalog. +earth_search_search.json are results obtained from https://www.element84.com/earth-search/ catalog. -eodag_search_result_onda.geojson is derived from results from https://www.onda-dias.eu/cms/ catalog. +mundi_search.xml are results obtained from https://mundiwebservices.com/ catalog. -eodag_search_result_peps.geojson is derived from results from https://peps.cnes.fr/ catalog. +onda_count.json and onda_search.json are results obtained from https://www.onda-dias.eu/cms/ catalog and eodag_search_result_onda.geojson is derived from results from this catalog. + +peps_search.json are results obtained from https://peps.cnes.fr/ catalog and eodag_search_result_peps.geojson is derived from results from this catalog. eodag_search_result_theia.geojson is derived from results from https://theia.cnes.fr/ catalog. diff --git a/tests/resources/constraints.json b/tests/resources/constraints.json new file mode 100644 index 000000000..d8a0b7f0d --- /dev/null +++ b/tests/resources/constraints.json @@ -0,0 +1,91 @@ +[ + { + "year": [ + "2000", "2001", "2002", "2003", "2004", "2005" + ], + "month": [ + "01", "02", "03" + ], + "day": [ + "01", "10", "20" + ], + "time": [ + "01:00", "12:00", "18:00" + ], + "variable": [ + "a", "b" + ], + "leadtime_hour": [ + "24", "48", "72" + ], + "type": [ + "A", "B" + ], + "product_type": [ + "ensemble_mean", "reanalysis" + ] + }, + { + "year": [ + "2000", "2001" + ], + "month": [ + "01", "02", "03", "04", "05" + ], + "day": [ + "01", "10", "20", "25" + ], + "time": [ + "01:00", "12:00", "18:00", "22:00" + ], + "variable": [ + "a", "b", "c" + ], + "type": [ + "C", "B" + ], + "product_type": [ + "ensemble_mean", "reanalysis" + ] + }, + { + "year": [ + "2002", "2003", "2004", "2005" + ], + "month": [ + "12" + ], + "day": [ + "01", "10", "20", "25" + ], + "time": [ + "00:00","01:00", "12:00", "18:00", "22:00" + ], + "variable": [ + "b", "c" + ], + "product_type": [ + "ensemble_mean", "reanalysis" + ] + }, + { + "year": [ + "2000", "2001" + ], + "month": [ + "06", "07" + ], + "day": [ + "03", "05" + ], + "time": [ + "01:00", "12:00", "18:00", "22:00" + ], + "variable": [ + "e", "f" + ], + "product_type": [ + "ensemble_mean", "reanalysis" + ] + } +] diff --git a/tests/resources/eodag_search_result_mundi.geojson b/tests/resources/eodag_search_result_mundi.geojson deleted file mode 100644 index f4930d30d..000000000 --- a/tests/resources/eodag_search_result_mundi.geojson +++ /dev/null @@ -1 +0,0 @@ -{"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[[1.8886830142, 43.2530814432], [1.8886830142, 44.247830684], [1.4849230599, 44.247830684], [1.4849230599, 43.2530814432], [1.8886830142, 43.2530814432]]]}, "id": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651", "properties": {"eodag_product_type": "S2_MSI_L1C", "eodag_provider": "mundi", "eodag_search_intersection": {"type": "Polygon", "coordinates": [[[1.8886830142, 43.907759172380565], [1.8886830142, 43.2530814432], [1.4849230599, 43.2530814432], [1.4849230599, 43.907759172380565], [1.8886830142, 43.907759172380565]]]}, "abstract": "name: S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651", "instrument": "MSI", "platform": "Sentinel2", "platformSerialIdentifier": "S2A,S2B", "processingLevel": "L1C", "sensorType": "OPTIC", "license": "proprietary", "missionStartDate": "2015-06-23T00:00:00Z", "title": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651", "productType": "IMAGE", "uid": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651", "resolution": "10.0", "organisationName": "dhus", "publicationDate": "2019-11-09T14:19:04Z", "otherConstraint": "Copernicus Sentinel data", "classification": "NOT_PROTECTED", "parentIdentifier": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651", "acquisitionType": "STRIP", "orbitNumber": "8", "orbitDirection": "DESCENDING", "cloudCover": "1.3154", "productVersion": "PROVIDER:02.08", "productQualityStatus": "NOTVERIFIED", "processingCenter": "ESAS2", "creationDate": "2019-11-09T14:26:21Z", "modificationDate": "2020-03-13T06:11:58Z", "processingDate": "2019-11-09T14:17:03Z", "sensorMode": "EO", "archivingCenter": "DIAS_", "availabilityTime": "2019-11-09T14:26:21Z", "acquisitionStation": "ESAS2", "startTimeFromAscendingNode": "2019-11-09T10:42:51Z", "completionTimeFromAscendingNode": "2019-11-09T10:42:51Z", "quicklook": "https://obs.eu-de.otc.t-systems.com/metadata/quicklooks/Sentinel2/S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651-ql.jpg", "downloadLink": "https://obs.eu-de.otc.t-systems.com/s2-l1c-2019-q4/31/T/CJ/2019/11/09/S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651", "storageStatus": "ONLINE", "thumbnail": "https://obs.eu-de.otc.t-systems.com/metadata/thumbnails/Sentinel2/S2A_MSIL1C_20191109T104251_N0208_R008_T31TCJ_20191109T111651-th.jpg", "category": null, "metadataFormat": "NATIVE", "imageFileFormat": "JPEG2000", "orderID": "UNKNOWN", "sceneIndex": "1", "link": null, "group": null, "date": "2019-11-09T10:42:51Z", "polygon": "43.2530814432 1.4849230599 43.2593919105 1.8886830142 44.247830684 1.8702372868 44.2469085875 1.8122836361 44.1446267581 1.7779727805 43.9977953345 1.7291152962 43.8509904138 1.680120001 43.7040904605 1.6316333272 43.5572238607 1.5833161177 43.4103400319 1.535547923 43.2634396658 1.4882524401 43.2530814432 1.4849230599", "generalQuality": "PASSED", "sensorQuality": "PASSED", "radiometricQuality": "PASSED", "geometricQuality": "PASSED", "formatCorrectness": "PASSED", "equivalentNumberofScene": "1", "audience": "All", "archiveInstanceReference": "ACDC_", "productDatapackSize": "1.69156288E8", "format": "SAFE", "startOrbitNumber": "22885", "datatakeId": "GS2A_20191109T104251_022885_N02.08", "tileIdentifier": "31TCJ", "manualQualityCheckStatus": "NOT_CHECKED", "manualQualityCheckStatusComment": "NOT_CHECKED", "referenceProductOnly": "true", "watcherRecordValidity": "true", "ingestionDate": "2019-11-09T14:25:11Z", "productDeliveryFormat": "folder", "checksum": "398F4778DA46E3FFD94E038EB6A3EC1E"}}, {"type": "Feature", "geometry": {"type": "Polygon", "coordinates": [[[3.1222564909, 43.2581500108], [3.1222564909, 44.253352161], [1.750816704, 44.253352161], [1.750816704, 43.2581500108], [3.1222564909, 43.2581500108]]]}, "id": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651", "properties": {"eodag_product_type": "S2_MSI_L1C", "eodag_provider": "mundi", "eodag_search_intersection": {"type": "Polygon", "coordinates": [[[2.379835675499976, 43.2581500108], [1.750816704, 43.2581500108], [1.750816704, 43.907759172380565], [2.379835675499976, 43.907759172380565], [2.379835675499976, 43.2581500108]]]}, "abstract": "name: S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651", "instrument": "MSI", "platform": "Sentinel2", "platformSerialIdentifier": "S2A,S2B", "processingLevel": "L1C", "sensorType": "OPTIC", "license": "proprietary", "missionStartDate": "2015-06-23T00:00:00Z", "title": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651", "productType": "IMAGE", "uid": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651", "resolution": "10.0", "organisationName": "dhus", "publicationDate": "2019-11-09T14:21:50Z", "otherConstraint": "Copernicus Sentinel data", "classification": "NOT_PROTECTED", "parentIdentifier": "S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651", "acquisitionType": "STRIP", "orbitNumber": "8", "orbitDirection": "DESCENDING", "cloudCover": "39.4642", "productVersion": "PROVIDER:02.08", "productQualityStatus": "NOTVERIFIED", "processingCenter": "ESAS2", "creationDate": "2019-11-09T14:29:40Z", "modificationDate": "2020-03-13T06:11:58Z", "processingDate": "2019-11-09T14:20:05Z", "sensorMode": "EO", "archivingCenter": "DIAS_", "availabilityTime": "2019-11-09T14:29:40Z", "acquisitionStation": "ESAS2", "startTimeFromAscendingNode": "2019-11-09T10:42:51Z", "completionTimeFromAscendingNode": "2019-11-09T10:42:51Z", "quicklook": "https://obs.eu-de.otc.t-systems.com/metadata/quicklooks/Sentinel2/S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651-ql.jpg", "downloadLink": "https://obs.eu-de.otc.t-systems.com/s2-l1c-2019-q4/31/T/DJ/2019/11/09/S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651", "storageStatus": "ONLINE", "thumbnail": "https://obs.eu-de.otc.t-systems.com/metadata/thumbnails/Sentinel2/S2A_MSIL1C_20191109T104251_N0208_R008_T31TDJ_20191109T111651-th.jpg", "category": null, "metadataFormat": "NATIVE", "imageFileFormat": "JPEG2000", "orderID": "UNKNOWN", "sceneIndex": "1", "link": null, "group": null, "date": "2019-11-09T10:42:51Z", "polygon": "43.2581500108 1.7674745987 43.2647271639 3.1202599003 44.253352161 3.1222564909 44.2468684047 1.8122701566 44.1446267581 1.7779727805 44.0630145871 1.750816704 43.2581500108 1.7674745987", "generalQuality": "PASSED", "sensorQuality": "PASSED", "radiometricQuality": "PASSED", "geometricQuality": "PASSED", "formatCorrectness": "PASSED", "equivalentNumberofScene": "1", "audience": "All", "archiveInstanceReference": "ACDC_", "productDatapackSize": "8.52324544E8", "format": "SAFE", "startOrbitNumber": "22885", "datatakeId": "GS2A_20191109T104251_022885_N02.08", "tileIdentifier": "31TDJ", "manualQualityCheckStatus": "NOT_CHECKED", "manualQualityCheckStatusComment": "NOT_CHECKED", "referenceProductOnly": "true", "watcherRecordValidity": "true", "ingestionDate": "2019-11-09T14:28:36Z", "productDeliveryFormat": "folder", "checksum": "36E48A2D7BB95A597DEC0A1773FB8E7B"}}]} diff --git a/tests/resources/provider_responses/creodias_s3_objects.json b/tests/resources/provider_responses/creodias_s3_objects.json new file mode 100644 index 000000000..7c4344751 --- /dev/null +++ b/tests/resources/provider_responses/creodias_s3_objects.json @@ -0,0 +1,37 @@ +{ + "Contents": [ + { + "Key": "Sentinel-1/SAR/GRD/2014/10/10/S1A_EW_GRDH_1SDH_20141010T120113_20141010T120222_002767_0031C8_D5D7.SAFE/support/s1-map-overlay.xsd", + "LastModified": "2018-06-01T15:22:13Z", + "ETag": "504eb851210859c67acb42ea4fed5fc5", + "Size": 450, + "StorageClass": "STANDARD", + "Owner": { + "DisplayName": "user1", + "ID": "user1" + } + }, + { + "Key": "Sentinel-1/SAR/GRD/2014/10/10/S1A_EW_GRDH_1SDH_20141010T120113_20141010T120222_002767_0031C8_D5D7.SAFE/support/s1-object-types.xsd", + "LastModified": "2018-06-01T15:22:13Z", + "ETag": "8aabb6347fb0a5f555aeb3e9aa3b55bb", + "Size": 62654, + "StorageClass": "STANDARD", + "Owner": { + "DisplayName": "user1", + "ID": "user1" + } + }, + { + "Key": "Sentinel-1/SAR/GRD/2014/10/10/S1A_EW_GRDH_1SDH_20141010T120113_20141010T120222_002767_0031C8_D5D7.SAFE/support/s1-product-preview.xsd", + "LastModified": "2018-06-01T15:22:13Z", + "ETag": "351b284603c155dcb83cdb5daedaf098", + "Size": 440, + "StorageClass": "STANDARD", + "Owner": { + "DisplayName": "user1", + "ID": "user1" + } + } + ] +} diff --git a/tests/resources/provider_responses/earth_search_search.json b/tests/resources/provider_responses/earth_search_search.json index 9d8ee4a33..eb931e8e2 100644 --- a/tests/resources/provider_responses/earth_search_search.json +++ b/tests/resources/provider_responses/earth_search_search.json @@ -1,486 +1,751 @@ { "type": "FeatureCollection", - "stac_version": "1.0.0-beta.2", + "stac_version": "1.0.0", "stac_extensions": [], "context": { - "page": 1, "limit": 1, - "matched": 21830128, + "matched": 25898765, "returned": 1 }, - "numberMatched": 21830128, + "numberMatched": 25898765, "numberReturned": 1, "features": [ { "type": "Feature", - "stac_version": "1.0.0-beta.2", - "stac_extensions": [ - "eo", - "view", - "proj" - ], - "id": "S2B_42XVR_20230417_0_L2A", - "bbox": [ - 68.75104745483206, - 81.62978584338511, - 69.62472365760772, - 81.95683550604923 - ], + "stac_version": "1.0.0", + "id": "S2B_27VWK_20240206_0_L1C", + "properties": { + "created": "2024-02-06T15:02:12.605Z", + "platform": "sentinel-2b", + "constellation": "sentinel-2", + "instruments": [ + "msi" + ], + "eo:cloud_cover": 9.93604802923519, + "proj:epsg": 32627, + "mgrs:utm_zone": 27, + "mgrs:latitude_band": "V", + "mgrs:grid_square": "WK", + "grid:code": "MGRS-27VWK", + "view:sun_azimuth": 175.230754104899, + "view:sun_elevation": 11.604167707198599, + "s2:degraded_msi_data_percentage": 0, + "s2:product_type": "S2MSI1C", + "s2:processing_baseline": "05.10", + "s2:product_uri": "S2B_MSIL1C_20240206T131259_N0510_R081_T27VWK_20240206T133903.SAFE", + "s2:generation_time": "2024-02-06T13:39:03.000000Z", + "s2:datatake_id": "GS2B_20240206T131259_036143_N05.10", + "s2:datatake_type": "INS-NOBS", + "s2:datastrip_id": "S2B_OPER_MSI_L1C_DS_2BPS_20240206T133903_S20240206T131359_N05.10", + "s2:granule_id": "S2B_OPER_MSI_L1C_TL_2BPS_20240206T133903_A036143_T27VWK_N05.10", + "s2:reflectance_conversion_factor": 1.02982763156509, + "datetime": "2024-02-06T13:14:11.918000Z", + "s2:sequence": "0", + "earthsearch:s3_path": "s3://earthsearch-data/sentinel-2-l1c/27/V/WK/2024/2/S2B_27VWK_20240206_0_L1C", + "earthsearch:payload_id": "roda-sentinel2/workflow-sentinel2-to-stac/3a593207fb50a145677c300bf92b974f", + "processing:software": { + "sentinel2-to-stac": "0.1.1" + }, + "updated": "2024-02-06T15:02:12.605Z" + }, "geometry": { "type": "Polygon", "coordinates": [ [ [ - 69.60050945760646, - 81.62978584338511 + -21.00037677459615, + 63.12951023858156 ], [ - 68.75104745483206, - 81.95683550604923 + -21.00037271572523, + 62.81247761195228 ], [ - 69.62472365760772, - 81.95643851325639 + -20.69680827030645, + 63.12918636378938 ], [ - 69.60050945760646, - 81.62978584338511 + -21.00037677459615, + 63.12951023858156 ] ] ] }, - "properties": { - "datetime": "2023-04-17T10:36:53Z", - "platform": "sentinel-2b", - "constellation": "sentinel-2", - "instruments": [ - "msi" - ], - "gsd": 10, - "view:off_nadir": 0, - "proj:epsg": 32642, - "sentinel:utm_zone": 42, - "sentinel:latitude_band": "X", - "sentinel:grid_square": "VR", - "sentinel:sequence": "0", - "sentinel:product_id": "S2B_MSIL2A_20230417T103629_N0509_R008_T42XVR_20230417T110713", - "sentinel:data_coverage": 1.95, - "eo:cloud_cover": 100, - "sentinel:valid_cloud_cover": true, - "sentinel:processing_baseline": "05.09", - "created": "2023-04-17T14:49:46.064Z", - "updated": "2023-04-17T14:49:46.064Z" - }, - "collection": "sentinel-s2-l2a", - "assets": { - "thumbnail": { - "title": "Thumbnail", - "type": "image/png", - "roles": [ - "thumbnail" - ], - "href": "https://roda.sentinel-hub.com/sentinel-s2-l1c/tiles/42/X/VR/2023/4/17/0/preview.jpg" + "links": [ + { + "rel": "self", + "type": "application/geo+json", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l1c/items/S2B_27VWK_20240206_0_L1C" }, - "overview": { - "title": "True color image", + { + "rel": "canonical", + "href": "s3://earthsearch-data/sentinel-2-l1c/27/V/WK/2024/2/S2B_27VWK_20240206_0_L1C/S2B_27VWK_20240206_0_L1C.json", + "type": "application/json" + }, + { + "rel": "license", + "href": "https://sentinel.esa.int/documents/247904/690755/Sentinel_Data_Legal_Notice" + }, + { + "rel": "parent", + "type": "application/json", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l1c" + }, + { + "rel": "collection", + "type": "application/json", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l1c" + }, + { + "rel": "root", + "type": "application/json", + "href": "https://earth-search.aws.element84.com/v1" + }, + { + "rel": "thumbnail", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l1c/items/S2B_27VWK_20240206_0_L1C/thumbnail" + } + ], + "assets": { + "blue": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B02.jp2", "type": "image/jp2", - "roles": [ - "overview" - ], - "gsd": 10, + "title": "Blue (band 2) - 10m", "eo:bands": [ { - "name": "B04", - "common_name": "red", - "center_wavelength": 0.6645, - "full_width_half_max": 0.038 - }, - { - "name": "B03", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.045 - }, - { - "name": "B02", + "name": "blue", "common_name": "blue", - "center_wavelength": 0.4966, + "description": "Blue (band 2)", + "center_wavelength": 0.49, "full_width_half_max": 0.098 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/qi/L2A_PVI.jp2" - }, - "info": { - "title": "Original JSON metadata", - "type": "application/json", - "roles": [ - "metadata" + "gsd": 10, + "proj:shape": [ + 10980, + 10980 + ], + "proj:transform": [ + 10, + 0, + 499980, + 0, + -10, + 7000020 + ], + "raster:bands": [ + { + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": -0.1 + } ], - "href": "https://roda.sentinel-hub.com/sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/tileInfo.json" - }, - "metadata": { - "title": "Original XML metadata", - "type": "application/xml", "roles": [ - "metadata" - ], - "href": "https://roda.sentinel-hub.com/sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/metadata.xml" + "data", + "reflectance" + ] }, - "visual": { - "title": "True color image", + "cirrus": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B10.jp2", "type": "image/jp2", - "roles": [ - "overview" - ], - "gsd": 10, + "title": "Cirrus (band 10) - 60m", "eo:bands": [ { - "name": "B04", - "common_name": "red", - "center_wavelength": 0.6645, - "full_width_half_max": 0.038 - }, - { - "name": "B03", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.045 - }, + "name": "cirrus", + "common_name": "cirrus", + "description": "Cirrus (band 10)", + "center_wavelength": 1.3735, + "full_width_half_max": 0.075 + } + ], + "gsd": 60, + "proj:shape": [ + 1830, + 1830 + ], + "proj:transform": [ + 60, + 0, + 499980, + 0, + -60, + 7000020 + ], + "raster:bands": [ { - "name": "B02", - "common_name": "blue", - "center_wavelength": 0.4966, - "full_width_half_max": 0.098 + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R10m/TCI.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "visual_20m": { - "title": "True color image", + "coastal": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B01.jp2", "type": "image/jp2", - "roles": [ - "overview" - ], - "gsd": 20, + "title": "Coastal aerosol (band 1) - 60m", "eo:bands": [ { - "name": "B04", - "common_name": "red", - "center_wavelength": 0.6645, - "full_width_half_max": 0.038 - }, - { - "name": "B03", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.045 - }, + "name": "coastal", + "common_name": "coastal", + "description": "Coastal aerosol (band 1)", + "center_wavelength": 0.443, + "full_width_half_max": 0.027 + } + ], + "gsd": 60, + "proj:shape": [ + 1830, + 1830 + ], + "proj:transform": [ + 60, + 0, + 499980, + 0, + -60, + 7000020 + ], + "raster:bands": [ { - "name": "B02", - "common_name": "blue", - "center_wavelength": 0.4966, - "full_width_half_max": 0.098 + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/TCI.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "visual_60m": { - "title": "True color image", - "type": "image/jp2", + "granule_metadata": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/metadata.xml", + "type": "application/xml", "roles": [ - "overview" - ], - "gsd": 60, + "metadata" + ] + }, + "green": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B03.jp2", + "type": "image/jp2", + "title": "Green (band 3) - 10m", "eo:bands": [ { - "name": "B04", - "common_name": "red", - "center_wavelength": 0.6645, - "full_width_half_max": 0.038 - }, - { - "name": "B03", + "name": "green", "common_name": "green", + "description": "Green (band 3)", "center_wavelength": 0.56, "full_width_half_max": 0.045 - }, + } + ], + "gsd": 10, + "proj:shape": [ + 10980, + 10980 + ], + "proj:transform": [ + 10, + 0, + 499980, + 0, + -10, + 7000020 + ], + "raster:bands": [ { - "name": "B02", - "common_name": "blue", - "center_wavelength": 0.4966, - "full_width_half_max": 0.098 + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R60m/TCI.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B01": { - "title": "Band 1 (coastal)", + "nir": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B08.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 60, + "title": "NIR 1 (band 8) - 10m", "eo:bands": [ { - "name": "B01", - "common_name": "coastal", - "center_wavelength": 0.4439, - "full_width_half_max": 0.027 + "name": "nir", + "common_name": "nir", + "description": "NIR 1 (band 8)", + "center_wavelength": 0.842, + "full_width_half_max": 0.145 + } + ], + "gsd": 10, + "proj:shape": [ + 10980, + 10980 + ], + "proj:transform": [ + 10, + 0, + 499980, + 0, + -10, + 7000020 + ], + "raster:bands": [ + { + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R60m/B01.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B02": { - "title": "Band 2 (blue)", + "nir08": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B8A.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 10, + "title": "NIR 2 (band 8A) - 20m", "eo:bands": [ { - "name": "B02", - "common_name": "blue", - "center_wavelength": 0.4966, - "full_width_half_max": 0.098 + "name": "nir08", + "common_name": "nir08", + "description": "NIR 2 (band 8A)", + "center_wavelength": 0.865, + "full_width_half_max": 0.033 + } + ], + "gsd": 20, + "proj:shape": [ + 5490, + 5490 + ], + "proj:transform": [ + 20, + 0, + 499980, + 0, + -20, + 7000020 + ], + "raster:bands": [ + { + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R10m/B02.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B03": { - "title": "Band 3 (green)", + "nir09": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B09.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 10, + "title": "NIR 3 (band 9) - 60m", "eo:bands": [ { - "name": "B03", - "common_name": "green", - "center_wavelength": 0.56, - "full_width_half_max": 0.045 + "name": "nir09", + "common_name": "nir09", + "description": "NIR 3 (band 9)", + "center_wavelength": 0.945, + "full_width_half_max": 0.026 + } + ], + "gsd": 60, + "proj:shape": [ + 1830, + 1830 + ], + "proj:transform": [ + 60, + 0, + 499980, + 0, + -60, + 7000020 + ], + "raster:bands": [ + { + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 60, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R10m/B03.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B04": { - "title": "Band 4 (red)", + "red": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B04.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 10, + "title": "Red (band 4) - 10m", "eo:bands": [ { - "name": "B04", + "name": "red", "common_name": "red", - "center_wavelength": 0.6645, + "description": "Red (band 4)", + "center_wavelength": 0.665, "full_width_half_max": 0.038 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R10m/B04.jp2" + "gsd": 10, + "proj:shape": [ + 10980, + 10980 + ], + "proj:transform": [ + 10, + 0, + 499980, + 0, + -10, + 7000020 + ], + "raster:bands": [ + { + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 10, + "scale": 0.0001, + "offset": -0.1 + } + ], + "roles": [ + "data", + "reflectance" + ] }, - "B05": { - "title": "Band 5", + "rededge1": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B05.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 20, + "title": "Red edge 1 (band 5) - 20m", "eo:bands": [ { - "name": "B05", - "center_wavelength": 0.7039, + "name": "rededge1", + "common_name": "rededge", + "description": "Red edge 1 (band 5)", + "center_wavelength": 0.704, "full_width_half_max": 0.019 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/B05.jp2" - }, - "B06": { - "title": "Band 6", - "type": "image/jp2", - "roles": [ - "data" - ], "gsd": 20, - "eo:bands": [ + "proj:shape": [ + 5490, + 5490 + ], + "proj:transform": [ + 20, + 0, + 499980, + 0, + -20, + 7000020 + ], + "raster:bands": [ { - "name": "B06", - "center_wavelength": 0.7402, - "full_width_half_max": 0.018 + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/B06.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B07": { - "title": "Band 7", + "rededge2": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B06.jp2", "type": "image/jp2", - "roles": [ - "data" + "title": "Red edge 2 (band 6) - 20m", + "eo:bands": [ + { + "name": "rededge2", + "common_name": "rededge", + "description": "Red edge 2 (band 6)", + "center_wavelength": 0.74, + "full_width_half_max": 0.018 + } ], "gsd": 20, - "eo:bands": [ + "proj:shape": [ + 5490, + 5490 + ], + "proj:transform": [ + 20, + 0, + 499980, + 0, + -20, + 7000020 + ], + "raster:bands": [ { - "name": "B07", - "center_wavelength": 0.7825, - "full_width_half_max": 0.028 + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/B07.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B08": { - "title": "Band 8 (nir)", + "rededge3": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B07.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 10, + "title": "Red edge 3 (band 7) - 20m", "eo:bands": [ { - "name": "B08", - "common_name": "nir", - "center_wavelength": 0.8351, - "full_width_half_max": 0.145 + "name": "rededge3", + "common_name": "rededge", + "description": "Red edge 3 (band 7)", + "center_wavelength": 0.783, + "full_width_half_max": 0.028 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R10m/B08.jp2" - }, - "B8A": { - "title": "Band 8A", - "type": "image/jp2", - "roles": [ - "data" - ], "gsd": 20, - "eo:bands": [ + "proj:shape": [ + 5490, + 5490 + ], + "proj:transform": [ + 20, + 0, + 499980, + 0, + -20, + 7000020 + ], + "raster:bands": [ { - "name": "B8A", - "center_wavelength": 0.8648, - "full_width_half_max": 0.033 + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/B8A.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B09": { - "title": "Band 9", + "swir16": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B11.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 60, + "title": "SWIR 1 (band 11) - 20m", "eo:bands": [ { - "name": "B09", - "center_wavelength": 0.945, - "full_width_half_max": 0.026 + "name": "swir16", + "common_name": "swir16", + "description": "SWIR 1 (band 11)", + "center_wavelength": 1.61, + "full_width_half_max": 0.143 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R60m/B09.jp2" - }, - "B11": { - "title": "Band 11 (swir16)", - "type": "image/jp2", - "roles": [ - "data" - ], "gsd": 20, - "eo:bands": [ + "proj:shape": [ + 5490, + 5490 + ], + "proj:transform": [ + 20, + 0, + 499980, + 0, + -20, + 7000020 + ], + "raster:bands": [ { - "name": "B11", - "common_name": "swir16", - "center_wavelength": 1.6137, - "full_width_half_max": 0.143 + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": -0.1 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/B11.jp2" + "roles": [ + "data", + "reflectance" + ] }, - "B12": { - "title": "Band 12 (swir22)", + "swir22": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/B12.jp2", "type": "image/jp2", - "roles": [ - "data" - ], - "gsd": 20, + "title": "SWIR 2 (band 12) - 20m", "eo:bands": [ { - "name": "B12", + "name": "swir22", "common_name": "swir22", - "center_wavelength": 2.22024, + "description": "SWIR 2 (band 12)", + "center_wavelength": 2.19, "full_width_half_max": 0.242 } ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/B12.jp2" + "gsd": 20, + "proj:shape": [ + 5490, + 5490 + ], + "proj:transform": [ + 20, + 0, + 499980, + 0, + -20, + 7000020 + ], + "raster:bands": [ + { + "nodata": 0, + "data_type": "uint16", + "bits_per_sample": 15, + "spatial_resolution": 20, + "scale": 0.0001, + "offset": -0.1 + } + ], + "roles": [ + "data", + "reflectance" + ] }, - "AOT": { - "title": "Aerosol Optical Thickness (AOT)", - "type": "image/jp2", + "thumbnail": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/preview.jpg", + "type": "image/jpeg", + "title": "Thumbnail image", "roles": [ - "data" - ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R60m/AOT.jp2" + "thumbnail" + ] }, - "WVP": { - "title": "Water Vapour (WVP)", - "type": "image/jp2", + "tileinfo_metadata": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/tileInfo.json", + "type": "application/json", "roles": [ - "data" - ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R10m/WVP.jp2" + "metadata" + ] }, - "SCL": { - "title": "Scene Classification Map (SCL)", + "visual": { + "href": "s3://sentinel-s2-l1c/tiles/27/V/WK/2024/2/6/0/TCI.jp2", "type": "image/jp2", - "roles": [ - "data" + "title": "True color image", + "eo:bands": [ + { + "name": "red", + "common_name": "red", + "description": "Red (band 4)", + "center_wavelength": 0.665, + "full_width_half_max": 0.038 + }, + { + "name": "green", + "common_name": "green", + "description": "Green (band 3)", + "center_wavelength": 0.56, + "full_width_half_max": 0.045 + }, + { + "name": "blue", + "common_name": "blue", + "description": "Blue (band 2)", + "center_wavelength": 0.49, + "full_width_half_max": 0.098 + } + ], + "proj:shape": [ + 10980, + 10980 + ], + "proj:transform": [ + 10, + 0, + 499980, + 0, + -10, + 7000020 ], - "href": "s3://sentinel-s2-l2a/tiles/42/X/VR/2023/4/17/0/R20m/SCL.jp2" + "roles": [ + "visual" + ] } }, - "links": [ - { - "rel": "self", - "href": "https://earth-search.aws.element84.com/v0/collections/sentinel-s2-l2a/items/S2B_42XVR_20230417_0_L2A" - }, - { - "rel": "canonical", - "href": "https://cirrus-v0-data-1qm7gekzjucbq.s3.us-west-2.amazonaws.com/sentinel-s2-l2a/42/X/VR/2023/4/S2B_42XVR_20230417_0_L2A/S2B_42XVR_20230417_0_L2A.json", - "type": "application/json" - }, - { - "title": "sentinel-s2-l2a-aws/workflow-publish-sentinel/tiles-42-X-VR-2023-4-17-0", - "rel": "via-cirrus", - "href": "https://cirrus-earth-search.aws.element84.com/v0/catid/sentinel-s2-l2a-aws/workflow-publish-sentinel/tiles-42-X-VR-2023-4-17-0" - }, - { - "rel": "parent", - "href": "https://earth-search.aws.element84.com/v0/collections/sentinel-s2-l2a" - }, - { - "rel": "collection", - "href": "https://earth-search.aws.element84.com/v0/collections/sentinel-s2-l2a" - }, - { - "rel": "root", - "href": "https://earth-search.aws.element84.com/v0/" - } - ] + "bbox": [ + -21.00037677459615, + 62.81247761195228, + -20.69680827030645, + 63.12951023858156 + ], + "stac_extensions": [ + "https://stac-extensions.github.io/view/v1.0.0/schema.json", + "https://stac-extensions.github.io/grid/v1.0.0/schema.json", + "https://stac-extensions.github.io/eo/v1.1.0/schema.json", + "https://stac-extensions.github.io/processing/v1.1.0/schema.json", + "https://stac-extensions.github.io/projection/v1.1.0/schema.json", + "https://stac-extensions.github.io/mgrs/v1.0.0/schema.json", + "https://stac-extensions.github.io/raster/v1.1.0/schema.json" + ], + "collection": "sentinel-2-l1c" } ], "links": [ { "rel": "next", - "title": "Next page of results", - "method": "POST", - "href": "https://earth-search.aws.element84.com/v0/search", - "merge": false, - "body": { - "collections": [ - "sentinel-s2-l2a" - ], - "page": 2, - "limit": 1 - } + "title": "Next page of Items", + "method": "GET", + "type": "application/geo+json", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l1c/items?collections=sentinel-2-l1c&limit=1&next=2024-02-06T13%3A14%3A11.918000Z%2CS2B_27VWK_20240206_0_L1C%2Csentinel-2-l1c" + }, + { + "rel": "root", + "type": "application/json", + "href": "https://earth-search.aws.element84.com/v1" + }, + { + "rel": "self", + "type": "application/geo+json", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l1c/items" + }, + { + "rel": "collection", + "type": "application/json", + "href": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l1c" } ] } diff --git a/tests/resources/stac/queryables.json b/tests/resources/stac/queryables.json new file mode 100644 index 000000000..31ce3032e --- /dev/null +++ b/tests/resources/stac/queryables.json @@ -0,0 +1,36 @@ +{ + "$id": "https://planetarycomputer.microsoft.com/api/stac/v1/collections/sentinel-1-grd/queryables", + "type": "object", + "title": "STAC Queryables.", + "$schema": "http://json-schema.org/draft-07/schema#", + "properties": { + "id": { + "$ref": "https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/definitions/core/allOf/2/properties/id", + "title": "Item ID", + "description": "Item identifier" + }, + "datetime": { + "type": "string", + "title": "Acquired", + "format": "date-time", + "pattern": "(\\+00:00|Z)$", + "description": "Datetime" + }, + "geometry": { + "$ref": "https://geojson.org/schema/Feature.json", + "title": "Item Geometry", + "description": "Item Geometry" + }, + "platform": { + "enum": [ + "SENTINEL-1A", + "SENTINEL-1B" + ], + "type": "string", + "title": "Platform" + }, + "s1:processing_level":{ + "type":"string" + } + } +} diff --git a/tests/test_cli.py b/tests/test_cli.py index dbe5bbf07..f54e24bba 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -16,8 +16,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import os -import random import re import unittest from contextlib import contextmanager @@ -40,7 +40,6 @@ download, eodag, search_crunch, - setup_logging, ) from tests.units import test_core from tests.utils import mock, no_blanks, write_eodag_conf_with_fake_credentials @@ -80,11 +79,13 @@ def setUp(self): def tearDown(self): super(TestEodagCli, self).tearDown() - # Default logging: no logging but still displays progress bars - setup_logging(1) # stop Mock and remove tmp config dir self.expanduser_mock.stop() self.tmp_home_dir.cleanup() + # reset logging + logger = logging.getLogger("eodag") + logger.handlers = [] + logger.level = 0 def test_eodag_without_args(self): """Calling eodag without arguments should print help message""" @@ -591,17 +592,21 @@ def test_eodag_list_product_type_ok(self): def test_eodag_list_product_type_with_provider_ok(self): """Calling eodag list with provider should return all supported product types of specified provider""" # noqa - provider = random.choice(test_core.TestCore.SUPPORTED_PROVIDERS) - provider_supported_product_types = [ - pt - for pt, provs in test_core.TestCore.SUPPORTED_PRODUCT_TYPES.items() - if provider in provs - if pt != GENERIC_PRODUCT_TYPE - ] - result = self.runner.invoke(eodag, ["list", "-p", provider, "--no-fetch"]) - self.assertEqual(result.exit_code, 0) - for pt in provider_supported_product_types: - self.assertIn(pt, result.output) + for provider in test_core.TestCore.SUPPORTED_PROVIDERS: + provider_supported_product_types = [ + pt + for pt, provs in test_core.TestCore.SUPPORTED_PRODUCT_TYPES.items() + if provider in provs + if pt != GENERIC_PRODUCT_TYPE + ] + result = self.runner.invoke(eodag, ["list", "-p", provider, "--no-fetch"]) + self.assertEqual(result.exit_code, 0) + for pt in provider_supported_product_types: + self.assertIn( + pt, + result.output, + f"{pt} was not found in {provider} supported product types", + ) def test_eodag_list_product_type_with_provider_ko(self): """Calling eodag list with unsupported provider should fail and print a list of available providers""" # noqa diff --git a/tests/test_config.py b/tests/test_config.py index 65b302d5b..e819986ec 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -437,6 +437,11 @@ def test_override_config_from_env(self): "EODAG__AWS_EOS__AUTH__CREDENTIALS__AWS_SECRET_ACCESS_KEY" ] = "secret-access-key" os.environ["EODAG__PEPS__DOWNLOAD__OUTPUTS_PREFIX"] = "/data" + # check a parameter that has not been set yet + self.assertFalse(hasattr(default_config["peps"].search, "timeout")) + self.assertNotIn("start_page", default_config["peps"].search.pagination) + os.environ["EODAG__PEPS__SEARCH__TIMEOUT"] = "3.1" + os.environ["EODAG__PEPS__SEARCH__PAGINATION__START_PAGE"] = "2" config.override_config_from_env(default_config) usgs_conf = default_config["usgs"] @@ -457,6 +462,8 @@ def test_override_config_from_env(self): peps_conf = default_config["peps"] self.assertEqual(peps_conf.download.outputs_prefix, "/data") + self.assertEqual(peps_conf.search.timeout, 3.1) + self.assertEqual(peps_conf.search.pagination["start_page"], 2) @mock.patch("requests.get", autospec=True) def test_get_ext_product_types_conf(self, mock_get): diff --git a/tests/test_end_to_end.py b/tests/test_end_to_end.py index 6aa7974b0..66e84e4c4 100644 --- a/tests/test_end_to_end.py +++ b/tests/test_end_to_end.py @@ -489,10 +489,6 @@ def test_end_to_end_search_download_usgs_satapi_aws(self): expected_filename = "{}".format(product.properties["title"]) self.execute_download(product, expected_filename, wait_sec=15) - @unittest.skip( - "The public datasets service will not be available during the DHS Move, " - + "see https://confluence.ecmwf.int/x/jSKADQ" - ) def test_end_to_end_search_download_ecmwf(self): product = self.execute_search( *ECMWF_SEARCH_ARGS, search_kwargs_dict=ECMWF_SEARCH_KWARGS diff --git a/tests/test_requirements.py b/tests/test_requirements.py new file mode 100644 index 000000000..86e155153 --- /dev/null +++ b/tests/test_requirements.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2024, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ast +import configparser +import os +import unittest + +import importlib_metadata +from packaging.requirements import Requirement +from stdlib_list import stdlib_list + +from tests.context import MisconfiguredError + +project_path = "./eodag" +setup_cfg_path = "./setup.cfg" +allowed_missing_imports = ["eodag"] + + +def get_imports(filepath): + """Get python imports from the given file path""" + with open(filepath, "r") as file: + try: + root = ast.parse(file.read()) + except UnicodeDecodeError as e: + raise MisconfiguredError( + f"UnicodeDecodeError in {filepath}: {e.object[max(e.start - 50, 0):min(e.end + 50, len(e.object))]}" + ) from e + + for node in ast.iter_child_nodes(root): + if isinstance(node, ast.Import): + for alias in node.names: + if alias.name.split(".")[0] == "utils": + pass + + yield alias.name.split(".")[0] + elif isinstance(node, ast.ImportFrom) and node.level == 0: + if node.module.split(".")[0] == "utils": + pass + yield node.module.split(".")[0] + + +def get_project_imports(project_path): + """Get python imports from the project path""" + imports = set() + for dirpath, dirs, files in os.walk(project_path): + for filename in files: + if filename.endswith(".py"): + filepath = os.path.join(dirpath, filename) + imports.update(get_imports(filepath)) + return imports + + +def get_setup_requires(setup_cfg_path): + """Get requirements from the given setup.cfg file path""" + config = configparser.ConfigParser() + config.read(setup_cfg_path) + return set( + [ + Requirement(r).name + for r in config["options"]["install_requires"].split("\n") + if r + ] + ) + + +class TestRequirements(unittest.TestCase): + def test_requirements(self): + """Needed libraries must be in project requirements""" + + project_imports = get_project_imports(project_path) + setup_requires = get_setup_requires(setup_cfg_path) + import_required_dict = importlib_metadata.packages_distributions() + default_libs = stdlib_list() + + missing_imports = [] + for project_import in project_imports: + required = import_required_dict.get(project_import, [project_import]) + if ( + not set(required).intersection(setup_requires) + and project_import not in default_libs + allowed_missing_imports + ): + missing_imports.append(project_import) + + self.assertEqual( + len(missing_imports), + 0, + f"The following libraries were not found in project requirements: {missing_imports}", + ) diff --git a/tests/units/test_apis_plugins.py b/tests/units/test_apis_plugins.py index 496db0ce2..654ee6a65 100644 --- a/tests/units/test_apis_plugins.py +++ b/tests/units/test_apis_plugins.py @@ -16,6 +16,8 @@ # See the License for the specific language governing permissions and # limitations under the License. import ast +import io +import json import logging import os import unittest @@ -28,6 +30,8 @@ from ecmwfapi.api import ANONYMOUS_APIKEY_VALUES from shapely.geometry import shape +from eodag.utils import MockResponse +from tests import TEST_RESOURCES_PATH from tests.context import ( DEFAULT_DOWNLOAD_WAIT, DEFAULT_MISSION_START_DATE, @@ -114,7 +118,7 @@ def setUp(self): "grid": "2/2", "param": "228164", # total cloud cover parameter "time": "00", - "type": "pf", + "type": "cf", "class": "ti", } @@ -637,27 +641,9 @@ def setUp(self): self.product_dataset = "cams-global-reanalysis-eac4" self.product_type_params = { "dataset": self.product_dataset, - "stream": "oper", - "class": "mc", - "expver": "0001", - "variable": [ - "dust_aerosol_0.03-0.55um_mixing_ratio", - "dust_aerosol_0.55-0.9um_mixing_ratio", - "dust_aerosol_0.9-20um_mixing_ratio", - "dust_aerosol_optical_depth_550nm", - "hydrophilic_black_carbon_aerosol_mixing_ratio", - "hydrophilic_organic_matter_aerosol_mixing_ratio", - "hydrophobic_black_carbon_aerosol_mixing_ratio", - "hydrophobic_organic_matter_aerosol_mixing_ratio", - "sea_salt_aerosol_0.03-0.5um_mixing_ratio", - "sea_salt_aerosol_0.5-5um_mixing_ratio", - "sea_salt_aerosol_5-20um_mixing_ratio", - "sea_salt_aerosol_optical_depth_550nm", - "sulphate_aerosol_optical_depth_550nm", - ], - "model_level": [str(i) for i in range(1, 61)], + "format": "grib", + "variable": "2m_dewpoint_temperature", "time": "00:00", - "format": "netcdf", } self.custom_query_params = { "dataset": "cams-global-ghg-reanalysis-egg4", @@ -692,6 +678,10 @@ def test_plugins_apis_cds_logging(self): client = self.api_plugin._get_cds_client(**auth_dict) self.assertEqual(client.logger.level, logging.DEBUG) + logger = logging.getLogger("eodag") + logger.handlers = [] + logger.level = 0 + def test_plugins_apis_cds_query_dates_missing(self): """CdsApi.query must use default dates if missing""" # given start & stop @@ -719,7 +709,7 @@ def test_plugins_apis_cds_query_dates_missing(self): ) self.assertIn( eoproduct.properties["completionTimeFromAscendingNode"], - datetime.utcnow().isoformat(), + "2015-01-02T00:00:00Z", ) # missing start & stop and plugin.product_type_config set (set in core._prepare_search) @@ -736,7 +726,7 @@ def test_plugins_apis_cds_query_dates_missing(self): eoproduct.properties["startTimeFromAscendingNode"], "1985-10-26" ) self.assertEqual( - eoproduct.properties["completionTimeFromAscendingNode"], "2015-10-21" + eoproduct.properties["completionTimeFromAscendingNode"], "1985-10-27" ) def test_plugins_apis_cds_query_without_producttype(self): @@ -821,19 +811,35 @@ def test_plugins_apis_cds_authenticate(self, mock_client_status): assert auth_dict["url"] == self.api_plugin.config.api_endpoint del self.api_plugin.config.credentials + @mock.patch("eodag.plugins.download.http.requests.head", autospec=True) + @mock.patch("eodag.plugins.download.http.requests.get", autospec=True) @mock.patch( "eodag.api.core.EODataAccessGateway.fetch_product_types_list", autospec=True ) @mock.patch("eodag.plugins.apis.cds.CdsApi.authenticate", autospec=True) - @mock.patch("cdsapi.api.Client.retrieve", autospec=True) + @mock.patch("cdsapi.api.Client._api", autospec=True) def test_plugins_apis_cds_download( - self, mock_client_retrieve, mock_cds_authenticate, mock_fetch_product_types_list + self, + mock_client_api, + mock_cds_authenticate, + mock_fetch_product_types_list, + mock_get, + mock_head, ): """CdsApi.download must call the authenticate function and cdsapi Client retrieve""" mock_cds_authenticate.return_value = { "key": "foo:bar", "url": "http://foo.bar.baz", } + mock_client_api.return_value.location = "http://somewhere/something" + + mock_get.return_value.__enter__.return_value.iter_content.return_value = ( + io.BytesIO(b"some content") + ) + mock_get.return_value.__enter__.return_value.headers = { + "content-disposition": "" + } + mock_head.return_value.headers = {"content-disposition": ""} dag = EODataAccessGateway() dag.set_preferred_provider("cop_ads") @@ -849,16 +855,15 @@ def test_plugins_apis_cds_download( query_str = "".join(urlsplit(eoproduct.location).fragment.split("?", 1)[1:]) expected_download_request = geojson.loads(query_str) expected_dataset_name = expected_download_request.pop("dataset") - expected_path = os.path.join( - output_data_path, "%s.grib" % eoproduct.properties["title"] - ) + expected_url = f"{mock_cds_authenticate.return_value['url']}/resources/{expected_dataset_name}" + expected_path = os.path.join(output_data_path, eoproduct.properties["title"]) path = eoproduct.download(outputs_prefix=output_data_path) - mock_client_retrieve.assert_called_once_with( + mock_client_api.assert_called_once_with( mock.ANY, # instance - name=expected_dataset_name, - request=expected_download_request, - target=expected_path, + expected_url, + expected_download_request, + "POST", ) assert path == expected_path @@ -908,3 +913,27 @@ def test_plugins_apis_cds_download_all( ) assert mock_cds_download.call_count == len(eoproducts) assert len(paths) == len(eoproducts) + + @mock.patch("eodag.utils.constraints.requests.get", autospec=True) + def test_plugins_apis_cds_discover_queryables(self, mock_requests_constraints): + constraints_path = os.path.join(TEST_RESOURCES_PATH, "constraints.json") + with open(constraints_path) as f: + constraints = json.load(f) + mock_requests_constraints.return_value = MockResponse( + constraints, status_code=200 + ) + queryables = self.api_plugin.discover_queryables( + productType="CAMS_EU_AIR_QUALITY_RE" + ) + self.assertEqual(12, len(queryables)) + self.assertIn("variable", queryables) + # with additional param + queryables = self.api_plugin.discover_queryables( + productType="CAMS_EU_AIR_QUALITY_RE", + variable="a", + ) + self.assertEqual(12, len(queryables)) + queryable = queryables.get("variable") + self.assertEqual("a", queryable.__metadata__[0].get_default()) + queryable = queryables.get("month") + self.assertTrue(queryable.__metadata__[0].is_required()) diff --git a/tests/units/test_auth_plugins.py b/tests/units/test_auth_plugins.py index 47dea7f1a..048525c62 100644 --- a/tests/units/test_auth_plugins.py +++ b/tests/units/test_auth_plugins.py @@ -59,21 +59,21 @@ def setUpClass(cls): cls.providers_config, { "provider_text_token_simple_url": { - "products": {}, + "products": {"foo_product": {}}, "auth": { "type": "TokenAuth", "auth_uri": "http://foo.bar", }, }, "provider_text_token_format_url": { - "products": {}, + "products": {"foo_product": {}}, "auth": { "type": "TokenAuth", "auth_uri": "http://foo.bar?username={username}", }, }, "provider_text_token_header": { - "products": {}, + "products": {"foo_product": {}}, "auth": { "type": "TokenAuth", "auth_uri": "http://foo.bar", @@ -85,7 +85,7 @@ def setUpClass(cls): }, }, "provider_json_token_simple_url": { - "products": {}, + "products": {"foo_product": {}}, "auth": { "type": "TokenAuth", "auth_uri": "http://foo.bar", @@ -209,7 +209,7 @@ def setUpClass(cls): cls.providers_config, { "foo_provider": { - "products": {}, + "products": {"foo_product": {}}, "auth": { "type": "HttpQueryStringAuth", "auth_uri": "http://foo.bar", @@ -289,7 +289,7 @@ def setUpClass(cls): cls.providers_config, { "foo_provider": { - "products": {}, + "products": {"foo_product": {}}, "auth": { "type": "SASAuth", "auth_uri": "http://foo.bar?href={url}", @@ -408,7 +408,7 @@ def setUpClass(cls): cls.providers_config, { "foo_provider": { - "products": {}, + "products": {"foo_product": {}}, "auth": { "type": "KeycloakOIDCPasswordAuth", "auth_base_uri": "http://foo.bar", diff --git a/tests/units/test_constraints.py b/tests/units/test_constraints.py new file mode 100644 index 000000000..998299490 --- /dev/null +++ b/tests/units/test_constraints.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright 2024, CS GROUP - France, https://www.csgroup.eu/ +# +# This file is part of EODAG project +# https://www.github.com/CS-SI/EODAG +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import os +import unittest + +from eodag.config import load_default_config +from eodag.plugins.manager import PluginManager +from eodag.utils.constraints import get_constraint_queryables_with_additional_params +from eodag.utils.exceptions import ValidationError +from tests import TEST_RESOURCES_PATH + + +class TestConstraints(unittest.TestCase): + def setUp(self) -> None: + super(TestConstraints, self).setUp() + providers_config = load_default_config() + self.plugins_manager = PluginManager(providers_config) + + def test_get_constraint_queryables_with_additional_params(self): + constraints_path = os.path.join(TEST_RESOURCES_PATH, "constraints.json") + with open(constraints_path) as f: + constraints = json.load(f) + plugins = self.plugins_manager.get_search_plugins("ERA5_SL", "cop_cds") + plugin = next(plugins) + + # filter on one parameter + queryables = get_constraint_queryables_with_additional_params( + constraints, {"variable": "f"}, plugin, "ERA5_SL" + ) + self.assertEqual(6, len(queryables)) + self.assertIn("year", queryables) + queryable = queryables.get("year") + self.assertSetEqual({"2000", "2001"}, queryable["enum"]) + self.assertIn("variable", queryables) + + # not existing parameter + queryables = get_constraint_queryables_with_additional_params( + constraints, {"param": "f"}, plugin, "ERA5_SL" + ) + self.assertIn("not_available", queryables) + self.assertEqual("param", queryables["not_available"]["enum"].pop()) + + # not existing value of parameter + with self.assertRaises(ValidationError): + get_constraint_queryables_with_additional_params( + constraints, {"variable": "g"}, plugin, "ERA5_SL" + ) + + # with params/defaults + queryables = get_constraint_queryables_with_additional_params( + constraints, + {"variable": "c", "type": "B", "year": "2000"}, + plugin, + "ERA5_SL", + ) + self.assertEqual(7, len(queryables)) + self.assertIn("year", queryables) + self.assertIn("variable", queryables) + self.assertIn("month", queryables) + self.assertIn("day", queryables) + self.assertIn("time", queryables) + self.assertIn("type", queryables) + queryable = queryables.get("time") + self.assertSetEqual({"01:00", "12:00", "18:00", "22:00"}, queryable["enum"]) + queryable = queryables.get("type") + self.assertSetEqual({"C", "B"}, queryable["enum"]) + queryable = queryables.get("year") + self.assertSetEqual({"2000", "2001"}, queryable["enum"]) diff --git a/tests/units/test_core.py b/tests/units/test_core.py index 1fa2ac359..e083a27bc 100644 --- a/tests/units/test_core.py +++ b/tests/units/test_core.py @@ -19,8 +19,10 @@ import copy import glob import json +import logging import os import shutil +import tempfile import unittest import uuid from pathlib import Path @@ -35,17 +37,21 @@ from tests import TEST_RESOURCES_PATH from tests.context import ( DEFAULT_MAX_ITEMS_PER_PAGE, + CommonQueryables, EODataAccessGateway, EOProduct, NoMatchingProductType, PluginImplementationError, ProviderConfig, + Queryables, RequestError, SearchResult, + UnsupportedProductType, UnsupportedProvider, get_geometry_from_various, load_default_config, makedirs, + model_fields_to_annotated, ) from tests.utils import mock, write_eodag_conf_with_fake_credentials @@ -76,14 +82,27 @@ def tearDownClass(cls): # stop Mock and remove tmp config dir cls.expanduser_mock.stop() cls.tmp_home_dir.cleanup() + # reset logging + logger = logging.getLogger("eodag") + logger.handlers = [] + logger.level = 0 class TestCore(TestCoreBase): SUPPORTED_PRODUCT_TYPES = { + "CAMS_GAC_FORECAST": ["cop_ads"], + "CAMS_EU_AIR_QUALITY_FORECAST": ["cop_ads"], + "CAMS_GFE_GFAS": ["cop_ads"], + "CAMS_GRF": ["cop_ads"], + "CAMS_GRF_AUX": ["cop_ads"], + "CAMS_SOLAR_RADIATION": ["cop_ads"], + "CAMS_GREENHOUSE_EGG4_MONTHLY": ["cop_ads"], + "CAMS_GREENHOUSE_EGG4": ["cop_ads"], + "CAMS_GREENHOUSE_INVERSION": ["cop_ads"], + "CAMS_GLOBAL_EMISSIONS": ["cop_ads"], "CAMS_EAC4": ["cop_ads"], - "CAMS_GACF_AOT": ["cop_ads"], - "CAMS_GACF_MR": ["cop_ads"], - "CAMS_GACF_RH": ["cop_ads"], + "CAMS_EAC4_MONTHLY": ["cop_ads"], + "CAMS_EU_AIR_QUALITY_RE": ["cop_ads"], "CBERS4_AWFI_L2": ["aws_eos"], "CBERS4_AWFI_L4": ["aws_eos"], "CBERS4_MUX_L2": ["aws_eos"], @@ -100,34 +119,34 @@ class TestCore(TestCoreBase): "CLMS_GLO_LAI_333M": ["wekeo"], "CLMS_GLO_NDVI_1KM_LTS": ["wekeo"], "CLMS_GLO_NDVI_333M": ["wekeo"], - "COP_DEM_GLO30_DGED": ["creodias", "wekeo"], - "COP_DEM_GLO30_DTED": ["creodias"], - "COP_DEM_GLO90_DGED": ["creodias", "wekeo"], - "COP_DEM_GLO90_DTED": ["creodias"], + "COP_DEM_GLO30_DGED": ["creodias", "creodias_s3", "earth_search", "wekeo"], + "COP_DEM_GLO30_DTED": ["creodias", "creodias_s3"], + "COP_DEM_GLO90_DGED": ["creodias", "creodias_s3", "earth_search", "wekeo"], + "COP_DEM_GLO90_DTED": ["creodias", "creodias_s3"], "EEA_DAILY_SSM_1KM": ["wekeo"], "EEA_DAILY_SWI_1KM": ["wekeo"], "EEA_DAILY_VI": ["wekeo"], - "EFAS_FORECAST": ["wekeo"], - "EFAS_HISTORICAL": ["wekeo"], - "EFAS_REFORECAST": ["wekeo"], - "EFAS_SEASONAL": ["wekeo"], - "EFAS_SEASONAL_REFORECAST": ["wekeo"], - "ERA5_LAND": ["wekeo"], - "ERA5_LAND_MONTHLY": ["wekeo"], - "ERA5_PL": ["wekeo"], - "ERA5_PL_MONTHLY": ["wekeo"], + "EFAS_FORECAST": ["cop_cds", "wekeo"], + "EFAS_HISTORICAL": ["cop_cds", "wekeo"], + "EFAS_REFORECAST": ["cop_cds", "wekeo"], + "EFAS_SEASONAL": ["cop_cds", "wekeo"], + "EFAS_SEASONAL_REFORECAST": ["cop_cds", "wekeo"], + "ERA5_LAND": ["cop_cds", "wekeo"], + "ERA5_LAND_MONTHLY": ["cop_cds", "wekeo"], + "ERA5_PL": ["cop_cds", "wekeo"], + "ERA5_PL_MONTHLY": ["cop_cds", "wekeo"], "ERA5_SL": ["cop_cds", "wekeo"], - "ERA5_SL_MONTHLY": ["wekeo"], - "FIRE_HISTORICAL": ["wekeo"], - "GLACIERS_DIST_RANDOLPH": ["wekeo"], + "ERA5_SL_MONTHLY": ["cop_cds", "wekeo"], + "FIRE_HISTORICAL": ["cop_cds", "wekeo"], + "GLACIERS_DIST_RANDOLPH": ["cop_cds", "wekeo"], "GLACIERS_ELEVATION_AND_MASS_CHANGE": ["wekeo"], - "GLOFAS_FORECAST": ["wekeo"], - "GLOFAS_HISTORICAL": ["wekeo"], - "GLOFAS_REFORECAST": ["wekeo"], - "GLOFAS_SEASONAL": ["wekeo"], - "GLOFAS_SEASONAL_REFORECAST": ["wekeo"], + "GLOFAS_FORECAST": ["cop_cds", "wekeo"], + "GLOFAS_HISTORICAL": ["cop_cds", "wekeo"], + "GLOFAS_REFORECAST": ["cop_cds", "wekeo"], + "GLOFAS_SEASONAL": ["cop_cds", "wekeo"], + "GLOFAS_SEASONAL_REFORECAST": ["cop_cds", "wekeo"], "L57_REFLECTANCE": ["theia"], - "L8_OLI_TIRS_C1L1": ["aws_eos", "earth_search", "earth_search_gcs", "onda"], + "L8_OLI_TIRS_C1L1": ["aws_eos", "earth_search_gcs", "onda"], "L8_REFLECTANCE": ["theia"], "LANDSAT_C2L1": [ "astraea_eod", @@ -135,7 +154,7 @@ class TestCore(TestCoreBase): "usgs", "usgs_satapi_aws", ], - "LANDSAT_C2L2": ["planetary_computer", "usgs"], + "LANDSAT_C2L2": ["usgs", "planetary_computer", "earth_search"], "LANDSAT_C2L2ALB_BT": ["usgs_satapi_aws"], "LANDSAT_C2L2ALB_SR": ["usgs_satapi_aws"], "LANDSAT_C2L2ALB_ST": ["usgs_satapi_aws"], @@ -149,7 +168,7 @@ class TestCore(TestCoreBase): "LANDSAT_TM_C2L1": ["usgs"], "LANDSAT_TM_C2L2": ["usgs"], "MODIS_MCD43A4": ["astraea_eod", "aws_eos", "planetary_computer"], - "NAIP": ["astraea_eod", "aws_eos", "planetary_computer"], + "NAIP": ["astraea_eod", "aws_eos", "planetary_computer", "earth_search"], "NEMSAUTO_TCDC": ["meteoblue"], "NEMSGLOBAL_TCDC": ["meteoblue"], "OSO": ["theia"], @@ -162,17 +181,28 @@ class TestCore(TestCoreBase): "aws_eos", "cop_dataspace", "creodias", + "creodias_s3", + "earth_search", "onda", "peps", "planetary_computer", "sara", "wekeo", ], - "S1_SAR_OCN": ["cop_dataspace", "creodias", "onda", "peps", "sara", "wekeo"], - "S1_SAR_RAW": ["cop_dataspace", "creodias", "onda", "wekeo"], + "S1_SAR_OCN": [ + "cop_dataspace", + "creodias", + "creodias_s3", + "onda", + "peps", + "sara", + "wekeo", + ], + "S1_SAR_RAW": ["cop_dataspace", "creodias", "creodias_s3", "onda", "wekeo"], "S1_SAR_SLC": [ "cop_dataspace", "creodias", + "creodias_s3", "onda", "peps", "sara", @@ -183,6 +213,7 @@ class TestCore(TestCoreBase): "aws_eos", "cop_dataspace", "creodias", + "creodias_s3", "earth_search", "earth_search_gcs", "onda", @@ -196,9 +227,8 @@ class TestCore(TestCoreBase): "aws_eos", "cop_dataspace", "creodias", - "earth_search", + "creodias_s3", "onda", - "peps", "planetary_computer", "sara", "wekeo", @@ -209,12 +239,13 @@ class TestCore(TestCoreBase): "S2_MSI_L2B_MAJA_SNOW": ["theia"], "S2_MSI_L2B_MAJA_WATER": ["theia"], "S2_MSI_L3A_WASP": ["theia"], - "S3_EFR": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_ERR": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_LAN": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], + "S3_EFR": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara", "wekeo"], + "S3_ERR": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara", "wekeo"], + "S3_LAN": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara", "wekeo"], "S3_OLCI_L2LFR": [ "cop_dataspace", "creodias", + "creodias_s3", "onda", "sara", "wekeo", @@ -222,40 +253,85 @@ class TestCore(TestCoreBase): "S3_OLCI_L2LRR": [ "cop_dataspace", "creodias", + "creodias_s3", + "onda", + "sara", + "wekeo", + ], + "S3_OLCI_L2WFR": [ + "cop_dataspace", + "creodias", + "creodias_s3", + "onda", + "sara", + "wekeo", + ], + "S3_OLCI_L2WRR": [ + "cop_dataspace", + "creodias", + "creodias_s3", "onda", "sara", "wekeo", ], - "S3_OLCI_L2WFR": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_OLCI_L2WRR": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], "S3_RAC": ["sara"], "S3_SLSTR_L1RBT": [ "cop_dataspace", "creodias", + "creodias_s3", "onda", "sara", "wekeo", ], "S3_SLSTR_L2": ["wekeo"], - "S3_SLSTR_L2AOD": ["cop_dataspace", "creodias", "sara", "wekeo"], + "S3_SLSTR_L2AOD": ["cop_dataspace", "creodias", "creodias_s3", "sara", "wekeo"], "S3_SLSTR_L2FRP": [ "cop_dataspace", "creodias", + "creodias_s3", "onda", "sara", "wekeo", ], - "S3_SLSTR_L2LST": ["cop_dataspace", "creodias", "onda", "sara"], - "S3_SLSTR_L2WST": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_SRA": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_SRA_A": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_SRA_BS": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_SY_AOD": ["cop_dataspace", "creodias", "onda", "sara"], - "S3_SY_SYN": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], - "S3_SY_V10": ["cop_dataspace", "creodias", "onda", "sara"], - "S3_SY_VG1": ["cop_dataspace", "creodias", "onda", "sara"], - "S3_SY_VGP": ["cop_dataspace", "creodias", "onda", "sara"], - "S3_WAT": ["cop_dataspace", "creodias", "onda", "sara", "wekeo"], + "S3_SLSTR_L2LST": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara"], + "S3_SLSTR_L2WST": [ + "cop_dataspace", + "creodias", + "creodias_s3", + "onda", + "sara", + "wekeo", + ], + "S3_SRA": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara", "wekeo"], + "S3_SRA_A": [ + "cop_dataspace", + "creodias", + "creodias_s3", + "onda", + "sara", + "wekeo", + ], + "S3_SRA_BS": [ + "cop_dataspace", + "creodias", + "creodias_s3", + "onda", + "sara", + "wekeo", + ], + "S3_SY_AOD": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara"], + "S3_SY_SYN": [ + "cop_dataspace", + "creodias", + "creodias_s3", + "onda", + "sara", + "wekeo", + ], + "S3_SY_V10": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara"], + "S3_SY_VG1": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara"], + "S3_SY_VGP": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara"], + "S3_WAT": ["cop_dataspace", "creodias", "creodias_s3", "onda", "sara", "wekeo"], "S3_OLCI_L2WFR_BC003": ["wekeo"], "S3_OLCI_L2WRR_BC003": ["wekeo"], "S3_SRA_1A_BC004": ["wekeo"], @@ -272,45 +348,45 @@ class TestCore(TestCoreBase): "S6_P4_L2HR_F06": ["wekeo"], "S6_AMR_L2_F06": ["wekeo"], "S5P_L1B2_IR_ALL": ["wekeo"], - "S5P_L1B_IR_SIR": ["cop_dataspace", "creodias"], - "S5P_L1B_IR_UVN": ["cop_dataspace", "creodias"], - "S5P_L1B_RA_BD1": ["cop_dataspace", "creodias", "onda"], - "S5P_L1B_RA_BD2": ["cop_dataspace", "creodias", "onda"], - "S5P_L1B_RA_BD3": ["cop_dataspace", "creodias", "onda"], - "S5P_L1B_RA_BD4": ["cop_dataspace", "creodias", "onda"], - "S5P_L1B_RA_BD5": ["cop_dataspace", "creodias", "onda"], - "S5P_L1B_RA_BD6": ["cop_dataspace", "creodias", "onda"], - "S5P_L1B_RA_BD7": ["cop_dataspace", "creodias", "onda"], - "S5P_L1B_RA_BD8": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_AER_AI": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_AER_LH": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_CH4": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_CLOUD": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_CO": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_HCHO": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_NO2": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_NP_BD3": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_NP_BD6": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_NP_BD7": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_O3": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_O3_PR": ["cop_dataspace", "creodias", "onda"], - "S5P_L2_O3_TCL": ["cop_dataspace", "creodias"], - "S5P_L2_SO2": ["cop_dataspace", "creodias", "onda"], - "SATELLITE_CARBON_DIOXIDE": ["wekeo"], - "SATELLITE_METHANE": ["wekeo"], - "SATELLITE_SEA_LEVEL_BLACK_SEA": ["wekeo"], - "SEASONAL_MONTHLY_PL": ["wekeo"], - "SEASONAL_MONTHLY_SL": ["wekeo"], - "SEASONAL_ORIGINAL_PL": ["wekeo"], - "SEASONAL_ORIGINAL_SL": ["wekeo"], - "SEASONAL_POSTPROCESSED_PL": ["wekeo"], - "SEASONAL_POSTPROCESSED_SL": ["wekeo"], - "SIS_HYDRO_MET_PROJ": ["wekeo"], + "S5P_L1B_IR_SIR": ["cop_dataspace", "creodias", "creodias_s3"], + "S5P_L1B_IR_UVN": ["cop_dataspace", "creodias", "creodias_s3"], + "S5P_L1B_RA_BD1": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L1B_RA_BD2": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L1B_RA_BD3": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L1B_RA_BD4": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L1B_RA_BD5": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L1B_RA_BD6": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L1B_RA_BD7": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L1B_RA_BD8": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_AER_AI": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_AER_LH": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_CH4": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_CLOUD": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_CO": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_HCHO": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_NO2": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_NP_BD3": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_NP_BD6": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_NP_BD7": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_O3": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_O3_PR": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "S5P_L2_O3_TCL": ["cop_dataspace", "creodias", "creodias_s3"], + "S5P_L2_SO2": ["cop_dataspace", "creodias", "creodias_s3", "onda"], + "SATELLITE_CARBON_DIOXIDE": ["cop_cds", "wekeo"], + "SATELLITE_METHANE": ["cop_cds", "wekeo"], + "SATELLITE_SEA_LEVEL_BLACK_SEA": ["cop_cds", "wekeo"], + "SEASONAL_MONTHLY_PL": ["cop_cds", "wekeo"], + "SEASONAL_MONTHLY_SL": ["cop_cds", "wekeo"], + "SEASONAL_ORIGINAL_PL": ["cop_cds", "wekeo"], + "SEASONAL_ORIGINAL_SL": ["cop_cds", "wekeo"], + "SEASONAL_POSTPROCESSED_PL": ["cop_cds", "wekeo"], + "SEASONAL_POSTPROCESSED_SL": ["cop_cds", "wekeo"], + "SIS_HYDRO_MET_PROJ": ["cop_cds", "wekeo"], "SPOT5_SPIRIT": ["theia"], "SPOT_SWH": ["theia"], "SPOT_SWH_OLD": ["theia"], "TIGGE_CF_SFC": ["ecmwf"], - "UERRA_EUROPE_SL": ["wekeo"], + "UERRA_EUROPE_SL": ["cop_cds", "wekeo"], "VENUS_L1C": ["theia"], "VENUS_L2A_MAJA": ["theia"], "VENUS_L3A_MAJA": ["theia"], @@ -332,6 +408,7 @@ class TestCore(TestCoreBase): "cop_dataspace", "planetary_computer", "hydroweb_next", + "creodias_s3", ], } SUPPORTED_PROVIDERS = [ @@ -355,6 +432,7 @@ class TestCore(TestCoreBase): "planetary_computer", "hydroweb_next", "wekeo", + "creodias_s3", ] def setUp(self): @@ -378,7 +456,10 @@ def test_supported_providers_in_unit_test(self): def test_supported_product_types_in_unit_test(self): """Every product type must be referenced in the core unit test SUPPORTED_PRODUCT_TYPES class attribute""" for product_type in self.dag.list_product_types(fetch_providers=False): - self.assertIn(product_type["ID"], self.SUPPORTED_PRODUCT_TYPES.keys()) + assert ( + product_type["ID"] in self.SUPPORTED_PRODUCT_TYPES.keys() + or product_type["_id"] in self.SUPPORTED_PRODUCT_TYPES.keys() + ) def test_list_product_types_ok(self): """Core api must correctly return the list of supported product types""" @@ -388,6 +469,16 @@ def test_list_product_types_ok(self): self.assertListProductTypesRightStructure(product_type) # There should be no repeated product type in the output self.assertEqual(len(product_types), len(set(pt["ID"] for pt in product_types))) + # add alias for product type - should still work + products = self.dag.product_types_config + products["S2_MSI_L1C"]["alias"] = "S2_MSI_ALIAS" + product_types = self.dag.list_product_types(fetch_providers=False) + for product_type in product_types: + self.assertListProductTypesRightStructure(product_type) + # There should be no repeated product type in the output + self.assertEqual(len(product_types), len(set(pt["ID"] for pt in product_types))) + # use alias as id + self.assertIn("S2_MSI_ALIAS", [pt["ID"] for pt in product_types]) def test_list_product_types_for_provider_ok(self): """Core api must correctly return the list of supported product types for a given provider""" @@ -398,9 +489,14 @@ def test_list_product_types_for_provider_ok(self): self.assertIsInstance(product_types, list) for product_type in product_types: self.assertListProductTypesRightStructure(product_type) - self.assertIn( - provider, self.SUPPORTED_PRODUCT_TYPES[product_type["ID"]] - ) + if product_type["ID"] in self.SUPPORTED_PRODUCT_TYPES: + self.assertIn( + provider, self.SUPPORTED_PRODUCT_TYPES[product_type["ID"]] + ) + else: + self.assertIn( + provider, self.SUPPORTED_PRODUCT_TYPES[product_type["_id"]] + ) def test_list_product_types_for_unsupported_provider(self): """Core api must raise UnsupportedProvider error for list_product_types with unsupported provider""" @@ -532,7 +628,7 @@ def test_discover_product_types(self, mock_plugin_discover_product_types): ) @mock.patch( - "eodag.plugins.search.qssearch.QueryStringSearch.discover_product_types", + "eodag.plugins.apis.ecmwf.EcmwfApi.discover_product_types", autospec=True, return_value={ "providers_config": {"foo": {"productType": "foo"}}, @@ -781,7 +877,10 @@ def assertListProductTypesRightStructure(self, structure): self.assertIn("platformSerialIdentifier", structure) self.assertIn("processingLevel", structure) self.assertIn("sensorType", structure) - self.assertIn(structure["ID"], self.SUPPORTED_PRODUCT_TYPES) + assert ( + structure["ID"] in self.SUPPORTED_PRODUCT_TYPES + or structure["_id"] in self.SUPPORTED_PRODUCT_TYPES + ) @mock.patch("eodag.api.core.open_dir", autospec=True) @mock.patch("eodag.api.core.exists_in", autospec=True, return_value=True) @@ -975,6 +1074,91 @@ def test_update_providers_config(self): # run a 2nd time: check that it does not raise an error self.dag.update_providers_config(new_config) + @mock.patch( + "eodag.plugins.search.qssearch.StacSearch.discover_queryables", autospec=True + ) + @mock.patch( + "eodag.api.core.EODataAccessGateway.fetch_product_types_list", autospec=True + ) + def test_list_queryables( + self, mock_discover_queryables, mock_fetch_product_types_list + ): + """list_queryables must return queryables list adapted to provider and product-type""" + with self.assertRaises(UnsupportedProvider): + self.dag.list_queryables(provider="not_existing_provider") + + with self.assertRaises(UnsupportedProductType): + self.dag.list_queryables(productType="not_existing_product_type") + + queryables_none_none = self.dag.list_queryables() + expected_result = model_fields_to_annotated(CommonQueryables.model_fields) + self.assertEqual(len(queryables_none_none), len(expected_result)) + for key, queryable in queryables_none_none.items(): + # compare obj.__repr__ + self.assertEqual(str(expected_result[key]), str(queryable)) + + queryables_peps_none = self.dag.list_queryables(provider="peps") + expected_longer_result = model_fields_to_annotated(Queryables.model_fields) + self.assertGreater(len(queryables_peps_none), len(queryables_none_none)) + self.assertLess(len(queryables_peps_none), len(expected_longer_result)) + for key, queryable in queryables_peps_none.items(): + # compare obj.__repr__ + self.assertEqual(str(expected_longer_result[key]), str(queryable)) + + queryables_peps_s1grd = self.dag.list_queryables( + provider="peps", productType="S1_SAR_GRD" + ) + self.assertGreater(len(queryables_peps_s1grd), len(queryables_none_none)) + self.assertLess(len(queryables_peps_s1grd), len(queryables_peps_none)) + self.assertLess(len(queryables_peps_s1grd), len(expected_longer_result)) + for key, queryable in queryables_peps_s1grd.items(): + # compare obj.__repr__ + if key == "productType": + self.assertEqual("S1_SAR_GRD", queryable.__metadata__[0].get_default()) + else: + self.assertEqual(str(expected_longer_result[key]), str(queryable)) + + @mock.patch("eodag.plugins.apis.cds.CdsApi.discover_queryables", autospec=True) + def test_list_queryables_with_constraints(self, mock_discover_queryables): + plugin = next( + self.dag._plugins_manager.get_search_plugins( + provider="cop_cds", product_type="ERA5_SL" + ) + ) + # default values should be added to params + self.dag.list_queryables(provider="cop_cds", productType="ERA5_SL") + defaults = { + "productType": "ERA5_SL", + "api_product_type": "reanalysis", + "dataset": "reanalysis-era5-single-levels", + "format": "grib", + "time": "00:00", + } + mock_discover_queryables.assert_called_once_with(plugin, **defaults) + mock_discover_queryables.reset_mock() + # default values + additional param + self.dag.list_queryables(provider="cop_cds", productType="ERA5_SL", month="02") + params = { + "productType": "ERA5_SL", + "api_product_type": "reanalysis", + "dataset": "reanalysis-era5-single-levels", + "format": "grib", + "time": "00:00", + "month": "02", + } + mock_discover_queryables.assert_called_once_with(plugin, **params) + mock_discover_queryables.reset_mock() + # unset default values + self.dag.list_queryables(provider="cop_cds", productType="ERA5_SL", format=None) + defaults = { + "productType": "ERA5_SL", + "api_product_type": "reanalysis", + "dataset": "reanalysis-era5-single-levels", + "time": "00:00", + "format": None, + } + mock_discover_queryables.assert_called_once_with(plugin, **defaults) + class TestCoreConfWithEnvVar(TestCoreBase): @classmethod @@ -1039,6 +1223,20 @@ def test_core_object_prioritize_providers_file_in_envvar(self): class TestCoreInvolvingConfDir(unittest.TestCase): + @classmethod + def setUpClass(cls): + super(TestCoreInvolvingConfDir, cls).setUpClass() + cls.dag = EODataAccessGateway() + # mock os.environ to empty env + cls.mock_os_environ = mock.patch.dict(os.environ, {}, clear=True) + cls.mock_os_environ.start() + + @classmethod + def tearDownClass(cls): + super(TestCoreInvolvingConfDir, cls).tearDownClass() + # stop os.environ + cls.mock_os_environ.stop() + def setUp(self): super(TestCoreInvolvingConfDir, self).setUp() self.dag = EODataAccessGateway() @@ -1055,14 +1253,15 @@ def tearDown(self): except OSError: shutil.rmtree(old_path) - def execution_involving_conf_dir(self, inspect=None): + def execution_involving_conf_dir(self, inspect=None, conf_dir=None): """Check that the path(s) inspected (str, list) are created after the instantation of EODataAccessGateway. If they were already there, rename them (.old), instantiate, check, delete the new files, and restore the existing files to there previous name.""" if inspect is not None: + if conf_dir is None: + conf_dir = os.path.join(os.path.expanduser("~"), ".config", "eodag") if isinstance(inspect, str): inspect = [inspect] - conf_dir = os.path.join(os.path.expanduser("~"), ".config", "eodag") olds = [] currents = [] for inspected in inspect: @@ -1094,6 +1293,49 @@ def test_core_object_creates_locations_standard_location(self): """The core object must create a locations config file and a shp dir in standard user config location on instantiation""" # noqa self.execution_involving_conf_dir(inspect=["locations.yml", "shp"]) + def test_read_only_home_dir(self): + # standard directory + home_dir = os.path.join(os.path.expanduser("~"), ".config", "eodag") + self.execution_involving_conf_dir(inspect="eodag.yml", conf_dir=home_dir) + + # user defined directory + user_dir = os.path.join(os.path.expanduser("~"), ".config", "another_eodag") + os.environ["EODAG_CFG_DIR"] = user_dir + self.execution_involving_conf_dir(inspect="eodag.yml", conf_dir=user_dir) + shutil.rmtree(user_dir) + del os.environ["EODAG_CFG_DIR"] + + # fallback temporary folder + def makedirs_side_effect(dir): + if dir == os.path.join(os.path.expanduser("~"), ".config", "eodag"): + raise OSError("Mock makedirs error") + else: + return makedirs(dir) + + with mock.patch( + "eodag.api.core.makedirs", side_effect=makedirs_side_effect + ) as mock_makedirs: + # backup temp_dir if exists + temp_dir = temp_dir_old = os.path.join( + tempfile.gettempdir(), ".config", "eodag" + ) + if os.path.exists(temp_dir): + temp_dir_old = f"{temp_dir}.old" + shutil.move(temp_dir, temp_dir_old) + + EODataAccessGateway() + expected = [unittest.mock.call(home_dir), unittest.mock.call(temp_dir)] + mock_makedirs.assert_has_calls(expected) + self.assertTrue(os.path.exists(temp_dir)) + + # restore temp_dir + if temp_dir_old != temp_dir: + try: + shutil.rmtree(temp_dir) + except OSError: + os.unlink(temp_dir) + shutil.move(temp_dir_old, temp_dir) + class TestCoreGeometry(TestCoreBase): @classmethod @@ -1463,6 +1705,19 @@ def test__prepare_search_peps_plugins_product_available(self): finally: self.dag.set_preferred_provider(prev_fav_provider) + def test__prepare_search_peps_plugins_product_available_with_alias(self): + """_prepare_search must return the search plugins when productType is defined and alias is used""" + products = self.dag.product_types_config + products["S2_MSI_L1C"]["alias"] = "S2_MSI_ALIAS" + prev_fav_provider = self.dag.get_preferred_provider()[0] + try: + self.dag.set_preferred_provider("peps") + base = {"productType": "S2_MSI_ALIAS"} + search_plugins, _ = self.dag._prepare_search(**base) + self.assertEqual(search_plugins[0].provider, "peps") + finally: + self.dag.set_preferred_provider(prev_fav_provider) + def test__prepare_search_no_plugins_when_search_by_id(self): """_prepare_search must not return the search and auth plugins for a search by id""" base = {"id": "some_id", "provider": "some_provider"} @@ -1512,6 +1767,9 @@ def test__search_by_id( found = self.dag._search_by_id(uid="foo", productType="bar", provider="baz") + from eodag.utils.logging import get_logging_verbose + + _ = get_logging_verbose() # get_search_plugins mock_get_search_plugins.assert_called_once_with( self.dag._plugins_manager, product_type="bar", provider="baz" @@ -1839,8 +2097,7 @@ def test_search_iter_page_does_not_handle_query_errors( """search_iter_page must propagate errors""" search_plugin.provider = "peps" search_plugin.query.side_effect = AttributeError() - prepare_seach.return_value = ([search_plugin], {}) - page_iterator = self.dag.search_iter_page_plugin() + page_iterator = self.dag.search_iter_page_plugin(search_plugin=search_plugin) with self.assertRaises(AttributeError): next(page_iterator) @@ -2055,3 +2312,38 @@ def test_download_local_product(self): with self.assertLogs(level="INFO") as cm: self.dag.download(product) self.assertIn("Local product detected. Download skipped", str(cm.output)) + + +class TestCoreProductAlias(TestCoreBase): + @classmethod + def setUpClass(cls): + super(TestCoreProductAlias, cls).setUpClass() + cls.dag = EODataAccessGateway() + products = cls.dag.product_types_config + products["S2_MSI_L1C"]["alias"] = "S2_MSI_ALIAS" + + def test_get_alias_from_product_type(self): + # return product alias + self.assertEqual( + "S2_MSI_ALIAS", self.dag.get_alias_from_product_type("S2_MSI_L1C") + ) + # product type without alias + self.assertEqual( + "S1_SAR_GRD", self.dag.get_alias_from_product_type("S1_SAR_GRD") + ) + # not existing product type + with self.assertRaises(NoMatchingProductType): + self.dag.get_alias_from_product_type("JUST_A_TYPE") + + def test_get_product_type_from_alias(self): + # return product id + self.assertEqual( + "S2_MSI_L1C", self.dag.get_product_type_from_alias("S2_MSI_ALIAS") + ) + # product type without alias + self.assertEqual( + "S1_SAR_GRD", self.dag.get_product_type_from_alias("S1_SAR_GRD") + ) + # not existing product type + with self.assertRaises(NoMatchingProductType): + self.dag.get_product_type_from_alias("JUST_A_TYPE") diff --git a/tests/units/test_download_plugins.py b/tests/units/test_download_plugins.py index a9c3cf43a..d2e6cb608 100644 --- a/tests/units/test_download_plugins.py +++ b/tests/units/test_download_plugins.py @@ -15,6 +15,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import io import os import shutil import stat @@ -180,7 +181,11 @@ def test_plugins_download_http_ignore_assets( self.product.remote_location ) = "http://somewhere/dowload_from_location" self.product.properties["id"] = "someproduct" - self.product.assets = {"foo": {"href": "http://somewhere/download_asset"}} + self.product.assets.clear() + self.product.assets.update({"foo": {"href": "http://somewhere/download_asset"}}) + mock_requests_get.return_value.__enter__.return_value.iter_content.side_effect = lambda *x, **y: io.BytesIO( + b"some content" + ) # download asset if ignore_assets = False plugin.config.ignore_assets = False @@ -234,9 +239,13 @@ def test_plugins_download_http_assets_filename_from_href( plugin = self.get_download_plugin(self.product) self.product.location = self.product.remote_location = "http://somewhere" self.product.properties["id"] = "someproduct" - self.product.assets = { - "foo": {"href": "http://somewhere/mal:for;matted/something?foo=bar#baz"} - } + self.product.assets.clear() + self.product.assets.update( + {"foo": {"href": "http://somewhere/mal:for;matted/something?foo=bar#baz"}} + ) + mock_requests_get.return_value.__enter__.return_value.iter_content.return_value = io.BytesIO( + b"some content" + ) mock_requests_get.return_value.__enter__.return_value.headers = { "content-disposition": "" } @@ -282,7 +291,11 @@ def test_plugins_download_http_assets_filename_from_get( plugin = self.get_download_plugin(self.product) self.product.location = self.product.remote_location = "http://somewhere" self.product.properties["id"] = "someproduct" - self.product.assets = {"foo": {"href": "http://somewhere/something"}} + self.product.assets.clear() + self.product.assets.update({"foo": {"href": "http://somewhere/something"}}) + mock_requests_get.return_value.__enter__.return_value.iter_content.return_value = io.BytesIO( + b"some content" + ) mock_requests_get.return_value.__enter__.return_value.headers = { "content-disposition": '; filename = "somethingelse"' } @@ -298,6 +311,47 @@ def test_plugins_download_http_assets_filename_from_get( ) ) + @mock.patch("eodag.plugins.download.http.requests.head", autospec=True) + @mock.patch("eodag.plugins.download.http.requests.get", autospec=True) + def test_plugins_download_http_asset_filter( + self, mock_requests_get, mock_requests_head + ): + """HTTPDownload.download() must create an outputfile""" + + plugin = self.get_download_plugin(self.product) + self.product.location = self.product.remote_location = "http://somewhere" + self.product.properties["id"] = "someproduct" + self.product.assets.clear() + self.product.assets.update( + { + "somewhere": {"href": "http://somewhere/something", "title": "foo"}, + "elsewhere": {"href": "http://elsewhere/anything", "title": "boo"}, + } + ) + mock_requests_get.return_value.__enter__.return_value.iter_content.return_value = io.BytesIO( + b"some content" + ) + mock_requests_get.return_value.__enter__.return_value.headers = { + "content-disposition": '; filename = "somethingelse"' + } + mock_requests_head.return_value.headers = {"content-disposition": ""} + + path = plugin.download( + self.product, outputs_prefix=self.output_dir, asset="else.*" + ) + + self.assertEqual(path, os.path.join(self.output_dir, "dummy_product")) + self.assertTrue(os.path.isdir(path)) + self.assertTrue( + os.path.isfile( + os.path.join(self.output_dir, "dummy_product", "somethingelse") + ) + ) + self.assertEqual(2, mock_requests_get.call_count) + self.product.location = self.product.remote_location = "http://elsewhere" + plugin.download(self.product, outputs_prefix=self.output_dir) + self.assertEqual(6, mock_requests_get.call_count) + @mock.patch("eodag.plugins.download.http.requests.head", autospec=True) @mock.patch("eodag.plugins.download.http.requests.get", autospec=True) def test_plugins_download_http_assets_filename_from_head( @@ -308,7 +362,11 @@ def test_plugins_download_http_assets_filename_from_head( plugin = self.get_download_plugin(self.product) self.product.location = self.product.remote_location = "http://somewhere" self.product.properties["id"] = "someproduct" - self.product.assets = {"foo": {"href": "http://somewhere/something"}} + self.product.assets.clear() + self.product.assets.update({"foo": {"href": "http://somewhere/something"}}) + mock_requests_get.return_value.__enter__.return_value.iter_content.return_value = io.BytesIO( + b"some content" + ) mock_requests_get.return_value.__enter__.return_value.headers = { "content-disposition": '; filename = "somethingelse"' } @@ -336,15 +394,21 @@ def test_plugins_download_http_assets_size( plugin = self.get_download_plugin(self.product) self.product.location = self.product.remote_location = "http://somewhere" - self.product.assets = { - "foo": {"href": "http://somewhere/a"}, - "bar": {"href": "http://somewhere/b"}, - } + self.product.assets.clear() + self.product.assets.update( + { + "foo": {"href": "http://somewhere/a"}, + "bar": {"href": "http://somewhere/b"}, + } + ) mock_requests_head.return_value.headers = { "Content-length": "1", "content-disposition": '; size = "2"', } + mock_requests_get.return_value.__enter__.return_value.iter_content.return_value = io.BytesIO( + b"some content" + ) mock_requests_get.return_value.__enter__.return_value.headers = { "Content-length": "3", "content-disposition": '; size = "4"', @@ -359,10 +423,13 @@ def test_plugins_download_http_assets_size( mock_requests_head.return_value.headers.pop("Content-length") mock_progress_callback_reset.reset_mock() self.product.location = "http://somewhere" - self.product.assets = { - "foo": {"href": "http://somewhere/a"}, - "bar": {"href": "http://somewhere/b"}, - } + self.product.assets.clear() + self.product.assets.update( + { + "foo": {"href": "http://somewhere/a"}, + "bar": {"href": "http://somewhere/b"}, + } + ) with TemporaryDirectory() as temp_dir: plugin.download(self.product, outputs_prefix=temp_dir) mock_progress_callback_reset.assert_called_once_with(mock.ANY, total=2 + 2) @@ -371,24 +438,33 @@ def test_plugins_download_http_assets_size( mock_requests_head.return_value.headers.pop("content-disposition") mock_progress_callback_reset.reset_mock() self.product.location = "http://somewhere" - self.product.assets = { - "foo": {"href": "http://somewhere/a"}, - "bar": {"href": "http://somewhere/b"}, - } + self.product.assets.clear() + self.product.assets.update( + { + "foo": {"href": "http://somewhere/a"}, + "bar": {"href": "http://somewhere/b"}, + } + ) with TemporaryDirectory() as temp_dir: plugin.download(self.product, outputs_prefix=temp_dir) mock_progress_callback_reset.assert_called_once_with(mock.ANY, total=3 + 3) # size from GET / content-disposition + mock_requests_get.return_value.__enter__.return_value.iter_content.return_value = io.BytesIO( + b"some content" + ) mock_requests_get.return_value.__enter__.return_value.headers.pop( "Content-length" ) mock_progress_callback_reset.reset_mock() self.product.location = "http://somewhere" - self.product.assets = { - "foo": {"href": "http://somewhere/a"}, - "bar": {"href": "http://somewhere/b"}, - } + self.product.assets.clear() + self.product.assets.update( + { + "foo": {"href": "http://somewhere/a"}, + "bar": {"href": "http://somewhere/b"}, + } + ) with TemporaryDirectory() as temp_dir: plugin.download(self.product, outputs_prefix=temp_dir) mock_progress_callback_reset.assert_called_once_with(mock.ANY, total=4 + 4) @@ -401,13 +477,16 @@ def test_plugins_download_http_one_local_asset( plugin = self.get_download_plugin(self.product) self.product.location = self.product.remote_location = "http://somewhere" self.product.properties["id"] = "someproduct" - self.product.assets = { - "foo": { - "href": path_to_uri( - os.path.abspath(os.path.join(os.sep, "somewhere", "something")) - ) + self.product.assets.clear() + self.product.assets.update( + { + "foo": { + "href": path_to_uri( + os.path.abspath(os.path.join(os.sep, "somewhere", "something")) + ) + } } - } + ) path = plugin.download(self.product, outputs_prefix=self.output_dir) @@ -423,32 +502,38 @@ def test_plugins_download_http_several_local_assets( plugin = self.get_download_plugin(self.product) self.product.location = self.product.remote_location = "http://somewhere" self.product.properties["id"] = "someproduct" - self.product.assets = { - "foo": { - "href": path_to_uri( - os.path.abspath(os.path.join(os.sep, "somewhere", "something")) - ) - }, - "bar": { - "href": path_to_uri( - os.path.abspath( - os.path.join(os.sep, "somewhere", "something", "else") + self.product.assets.clear() + self.product.assets.update( + { + "foo": { + "href": path_to_uri( + os.path.abspath(os.path.join(os.sep, "somewhere", "something")) ) - ) - }, - "baz": { - "href": path_to_uri( - os.path.abspath( - os.path.join(os.sep, "somewhere", "another", "thing") + }, + "bar": { + "href": path_to_uri( + os.path.abspath( + os.path.join(os.sep, "somewhere", "something", "else") + ) ) - ) - }, - } + }, + "baz": { + "href": path_to_uri( + os.path.abspath( + os.path.join(os.sep, "somewhere", "another", "thing") + ) + ) + }, + } + ) path = plugin.download(self.product, outputs_prefix=self.output_dir) # assets common path - self.assertEqual(path, os.path.abspath(os.path.join(os.sep, "somewhere"))) + self.assertEqual( + os.path.normcase(path), + os.path.normcase(os.path.abspath(os.path.join(os.sep, "somewhere"))), + ) # empty product download directory should have been removed self.assertFalse(Path(os.path.join(self.output_dir, "dummy_product")).exists()) @@ -976,10 +1061,13 @@ def test_plugins_download_aws_safe_build_assets( plugin = self.get_download_plugin(self.product) self.product.properties["tileInfo"] = "http://example.com/tileInfo.json" self.product.properties["tilePath"] = "http://example.com/tilePath" - self.product.assets = { - "file1": {"href": "http://example.com/path/to/file1"}, - "file2": {"href": "http://example.com/path/to/file2"}, - } + self.product.assets.clear() + self.product.assets.update( + { + "file1": {"href": "http://example.com/path/to/file1"}, + "file2": {"href": "http://example.com/path/to/file2"}, + } + ) execpected_output = os.path.join( self.output_dir, self.product.properties["title"] ) @@ -1022,6 +1110,13 @@ def test_plugins_download_aws_safe_build_assets( self.assertEqual(path, execpected_output) + # with filter for assets + self.product.properties["title"] = "newTitle" + setattr(self.product, "location", "file://path/to/file") + plugin.download(self.product, outputs_prefix=self.output_dir, asset="file1") + # 3 additional calls + self.assertEqual(7, mock_get_chunk_dest_path.call_count) + class TestDownloadPluginS3Rest(BaseDownloadPluginTest): def setUp(self): @@ -1256,3 +1351,64 @@ def run(): run() mock_order.assert_called_once_with(mock.ANY, self.product, auth=None) self.assertEqual(mock_order_status.call_count, 2) + + +class TestDownloadPluginCreodiasS3(BaseDownloadPluginTest): + @mock.patch("eodag.plugins.download.aws.flatten_top_directories", autospec=True) + @mock.patch( + "eodag.plugins.download.aws.AwsDownload.check_manifest_file_list", autospec=True + ) + @mock.patch( + "eodag.plugins.download.aws.AwsDownload.finalize_s2_safe_product", autospec=True + ) + @mock.patch( + "eodag.plugins.download.aws.AwsDownload.get_chunk_dest_path", autospec=True + ) + @mock.patch( + "eodag.plugins.download.creodias_s3.CreodiasS3Download._get_authenticated_objects_from_auth_keys", + autospec=True, + ) + @mock.patch("eodag.plugins.download.aws.requests.get", autospec=True) + def test_plugins_download_creodias_s3( + self, + mock_requests_get, + mock_get_authenticated_objects, + mock_get_chunk_dest_path, + mock_finalize_s2_safe_product, + mock_check_manifest_file_list, + mock_flatten_top_directories, + ): + product = EOProduct( + "creodias_s3", + dict( + geometry="POINT (0 0)", + title="dummy_product", + id="dummy", + ), + ) + product.location = product.remote_location = "a" + assets = { + "a1": {"title": "a1", "href": "s3://eodata/a/a1"}, + "a2": {"title": "a2", "href": "s3://eodata/a/a2"}, + } + product.assets = assets + plugin = self.get_download_plugin(product) + product.properties["tileInfo"] = "http://example.com/tileInfo.json" + # authenticated objects mock + mock_get_authenticated_objects.return_value.keys.return_value = [ + "a1", + "a2", + ] + mock_get_authenticated_objects.return_value.filter.side_effect = ( + lambda *x, **y: [mock.Mock(size=0, key=y["Prefix"])] + ) + + plugin.download(product, outputs_prefix=self.output_dir, auth={}) + + mock_get_authenticated_objects.assert_called_once_with( + plugin, "eodata", "a", {} + ) + self.assertEqual(mock_get_chunk_dest_path.call_count, 2) + self.assertEqual(mock_finalize_s2_safe_product.call_count, 0) + self.assertEqual(mock_check_manifest_file_list.call_count, 0) + self.assertEqual(mock_flatten_top_directories.call_count, 1) diff --git a/tests/units/test_http_server.py b/tests/units/test_http_server.py index 538bdc6ee..a7eb76027 100644 --- a/tests/units/test_http_server.py +++ b/tests/units/test_http_server.py @@ -28,9 +28,13 @@ from fastapi.testclient import TestClient from shapely.geometry import box +from eodag.utils import USER_AGENT, MockResponse +from eodag.utils.exceptions import TimeOutError from tests import mock from tests.context import ( DEFAULT_ITEMS_PER_PAGE, + HTTP_REQ_TIMEOUT, + TEST_RESOURCES_PATH, AuthenticationError, SearchResult, parse_header, @@ -55,6 +59,18 @@ def setUpClass(cls): ) cls.expanduser_mock.start() + # mock os.environ to empty env + cls.mock_os_environ = mock.patch.dict(os.environ, {}, clear=True) + cls.mock_os_environ.start() + + # disable product types fetch + os.environ["EODAG_EXT_PRODUCT_TYPES_CFG_FILE"] = "" + + # load fake credentials to prevent providers needing auth for search to be pruned + os.environ["EODAG_CFG_FILE"] = os.path.join( + TEST_RESOURCES_PATH, "wrong_credentials_conf.yml" + ) + # import after having mocked home_dir because it launches http server (and EODataAccessGateway) # reload eodag.rest.utils to prevent eodag_api cache conflicts import eodag.rest.utils @@ -64,13 +80,6 @@ def setUpClass(cls): cls.eodag_http_server = eodag_http_server - # mock os.environ to empty env - cls.mock_os_environ = mock.patch.dict(os.environ, {}, clear=True) - cls.mock_os_environ.start() - - # disable product types fetch - os.environ["EODAG_EXT_PRODUCT_TYPES_CFG_FILE"] = "" - @classmethod def tearDownClass(cls): super(RequestTestCase, cls).tearDownClass() @@ -445,7 +454,7 @@ def test_not_found(self): @mock.patch( "eodag.rest.utils.eodag_api.search", autospec=True, - side_effect=AuthenticationError("you are no authorized"), + side_effect=AuthenticationError("you are not authorized"), ) def test_auth_error(self, mock_search): """A request to eodag server raising a Authentication error must return a 500 HTTP error code""" @@ -458,10 +467,29 @@ def test_auth_error(self, mock_search): self.assertIn("description", response_content) self.assertIn("AuthenticationError", str(cm_logs.output)) - self.assertIn("you are no authorized", str(cm_logs.output)) + self.assertIn("you are not authorized", str(cm_logs.output)) self.assertEqual(500, response.status_code) + @mock.patch( + "eodag.rest.utils.eodag_api.search", + autospec=True, + side_effect=TimeOutError("too long"), + ) + def test_timeout_error(self, mock_search): + """A request to eodag server raising a Authentication error must return a 500 HTTP error code""" + with self.assertLogs(level="ERROR") as cm_logs: + response = self.app.get( + f"search?collections={self.tested_product_type}", follow_redirects=True + ) + response_content = json.loads(response.content.decode("utf-8")) + + self.assertIn("description", response_content) + self.assertIn("TimeOutError", str(cm_logs.output)) + self.assertIn("too long", str(cm_logs.output)) + + self.assertEqual(504, response.status_code) + def test_filter(self): """latestIntersect filter should only keep the latest products once search area is fully covered""" result1 = self._request_valid( @@ -679,6 +707,7 @@ def test_search_item_id_from_catalog(self): "id": "foo", "provider": None, "productType": self.tested_product_type, + "_dc_qs": None, }, ) @@ -690,6 +719,7 @@ def test_search_item_id_from_collection(self): "id": "foo", "provider": None, "productType": self.tested_product_type, + "_dc_qs": None, }, ) @@ -829,11 +859,6 @@ def test_search_response_contains_pagination_info(self): response = self._request_valid(f"search?collections={self.tested_product_type}") self.assertIn("numberMatched", response) self.assertIn("numberReturned", response) - self.assertIn("context", response) - self.assertEqual(1, response["context"]["page"]) - self.assertEqual(DEFAULT_ITEMS_PER_PAGE, response["context"]["limit"]) - self.assertIn("matched", response["context"]) - self.assertIn("returned", response["context"]) def test_search_provider_in_downloadlink(self): """Search through eodag server and check that specified provider appears in downloadLink""" @@ -972,7 +997,7 @@ def test_download_item_from_catalog(self, mock_download, mock_auth): self.assertEqual(response_filename, expected_file) @mock.patch( - "eodag.plugins.apis.cds.CdsApi.authenticate", + "eodag.plugins.apis.usgs.UsgsApi.authenticate", autospec=True, ) @mock.patch( @@ -990,13 +1015,13 @@ def test_download_item_from_collection_api_plugin(self, mock_download, mock_auth # use an external python API provider for this test and reset downloader self._request_valid_raw.patchings[0].kwargs["return_value"][0][ 0 - ].provider = "cop_cds" + ].provider = "usgs" self._request_valid_raw.patchings[0].kwargs["return_value"][0][ 0 ].downloader = None self._request_valid_raw( - "collections/some-collection/items/foo/download?provider=cop_cds" + "collections/some-collection/items/foo/download?provider=usgs" ) mock_download.assert_called_once() # downloaded file should have been immediatly deleted from the server @@ -1032,18 +1057,156 @@ def test_stac_extension_oseo(self): def test_queryables(self): """Request to /queryables should return a valid response.""" - self._request_valid("queryables", check_links=False) + resp = self._request_valid("queryables", check_links=False) + self.assertListEqual( + list(resp.keys()), + [ + "$schema", + "$id", + "type", + "title", + "description", + "properties", + "additionalProperties", + ], + ) + + @mock.patch("eodag.plugins.search.qssearch.requests.get", autospec=True) + def test_queryables_with_provider(self, mock_requests_get): + resp = self._request_valid( + "queryables?provider=planetary_computer", check_links=False + ) + self.assertListEqual( + list(resp.keys()), + [ + "$schema", + "$id", + "type", + "title", + "description", + "properties", + "additionalProperties", + ], + ) + mock_requests_get.assert_called_once_with( + url="https://planetarycomputer.microsoft.com/api/stac/v1/search/../queryables", + timeout=HTTP_REQ_TIMEOUT, + headers=USER_AGENT, + ) def test_product_type_queryables(self): """Request to /collections/{collection_id}/queryables should return a valid response.""" - self._request_valid( + resp = self._request_valid( f"collections/{self.tested_product_type}/queryables", check_links=False ) + self.assertListEqual( + list(resp.keys()), + [ + "$schema", + "$id", + "type", + "title", + "description", + "properties", + "additionalProperties", + ], + ) - def test_product_type_queryables_with_provider(self): + @mock.patch("eodag.plugins.search.qssearch.requests.get", autospec=True) + def test_product_type_queryables_with_provider(self, mock_requests_get): """Request a collection-specific list of queryables for a given provider.""" + queryables_path = os.path.join(TEST_RESOURCES_PATH, "stac/queryables.json") + with open(queryables_path) as f: + provider_queryables = json.load(f) + mock_requests_get.return_value = MockResponse( + provider_queryables, status_code=200 + ) + # no provider specified (only 1 available for the moment) : queryables intresection returned - self._request_valid( - f"collections/{self.tested_product_type}/queryables?provider=peps", + res_no_provider = self._request_valid( + "collections/S1_SAR_GRD/queryables", check_links=False, ) + mock_requests_get.assert_called_once_with( + url="https://planetarycomputer.microsoft.com/api/stac/v1/search/../collections/" + "sentinel-1-grd/queryables", + timeout=HTTP_REQ_TIMEOUT, + headers=USER_AGENT, + ) + # returned queryables + self.assertListEqual( + list(res_no_provider.keys()), + [ + "$schema", + "$id", + "type", + "title", + "description", + "properties", + "additionalProperties", + ], + ) + self.assertListEqual( + list(res_no_provider["properties"].keys()), + ["ids", "geometry", "datetime"], + ) + self.assertIn("geometry", res_no_provider["properties"]) + self.assertNotIn("s1:processing_level", res_no_provider["properties"]) + + mock_requests_get.reset_mock() + + # provider specified + res = self._request_valid( + "collections/S1_SAR_GRD/queryables?provider=planetary_computer", + check_links=False, + ) + mock_requests_get.assert_called_once_with( + url="https://planetarycomputer.microsoft.com/api/stac/v1/search/../collections/" + "sentinel-1-grd/queryables", + timeout=HTTP_REQ_TIMEOUT, + headers=USER_AGENT, + ) + + self.assertListEqual( + list(res.keys()), + [ + "$schema", + "$id", + "type", + "title", + "description", + "properties", + "additionalProperties", + ], + ) + + # property added from provider queryables + self.assertIn("s1:processing_level", res["properties"]) + # property updated with info from provider queryables + self.assertIn("platform", res["properties"]) + self.assertEqual("string", res["properties"]["platform"]["type"][0]) + + @mock.patch("eodag.utils.constraints.requests.get", autospec=True) + def test_product_type_queryables_from_constraints(self, mock_requests_constraints): + constraints_path = os.path.join(TEST_RESOURCES_PATH, "constraints.json") + with open(constraints_path) as f: + constraints = json.load(f) + mock_requests_constraints.return_value = MockResponse( + constraints, status_code=200 + ) + res = self._request_valid( + "collections/ERA5_SL/queryables?provider=cop_cds", + check_links=False, + ) + + mock_requests_constraints.assert_called_once_with( + "http://datastore.copernicus-climate.eu/c3s/published-forms/c3sprod/" + "reanalysis-era5-single-levels/constraints.json", + headers=USER_AGENT, + timeout=5, + ) + self.assertEqual(10, len(res["properties"])) + self.assertIn("year", res["properties"]) + self.assertIn("ids", res["properties"]) + self.assertIn("geometry", res["properties"]) + self.assertNotIn("collections", res["properties"]) diff --git a/tests/units/test_metadata_mapping.py b/tests/units/test_metadata_mapping.py index 8b6922440..78aa672ac 100644 --- a/tests/units/test_metadata_mapping.py +++ b/tests/units/test_metadata_mapping.py @@ -17,10 +17,12 @@ # limitations under the License. import unittest +import orjson from jsonpath_ng.ext import parse from lxml import etree from shapely import wkt +from eodag.api.product.metadata_mapping import get_provider_queryable_key from tests.context import ( NOT_AVAILABLE, format_metadata, @@ -490,6 +492,18 @@ def test_convert_to_datetime_dict(self): format_metadata(to_format, date="2023-01-31T00:00"), str(expected_result) ) + def test_convert_interval_to_datetime_dict(self): + to_format = "{date#interval_to_datetime_dict}" + formated = format_metadata(to_format, date="2023-01-31T00:00/2023-02-03T00:00") + expected_result = { + "year": ["2023"], + "month": ["01", "02"], + "day": ["31", "01", "02", "03"], + } + formated_dict = orjson.loads(formated.replace("'", '"')) + for k in expected_result.keys(): + self.assertCountEqual(formated_dict[k], expected_result[k]) + def test_convert_get_ecmwf_time(self): to_format = "{date#get_ecmwf_time}" self.assertEqual( @@ -514,3 +528,45 @@ def test_convert_get_dates_from_string(self): {"startDate": "2023-10-19T00:00:00Z", "endDate": "2023-10-20T00:00:00Z"} ), ) + + +class TestMetadataMappingFunctions(unittest.TestCase): + def test_get_provider_queryable_key(self): + metadata_mapping = { + "id": "id", + "startTimeFromAscendingNode": [ + "datetime: {startTimeFromAscendingNode}", + "$.datetime", + ], + "api_product_type": ["productType", "$.properties.productType"], + "variable": ["variable", "$.variable"], + "variable_type": ["variable_type", "$.variable_type"], + } + provider_queryables = { + "datetime": {"type": "str", "description": "datetime"}, + "id": {"type": "str"}, + "productType": {"type": "str"}, + "level": {"type": int}, + "variable": {"type": "str"}, + "variable_type": {"type": "str"}, + } + provider_key = get_provider_queryable_key( + "startTimeFromAscendingNode", provider_queryables, metadata_mapping + ) + self.assertEqual("datetime", provider_key) + provider_key = get_provider_queryable_key( + "api_product_type", provider_queryables, metadata_mapping + ) + self.assertEqual("productType", provider_key) + provider_key = get_provider_queryable_key( + "id", provider_queryables, metadata_mapping + ) + self.assertEqual("id", provider_key) + provider_key = get_provider_queryable_key( + "variable_type", provider_queryables, metadata_mapping + ) + self.assertEqual("variable_type", provider_key) + provider_key = get_provider_queryable_key( + "variable", provider_queryables, metadata_mapping + ) + self.assertEqual("variable", provider_key) diff --git a/tests/units/test_search_plugins.py b/tests/units/test_search_plugins.py index 1a9db714d..7b9e80a6f 100644 --- a/tests/units/test_search_plugins.py +++ b/tests/units/test_search_plugins.py @@ -22,11 +22,15 @@ from pathlib import Path from unittest import mock +import boto3 import dateutil +import requests import responses import yaml +from botocore.stub import Stubber from requests import RequestException +from eodag.utils.exceptions import TimeOutError from tests.context import ( DEFAULT_MISSION_START_DATE, HTTP_REQ_TIMEOUT, @@ -34,6 +38,7 @@ USER_AGENT, AuthenticationError, EOProduct, + MisconfiguredError, PluginManager, RequestError, cached_parse, @@ -526,6 +531,19 @@ def test_plugins_search_querystringseach_distinct_product_type_mtd_mapping( ) self.assertNotIn("bar", products[0].properties) + @mock.patch( + "eodag.plugins.search.qssearch.requests.get", + autospec=True, + side_effect=requests.exceptions.Timeout(), + ) + def test_plugins_search_querystringseach_timeout(self, mock__request): + search_plugin = self.get_search_plugin(self.product_type, "peps") + with self.assertRaises(TimeOutError): + search_plugin.query( + productType="S1_SAR_SLC", + auth=None, + ) + class TestSearchPluginPostJsonSearch(BaseSearchPluginTest): def setUp(self): @@ -1073,7 +1091,6 @@ def test_plugins_search_odatav4search_distinct_product_type_mtd_mapping( geojson_geometry = self.search_criteria_s2_msi_l1c["geometry"].__geo_interface__ mock__request.return_value = mock.Mock() result = { - "context": {"matched": 1}, "features": [ { "id": "foo", @@ -1117,27 +1134,26 @@ def test_plugins_search_stacsearch_mapping_earthsearch(self, mock__request): mock__request.return_value = mock.Mock() mock__request.return_value.json.side_effect = [ { - "context": {"page": 1, "limit": 2, "matched": 1, "returned": 2}, "features": [ { "id": "foo", "geometry": geojson_geometry, "properties": { - "sentinel:product_id": "S2B_MSIL1C_20201009T012345_N0209_R008_T31TCJ_20201009T123456", + "s2:product_uri": "S2B_MSIL1C_20201009T012345_N0209_R008_T31TCJ_20201009T123456.SAFE", }, }, { "id": "bar", "geometry": geojson_geometry, "properties": { - "sentinel:product_id": "S2B_MSIL1C_20200910T012345_N0209_R008_T31TCJ_20200910T123456", + "s2:product_uri": "S2B_MSIL1C_20200910T012345_N0209_R008_T31TCJ_20200910T123456.SAFE", }, }, { "id": "bar", "geometry": geojson_geometry, "properties": { - "sentinel:product_id": "S2B_MSIL1C_20201010T012345_N0209_R008_T31TCJ_20201010T123456", + "s2:product_uri": "S2B_MSIL1C_20201010T012345_N0209_R008_T31TCJ_20201010T123456.SAFE", }, }, ], @@ -1171,7 +1187,6 @@ def test_plugins_search_stacsearch_default_geometry(self, mock__request): mock__request.return_value = mock.Mock() mock__request.return_value.json.side_effect = [ { - "context": {"matched": 3}, "features": [ { "id": "foo", @@ -1204,7 +1219,6 @@ def test_plugins_search_stacsearch_distinct_product_type_mtd_mapping( """The metadata mapping for a stac provider should not mix specific product-types metadata-mapping""" mock__request.return_value = mock.Mock() result = { - "context": {"matched": 1}, "features": [ { "id": "foo", @@ -1229,14 +1243,50 @@ def test_plugins_search_stacsearch_distinct_product_type_mtd_mapping( # search with another product type self.assertNotIn( - "bar", search_plugin.config.products["S2_MSI_L2A"]["metadata_mapping"] + "metadata_mapping", search_plugin.config.products["S1_SAR_GRD"] ) products, estimate = search_plugin.query( - productType="S2_MSI_L2A", + productType="S1_SAR_GRD", auth=None, ) self.assertNotIn("bar", products[0].properties) + @mock.patch("eodag.plugins.search.qssearch.StacSearch._request", autospec=True) + def test_plugins_search_stacsearch_distinct_product_type_mtd_mapping_astraea_eod( + self, mock__request + ): + """The metadata mapping for a astraea_eod should correctly build assets""" + mock__request.return_value = mock.Mock() + result = { + "features": [ + { + "id": "foo", + "geometry": None, + "assets": { + "productInfo": {"href": "s3://foo.bar/baz/productInfo.json"} + }, + }, + ], + } + product_type = "S1_SAR_GRD" + mock__request.return_value.json.side_effect = [result] + search_plugin = self.get_search_plugin(product_type, "astraea_eod") + + products, _ = search_plugin.query( + productType=product_type, + auth=None, + ) + self.assertIn("productInfo", products[0].assets) + self.assertEqual( + products[0].assets["productInfo"]["href"], + "s3://foo.bar/baz/productInfo.json", + ) + self.assertIn("manifest.safe", products[0].assets) + self.assertEqual( + products[0].assets["manifest.safe"]["href"], + "s3://foo.bar/baz/manifest.safe", + ) + class TestSearchPluginBuildPostSearchResult(BaseSearchPluginTest): @mock.patch("eodag.plugins.authentication.qsauth.requests.get", autospec=True) @@ -1482,3 +1532,82 @@ def run(): ) run() + + +class TestSearchPluginCreodiasS3Search(BaseSearchPluginTest): + def setUp(self): + super(TestSearchPluginCreodiasS3Search, self).setUp() + self.provider = "creodias_s3" + + @mock.patch("eodag.plugins.search.qssearch.requests.get", autospec=True) + def test_plugins_search_creodias_s3_links(self, mock_request): + # s3 links should be added to products with register_downloader + search_plugin = self.get_search_plugin("S1_SAR_GRD", self.provider) + client = boto3.client("s3", aws_access_key_id="a", aws_secret_access_key="b") + stubber = Stubber(client) + s3_response_file = ( + Path(TEST_RESOURCES_PATH) / "provider_responses/creodias_s3_objects.json" + ) + with open(s3_response_file) as f: + list_objects_response = json.load(f) + creodias_search_result_file = ( + Path(TEST_RESOURCES_PATH) / "eodag_search_result_creodias.geojson" + ) + with open(creodias_search_result_file) as f: + creodias_search_result = json.load(f) + mock_request.return_value = MockResponse(creodias_search_result, 200) + + res = search_plugin.query("S1_SAR_GRD") + for product in res[0]: + download_plugin = self.plugins_manager.get_download_plugin(product) + auth_plugin = self.plugins_manager.get_auth_plugin(self.provider) + stubber.add_response("list_objects", list_objects_response) + stubber.activate() + setattr(auth_plugin, "s3_client", client) + # fails if credentials are missing + auth_plugin.config.credentials = { + "aws_access_key_id": "", + "aws_secret_access_key": "", + } + with self.assertRaisesRegex( + MisconfiguredError, + r"^Incomplete credentials .* \['aws_access_key_id', 'aws_secret_access_key'\]$", + ): + product.register_downloader(download_plugin, auth_plugin) + auth_plugin.config.credentials = { + "aws_access_key_id": "foo", + "aws_secret_access_key": "bar", + } + product.register_downloader(download_plugin, auth_plugin) + assets = res[0][0].assets + # check if s3 links have been created correctly + for asset in assets.values(): + self.assertIn("s3://eodata/Sentinel-1/SAR/GRD/2014/10/10", asset["href"]) + + @mock.patch("eodag.plugins.search.qssearch.requests.get", autospec=True) + def test_plugins_search_creodias_s3_client_error(self, mock_request): + # request error should be raised when there is an error when fetching data from the s3 + search_plugin = self.get_search_plugin("S1_SAR_GRD", self.provider) + client = boto3.client("s3", aws_access_key_id="a", aws_secret_access_key="b") + stubber = Stubber(client) + + creodias_search_result_file = ( + Path(TEST_RESOURCES_PATH) / "eodag_search_result_creodias.geojson" + ) + with open(creodias_search_result_file) as f: + creodias_search_result = json.load(f) + mock_request.return_value = MockResponse(creodias_search_result, 200) + + with self.assertRaises(RequestError): + res = search_plugin.query("S1_SAR_GRD") + for product in res[0]: + download_plugin = self.plugins_manager.get_download_plugin(product) + auth_plugin = self.plugins_manager.get_auth_plugin(self.provider) + auth_plugin.config.credentials = { + "aws_access_key_id": "foo", + "aws_secret_access_key": "bar", + } + stubber.add_client_error("list_objects") + stubber.activate() + setattr(auth_plugin, "s3_client", client) + product.register_downloader(download_plugin, auth_plugin) diff --git a/tests/units/test_stac_utils.py b/tests/units/test_stac_utils.py index f1ea97b59..9ca450a24 100644 --- a/tests/units/test_stac_utils.py +++ b/tests/units/test_stac_utils.py @@ -26,7 +26,7 @@ from eodag.utils.exceptions import ValidationError from tests import TEST_RESOURCES_PATH, mock -from tests.context import SearchResult +from tests.context import RequestError, SearchResult class TestStacUtils(unittest.TestCase): @@ -418,13 +418,31 @@ def test_search_products(self, mock_count_hits, mock_do_search): ) self.assertEqual(call_kwargs["geometry"].bounds, (0.25, 43.2, 2.8, 43.9)) + @mock.patch( + "eodag.plugins.search.qssearch.QueryStringSearch.do_search", + autospec=True, + side_effect=RequestError, + ) + @mock.patch( + "eodag.plugins.search.qssearch.QueryStringSearch.count_hits", + autospec=True, + side_effect=RequestError, + ) + def test_search_products_fail(self, mock_count_hits, mock_do_search): + """search_products fail must return an error""" + with self.assertRaisesRegex( + RequestError, + r"No result could be obtained from any available provider", + ): + self.rest_utils.search_products("S2_MSI_L1C", {}) + @mock.patch( "eodag.plugins.search.qssearch.PostJsonSearch._request", autospec=True, ) def test_search_stac_items_with_stac_providers(self, mock__request): """search_stac_items runs without any error with stac providers""" - # mock the PostJsonSearch request with the S2_MSI_L2A earth_search response search dictionary + # mock the PostJsonSearch request with the S2_MSI_L1C earth_search response search dictionary mock__request.return_value = mock.Mock() mock__request.return_value.json.return_value = ( self.earth_search_resp_search_json @@ -433,9 +451,9 @@ def test_search_stac_items_with_stac_providers(self, mock__request): response = self.rest_utils.search_stac_items( url="http://foo/search", - arguments={}, + arguments={"collections": "S2_MSI_L1C"}, root="http://foo", - catalogs=["S2_MSI_L2A"], + catalogs=[], provider="earth_search", ) @@ -445,11 +463,24 @@ def test_search_stac_items_with_stac_providers(self, mock__request): self.assertTrue( "downloadLink", "thumbnail" in response["features"][0]["assets"].keys() ) - # check that assets from the provider response search are also in the response + # check that assets from the provider response search are reformatted in the response + product_id = self.earth_search_resp_search_json["features"][0]["properties"][ + "s2:product_uri" + ].replace(".SAFE", "") for (k, v) in self.earth_search_resp_search_json["features"][0][ "assets" ].items(): - self.assertIn((k, v), response["features"][0]["assets"].items()) + self.assertIn(k, response["features"][0]["assets"].keys()) + # check asset server-mode download link + self.assertEqual( + response["features"][0]["assets"][k]["href"], + f"http://foo/collections/S2_MSI_L1C/items/{product_id}/download/{k}?provider=earth_search", + ) + # check asset origin download link + self.assertEqual( + response["features"][0]["assets"][k]["alternate"]["origin"]["href"], + self.earth_search_resp_search_json["features"][0]["assets"][k]["href"], + ) # preferred provider should not be changed self.assertEqual("peps", self.rest_utils.eodag_api.get_preferred_provider()[0]) @@ -479,3 +510,69 @@ def test_search_stac_items_with_non_stac_providers(self, mock__request): ) # check that no other asset have also been added to the response self.assertEqual(len(response["features"][0]["assets"]), 2) + + @mock.patch( + "eodag.plugins.search.qssearch.QueryStringSearch._request", + autospec=True, + ) + def test_search_stac_items_get(self, mock__request): + """search_stac_items runs with GET method""" + # mock the QueryStringSearch request with the S2_MSI_L1C peps response search dictionary + mock__request.return_value = mock.Mock() + mock__request.return_value.json.return_value = self.peps_resp_search_json + + response = self.rest_utils.search_stac_items( + url="http://foo/search", + arguments={"collections": "S2_MSI_L1C"}, + root="http://foo/", + method="GET", + ) + + mock__request.assert_called() + + next_link = [link for link in response["links"] if link["rel"] == "next"][0] + + self.assertEqual( + next_link, + { + "method": "GET", + "body": None, + "rel": "next", + "href": "http://foo/search?collections=S2_MSI_L1C&page=2", + "title": "Next page", + "type": "application/geo+json", + }, + ) + + @mock.patch( + "eodag.plugins.search.qssearch.QueryStringSearch._request", + autospec=True, + ) + def test_search_stac_items_post(self, mock__request): + """search_stac_items runs with GET method""" + # mock the QueryStringSearch request with the S2_MSI_L1C peps response search dictionary + mock__request.return_value = mock.Mock() + mock__request.return_value.json.return_value = self.peps_resp_search_json + + response = self.rest_utils.search_stac_items( + url="http://foo/search", + arguments={"collections": ["S2_MSI_L1C"], "page": 2}, + root="http://foo/", + method="POST", + ) + + mock__request.assert_called() + + next_link = [link for link in response["links"] if link["rel"] == "next"][0] + + self.assertEqual( + next_link, + { + "method": "POST", + "rel": "next", + "href": "http://foo/search", + "title": "Next page", + "type": "application/geo+json", + "body": {"collections": ["S2_MSI_L1C"], "page": 3}, + }, + ) diff --git a/tests/units/test_utils.py b/tests/units/test_utils.py index e260061e6..df75f956a 100644 --- a/tests/units/test_utils.py +++ b/tests/units/test_utils.py @@ -17,6 +17,7 @@ # limitations under the License. import copy +import logging import os import sys import unittest @@ -41,6 +42,17 @@ class TestUtils(unittest.TestCase): + def setUp(self) -> None: + super(TestUtils, self).setUp() + setup_logging(verbose=1) + + def tearDown(self) -> None: + super(TestUtils, self).tearDown() + # reset logging + logger = logging.getLogger("eodag") + logger.handlers = [] + logger.level = 0 + def test_utils_get_timestamp(self): """get_timestamp must return a UNIX timestamp""" # Date to timestamp to date, this assumes the date is in UTC diff --git a/tox.ini b/tox.ini index 637acd63a..e5f2973eb 100644 --- a/tox.ini +++ b/tox.ini @@ -30,7 +30,7 @@ python = [testenv] commands = - mkdir -p test-reports + python -c "import os; os.makedirs('test-reports', exist_ok=True)" pytest -v --instafail \ -n auto --dist loadscope \ --cov=eodag \ diff --git a/utils/params_mapping_to_csv.py b/utils/params_mapping_to_csv.py index 437fe40ea..9c4f230df 100644 --- a/utils/params_mapping_to_csv.py +++ b/utils/params_mapping_to_csv.py @@ -15,10 +15,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import csv import json import logging import os +from typing import List import requests from lxml import html @@ -47,10 +50,10 @@ def params_mapping_to_csv( - ogc_doc_url=OPENSEARCH_DOC_URL, - opensearch_csv_file_path=DEFAULT_OPENSEARCH_CSV_FILE_PATH, - extra_csv_file_path=DEFAULT_EXTRA_CSV_FILE_PATH, -): + ogc_doc_url: str = OPENSEARCH_DOC_URL, + opensearch_csv_file_path: str = DEFAULT_OPENSEARCH_CSV_FILE_PATH, + extra_csv_file_path: str = DEFAULT_EXTRA_CSV_FILE_PATH, +) -> None: """Get providers metadata mapping, with corresponding description from OGC documentation and writes it to csv files (for opensearch and extra params) @@ -76,7 +79,7 @@ def params_mapping_to_csv( ) # list of lists of all parameters per provider - params_list_of_lists = [] + params_list_of_lists: List[List[str]] = [] for p in dag.providers_config.keys(): if hasattr(dag.providers_config[p], "search") and hasattr( dag.providers_config[p].search, "metadata_mapping" @@ -86,7 +89,7 @@ def params_mapping_to_csv( ) # union of params_list_of_lists - global_keys = sorted(list(set().union(*(params_list_of_lists)))) + global_keys: List[str] = sorted(list(set().union(*(params_list_of_lists)))) # csv fieldnames fieldnames = ["parameter"] + sorted(dag.providers_config.keys()) diff --git a/utils/product_types_information_to_csv.py b/utils/product_types_information_to_csv.py index c423019f0..65dcd1ebb 100644 --- a/utils/product_types_information_to_csv.py +++ b/utils/product_types_information_to_csv.py @@ -15,9 +15,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import csv import os import re +from typing import Any, Dict, List from eodag.api.core import EODataAccessGateway from eodag.config import load_default_config @@ -29,8 +32,8 @@ def product_types_info_to_csv( - product_types_csv_file_path=DEFAULT_PRODUCT_TYPES_CSV_FILE_PATH, -): + product_types_csv_file_path: str = DEFAULT_PRODUCT_TYPES_CSV_FILE_PATH, +) -> None: """Get product types metadata and their availability for providers, and writes it to a csv file :param product_types_csv_file_path: (optional) Path to product types information csv output file @@ -53,13 +56,15 @@ def product_types_info_to_csv( dag = EODataAccessGateway() # restore os.environ - for k, v in os.environ.items(): + for k, _ in os.environ.items(): if eodag_env_pattern.match(k): os.environ.pop(k) os.environ.update(eodag_env_backup) product_types = dag.list_product_types(fetch_providers=False) - product_types_names = [product_type["ID"] for product_type in product_types] + product_types_names: List[str] = [ + product_type["ID"] for product_type in product_types + ] metadata_params = list(k for k in product_types[0].keys() if k != "ID") # csv fieldnames @@ -73,7 +78,7 @@ def product_types_info_to_csv( product_types_writer.writeheader() # create product types table rows - product_types_rows = {} + product_types_rows: Dict[str, Any] = {} for product_type_name in product_types_names: product_types_rows[product_type_name] = {"product type": product_type_name} for metadata_param in metadata_params: