diff --git a/.circleci/config.yml b/.circleci/config.yml index 89e4f58e..d74b8e9c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -12,7 +12,7 @@ common: &common - run: name: run pre-commit command: | - if [[ "$CIRCLE_JOB" == "python-3.6" ]]; then + if [[ "$CIRCLE_JOB" == "python-3.7" ]]; then ~/.local/bin/pre-commit run --all-files fi - run: @@ -25,13 +25,6 @@ common: &common when: always jobs: - "python-2.7": - <<: *common - docker: - - image: circleci/python:2.7 - environment: - - TOXENV=py27 - "python-3.6": <<: *common docker: @@ -94,7 +87,6 @@ workflows: build_and_deploy: jobs: - benchmark - - "python-2.7" - "python-3.6" - "python-3.7": filters: # required since `deploy` has tag filters AND requires `build` diff --git a/.gitignore b/.gitignore index 7b7e4080..085ebce9 100644 --- a/.gitignore +++ b/.gitignore @@ -104,4 +104,5 @@ ENV/ .pytest_cache .benchmarks/ -tests/benchmarks/data/* \ No newline at end of file +tests/benchmarks/data/* +tests/fixtures/mask* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 140fd1c8..509349ef 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,20 +1,17 @@ - repos: - - repo: 'https://github.com/ambv/black' - # 18.6b1 - rev: ed50737290662f6ef4016a7ea44da78ee1eff1e2 + repo: 'https://github.com/psf/black' + rev: stable hooks: - id: black args: ['--safe'] - language_version: python3.6 + language_version: python3.7 - repo: 'https://github.com/pre-commit/pre-commit-hooks' - # v1.3.0 - rev: a6209d8d4f97a09b61855ea3f1fb250f55147b8b + rev: v2.4.0 hooks: - id: flake8 - language_version: python3.6 + language_version: python3.7 args: [ # E501 let black handle all line length decisions # W503 black conflicts with "line break before operator" rule @@ -26,9 +23,16 @@ repos: rev: 22d3ccf6cf91ffce3b16caa946c155778f0cb20f hooks: - id: pydocstyle - language_version: python3.6 + language_version: python3.7 args: [ # Check for docstring presence only '--select=D1', # Don't require docstrings for tests '--match=(?!test).*\.py'] + + - + repo: https://github.com/pre-commit/mirrors-mypy + rev: 'v0.770' + hooks: + - id: mypy + args: [--no-strict-optional, --ignore-missing-imports] \ No newline at end of file diff --git a/CHANGES.txt b/CHANGES.txt index 94607171..bed0b364 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,73 @@ +2.0a7 (2020-05-17) +------------------ +- allow reading high resolution part of a raster (by making height, width args optional) +- add `max_size` option in `rio_tiler.reader.part` to set a maximum output size when height and width are not set +- add point and area function in rio_tiler.io.cogeo +- fix width-height height-widht bug in `rio_tiler.reader.part` + +**depreciation** +- deprecated `out_window` option in favor of `window` in rio_tiler.reader._read + +2.0a6 (2020-05-06) +------------------ +- fix unwanted breacking change with `img_profiles.get` not allowing default values + +2.0a5 (2020-05-06) +------------------ +- make `rio_tiler.io.landsat8.tile` return Uint16 data and not float32 (#173) +- `rio_tiler.profiles.img_profiles` item access return `copy` of the items (#177) +- better colormap docs (#176, author @kylebarron) + +2.0a4 (2020-04-08) +------------------ +- add `rio_tiler.io.cogeo.info` to retrieve simple file metadata (no image statistics) +- add band metadata tag info in `rio_tiler.render.metadata` output +- add `rio_tiler.io.stac` STAC compliant rio_tiler.colormap.apply_discrete_cmap + +2.0a3 (2020-03-25) +------------------ +- only use `transform_bounds` when needed in rio_tiler.reader.part + +Breaking Changes: +- switch back to gdal/rasterio calculate_default_transform (#164). Thanks to Terracotta core developper @dionhaefner. +- refactor `rio_tiler.utils.get_vrt_transform` to get width and height input. + +2.0a2 (2020-03-20) +------------------ +- Fall back to gdal/rasterio calculate_default_transform for dateline separation crossing dataset (ref #164) + +2.0a1 (2020-03-19) +------------------ +- added `reader.preview`, `reader.point` methods +- added multi_* functions to rio_tiler.reader to support multiple assets addresses +- added `rio_tiler.utils.has_mask_band` function +- added `rio_tiler.utils.get_overview_level` to calculate the overview level needed for partial reading. +- added type hints +- added scale, offsets, colormap, datatype and colorinterp in reader.metadata output (#158) +- new `rio_tiler.colormap` submodule +- added `unscale` options to rio_tiler.reader._read function apply internal scale/offset (#157) + +Breaking Changes: +- removed python 2 support +- new package architecture (.io submodule) +- introduced new rio_tiler.reader functions (part, preview, metadata...) +- renamed rio_tiler.main to rio_tiler.io.cogeo +- bucket and prefixes are defined in rio_tiler.io.dataset.`{dataset}_parse` function from + AWS supported Public Dataset +- renamed `minimum_tile_cover` to `minimum_overlap` +- renamed `tile_edge_padding` to `padding` +- padding is set to 0 by default. +- use terracotta calculate_default_transform (see https://github.com/cogeotiff/rio-tiler/issues/56#issuecomment-442484999) +- colormaps are now have an alpha value +- `rio_tiler.utils.get_colormap` replaced by `rio_tiler.colormap.get_colormap` +- new `rio_tiler.colormap.get_colormap` supports only GDAL like colormap +- replaced `rio_tiler.utils.array_to_image` by `rio_tiler.utils.render` +- replaced `rio_tiler.utils.apply_cmap` by `rio_tiler.colormap.apply_cmap` +- replaced `rio_tiler.utils._apply_discrete_colormap` by `rio_tiler.colormap.apply_discrete_cmap` +- removed `histogram_bins` and `histogram_range` in options in metadata reader. + Should now be passed in `hist_options` (e.g: hist_options={bins=10, range=(0, 10)}) +- remove alpha band value from output data array in tile/preview/metadata #127 + 1.4.0 (2020-02-19) ------------------ - Add Sentinel2-L2A support (#137) @@ -15,7 +85,7 @@ allow more custom VRT Warping. (ref: https://github.com/OSGeo/gdal/issues/1989#i - add `minimum_tile_cover` option to filter dataset not covering a certain amount of the tile. - add Sentinel-1 submodule -Breacking Changes: +Breaking Changes: - need rasterio>=1.1 1.2.11 (2019-09-18) @@ -35,62 +105,50 @@ in `rio_tiler.utils.get_vrt_transform` 1.2.8 (2019-07-08) ------------------ - - Add kwargs options in landsat8.tile, sentinel2.tile and cbers.tile functions to allow `resampling_method` and `tile_edge_padding` options forwarding to utils._tile_read. - Add Input (bounds_crs) and Output (dst_crs) option to `utils._tile_read` function (#108) 1.2.7 (2019-05-14) ------------------ - - Revert changes introduced in #106 (see #105) - Refactor tests 1.2.6 (2019-05-10) - DELETED ------------------ - - Use same resampling method for mask and for data (#105) - 1.2.5 (2019-05-08) ------------------ - - add tile_edge_padding option to be passed to rio_tiler.utils._tile_read to reduce sharp edges that occur due to resampling (#104) 1.2.4 (2019-04-16) ------------------ - - add histogram_range options to be passed to rio_tiler.{module}.metadata function (#102) 1.2.3 (2019-04-04) ------------------ - - add histogram_bins options to be passed to rio_tiler.{module}.metadata function (#98) Bug fixes: - - return index number with band descriptions (#99) 1.2.2 (2019-04-03) ------------------ - - add mercator min/max zoom info in metadata output from rio_tiler.utils.raster_get_stats (#96) - add band description (band name) in metadata output from rio_tiler.utils.raster_get_stats (#96) 1.2.1 (2019-03-26) ------------------ - - Replace rio-pansharpen dependency with a fork of the brovey function directly into `rio_tiler.utils` (rio-pansharpen is unmaintened and not compatible with rasterio>=1) (#94). 1.2.0 (2019-03-26) ------------------ - - `rio_tiler.utils.array_to_image`'s color_map arg can be a dictionary of discrete values (#91) -Breacking Changes: - +Breaking Changes: - `expr` argument is now a required option in `rio_tiler.utils.expression`. (#88) 1.1.4 (2019-03-11) @@ -99,9 +157,7 @@ Breacking Changes: 1.1.3 (2019-03-06) ------------------ - Bug fixes: - - Fix casting to integer for MAX_THREADS environment variable. 1.1.1 (2019-02-21) @@ -109,26 +165,21 @@ Bug fixes: - Minor typo correction and harmonization of the use of src/src_dst/src_path in `rio_tiler.utils` Bug fixes: - - Fix nodata handling in `utils.raster_get_stats` 1.1.0 (2019-02-15) ------------------ - - Allow options forwarding to `tile_read` from `main.tile` function (#86) - Add `resampling_method` options in `rio_tiler.utils.tile_read` to allow user set resampling. Default is now bilinear (#85) Bug fixes: - - Fix nodata option forwarding to tile_read when source is a path (#83) Refactoring: - - Refactor `rio_tiler.utils.tile_read` to reduce code complexity (#84) -Breacking Changes: - +Breaking Changes: - `indexes` options is now set to **None** in `rio_tiler.utils.tile_read`. Default will now be the dataset indexes. @@ -151,7 +202,7 @@ and to return more statistics (#66) - add GDAL compatible Image creation profile (#29) - add max threads number settings via "MAX_THREADS" environment variable (#71) -**Breacking Changes** +Breaking Changes: - update `metadata` structure returned by `landsat8.metadata`, `sentinel2.metadata`, `cbers.metadata` - force sentinel, landsat and cbers band names to be string and add validation (#65) - moved landsat utility functions from `rio_tiler.utils` to `rio_tiler.landsat8` @@ -192,7 +243,7 @@ and to return more statistics (#66) - fix custom nodata handling - fix performances issue -:Breacking Changes: +Breaking Changes: - removed alpha band options to select a custom alpha band number 1.0a8 (2018-06-20) @@ -211,7 +262,7 @@ and to return more statistics (#66) ------------------ - adds DatasetReader input option for utils.tile_read (do not close the dataset on each read) -:Breacking Changes: +Breaking Changes: - `utils.tile_band_worker` renamed to `utils.tile_read` - `main.tile` **rgb** option renamed **indexes** - `sentinel2.tile`, `landsat8.tile`, `cbers.tile` **rgb** option renamed **bands** @@ -222,7 +273,7 @@ and to return more statistics (#66) - adds utils.b64_encode_img function to encode an image object into a base64 string - add tiles profiles (jpeg, png, webp) based on https://github.com/mapnik/mapnik/wiki/Image-IO#default-output-details -:Breacking Changes: +Breaking Changes: - Refactor `rio_tiler.utils.array_to_img` to return PIL image object 1.0a3 (2018-02-05) @@ -234,19 +285,17 @@ and to return more statistics (#66) - add "expression" utility function - better nodata/mask/alpha band definition and support -:Breacking Changes: +Breaking Changes: - tile functions now return an associated mask (Landsat, Sentinel, CBERS, main) - remove nodata support in utils.image_to_array function - add mask support in utils.image_to_array function - utils.tile_band_worker will always return a (Band, Width, Height) array (e.g 1x256x256 or 3x256x256) - 1.0a1 (2018-01-04) ------------------ - remove aws.py sub-module (replaced by main.py) - no default bands value for main.py tiles. - 1.0a.0 (2018-01-03) ------------------ - add colormap option in `utils.array_to_img` @@ -257,7 +306,7 @@ and to return more statistics (#66) - removing internal caching - update to rasterio 1.0a12 -:Breacking Changes: +Breaking Changes: - remove data value rescaling in `landsat8.tile` and `sentinel2.tile` - fix wrong lat/grid_square value in `utils.sentinel_parse_scene_id` - rename `utils.sentinel_min_max_worker` to `utils.band_min_max_worker` diff --git a/README.md b/README.md index cb69eea2..bf469eb7 100644 --- a/README.md +++ b/README.md @@ -13,8 +13,7 @@ Additional support is provided for the following satellite missions hosted on ** - [Landsat8](https://aws.amazon.com/fr/public-datasets/landsat) - [CBERS](https://registry.opendata.aws/cbers/) (**requester-pays**) -Rio-tiler supports Python 2.7 and 3.3-3.7. - +**Starting with version 2.0 rio-tiler only supports Python>=3.** ## Install @@ -22,7 +21,7 @@ You can install rio-tiler using pip ```bash $ pip install -U pip -$ pip install rio-tiler +$ pip install rio-tiler --pre ``` or install from source: @@ -36,9 +35,9 @@ $ pip install -e . ## Overview -Create tiles using one of these rio_tiler modules: `main`, `sentinel2`, `sentinel1`, `landsat8`, `cbers`. +Create tiles using one of these rio_tiler `io` submodules: `cogeo`, `sentinel2`, `sentinel1`, `landsat8`, `cbers`. -The `main` module can create mercator tiles from any raster source supported by Rasterio (i.e. local files, http, s3, gcs etc.). The mission specific modules make it easier to extract tiles from AWS S3 buckets (i.e. only a scene ID is required); They can also be used to return metadata. +The `rio_tiler.io.cogeo` module can create mercator tiles from any raster source supported by Rasterio (i.e. local files, http, s3, gcs etc.). The mission specific modules make it easier to extract tiles from AWS S3 buckets (i.e. only a scene ID is required); They can also be used to return metadata. Each tilling modules have a method to return image metadata (e.g bounds). @@ -47,9 +46,9 @@ Each tilling modules have a method to return image metadata (e.g bounds). Read a tile from a file over the internet ```python -from rio_tiler import main +from rio_tiler.io import cogeo -tile, mask = main.tile( +tile, mask = cogeo.tile( 'http://oin-hotosm.s3.amazonaws.com/5a95f32c2553e6000ce5ad2e/0/10edab38-1bdd-4c06-b83d-6e10ac532b7d.tif', 691559, 956905, @@ -66,19 +65,19 @@ print(mask.shape) Create image from tile ```python -from rio_tiler.utils import array_to_image +from rio_tiler.utils import render -buffer = array_to_image(tile, mask=mask) # this returns a buffer (PNG by default) +buffer = render(tile, mask=mask) # this returns a buffer (PNG by default) ``` Use creation options to match `mapnik` default ```python -from rio_tiler.utils import array_to_image +from rio_tiler.utils import render from rio_tiler.profiles import img_profiles options = img_profiles["webp"] -buffer = array_to_image(tile, mask=mask, img_format="webp", **options) +buffer = render(tile, mask=mask, img_format="webp", **options) ``` Write image to file @@ -91,9 +90,9 @@ with open("my.png", "wb") as f: Get a Sentinel2 tile and its nodata mask. ```python -from rio_tiler import sentinel2 +from rio_tiler.io import sentinel2 -tile, mask = sentinel2.tile('S2A_tile_20170729_19UDP_0', 77, 89, 8) +tile, mask = sentinel2.tile('S2A_L1C_20170729_19UDP_0', 77, 89, 8) print(tile.shape) > (3, 256, 256) ``` @@ -101,7 +100,7 @@ print(tile.shape) Get bounds for a Landsat scene (WGS84). ```python -from rio_tiler import landsat8 +from rio_tiler.io import landsat8 landsat8.bounds('LC08_L1TP_016037_20170813_20170814_01_RT') > {'bounds': [-81.30836, 32.10539, -78.82045, 34.22818], @@ -111,15 +110,12 @@ landsat8.bounds('LC08_L1TP_016037_20170813_20170814_01_RT') Get metadata of a Landsat scene (i.e. percentiles (pc) min/max values, histograms, and bounds in WGS84) . ```python -from rio_tiler import landsat8 +from rio_tiler.io import landsat8 landsat8.metadata('LC08_L1TP_016037_20170813_20170814_01_RT', pmin=5, pmax=95) { 'sceneid': 'LC08_L1TP_016037_20170813_20170814_01_RT', - 'bounds': { - 'value': (-81.30844102941015, 32.105321365706104, -78.82036599673634, 34.22863519772504), - 'crs': '+init=EPSG:4326' - }, + 'bounds':(-81.30844102941015, 32.105321365706104, -78.82036599673634, 34.22863519772504), 'statistics': { '1': { 'pc': [1251.297607421875, 5142.0126953125], @@ -150,6 +146,41 @@ landsat8.metadata('LC08_L1TP_016037_20170813_20170814_01_RT', pmin=5, pmax=95) The primary purpose for calculating minimum and maximum values of an image is to rescale pixel values from their original range (e.g. 0 to 65,535) to the range used by computer screens (i.e. 0 and 255) through a linear transformation. This will make images look good on display. +#### Working with SpatioTemporal Asset Catalog (STAC) + +In rio-tiler v2, we added a `rio_tiler.io.stac` submodule to allow tile/metadata fetching of assets withing a STAC item. + +```python +from typing import Dict +from rio_tiler.io import stac as STACReader + +item: Dict = ... # a STAC Item + +# Name of assets to read +assets = ["red", "green", "blue"] + +tile, mask = STACReader.tile(item, assets, x, y, z, tilesize=256) + +print(tile.shape) +> (3, 256, 256) +``` + +#### Working with multiple assets + +`rio_tiler.reader` submodule has `multi_*` functions (tile, preview, point, metadata) allowing to fetch and merge info +from multiple dataset (think about multiple bands stored in separated files). + +```python +from typing import Dict +from rio_tiler import reader + +assets = ["b1.tif", "b2.tif", "b3.tif"] +tile, mask = reader.multi_tile(assets, x, y, z, tilesize=256) + +print(tile.shape) +> (3, 256, 256) +``` + ## Requester-pays Buckets On AWS, `sentinel2`, `sentinel1`, and `cbers` dataset are stored in a `requester-pays` bucket, meaning the cost of GET, LIST requests will be charged to the users. For rio-tiler to work with those buckets, you'll need to set `AWS_REQUEST_PAYER="requester"` in your environement. @@ -162,6 +193,38 @@ When performing partial reading of JPEG2000 dataset GDAL (rasterio backend libra Ref: [Do you really want people using your data](https://medium.com/@_VincentS_/do-you-really-want-people-using-your-data-ec94cd94dc3f) blog post. + +## Create an AWS Lambda package + +The easiest way to make sure the package will work on AWS is to use docker + +```dockerfile +FROM lambci/lambda:build-python3.7 + +ENV LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 CFLAGS="--std=c99" + +RUN pip3 install rio-tiler --no-binary numpy -t /tmp/python -U + +RUN cd /tmp/python && zip -r9q /tmp/package.zip * +``` + +Ref: https://github.com/vincentsarago/simple-rio-lambda + + +## Plugins +- [rio-tiler-mosaic](https://github.com/cogeotiff/rio-tiler-mosaic) +- [rio-tiler-mvt](https://github.com/cogeotiff/rio-tiler-mvt) +- [rio-tiler-crs](https://github.com/cogeotiff/rio-tiler-crs) +- [rio-viz](https://github.com/developmentseed/rio-viz) + +## Implementations +- [remotepixel-tiler](https://github.com/RemotePixel/remotepixel-tiler) +- [CosmiQ/solaris](https://github.com/CosmiQ/solaris) +- [lambda-tiler](https://github.com/vincentsarago/lambda-tiler) +- [cogeo-mosaic](https://github.com/developmentseed/cogeo-mosaic) +- [titiler](https://github.com/developmentseed/titiler) + + ## Contribution & Development Issues and pull requests are more than welcome. @@ -196,30 +259,3 @@ See [AUTHORS.txt](https://github.com/cogeotiff/rio-tiler/blob/master/AUTHORS.txt See [CHANGES.txt](https://github.com/cogeotiff/rio-tiler/blob/master/CHANGES.txt). - -## Create an AWS Lambda package - -The easiest way to make sure the package will work on AWS is to use docker - -```dockerfile -FROM lambci/lambda:build-python3.6 - -ENV LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 CFLAGS="--std=c99" - -RUN pip3 install rio-tiler --no-binary numpy -t /tmp/python -U - -RUN cd /tmp/python && zip -r9q /tmp/package.zip * -``` - -Ref: https://github.com/vincentsarago/simple-rio-lambda - - -## Plugins -- [rio-tiler-mosaic](https://github.com/cogeotiff/rio-tiler-mosaic) -- [rio-tiler-mvt](https://github.com/cogeotiff/rio-tiler-mvt) - -## Implementations -- [remotepixel-tiler](https://github.com/RemotePixel/remotepixel-tiler) -- [CosmiQ/solaris](https://github.com/CosmiQ/solaris) -- [lambda-tiler](https://github.com/vincentsarago/lambda-tiler) -- [cogeo-mosaic](https://github.com/developmentseed/cogeo-mosaic) diff --git a/docs/colormap.md b/docs/colormap.md new file mode 100644 index 00000000..eeb73bd8 --- /dev/null +++ b/docs/colormap.md @@ -0,0 +1,39 @@ +# Colormaps + +Rio-tiler includes many colormaps, some derived from Matplotlib and some custom +ones that are commonly-used with raster data. + +You can load a colormap with `rio_tiler.colormap.get_colormap`, and then pass it +to `rio_tiler.utils.render`: + +```py +from rio_tiler.colormap import get_colormap +from rio_tiler.utils import render + +colormap = get_colormap('cfastie') +render(tile, mask, colormap=colormap) +``` + +![](img/custom.png) +![](img/perceptually_uniform_sequential.png) +![](img/sequential.png) +![](img/sequential_(2).png) +![](img/diverging.png) +![](img/cyclic.png) +![](img/qualitative.png) +![](img/miscellaneous.png) + +### Refs +- Matplotlib: https://matplotlib.org/3.1.0/tutorials/colors/colormaps.html +- cfastie: http://publiclab.org/notes/cfastie/08-26-2014/new-ndvi-colormap +- rplumbo: https://github.com/cogeotiff/rio-tiler/pull/90 +- schwarzwald: http://soliton.vm.bytemark.co.uk/pub/cpt-city/wkp/schwarzwald/tn/wiki-schwarzwald-cont.png.index.html + +### Update images for new colormaps + +To regenerate these images for new colormaps, update the list of colormaps at +the top of `scripts/colormap_thumb.py` and then run + +```bash +python scripts/colormap_thumb.py +``` diff --git a/docs/img/custom.png b/docs/img/custom.png new file mode 100644 index 00000000..82e668aa Binary files /dev/null and b/docs/img/custom.png differ diff --git a/docs/img/cyclic.png b/docs/img/cyclic.png new file mode 100644 index 00000000..249af0f8 Binary files /dev/null and b/docs/img/cyclic.png differ diff --git a/docs/img/diverging.png b/docs/img/diverging.png new file mode 100644 index 00000000..912a3771 Binary files /dev/null and b/docs/img/diverging.png differ diff --git a/docs/img/miscellaneous.png b/docs/img/miscellaneous.png new file mode 100644 index 00000000..ff4b8c95 Binary files /dev/null and b/docs/img/miscellaneous.png differ diff --git a/docs/img/perceptually_uniform_sequential.png b/docs/img/perceptually_uniform_sequential.png new file mode 100644 index 00000000..f839cdee Binary files /dev/null and b/docs/img/perceptually_uniform_sequential.png differ diff --git a/docs/img/qualitative.png b/docs/img/qualitative.png new file mode 100644 index 00000000..eeb772f8 Binary files /dev/null and b/docs/img/qualitative.png differ diff --git a/docs/img/sequential.png b/docs/img/sequential.png new file mode 100644 index 00000000..f2ec3835 Binary files /dev/null and b/docs/img/sequential.png differ diff --git a/docs/img/sequential_(2).png b/docs/img/sequential_(2).png new file mode 100644 index 00000000..1a3bcc5b Binary files /dev/null and b/docs/img/sequential_(2).png differ diff --git a/docs/scripts/colormap_thumb.py b/docs/scripts/colormap_thumb.py new file mode 100644 index 00000000..b1060ca4 --- /dev/null +++ b/docs/scripts/colormap_thumb.py @@ -0,0 +1,183 @@ +""" +colormap_thumb.py: Create colormap thumbnails for documentation + +This file is derived from the matplotlib documentation. +https://matplotlib.org/tutorials/colors/colormaps.html +""" +from pathlib import Path + +import matplotlib.pyplot as plt +import numpy as np +from matplotlib.colors import ListedColormap +from rio_tiler.colormap import get_colormap, make_lut + + +cmaps = [ + ("Custom", ["cfastie", "rplumbo", "schwarzwald"]), + ( + "Perceptually Uniform Sequential", + ["viridis", "plasma", "inferno", "magma", "cividis"], + ), + ( + "Sequential", + [ + "Greys", + "Purples", + "Blues", + "Greens", + "Oranges", + "Reds", + "YlOrBr", + "YlOrRd", + "OrRd", + "PuRd", + "RdPu", + "BuPu", + "GnBu", + "PuBu", + "YlGnBu", + "PuBuGn", + "BuGn", + "YlGn", + ], + ), + ( + "Sequential (2)", + [ + "binary", + "gist_yarg", + "gist_gray", + "gray", + "bone", + "pink", + "spring", + "summer", + "autumn", + "winter", + "cool", + "Wistia", + "hot", + "afmhot", + "gist_heat", + "copper", + ], + ), + ( + "Diverging", + [ + "PiYG", + "PRGn", + "BrBG", + "PuOr", + "RdGy", + "RdBu", + "RdYlBu", + "RdYlGn", + "Spectral", + "coolwarm", + "bwr", + "seismic", + ], + ), + ("Cyclic", ["twilight", "twilight_shifted", "hsv"]), + ( + "Qualitative", + [ + "Pastel1", + "Pastel2", + "Paired", + "Accent", + "Dark2", + "Set1", + "Set2", + "Set3", + "tab10", + "tab20", + "tab20b", + "tab20c", + ], + ), + ( + "Miscellaneous", + [ + "flag", + "prism", + "ocean", + "gist_earth", + "terrain", + "gist_stern", + "gnuplot", + "gnuplot2", + "CMRmap", + "cubehelix", + "brg", + "gist_rainbow", + "rainbow", + "jet", + "nipy_spectral", + "gist_ncar", + ], + ), +] + + +gradient = np.linspace(0, 1, 256) +gradient = np.vstack((gradient, gradient)) + + +def make_colormap(name): + """Use rio-tiler colormap to create matplotlib colormap + """ + cmap = make_lut(get_colormap(name)) + # rescale to 0-1 + return ListedColormap(cmap / 255, name=name) + + +def plot_color_gradients(cmap_category, cmap_list): + """Make + """ + # Create figure and adjust figure height to number of colormaps + nrows = len(cmap_list) + figh = 0.35 + 0.15 + (nrows + (nrows - 1) * 0.1) * 0.22 + fig, axes = plt.subplots(nrows=nrows, figsize=(6.4, figh)) + fig.subplots_adjust(top=1 - 0.35 / figh, bottom=0.15 / figh, left=0.2, right=0.99) + + axes[0].set_title(cmap_category + " colormaps", fontsize=14) + + for ax, name in zip(axes, cmap_list): + ax.imshow(gradient, aspect="auto", cmap=make_colormap(name)) + ax.text( + -0.01, + 0.5, + name, + va="center", + ha="right", + fontsize=10, + transform=ax.transAxes, + ) + + # Turn off *all* ticks & spines, not just the ones with colormaps. + for ax in axes: + ax.set_axis_off() + + return fig + + +def main(): + """Create thumbnails""" + for cmap_category, cmap_list in cmaps: + plot_color_gradients(cmap_category, cmap_list) + + # Export fig + out_path = ( + Path(__file__).parents[0] + / ".." + / "img" + / (cmap_category.replace(" ", "_").lower() + ".png") + ) + out_path.parents[0].mkdir(exist_ok=True) + plt.savefig(out_path, dpi=200) + + +if __name__ == "__main__": + main() diff --git a/docs/v2_migration.md b/docs/v2_migration.md new file mode 100644 index 00000000..9f44fc0c --- /dev/null +++ b/docs/v2_migration.md @@ -0,0 +1,267 @@ +# rio-tiler 1.0 to 2.0 migration + +rio-tiler version 2.0 introduced a lot of breaking changes [see](https://github.com/cogeotiff/rio-tiler/blob/f55134b383b14e5ed0a79f3dc27da0d9adbb21a4/CHANGES.txt#L10-L26). This documents aims to help with migrating your code to use rio-tiler 2.0. + +## Python **3** only + +First and main change is the drop of python 2 support. To be honest we first started this lib as a python 3 only and then switch back to support python 2. Now we are in 2020 and python 2 is [officially dead](https://pythonclock.org) we decided to remove python 2 support and to continue with only python 3. + +If you need help moving from python 2 to 3 checkout the official [doc](https://docs.python.org/3/howto/pyporting.html). + +### Type hints + +By switching to python 3 we also embrace new code style with the adoption of type hints: + + Python 3.6+ has support for optional "type hints". + These "type hints" are a new syntax (since Python 3.6+) that allow declaring the type of a variable. + By declaring types for your variables, editors and tools can give you better support. +from: https://fastapi.tiangolo.com/python-types/ + +Other docs: +- https://kishstats.com/python/2019/01/07/python-type-hinting.html +- https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html + +Example: +```python +def get_vrt_transform( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + bounds: Tuple[float, float, float, float], + dst_crs: CRS = constants.WEB_MERCATOR_CRS, +) -> Tuple[Affine, int, int]: +``` + +## Rasterio >= **1.1.3** + +Recent changes in rasterio makes masking more reliable. + +## New **rio_tiler.io** submodules + +Mostly to gain in code clarity, we moved the mission specific submodules (e.g. rio_tiler.landsat8) to a `rio_tiler.io` submodule. `rio_tiler.main` has also been renamed `rio_tiler.io.cogeo`. + +```python +# v1 +from rio_tiler.main import tile as cogTiler +tile, mask = cogTiler('my_tif.tif', 691559, 956905, 21, tilesize=256) + +# v2 +from rio_tiler.io.cogeo import tile as cogTiler +tile, mask = cogTiler('my_tif.tif', 691559, 956905, 21, tilesize=256) +``` + +```python +# v1 +from rio_tiler import landsat8 +landsat8.bounds('LC08_L1TP_016037_20170813_20170814_01_RT') + +# v2 +from rio_tiler.io import landsat8 +landsat8.bounds('LC08_L1TP_016037_20170813_20170814_01_RT') +``` + +## **rio_tiler.reader** + +Internal tile/data reading functions have been refactored and moved to a new `rio_tiler.reader` submodule. + +### tile + +In *rio_tiler==1* most of the magic was happening in [`rio_tiler.utils._tile_read`](https://github.com/cogeotiff/rio-tiler/blob/master/rio_tiler/utils.py#L337-L349). In *rio_tiler==2* this function is now split in two, `rio_tiler.reader.part` and `rio_tiler_reader._read`, to reduce code reutilisation and to make the code more robust. The `part` function now takes `height` and `width` instead of a unique `tilesize` to specify the output array size. + +To ease the transition we added a `rio_tiler.reader.tile` function. + +Note: The new `rio_tiler.reader.part` function enables to perform non-squared data cropping by passing output width and height (instead of just tilesize). + +```python +# v1 +with rasterio.open("my_tif.tif") as src_dst: + # get tile bounds and read raster + mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) + tile_bounds = mercantile.xy_bounds(mercator_tile) + + t, m = rio_tiler.utils._tile_read(src, tile_bounds, 256) + +# v2 +with rasterio.open("my_tif.tif") as src_dst: + t, m = rio_tiler.reader.tile(src_dst, tile_x, tile_y, tile_z, 256) # Will check if tile is valid + +# Or +with rasterio.open("my_tif.tif") as src_dst: + mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) + tile_bounds = mercantile.xy_bounds(mercator_tile) + + t, m = rio_tiler.reader.part(src_dst, tile_bounds, 256, 256) +``` + +*Options changes* +- `tile_edge_padding` -> `padding`, and set to **0** by default +- `minimum_tile_cover` -> `minimum_overlap` +- `unscale` (**New**): add ability to apply scale and offset to the data (Default: False) + +```python +# v1 +with rasterio.open("my_tif.tif") as src_dst: + mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) + tile_bounds = mercantile.xy_bounds(mercator_tile) + t, m = rio_tiler.utils._tile_read(src, tile_bounds, 256, tile_edge_padding=4, minimum_tile_cover=0.3) + +# v2 +with rasterio.open("my_tif.tif") as src_dst: + t, m = rio_tiler.reader.tile(src_dst, tile_x, tile_y, tile_z, 256, padding=4, minimum_overlap=0.3) +``` + +#### Alpha band + +Since the first version, rio-tiler returns a tuple of **(data, mask)** in most of the `reading` function. This design was made early and without thinking about datasets with an alpha channel, which resulted in issues like [#126](https://github.com/cogeotiff/rio-tiler/pull/126), where a user gets a 4 bands data array + a mask (instead of 3 bands + mask). In *rio-tiler=2.*, when no `indexes` options are passed, we remove the alpha channel from the output data array. + +```python +# v1 +with rasterio.open("my_tif_alpha.tif") as src_dst: + t, m = rio_tiler.utils._tile_read(src, tile_bounds, 256, indexes=(1,2,3)) + +# v2 +with rasterio.open("my_tif_alpha.tif") as src_dst: + # because rio-tiler will remove the alpha band we don't need to use the indexes option + t, m = rio_tiler.reader.tile(src_dst, tile_x, tile_y, tile_z, 256) +``` + +### metadata + +`rio_tiler.utils._raster_get_stats` has been replaced by `rio_tiler.reader.metadata` which uses the new `reader.part` and `reader.preview` functions. Meaning that now you can get metadata for a specific area by passing a bbox. To limit the data transfer (with the idea of getting the metadata from the COG overviews) we use only the `max_size` options, meaning the `overview_level` options have been removed (at least for version 2.0.0). + +```python +# v1 +import rio_tiler +with rasterio.open("my_tif.tif") as src_dst: + meta = rio_tiler.utils._raster_get_stats(src_dst) + +# v2 +with rasterio.open("my_tif.tif") as src_dst: + rio_tiler.reader.metadata(src_dst) +``` + +*Options changes* +- removed `histogram_bins` and `histogram_range` which should now be passed in `hist_options` (e.g: hist_options={bins=10, range=(0, 10)}) +- removed `overview_level` +- added `bounds` + +*Output* + +The output has also been updated. The new `metadata` output doesn't return min/max zoom and bounds is return in WGS84 by default. + + +```python +# v1 +with rasterio.open("my_tif.tif") as src_dst: + rio_tiler.utils._raster_get_stats(src_dst) + +> { + "bounds": { + "value": [-119.05915661478785, 13.102845359730287, -84.91821332299578, 33.995073647795806], + "crs": "EPSG:4326" + }, + "minzoom": 3, + "maxzoom": 12, + "band_descriptions": [ + [1, "band1"] + ], + "statistics": { + "1": { + "pc": [1, 17], + "min": 1, + "max": 18, + "std": 4.418616203143802, + "histogram": [ + [...], + [...] + ] + } + } +} + +# v2 +with rasterio.open("my_tif.tif") as src_dst: + rio_tiler.reader.metadata(src_dst) + +> { + "bounds": [-119.05915661478785, 13.102845359730287, -84.91821332299578, 33.995073647795806], + "statistics": { + "1": { + "pc": [1, 16], + "min": 1, + "max": 18, + "std": 4.069636227214257, + "histogram": [ + [...], + [...] + ] + } + }, + "nodata_type": "Nodata", + "band_descriptions": [[1, "band1"]], + "dtype": "int8", + "colorinterp": ["palette"], + "colormap": { + "0": [0, 0, 0, 0], + "1": [0, 61, 0, 255 ], + "2": [148, 56, 112, 255], + ... + } +} +``` + +## colormaps + +In addition to a new colormap specific submodule (`rio_tiler.colormap`), in *rio-tiler==2*, colormaps are now RGBA values. + +We also removed `PIL` colormap compatibility. + +```python +# v1 +cmap = rio_tiler.utils.get_colormap("viridis", format="gdal") +print(cmap[0]) +> [68, 1, 84] + +# v2 +cmap = rio_tiler.colormap.get_colormap("viridis") +print(cmap[0]) +> [68, 1, 84, 255] +``` + +## render + +In *rio-tiler==1.** to create an image blob from an array we used the `rio_tiler.utils.array_to_image` function. We have renamed and slightly refactored the function but it works the same. + +```python +# v1 +img = rio_tiler.utils.array_to_image(tile, mask, img_format="PNG") + +# v2 +img = rio_tiler.utils.render(tile, mask, img_format="PNG") +``` + +## Mission specific changes + +Each `rio_tiler.io.{mission}` **scene id parser** (e.g cbers_parser) has been refactored and now return AWS S3 path information. + +```python +rio_tiler.io.landsat8.landsat_parser("LC08_L1TP_016037_20170813_20170814_01_RT")) +{ + "sensor": "C", + "satellite": "08", + "processingCorrectionLevel": "L1TP", + "path": "016", + "row": "037", + "acquisitionYear": "2017", + "acquisitionMonth": "08", + "acquisitionDay": "13", + "processingYear": "2017", + "processingMonth": "08", + "processingDay": "14", + "collectionNumber": "01", + "collectionCategory": "RT", + "scene": "LC08_L1TP_016037_20170813_20170814_01_RT", + "date": "2017-08-13", +--> "scheme": "s3", +--> "bucket": "landsat-pds", +--> "prefix": "c1/L8/016/037/LC08_L1TP_016037_20170813_20170814_01_RT" +} +``` diff --git a/rio_tiler/cbers.py b/rio_tiler/cbers.py deleted file mode 100644 index 433f2930..00000000 --- a/rio_tiler/cbers.py +++ /dev/null @@ -1,268 +0,0 @@ -"""rio_tiler.cbers: cbers processing.""" - -import os -import re -import multiprocessing -from functools import partial -from concurrent import futures - -import numpy as np - -import mercantile -import rasterio -from rasterio.warp import transform_bounds - -from rio_tiler import utils -from rio_tiler.errors import TileOutsideBounds, InvalidBandName, InvalidCBERSSceneId - -CBERS_BUCKET = "s3://cbers-pds" -CBERS_BANDS = ["1", "2", "3", "4", "5", "6", "7", "8", "13", "14", "15", "16"] - -# ref: https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor -MAX_THREADS = int(os.environ.get("MAX_THREADS", multiprocessing.cpu_count() * 5)) - - -def _cbers_parse_scene_id(sceneid): - """Parse CBERS scene id. - - Attributes - ---------- - sceneid : str - CBERS sceneid. - - Returns - ------- - out : dict - dictionary with metadata constructed from the sceneid. - - e.g: - _cbers_parse_scene_id('CBERS_4_PAN5M_20171121_057_094_L2') - { - "acquisitionDay": "21", - "acquisitionMonth": "11", - "acquisitionYear": "2017", - "instrument": "PAN5M", - "key": "CBERS4/PAN5M/057/094/CBERS_4_PAN5M_20171121_057_094_L2", - "path": "057", - "processingCorrectionLevel": "L2", - "row": "094", - "mission": "4", - "scene": "CBERS_4_PAN5M_20171121_057_094_L2", - "reference_band": "1", - "bands": ["1"], - "rgb": ("1", "1", "1"), - "satellite": "CBERS", - } - - """ - if not re.match(r"^CBERS_4_\w+_[0-9]{8}_[0-9]{3}_[0-9]{3}_L[0-9]$", sceneid): - raise InvalidCBERSSceneId("Could not match {}".format(sceneid)) - - cbers_pattern = ( - r"(?P\w+)_" - r"(?P[0-9]{1})" - r"_" - r"(?P\w+)" - r"_" - r"(?P[0-9]{4})" - r"(?P[0-9]{2})" - r"(?P[0-9]{2})" - r"_" - r"(?P[0-9]{3})" - r"_" - r"(?P[0-9]{3})" - r"_" - r"(?PL[0-9]{1})$" - ) - - meta = None - match = re.match(cbers_pattern, sceneid, re.IGNORECASE) - if match: - meta = match.groupdict() - - path = meta["path"] - row = meta["row"] - instrument = meta["instrument"] - meta["key"] = "CBERS4/{}/{}/{}/{}".format(instrument, path, row, sceneid) - - meta["scene"] = sceneid - - instrument_params = { - "MUX": { - "reference_band": "6", - "bands": ["5", "6", "7", "8"], - "rgb": ("7", "6", "5"), - }, - "AWFI": { - "reference_band": "14", - "bands": ["13", "14", "15", "16"], - "rgb": ("15", "14", "13"), - }, - "PAN10M": { - "reference_band": "4", - "bands": ["2", "3", "4"], - "rgb": ("3", "4", "2"), - }, - "PAN5M": {"reference_band": "1", "bands": ["1"], "rgb": ("1", "1", "1")}, - } - meta["reference_band"] = instrument_params[instrument]["reference_band"] - meta["bands"] = instrument_params[instrument]["bands"] - meta["rgb"] = instrument_params[instrument]["rgb"] - - return meta - - -def bounds(sceneid): - """ - Retrieve image bounds. - - Attributes - ---------- - sceneid : str - CBERS sceneid. - - Returns - ------- - out : dict - dictionary with image bounds. - - """ - scene_params = _cbers_parse_scene_id(sceneid) - cbers_address = "{}/{}".format(CBERS_BUCKET, scene_params["key"]) - - with rasterio.open( - "{}/{}_BAND{}.tif".format( - cbers_address, sceneid, scene_params["reference_band"] - ) - ) as src: - bounds = transform_bounds(src.crs, "epsg:4326", *src.bounds, densify_pts=21) - - info = {"sceneid": sceneid} - info["bounds"] = list(bounds) - - return info - - -def metadata(sceneid, pmin=2, pmax=98, **kwargs): - """ - Return band bounds and statistics. - - Attributes - ---------- - sceneid : str - CBERS sceneid. - pmin : int, optional, (default: 2) - Histogram minimum cut. - pmax : int, optional, (default: 98) - Histogram maximum cut. - kwargs : optional - These are passed to 'rio_tiler.utils.raster_get_stats' - e.g: histogram_bins=20, dst_crs='epsg:4326' - - Returns - ------- - out : dict - Dictionary with bounds and bands statistics. - - """ - scene_params = _cbers_parse_scene_id(sceneid) - cbers_address = "{}/{}".format(CBERS_BUCKET, scene_params["key"]) - bands = scene_params["bands"] - ref_band = scene_params["reference_band"] - - info = {"sceneid": sceneid} - - addresses = [ - "{}/{}_BAND{}.tif".format(cbers_address, sceneid, band) for band in bands - ] - _stats_worker = partial( - utils.raster_get_stats, - indexes=[1], - nodata=0, - overview_level=2, - percentiles=(pmin, pmax), - **kwargs - ) - with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor: - responses = list(executor.map(_stats_worker, addresses)) - - info["bounds"] = [r["bounds"] for b, r in zip(bands, responses) if b == ref_band][0] - info["statistics"] = { - b: v for b, d in zip(bands, responses) for k, v in d["statistics"].items() - } - return info - - -def tile(sceneid, tile_x, tile_y, tile_z, bands=None, tilesize=256, **kwargs): - """ - Create mercator tile from CBERS data. - - Attributes - ---------- - sceneid : str - CBERS sceneid. - tile_x : int - Mercator tile X index. - tile_y : int - Mercator tile Y index. - tile_z : int - Mercator tile ZOOM level. - bands : tuple, int, optional (default: None) - Bands index for the RGB combination. If None uses default - defined for the instrument - tilesize : int, optional (default: 256) - Output image size. - kwargs: dict, optional - These will be passed to the 'rio_tiler.utils._tile_read' function. - - Returns - ------- - data : numpy ndarray - mask: numpy array - - """ - scene_params = _cbers_parse_scene_id(sceneid) - - if not bands: - bands = scene_params["rgb"] - - if not isinstance(bands, tuple): - bands = tuple((bands,)) - - for band in bands: - if band not in scene_params["bands"]: - raise InvalidBandName( - "{} is not a valid band name for {} CBERS instrument".format( - band, scene_params["instrument"] - ) - ) - - cbers_address = "{}/{}".format(CBERS_BUCKET, scene_params["key"]) - - with rasterio.open( - "{}/{}_BAND{}.tif".format( - cbers_address, sceneid, scene_params["reference_band"] - ) - ) as src: - bounds = transform_bounds(src.crs, "epsg:4326", *src.bounds, densify_pts=21) - - if not utils.tile_exists(bounds, tile_z, tile_x, tile_y): - raise TileOutsideBounds( - "Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y) - ) - - mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) - tile_bounds = mercantile.xy_bounds(mercator_tile) - - addresses = [ - "{}/{}_BAND{}.tif".format(cbers_address, sceneid, band) for band in bands - ] - - _tiler = partial( - utils.tile_read, bounds=tile_bounds, tilesize=tilesize, nodata=0, **kwargs - ) - with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor: - data, masks = zip(*list(executor.map(_tiler, addresses))) - mask = np.all(masks, axis=0).astype(np.uint8) * 255 - - return np.concatenate(data), mask diff --git a/rio_tiler/cmap/__init__.py b/rio_tiler/cmap/__init__.py index 4e073f8f..944d1b10 100644 --- a/rio_tiler/cmap/__init__.py +++ b/rio_tiler/cmap/__init__.py @@ -8,7 +8,7 @@ def generate_maps() -> None: x = np.linspace(0, 1, 256) for name, colormap in cm.cmap_d.items(): - cmap_vals = colormap(x)[:, :-1] # cut off alpha + cmap_vals = colormap(x)[:, :] cmap_uint8 = (cmap_vals * 255).astype('uint8') np.save(f'{name.lower()}.npy', cmap_uint8) diff --git a/rio_tiler/cmap/accent.npy b/rio_tiler/cmap/accent.npy index 9065ac7d..176e9a7c 100644 Binary files a/rio_tiler/cmap/accent.npy and b/rio_tiler/cmap/accent.npy differ diff --git a/rio_tiler/cmap/accent_r.npy b/rio_tiler/cmap/accent_r.npy index 92506913..d4942a9c 100644 Binary files a/rio_tiler/cmap/accent_r.npy and b/rio_tiler/cmap/accent_r.npy differ diff --git a/rio_tiler/cmap/afmhot.npy b/rio_tiler/cmap/afmhot.npy index aff308c2..3d739b81 100644 Binary files a/rio_tiler/cmap/afmhot.npy and b/rio_tiler/cmap/afmhot.npy differ diff --git a/rio_tiler/cmap/afmhot_r.npy b/rio_tiler/cmap/afmhot_r.npy index 5aa51059..86a54d65 100644 Binary files a/rio_tiler/cmap/afmhot_r.npy and b/rio_tiler/cmap/afmhot_r.npy differ diff --git a/rio_tiler/cmap/autumn.npy b/rio_tiler/cmap/autumn.npy index 5c59e829..8dc7625f 100644 Binary files a/rio_tiler/cmap/autumn.npy and b/rio_tiler/cmap/autumn.npy differ diff --git a/rio_tiler/cmap/autumn_r.npy b/rio_tiler/cmap/autumn_r.npy index caa63508..f10f65d8 100644 Binary files a/rio_tiler/cmap/autumn_r.npy and b/rio_tiler/cmap/autumn_r.npy differ diff --git a/rio_tiler/cmap/binary.npy b/rio_tiler/cmap/binary.npy index ab7c81ad..c97ea783 100644 Binary files a/rio_tiler/cmap/binary.npy and b/rio_tiler/cmap/binary.npy differ diff --git a/rio_tiler/cmap/binary_r.npy b/rio_tiler/cmap/binary_r.npy index 63cf3f7e..a3e1ef7c 100644 Binary files a/rio_tiler/cmap/binary_r.npy and b/rio_tiler/cmap/binary_r.npy differ diff --git a/rio_tiler/cmap/blues.npy b/rio_tiler/cmap/blues.npy index 2d76a48b..6558c871 100644 Binary files a/rio_tiler/cmap/blues.npy and b/rio_tiler/cmap/blues.npy differ diff --git a/rio_tiler/cmap/blues_r.npy b/rio_tiler/cmap/blues_r.npy index edfed862..2aec1984 100644 Binary files a/rio_tiler/cmap/blues_r.npy and b/rio_tiler/cmap/blues_r.npy differ diff --git a/rio_tiler/cmap/bone.npy b/rio_tiler/cmap/bone.npy index 461763f8..1b12a909 100644 Binary files a/rio_tiler/cmap/bone.npy and b/rio_tiler/cmap/bone.npy differ diff --git a/rio_tiler/cmap/bone_r.npy b/rio_tiler/cmap/bone_r.npy index 4ace09d5..578a263d 100644 Binary files a/rio_tiler/cmap/bone_r.npy and b/rio_tiler/cmap/bone_r.npy differ diff --git a/rio_tiler/cmap/brbg.npy b/rio_tiler/cmap/brbg.npy index 519738ca..c6aed0b3 100644 Binary files a/rio_tiler/cmap/brbg.npy and b/rio_tiler/cmap/brbg.npy differ diff --git a/rio_tiler/cmap/brbg_r.npy b/rio_tiler/cmap/brbg_r.npy index 4d154c35..a7b28517 100644 Binary files a/rio_tiler/cmap/brbg_r.npy and b/rio_tiler/cmap/brbg_r.npy differ diff --git a/rio_tiler/cmap/brg.npy b/rio_tiler/cmap/brg.npy index 4264144b..c9c3406b 100644 Binary files a/rio_tiler/cmap/brg.npy and b/rio_tiler/cmap/brg.npy differ diff --git a/rio_tiler/cmap/brg_r.npy b/rio_tiler/cmap/brg_r.npy index 859fd806..2bb72dc3 100644 Binary files a/rio_tiler/cmap/brg_r.npy and b/rio_tiler/cmap/brg_r.npy differ diff --git a/rio_tiler/cmap/bugn.npy b/rio_tiler/cmap/bugn.npy index d1624b20..46430c8b 100644 Binary files a/rio_tiler/cmap/bugn.npy and b/rio_tiler/cmap/bugn.npy differ diff --git a/rio_tiler/cmap/bugn_r.npy b/rio_tiler/cmap/bugn_r.npy index e091edb9..c15e5161 100644 Binary files a/rio_tiler/cmap/bugn_r.npy and b/rio_tiler/cmap/bugn_r.npy differ diff --git a/rio_tiler/cmap/bupu.npy b/rio_tiler/cmap/bupu.npy index a796e8ad..a8911303 100644 Binary files a/rio_tiler/cmap/bupu.npy and b/rio_tiler/cmap/bupu.npy differ diff --git a/rio_tiler/cmap/bupu_r.npy b/rio_tiler/cmap/bupu_r.npy index 41ad5be1..f9c7a9d6 100644 Binary files a/rio_tiler/cmap/bupu_r.npy and b/rio_tiler/cmap/bupu_r.npy differ diff --git a/rio_tiler/cmap/bwr.npy b/rio_tiler/cmap/bwr.npy index ac6fba5d..27def091 100644 Binary files a/rio_tiler/cmap/bwr.npy and b/rio_tiler/cmap/bwr.npy differ diff --git a/rio_tiler/cmap/bwr_r.npy b/rio_tiler/cmap/bwr_r.npy index c0d9c92c..1de0e90f 100644 Binary files a/rio_tiler/cmap/bwr_r.npy and b/rio_tiler/cmap/bwr_r.npy differ diff --git a/rio_tiler/cmap/cfastie.npy b/rio_tiler/cmap/cfastie.npy index 31f26081..6d2b6dd8 100644 Binary files a/rio_tiler/cmap/cfastie.npy and b/rio_tiler/cmap/cfastie.npy differ diff --git a/rio_tiler/cmap/cividis.npy b/rio_tiler/cmap/cividis.npy index 60757120..c5159da8 100644 Binary files a/rio_tiler/cmap/cividis.npy and b/rio_tiler/cmap/cividis.npy differ diff --git a/rio_tiler/cmap/cividis_r.npy b/rio_tiler/cmap/cividis_r.npy index aba79ce1..6350e5b6 100644 Binary files a/rio_tiler/cmap/cividis_r.npy and b/rio_tiler/cmap/cividis_r.npy differ diff --git a/rio_tiler/cmap/cmrmap.npy b/rio_tiler/cmap/cmrmap.npy index 591cfa04..86fb4e7c 100644 Binary files a/rio_tiler/cmap/cmrmap.npy and b/rio_tiler/cmap/cmrmap.npy differ diff --git a/rio_tiler/cmap/cmrmap_r.npy b/rio_tiler/cmap/cmrmap_r.npy index ba872e89..9d829351 100644 Binary files a/rio_tiler/cmap/cmrmap_r.npy and b/rio_tiler/cmap/cmrmap_r.npy differ diff --git a/rio_tiler/cmap/cool.npy b/rio_tiler/cmap/cool.npy index 8d240325..84e9f89a 100644 Binary files a/rio_tiler/cmap/cool.npy and b/rio_tiler/cmap/cool.npy differ diff --git a/rio_tiler/cmap/cool_r.npy b/rio_tiler/cmap/cool_r.npy index c8d591aa..c17ea4f3 100644 Binary files a/rio_tiler/cmap/cool_r.npy and b/rio_tiler/cmap/cool_r.npy differ diff --git a/rio_tiler/cmap/coolwarm.npy b/rio_tiler/cmap/coolwarm.npy index bf6bfa21..d34b6aaa 100644 Binary files a/rio_tiler/cmap/coolwarm.npy and b/rio_tiler/cmap/coolwarm.npy differ diff --git a/rio_tiler/cmap/coolwarm_r.npy b/rio_tiler/cmap/coolwarm_r.npy index 3b05a2a2..24301a19 100644 Binary files a/rio_tiler/cmap/coolwarm_r.npy and b/rio_tiler/cmap/coolwarm_r.npy differ diff --git a/rio_tiler/cmap/copper.npy b/rio_tiler/cmap/copper.npy index 9c825a4a..b31618fd 100644 Binary files a/rio_tiler/cmap/copper.npy and b/rio_tiler/cmap/copper.npy differ diff --git a/rio_tiler/cmap/copper_r.npy b/rio_tiler/cmap/copper_r.npy index d87c9823..5874b20d 100644 Binary files a/rio_tiler/cmap/copper_r.npy and b/rio_tiler/cmap/copper_r.npy differ diff --git a/rio_tiler/cmap/cubehelix.npy b/rio_tiler/cmap/cubehelix.npy index da3c7891..b88cf7f8 100644 Binary files a/rio_tiler/cmap/cubehelix.npy and b/rio_tiler/cmap/cubehelix.npy differ diff --git a/rio_tiler/cmap/cubehelix_r.npy b/rio_tiler/cmap/cubehelix_r.npy index 03f5da68..fc11b82b 100644 Binary files a/rio_tiler/cmap/cubehelix_r.npy and b/rio_tiler/cmap/cubehelix_r.npy differ diff --git a/rio_tiler/cmap/dark2.npy b/rio_tiler/cmap/dark2.npy index 2df9f9ce..abcf6f81 100644 Binary files a/rio_tiler/cmap/dark2.npy and b/rio_tiler/cmap/dark2.npy differ diff --git a/rio_tiler/cmap/dark2_r.npy b/rio_tiler/cmap/dark2_r.npy index 5591a230..7c654063 100644 Binary files a/rio_tiler/cmap/dark2_r.npy and b/rio_tiler/cmap/dark2_r.npy differ diff --git a/rio_tiler/cmap/flag.npy b/rio_tiler/cmap/flag.npy index 42762d16..8431ebb0 100644 Binary files a/rio_tiler/cmap/flag.npy and b/rio_tiler/cmap/flag.npy differ diff --git a/rio_tiler/cmap/flag_r.npy b/rio_tiler/cmap/flag_r.npy index bdbefcca..58f67151 100644 Binary files a/rio_tiler/cmap/flag_r.npy and b/rio_tiler/cmap/flag_r.npy differ diff --git a/rio_tiler/cmap/gist_earth.npy b/rio_tiler/cmap/gist_earth.npy index efc2fcb7..7ea9866b 100644 Binary files a/rio_tiler/cmap/gist_earth.npy and b/rio_tiler/cmap/gist_earth.npy differ diff --git a/rio_tiler/cmap/gist_earth_r.npy b/rio_tiler/cmap/gist_earth_r.npy index 0af548ca..427e994e 100644 Binary files a/rio_tiler/cmap/gist_earth_r.npy and b/rio_tiler/cmap/gist_earth_r.npy differ diff --git a/rio_tiler/cmap/gist_gray.npy b/rio_tiler/cmap/gist_gray.npy index 63cf3f7e..a3e1ef7c 100644 Binary files a/rio_tiler/cmap/gist_gray.npy and b/rio_tiler/cmap/gist_gray.npy differ diff --git a/rio_tiler/cmap/gist_gray_r.npy b/rio_tiler/cmap/gist_gray_r.npy index ab7c81ad..c97ea783 100644 Binary files a/rio_tiler/cmap/gist_gray_r.npy and b/rio_tiler/cmap/gist_gray_r.npy differ diff --git a/rio_tiler/cmap/gist_heat.npy b/rio_tiler/cmap/gist_heat.npy index 31416216..dcada459 100644 Binary files a/rio_tiler/cmap/gist_heat.npy and b/rio_tiler/cmap/gist_heat.npy differ diff --git a/rio_tiler/cmap/gist_heat_r.npy b/rio_tiler/cmap/gist_heat_r.npy index af62a47e..9488ab2d 100644 Binary files a/rio_tiler/cmap/gist_heat_r.npy and b/rio_tiler/cmap/gist_heat_r.npy differ diff --git a/rio_tiler/cmap/gist_ncar.npy b/rio_tiler/cmap/gist_ncar.npy index 4ea2eb68..6acae9fc 100644 Binary files a/rio_tiler/cmap/gist_ncar.npy and b/rio_tiler/cmap/gist_ncar.npy differ diff --git a/rio_tiler/cmap/gist_ncar_r.npy b/rio_tiler/cmap/gist_ncar_r.npy index 654d9dcb..f15b84d9 100644 Binary files a/rio_tiler/cmap/gist_ncar_r.npy and b/rio_tiler/cmap/gist_ncar_r.npy differ diff --git a/rio_tiler/cmap/gist_rainbow.npy b/rio_tiler/cmap/gist_rainbow.npy index 063ffc11..9baae586 100644 Binary files a/rio_tiler/cmap/gist_rainbow.npy and b/rio_tiler/cmap/gist_rainbow.npy differ diff --git a/rio_tiler/cmap/gist_rainbow_r.npy b/rio_tiler/cmap/gist_rainbow_r.npy index 70037e93..634646c4 100644 Binary files a/rio_tiler/cmap/gist_rainbow_r.npy and b/rio_tiler/cmap/gist_rainbow_r.npy differ diff --git a/rio_tiler/cmap/gist_stern.npy b/rio_tiler/cmap/gist_stern.npy index 07bdc4ba..94fd9de3 100644 Binary files a/rio_tiler/cmap/gist_stern.npy and b/rio_tiler/cmap/gist_stern.npy differ diff --git a/rio_tiler/cmap/gist_stern_r.npy b/rio_tiler/cmap/gist_stern_r.npy index 29e4bc3b..f20a4ede 100644 Binary files a/rio_tiler/cmap/gist_stern_r.npy and b/rio_tiler/cmap/gist_stern_r.npy differ diff --git a/rio_tiler/cmap/gist_yarg.npy b/rio_tiler/cmap/gist_yarg.npy index ab7c81ad..c97ea783 100644 Binary files a/rio_tiler/cmap/gist_yarg.npy and b/rio_tiler/cmap/gist_yarg.npy differ diff --git a/rio_tiler/cmap/gist_yarg_r.npy b/rio_tiler/cmap/gist_yarg_r.npy index c76faec9..64973cee 100644 Binary files a/rio_tiler/cmap/gist_yarg_r.npy and b/rio_tiler/cmap/gist_yarg_r.npy differ diff --git a/rio_tiler/cmap/gnbu.npy b/rio_tiler/cmap/gnbu.npy index 87468761..d8376ec1 100644 Binary files a/rio_tiler/cmap/gnbu.npy and b/rio_tiler/cmap/gnbu.npy differ diff --git a/rio_tiler/cmap/gnbu_r.npy b/rio_tiler/cmap/gnbu_r.npy index 69868bee..2cc103f0 100644 Binary files a/rio_tiler/cmap/gnbu_r.npy and b/rio_tiler/cmap/gnbu_r.npy differ diff --git a/rio_tiler/cmap/gnuplot.npy b/rio_tiler/cmap/gnuplot.npy index 11ee0318..36be5daf 100644 Binary files a/rio_tiler/cmap/gnuplot.npy and b/rio_tiler/cmap/gnuplot.npy differ diff --git a/rio_tiler/cmap/gnuplot2.npy b/rio_tiler/cmap/gnuplot2.npy index a6572180..017d8afb 100644 Binary files a/rio_tiler/cmap/gnuplot2.npy and b/rio_tiler/cmap/gnuplot2.npy differ diff --git a/rio_tiler/cmap/gnuplot2_r.npy b/rio_tiler/cmap/gnuplot2_r.npy index 4c9bfefc..45650044 100644 Binary files a/rio_tiler/cmap/gnuplot2_r.npy and b/rio_tiler/cmap/gnuplot2_r.npy differ diff --git a/rio_tiler/cmap/gnuplot_r.npy b/rio_tiler/cmap/gnuplot_r.npy index 194791b4..598f96c2 100644 Binary files a/rio_tiler/cmap/gnuplot_r.npy and b/rio_tiler/cmap/gnuplot_r.npy differ diff --git a/rio_tiler/cmap/gray.npy b/rio_tiler/cmap/gray.npy index 63cf3f7e..a3e1ef7c 100644 Binary files a/rio_tiler/cmap/gray.npy and b/rio_tiler/cmap/gray.npy differ diff --git a/rio_tiler/cmap/gray_r.npy b/rio_tiler/cmap/gray_r.npy index ab7c81ad..c97ea783 100644 Binary files a/rio_tiler/cmap/gray_r.npy and b/rio_tiler/cmap/gray_r.npy differ diff --git a/rio_tiler/cmap/greens.npy b/rio_tiler/cmap/greens.npy index e2322850..da106347 100644 Binary files a/rio_tiler/cmap/greens.npy and b/rio_tiler/cmap/greens.npy differ diff --git a/rio_tiler/cmap/greens_r.npy b/rio_tiler/cmap/greens_r.npy index ba273f9b..8c32e045 100644 Binary files a/rio_tiler/cmap/greens_r.npy and b/rio_tiler/cmap/greens_r.npy differ diff --git a/rio_tiler/cmap/greys.npy b/rio_tiler/cmap/greys.npy index 4fff6f1d..95a4f108 100644 Binary files a/rio_tiler/cmap/greys.npy and b/rio_tiler/cmap/greys.npy differ diff --git a/rio_tiler/cmap/greys_r.npy b/rio_tiler/cmap/greys_r.npy index fc55b478..0033f406 100644 Binary files a/rio_tiler/cmap/greys_r.npy and b/rio_tiler/cmap/greys_r.npy differ diff --git a/rio_tiler/cmap/hot.npy b/rio_tiler/cmap/hot.npy index 1d3a371e..1e0a98dd 100644 Binary files a/rio_tiler/cmap/hot.npy and b/rio_tiler/cmap/hot.npy differ diff --git a/rio_tiler/cmap/hot_r.npy b/rio_tiler/cmap/hot_r.npy index 7ab991d2..6447836a 100644 Binary files a/rio_tiler/cmap/hot_r.npy and b/rio_tiler/cmap/hot_r.npy differ diff --git a/rio_tiler/cmap/hsv.npy b/rio_tiler/cmap/hsv.npy index f8ecb9c8..b1edcfe5 100644 Binary files a/rio_tiler/cmap/hsv.npy and b/rio_tiler/cmap/hsv.npy differ diff --git a/rio_tiler/cmap/hsv_r.npy b/rio_tiler/cmap/hsv_r.npy index 9ca27c9c..22a50a77 100644 Binary files a/rio_tiler/cmap/hsv_r.npy and b/rio_tiler/cmap/hsv_r.npy differ diff --git a/rio_tiler/cmap/inferno.npy b/rio_tiler/cmap/inferno.npy index f2248930..961b23ed 100644 Binary files a/rio_tiler/cmap/inferno.npy and b/rio_tiler/cmap/inferno.npy differ diff --git a/rio_tiler/cmap/inferno_r.npy b/rio_tiler/cmap/inferno_r.npy index b16e49d5..4be61358 100644 Binary files a/rio_tiler/cmap/inferno_r.npy and b/rio_tiler/cmap/inferno_r.npy differ diff --git a/rio_tiler/cmap/jet.npy b/rio_tiler/cmap/jet.npy index 3ec70b1a..145d3d69 100644 Binary files a/rio_tiler/cmap/jet.npy and b/rio_tiler/cmap/jet.npy differ diff --git a/rio_tiler/cmap/jet_r.npy b/rio_tiler/cmap/jet_r.npy index fb1b424b..2eaaa6a1 100644 Binary files a/rio_tiler/cmap/jet_r.npy and b/rio_tiler/cmap/jet_r.npy differ diff --git a/rio_tiler/cmap/magma.npy b/rio_tiler/cmap/magma.npy index 194c2247..6f785942 100644 Binary files a/rio_tiler/cmap/magma.npy and b/rio_tiler/cmap/magma.npy differ diff --git a/rio_tiler/cmap/magma_r.npy b/rio_tiler/cmap/magma_r.npy index 5f6aafc2..7acf90ec 100644 Binary files a/rio_tiler/cmap/magma_r.npy and b/rio_tiler/cmap/magma_r.npy differ diff --git a/rio_tiler/cmap/nipy_spectral.npy b/rio_tiler/cmap/nipy_spectral.npy index 14524991..04edd628 100644 Binary files a/rio_tiler/cmap/nipy_spectral.npy and b/rio_tiler/cmap/nipy_spectral.npy differ diff --git a/rio_tiler/cmap/nipy_spectral_r.npy b/rio_tiler/cmap/nipy_spectral_r.npy index a44e2ac8..72acc352 100644 Binary files a/rio_tiler/cmap/nipy_spectral_r.npy and b/rio_tiler/cmap/nipy_spectral_r.npy differ diff --git a/rio_tiler/cmap/ocean.npy b/rio_tiler/cmap/ocean.npy index 46b6d156..fdbe3031 100644 Binary files a/rio_tiler/cmap/ocean.npy and b/rio_tiler/cmap/ocean.npy differ diff --git a/rio_tiler/cmap/ocean_r.npy b/rio_tiler/cmap/ocean_r.npy index 643ae888..83fb378a 100644 Binary files a/rio_tiler/cmap/ocean_r.npy and b/rio_tiler/cmap/ocean_r.npy differ diff --git a/rio_tiler/cmap/oranges.npy b/rio_tiler/cmap/oranges.npy index bf340a9b..7a9b754c 100644 Binary files a/rio_tiler/cmap/oranges.npy and b/rio_tiler/cmap/oranges.npy differ diff --git a/rio_tiler/cmap/oranges_r.npy b/rio_tiler/cmap/oranges_r.npy index fe700be1..13a0f2a1 100644 Binary files a/rio_tiler/cmap/oranges_r.npy and b/rio_tiler/cmap/oranges_r.npy differ diff --git a/rio_tiler/cmap/orrd.npy b/rio_tiler/cmap/orrd.npy index 27939d6a..c9a72ba5 100644 Binary files a/rio_tiler/cmap/orrd.npy and b/rio_tiler/cmap/orrd.npy differ diff --git a/rio_tiler/cmap/orrd_r.npy b/rio_tiler/cmap/orrd_r.npy index 22f59f14..5552d963 100644 Binary files a/rio_tiler/cmap/orrd_r.npy and b/rio_tiler/cmap/orrd_r.npy differ diff --git a/rio_tiler/cmap/paired.npy b/rio_tiler/cmap/paired.npy index 1b8c2e91..e130d2f8 100644 Binary files a/rio_tiler/cmap/paired.npy and b/rio_tiler/cmap/paired.npy differ diff --git a/rio_tiler/cmap/paired_r.npy b/rio_tiler/cmap/paired_r.npy index e8bf38ea..5e716baa 100644 Binary files a/rio_tiler/cmap/paired_r.npy and b/rio_tiler/cmap/paired_r.npy differ diff --git a/rio_tiler/cmap/pastel1.npy b/rio_tiler/cmap/pastel1.npy index e27f7ad6..942c019e 100644 Binary files a/rio_tiler/cmap/pastel1.npy and b/rio_tiler/cmap/pastel1.npy differ diff --git a/rio_tiler/cmap/pastel1_r.npy b/rio_tiler/cmap/pastel1_r.npy index 3b5c3838..8c12bd7f 100644 Binary files a/rio_tiler/cmap/pastel1_r.npy and b/rio_tiler/cmap/pastel1_r.npy differ diff --git a/rio_tiler/cmap/pastel2.npy b/rio_tiler/cmap/pastel2.npy index 9f784c7f..ecd5d483 100644 Binary files a/rio_tiler/cmap/pastel2.npy and b/rio_tiler/cmap/pastel2.npy differ diff --git a/rio_tiler/cmap/pastel2_r.npy b/rio_tiler/cmap/pastel2_r.npy index 3625951d..94f8b6db 100644 Binary files a/rio_tiler/cmap/pastel2_r.npy and b/rio_tiler/cmap/pastel2_r.npy differ diff --git a/rio_tiler/cmap/pink.npy b/rio_tiler/cmap/pink.npy index 28d58ba4..50fe71eb 100644 Binary files a/rio_tiler/cmap/pink.npy and b/rio_tiler/cmap/pink.npy differ diff --git a/rio_tiler/cmap/pink_r.npy b/rio_tiler/cmap/pink_r.npy index 808bbb78..6b0eccad 100644 Binary files a/rio_tiler/cmap/pink_r.npy and b/rio_tiler/cmap/pink_r.npy differ diff --git a/rio_tiler/cmap/piyg.npy b/rio_tiler/cmap/piyg.npy index d25e014c..d9cf1eab 100644 Binary files a/rio_tiler/cmap/piyg.npy and b/rio_tiler/cmap/piyg.npy differ diff --git a/rio_tiler/cmap/piyg_r.npy b/rio_tiler/cmap/piyg_r.npy index be1fa024..59fa36bd 100644 Binary files a/rio_tiler/cmap/piyg_r.npy and b/rio_tiler/cmap/piyg_r.npy differ diff --git a/rio_tiler/cmap/plasma.npy b/rio_tiler/cmap/plasma.npy index ec0e896f..3c61aaba 100644 Binary files a/rio_tiler/cmap/plasma.npy and b/rio_tiler/cmap/plasma.npy differ diff --git a/rio_tiler/cmap/plasma_r.npy b/rio_tiler/cmap/plasma_r.npy index 581e4f4c..7cdb7956 100644 Binary files a/rio_tiler/cmap/plasma_r.npy and b/rio_tiler/cmap/plasma_r.npy differ diff --git a/rio_tiler/cmap/prgn.npy b/rio_tiler/cmap/prgn.npy index ed152496..c7dd9ecc 100644 Binary files a/rio_tiler/cmap/prgn.npy and b/rio_tiler/cmap/prgn.npy differ diff --git a/rio_tiler/cmap/prgn_r.npy b/rio_tiler/cmap/prgn_r.npy index ef988519..c5fa4c1b 100644 Binary files a/rio_tiler/cmap/prgn_r.npy and b/rio_tiler/cmap/prgn_r.npy differ diff --git a/rio_tiler/cmap/prism.npy b/rio_tiler/cmap/prism.npy index d83fb561..5c6e75b5 100644 Binary files a/rio_tiler/cmap/prism.npy and b/rio_tiler/cmap/prism.npy differ diff --git a/rio_tiler/cmap/prism_r.npy b/rio_tiler/cmap/prism_r.npy index 48d36478..58bee8a6 100644 Binary files a/rio_tiler/cmap/prism_r.npy and b/rio_tiler/cmap/prism_r.npy differ diff --git a/rio_tiler/cmap/pubu.npy b/rio_tiler/cmap/pubu.npy index bc851ed0..0c1eb99f 100644 Binary files a/rio_tiler/cmap/pubu.npy and b/rio_tiler/cmap/pubu.npy differ diff --git a/rio_tiler/cmap/pubu_r.npy b/rio_tiler/cmap/pubu_r.npy index 7e594eac..774a9c45 100644 Binary files a/rio_tiler/cmap/pubu_r.npy and b/rio_tiler/cmap/pubu_r.npy differ diff --git a/rio_tiler/cmap/pubugn.npy b/rio_tiler/cmap/pubugn.npy index 7f75bed2..589d1306 100644 Binary files a/rio_tiler/cmap/pubugn.npy and b/rio_tiler/cmap/pubugn.npy differ diff --git a/rio_tiler/cmap/pubugn_r.npy b/rio_tiler/cmap/pubugn_r.npy index 689c781d..0e1dca68 100644 Binary files a/rio_tiler/cmap/pubugn_r.npy and b/rio_tiler/cmap/pubugn_r.npy differ diff --git a/rio_tiler/cmap/puor.npy b/rio_tiler/cmap/puor.npy index 45b90921..57899f69 100644 Binary files a/rio_tiler/cmap/puor.npy and b/rio_tiler/cmap/puor.npy differ diff --git a/rio_tiler/cmap/puor_r.npy b/rio_tiler/cmap/puor_r.npy index d622e19d..fbb1acf3 100644 Binary files a/rio_tiler/cmap/puor_r.npy and b/rio_tiler/cmap/puor_r.npy differ diff --git a/rio_tiler/cmap/purd.npy b/rio_tiler/cmap/purd.npy index be673a11..ba174e56 100644 Binary files a/rio_tiler/cmap/purd.npy and b/rio_tiler/cmap/purd.npy differ diff --git a/rio_tiler/cmap/purd_r.npy b/rio_tiler/cmap/purd_r.npy index a900b437..2bd9aec7 100644 Binary files a/rio_tiler/cmap/purd_r.npy and b/rio_tiler/cmap/purd_r.npy differ diff --git a/rio_tiler/cmap/purples.npy b/rio_tiler/cmap/purples.npy index 9ac180f8..0d99c34e 100644 Binary files a/rio_tiler/cmap/purples.npy and b/rio_tiler/cmap/purples.npy differ diff --git a/rio_tiler/cmap/purples_r.npy b/rio_tiler/cmap/purples_r.npy index 1800460b..51b6a5cb 100644 Binary files a/rio_tiler/cmap/purples_r.npy and b/rio_tiler/cmap/purples_r.npy differ diff --git a/rio_tiler/cmap/rainbow.npy b/rio_tiler/cmap/rainbow.npy index a0fcfcba..37824031 100644 Binary files a/rio_tiler/cmap/rainbow.npy and b/rio_tiler/cmap/rainbow.npy differ diff --git a/rio_tiler/cmap/rainbow_r.npy b/rio_tiler/cmap/rainbow_r.npy index 6c6c9c2c..6d05da1a 100644 Binary files a/rio_tiler/cmap/rainbow_r.npy and b/rio_tiler/cmap/rainbow_r.npy differ diff --git a/rio_tiler/cmap/rdbu.npy b/rio_tiler/cmap/rdbu.npy index a75efcdb..8ca6f9e0 100644 Binary files a/rio_tiler/cmap/rdbu.npy and b/rio_tiler/cmap/rdbu.npy differ diff --git a/rio_tiler/cmap/rdbu_r.npy b/rio_tiler/cmap/rdbu_r.npy index 39a6aaf1..8e1e50b4 100644 Binary files a/rio_tiler/cmap/rdbu_r.npy and b/rio_tiler/cmap/rdbu_r.npy differ diff --git a/rio_tiler/cmap/rdgy.npy b/rio_tiler/cmap/rdgy.npy index 0be574b9..9dce3e12 100644 Binary files a/rio_tiler/cmap/rdgy.npy and b/rio_tiler/cmap/rdgy.npy differ diff --git a/rio_tiler/cmap/rdgy_r.npy b/rio_tiler/cmap/rdgy_r.npy index 0a7f9dbb..0d23fdd8 100644 Binary files a/rio_tiler/cmap/rdgy_r.npy and b/rio_tiler/cmap/rdgy_r.npy differ diff --git a/rio_tiler/cmap/rdpu.npy b/rio_tiler/cmap/rdpu.npy index 72287e87..0c03ce5b 100644 Binary files a/rio_tiler/cmap/rdpu.npy and b/rio_tiler/cmap/rdpu.npy differ diff --git a/rio_tiler/cmap/rdpu_r.npy b/rio_tiler/cmap/rdpu_r.npy index 97c842ec..0bb049bf 100644 Binary files a/rio_tiler/cmap/rdpu_r.npy and b/rio_tiler/cmap/rdpu_r.npy differ diff --git a/rio_tiler/cmap/rdylbu.npy b/rio_tiler/cmap/rdylbu.npy index 4a42d90d..964d4de9 100644 Binary files a/rio_tiler/cmap/rdylbu.npy and b/rio_tiler/cmap/rdylbu.npy differ diff --git a/rio_tiler/cmap/rdylbu_r.npy b/rio_tiler/cmap/rdylbu_r.npy index 54ffda38..b63b5547 100644 Binary files a/rio_tiler/cmap/rdylbu_r.npy and b/rio_tiler/cmap/rdylbu_r.npy differ diff --git a/rio_tiler/cmap/rdylgn.npy b/rio_tiler/cmap/rdylgn.npy index cfa0679f..1b4ded05 100644 Binary files a/rio_tiler/cmap/rdylgn.npy and b/rio_tiler/cmap/rdylgn.npy differ diff --git a/rio_tiler/cmap/rdylgn_r.npy b/rio_tiler/cmap/rdylgn_r.npy index d2569d05..16de8b83 100644 Binary files a/rio_tiler/cmap/rdylgn_r.npy and b/rio_tiler/cmap/rdylgn_r.npy differ diff --git a/rio_tiler/cmap/reds.npy b/rio_tiler/cmap/reds.npy index 936db5e8..e38400bc 100644 Binary files a/rio_tiler/cmap/reds.npy and b/rio_tiler/cmap/reds.npy differ diff --git a/rio_tiler/cmap/reds_r.npy b/rio_tiler/cmap/reds_r.npy index 9315aed5..aafcc08e 100644 Binary files a/rio_tiler/cmap/reds_r.npy and b/rio_tiler/cmap/reds_r.npy differ diff --git a/rio_tiler/cmap/rplumbo.npy b/rio_tiler/cmap/rplumbo.npy index 22a1a21d..23043a93 100644 Binary files a/rio_tiler/cmap/rplumbo.npy and b/rio_tiler/cmap/rplumbo.npy differ diff --git a/rio_tiler/cmap/schwarzwald.npy b/rio_tiler/cmap/schwarzwald.npy index c5a11e08..187d5609 100644 Binary files a/rio_tiler/cmap/schwarzwald.npy and b/rio_tiler/cmap/schwarzwald.npy differ diff --git a/rio_tiler/cmap/seismic.npy b/rio_tiler/cmap/seismic.npy index 9cd114a7..7406e02a 100644 Binary files a/rio_tiler/cmap/seismic.npy and b/rio_tiler/cmap/seismic.npy differ diff --git a/rio_tiler/cmap/seismic_r.npy b/rio_tiler/cmap/seismic_r.npy index 9089c687..d5c605a1 100644 Binary files a/rio_tiler/cmap/seismic_r.npy and b/rio_tiler/cmap/seismic_r.npy differ diff --git a/rio_tiler/cmap/set1.npy b/rio_tiler/cmap/set1.npy index 6af3fd94..e092e4b0 100644 Binary files a/rio_tiler/cmap/set1.npy and b/rio_tiler/cmap/set1.npy differ diff --git a/rio_tiler/cmap/set1_r.npy b/rio_tiler/cmap/set1_r.npy index 849bffb5..6861673e 100644 Binary files a/rio_tiler/cmap/set1_r.npy and b/rio_tiler/cmap/set1_r.npy differ diff --git a/rio_tiler/cmap/set2.npy b/rio_tiler/cmap/set2.npy index 242c56b1..31e6c427 100644 Binary files a/rio_tiler/cmap/set2.npy and b/rio_tiler/cmap/set2.npy differ diff --git a/rio_tiler/cmap/set2_r.npy b/rio_tiler/cmap/set2_r.npy index 21c8e230..7a6d6f1d 100644 Binary files a/rio_tiler/cmap/set2_r.npy and b/rio_tiler/cmap/set2_r.npy differ diff --git a/rio_tiler/cmap/set3.npy b/rio_tiler/cmap/set3.npy index 153ea917..b6c7e0df 100644 Binary files a/rio_tiler/cmap/set3.npy and b/rio_tiler/cmap/set3.npy differ diff --git a/rio_tiler/cmap/set3_r.npy b/rio_tiler/cmap/set3_r.npy index b13ee6b0..2aa722cd 100644 Binary files a/rio_tiler/cmap/set3_r.npy and b/rio_tiler/cmap/set3_r.npy differ diff --git a/rio_tiler/cmap/spectral.npy b/rio_tiler/cmap/spectral.npy index dbdb0f0d..086baab0 100644 Binary files a/rio_tiler/cmap/spectral.npy and b/rio_tiler/cmap/spectral.npy differ diff --git a/rio_tiler/cmap/spectral_r.npy b/rio_tiler/cmap/spectral_r.npy index e19acdb8..246c5ae4 100644 Binary files a/rio_tiler/cmap/spectral_r.npy and b/rio_tiler/cmap/spectral_r.npy differ diff --git a/rio_tiler/cmap/spring.npy b/rio_tiler/cmap/spring.npy index 52613f6c..df92484a 100644 Binary files a/rio_tiler/cmap/spring.npy and b/rio_tiler/cmap/spring.npy differ diff --git a/rio_tiler/cmap/spring_r.npy b/rio_tiler/cmap/spring_r.npy index 898ad420..7b9b7856 100644 Binary files a/rio_tiler/cmap/spring_r.npy and b/rio_tiler/cmap/spring_r.npy differ diff --git a/rio_tiler/cmap/summer.npy b/rio_tiler/cmap/summer.npy index 3757840d..f69451fe 100644 Binary files a/rio_tiler/cmap/summer.npy and b/rio_tiler/cmap/summer.npy differ diff --git a/rio_tiler/cmap/summer_r.npy b/rio_tiler/cmap/summer_r.npy index 895a9b20..7732d264 100644 Binary files a/rio_tiler/cmap/summer_r.npy and b/rio_tiler/cmap/summer_r.npy differ diff --git a/rio_tiler/cmap/tab10.npy b/rio_tiler/cmap/tab10.npy index 47184925..c013253a 100644 Binary files a/rio_tiler/cmap/tab10.npy and b/rio_tiler/cmap/tab10.npy differ diff --git a/rio_tiler/cmap/tab10_r.npy b/rio_tiler/cmap/tab10_r.npy index 252ac45b..b3557815 100644 Binary files a/rio_tiler/cmap/tab10_r.npy and b/rio_tiler/cmap/tab10_r.npy differ diff --git a/rio_tiler/cmap/tab20.npy b/rio_tiler/cmap/tab20.npy index d1deb3ea..05d9d1a5 100644 Binary files a/rio_tiler/cmap/tab20.npy and b/rio_tiler/cmap/tab20.npy differ diff --git a/rio_tiler/cmap/tab20_r.npy b/rio_tiler/cmap/tab20_r.npy index 12cf6349..779495d1 100644 Binary files a/rio_tiler/cmap/tab20_r.npy and b/rio_tiler/cmap/tab20_r.npy differ diff --git a/rio_tiler/cmap/tab20b.npy b/rio_tiler/cmap/tab20b.npy index 6c1873e7..d63df354 100644 Binary files a/rio_tiler/cmap/tab20b.npy and b/rio_tiler/cmap/tab20b.npy differ diff --git a/rio_tiler/cmap/tab20b_r.npy b/rio_tiler/cmap/tab20b_r.npy index b69c6519..58e8ea27 100644 Binary files a/rio_tiler/cmap/tab20b_r.npy and b/rio_tiler/cmap/tab20b_r.npy differ diff --git a/rio_tiler/cmap/tab20c.npy b/rio_tiler/cmap/tab20c.npy index 3511a4ae..a1222caf 100644 Binary files a/rio_tiler/cmap/tab20c.npy and b/rio_tiler/cmap/tab20c.npy differ diff --git a/rio_tiler/cmap/tab20c_r.npy b/rio_tiler/cmap/tab20c_r.npy index 2773639f..6b1fb540 100644 Binary files a/rio_tiler/cmap/tab20c_r.npy and b/rio_tiler/cmap/tab20c_r.npy differ diff --git a/rio_tiler/cmap/terrain.npy b/rio_tiler/cmap/terrain.npy index 2bfc11b3..1f4567f6 100644 Binary files a/rio_tiler/cmap/terrain.npy and b/rio_tiler/cmap/terrain.npy differ diff --git a/rio_tiler/cmap/terrain_r.npy b/rio_tiler/cmap/terrain_r.npy index cc8c19d6..d75dfd7e 100644 Binary files a/rio_tiler/cmap/terrain_r.npy and b/rio_tiler/cmap/terrain_r.npy differ diff --git a/rio_tiler/cmap/twilight.npy b/rio_tiler/cmap/twilight.npy index a9257886..b043ffb3 100644 Binary files a/rio_tiler/cmap/twilight.npy and b/rio_tiler/cmap/twilight.npy differ diff --git a/rio_tiler/cmap/twilight_r.npy b/rio_tiler/cmap/twilight_r.npy index b8ffd2c3..48f53e5c 100644 Binary files a/rio_tiler/cmap/twilight_r.npy and b/rio_tiler/cmap/twilight_r.npy differ diff --git a/rio_tiler/cmap/twilight_shifted.npy b/rio_tiler/cmap/twilight_shifted.npy index 5a93e642..e4ba7090 100644 Binary files a/rio_tiler/cmap/twilight_shifted.npy and b/rio_tiler/cmap/twilight_shifted.npy differ diff --git a/rio_tiler/cmap/twilight_shifted_r.npy b/rio_tiler/cmap/twilight_shifted_r.npy index 8a4452b7..f2fb8574 100644 Binary files a/rio_tiler/cmap/twilight_shifted_r.npy and b/rio_tiler/cmap/twilight_shifted_r.npy differ diff --git a/rio_tiler/cmap/viridis.npy b/rio_tiler/cmap/viridis.npy index 920fd9ea..34127f2d 100644 Binary files a/rio_tiler/cmap/viridis.npy and b/rio_tiler/cmap/viridis.npy differ diff --git a/rio_tiler/cmap/viridis_r.npy b/rio_tiler/cmap/viridis_r.npy index 9bb41543..83657573 100644 Binary files a/rio_tiler/cmap/viridis_r.npy and b/rio_tiler/cmap/viridis_r.npy differ diff --git a/rio_tiler/cmap/winter.npy b/rio_tiler/cmap/winter.npy index 5981204f..fdeb935b 100644 Binary files a/rio_tiler/cmap/winter.npy and b/rio_tiler/cmap/winter.npy differ diff --git a/rio_tiler/cmap/winter_r.npy b/rio_tiler/cmap/winter_r.npy index af6bb61c..28169a2b 100644 Binary files a/rio_tiler/cmap/winter_r.npy and b/rio_tiler/cmap/winter_r.npy differ diff --git a/rio_tiler/cmap/wistia.npy b/rio_tiler/cmap/wistia.npy index 0ec1f838..c142e46b 100644 Binary files a/rio_tiler/cmap/wistia.npy and b/rio_tiler/cmap/wistia.npy differ diff --git a/rio_tiler/cmap/wistia_r.npy b/rio_tiler/cmap/wistia_r.npy index 03cfab55..4d32dadd 100644 Binary files a/rio_tiler/cmap/wistia_r.npy and b/rio_tiler/cmap/wistia_r.npy differ diff --git a/rio_tiler/cmap/ylgn.npy b/rio_tiler/cmap/ylgn.npy index dcfde355..763d1e3c 100644 Binary files a/rio_tiler/cmap/ylgn.npy and b/rio_tiler/cmap/ylgn.npy differ diff --git a/rio_tiler/cmap/ylgn_r.npy b/rio_tiler/cmap/ylgn_r.npy index 7e2530fb..f5d0794b 100644 Binary files a/rio_tiler/cmap/ylgn_r.npy and b/rio_tiler/cmap/ylgn_r.npy differ diff --git a/rio_tiler/cmap/ylgnbu.npy b/rio_tiler/cmap/ylgnbu.npy index 87d28aa3..3788e6d6 100644 Binary files a/rio_tiler/cmap/ylgnbu.npy and b/rio_tiler/cmap/ylgnbu.npy differ diff --git a/rio_tiler/cmap/ylgnbu_r.npy b/rio_tiler/cmap/ylgnbu_r.npy index 3e904d29..aa44082d 100644 Binary files a/rio_tiler/cmap/ylgnbu_r.npy and b/rio_tiler/cmap/ylgnbu_r.npy differ diff --git a/rio_tiler/cmap/ylorbr.npy b/rio_tiler/cmap/ylorbr.npy index 1e288322..35098d26 100644 Binary files a/rio_tiler/cmap/ylorbr.npy and b/rio_tiler/cmap/ylorbr.npy differ diff --git a/rio_tiler/cmap/ylorbr_r.npy b/rio_tiler/cmap/ylorbr_r.npy index 89584b8d..262b7b43 100644 Binary files a/rio_tiler/cmap/ylorbr_r.npy and b/rio_tiler/cmap/ylorbr_r.npy differ diff --git a/rio_tiler/cmap/ylorrd.npy b/rio_tiler/cmap/ylorrd.npy index 38f334c3..84936a4d 100644 Binary files a/rio_tiler/cmap/ylorrd.npy and b/rio_tiler/cmap/ylorrd.npy differ diff --git a/rio_tiler/cmap/ylorrd_r.npy b/rio_tiler/cmap/ylorrd_r.npy index 7625d211..d861af85 100644 Binary files a/rio_tiler/cmap/ylorrd_r.npy and b/rio_tiler/cmap/ylorrd_r.npy differ diff --git a/rio_tiler/colormap.py b/rio_tiler/colormap.py new file mode 100644 index 00000000..61c38081 --- /dev/null +++ b/rio_tiler/colormap.py @@ -0,0 +1,147 @@ +"""rio-tiler colormap functions.""" + +from typing import Dict, Sequence, Tuple + +import os + +import numpy + + +EMPTY_COLORMAP: Dict = {i: [0, 0, 0, 0] for i in range(256)} + + +def _update_alpha(cmap: Dict, idx: Sequence[int], alpha: int = 0) -> None: + """Update the alpha value of a colormap index.""" + if isinstance(idx, int): + idx = (idx,) + for i in idx: + cmap[i] = cmap[i][0:3] + [alpha] + + +def _remove_value(cmap: Dict, idx: Sequence[int]) -> None: + """Remove value from a colormap dict.""" + if isinstance(idx, int): + idx = (idx,) + + for i in idx: + cmap.pop(i, None) + + +def _update_cmap(cmap: Dict, values: Dict) -> None: + """Update a colormap dict.""" + for i, color in values.items(): + if len(color) == 3: + color += [255] + cmap[i] = color + + +def get_colormap(name: str) -> Dict: + """ + Return colormap dict. + + Attributes + ---------- + name : str, optional + Colormap name (default: cfastie) + + Returns + ------- + colormap : dict + GDAL RGBA Color Table dictionary. + + """ + cmap_file = os.path.join(os.path.dirname(__file__), "cmap", f"{name.lower()}.npy") + cmap = numpy.load(cmap_file) + assert cmap.shape == (256, 4) + assert cmap.dtype == numpy.uint8 + + return {idx: value.tolist() for idx, value in enumerate(cmap)} + + +# From https://github.com/mojodna/marblecutter/blob/5b9040ba6c83562a465eabdbb6e8959e6a8bf041/marblecutter/utils.py#L35 +def make_lut(colormap: Dict) -> numpy.ndarray: + """ + Create a lookup table numpy.ndarray from a GDAL RGBA Color Table dictionary. + + Attributes + ---------- + colormap : dict + GDAL RGBA Color Table dictionary. + + Returns + ------- + lut : numpy.ndarray + colormap lookup table + + """ + lut = numpy.zeros(shape=(256, 4), dtype=numpy.uint8) + for i, color in colormap.items(): + lut[int(i)] = color + + return lut + + +def apply_cmap( + data: numpy.ndarray, colormap: Dict +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Apply colormap on tile data. + + Attributes + ---------- + data : numpy ndarray + 1D image array to translate to RGB. + colormap : dict + GDAL RGBA Color Table dictionary. + + Returns + ------- + data : numpy.ndarray + RGB data. + mask: numpy.ndarray + Alpha band. + + """ + if data.shape[0] > 1: + raise Exception("Source data must be 1 band") + + lookup_table = make_lut(colormap) + data = lookup_table[data[0], :] + + data = numpy.transpose(data, [2, 0, 1]) + + return data[:-1], data[-1] + + +def apply_discrete_cmap( + data: numpy.ndarray, colormap: Dict +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Apply discrete colormap. + + Note: This method is not used by default and left + to users to use within custom render methods. + + Attributes + ---------- + data : numpy ndarray + 1D image array to translate to RGB. + color_map: dict + Discrete ColorMap dictionary + e.g: + { + 1: [255, 255, 255], + 2: [255, 0, 0] + } + Returns + ------- + arr: numpy.ndarray + """ + res = numpy.zeros((data.shape[1], data.shape[2], 4), dtype=numpy.uint8) + + for k, v in colormap.items(): + res[data[0] == k] = v + + data = numpy.transpose(res, [2, 0, 1]) + + return data[:-1], data[-1] diff --git a/rio_tiler/constants.py b/rio_tiler/constants.py new file mode 100644 index 00000000..fd720575 --- /dev/null +++ b/rio_tiler/constants.py @@ -0,0 +1,10 @@ +"""rio-tiler constant values.""" + +import os +import multiprocessing + +from rasterio.crs import CRS + +MAX_THREADS = int(os.environ.get("MAX_THREADS", multiprocessing.cpu_count() * 5)) +WEB_MERCATOR_CRS = CRS.from_epsg(3857) +WGS84_CRS = CRS.from_epsg(4326) diff --git a/rio_tiler/errors.py b/rio_tiler/errors.py index 2e222e59..ca1cb766 100644 --- a/rio_tiler/errors.py +++ b/rio_tiler/errors.py @@ -33,5 +33,9 @@ class DeprecationWarning(UserWarning): """Rio-tiler module deprecations warning.""" +class AlphaBandWarning(UserWarning): + """Automaticaly removed Alpha band from output array.""" + + class NoOverviewWarning(UserWarning): """Dataset has no overviews.""" diff --git a/rio_tiler/io/__init__.py b/rio_tiler/io/__init__.py new file mode 100644 index 00000000..d22870e3 --- /dev/null +++ b/rio_tiler/io/__init__.py @@ -0,0 +1 @@ +"""rio-tiler.io""" diff --git a/rio_tiler/io/cbers.py b/rio_tiler/io/cbers.py new file mode 100644 index 00000000..2dd3266d --- /dev/null +++ b/rio_tiler/io/cbers.py @@ -0,0 +1,239 @@ +"""rio_tiler.cbers: cbers processing.""" + +from typing import Any, Dict, Sequence, Tuple, Union + +import re + +import numpy + +import rasterio +from rasterio.warp import transform_bounds + +from rio_tiler import reader +from rio_tiler import constants +from rio_tiler.utils import tile_exists +from rio_tiler.errors import TileOutsideBounds, InvalidBandName, InvalidCBERSSceneId + + +def cbers_parser(sceneid: str) -> Dict: + """Parse CBERS scene id. + + Attributes + ---------- + sceneid : str + CBERS sceneid. + + Returns + ------- + out : dict + dictionary with metadata constructed from the sceneid. + + """ + if not re.match(r"^CBERS_4_\w+_[0-9]{8}_[0-9]{3}_[0-9]{3}_L[0-9]$", sceneid): + raise InvalidCBERSSceneId("Could not match {}".format(sceneid)) + + cbers_pattern = ( + r"(?P\w+)_" + r"(?P[0-9]{1})" + r"_" + r"(?P\w+)" + r"_" + r"(?P[0-9]{4})" + r"(?P[0-9]{2})" + r"(?P[0-9]{2})" + r"_" + r"(?P[0-9]{3})" + r"_" + r"(?P[0-9]{3})" + r"_" + r"(?PL[0-9]{1})$" + ) + + meta: Dict[str, Any] = re.match(cbers_pattern, sceneid, re.IGNORECASE).groupdict() + meta["scene"] = sceneid + + instrument = meta["instrument"] + instrument_params = { + "MUX": { + "reference_band": "6", + "bands": ("5", "6", "7", "8"), + "rgb": ("7", "6", "5"), + }, + "AWFI": { + "reference_band": "14", + "bands": ("13", "14", "15", "16"), + "rgb": ("15", "14", "13"), + }, + "PAN10M": { + "reference_band": "4", + "bands": ("2", "3", "4"), + "rgb": ("3", "4", "2"), + }, + "PAN5M": {"reference_band": "1", "bands": ("1"), "rgb": ("1", "1", "1")}, + } + meta["reference_band"] = instrument_params[instrument]["reference_band"] + meta["bands"] = instrument_params[instrument]["bands"] + meta["rgb"] = instrument_params[instrument]["rgb"] + + meta["scheme"] = "s3" + meta["bucket"] = "cbers-pds" + meta["prefix"] = "CBERS4/{instrument}/{path}/{row}/{scene}".format(**meta) + + return meta + + +def bounds(sceneid: str) -> Dict: + """ + Retrieve image bounds. + + Attributes + ---------- + sceneid : str + CBERS sceneid. + + Returns + ------- + out : dict + dictionary with image bounds. + + """ + scene_params = cbers_parser(sceneid) + cbers_prefix = "{scheme}://{bucket}/{prefix}/{scene}".format(**scene_params) + + with rasterio.open( + "{}_BAND{}.tif".format(cbers_prefix, scene_params["reference_band"]) + ) as src: + bounds = transform_bounds( + src.crs, constants.WGS84_CRS, *src.bounds, densify_pts=21 + ) + + return dict(sceneid=sceneid, bounds=bounds) + + +def metadata( + sceneid: str, + pmin: float = 2.0, + pmax: float = 98.0, + hist_options: Dict = {}, + **kwargs: Any, +) -> Dict: + """ + Return band bounds and statistics. + + Attributes + ---------- + sceneid : str + CBERS sceneid. + pmin : int, optional, (default: 2) + Histogram minimum cut. + pmax : int, optional, (default: 98) + Histogram maximum cut. + hist_options : dict, optional + Options to forward to numpy.histogram function. + e.g: {bins=20, range=(0, 1000)} + kwargs : optional + These are passed to 'rio_tiler.reader.preview' + + Returns + ------- + out : dict + Dictionary with bounds and bands statistics. + + """ + scene_params = cbers_parser(sceneid) + cbers_prefix = "{scheme}://{bucket}/{prefix}/{scene}".format(**scene_params) + + bands = scene_params["bands"] + addresses = [f"{cbers_prefix}_BAND{band}.tif" for band in bands] + + responses = reader.multi_metadata( + addresses, + indexes=[1], + nodata=0, + percentiles=(pmin, pmax), + hist_options=hist_options, + **kwargs, + ) + + info: Dict[str, Any] = dict(sceneid=sceneid) + info["instrument"] = scene_params["instrument"] + info["band_descriptions"] = [(ix + 1, b) for ix, b in enumerate(bands)] + + info["bounds"] = [ + r["bounds"] + for b, r in zip(bands, responses) + if b == scene_params["reference_band"] + ][0] + info["statistics"] = {b: d["statistics"][1] for b, d in zip(bands, responses)} + return info + + +def tile( + sceneid: str, + tile_x: int, + tile_y: int, + tile_z: int, + bands: Union[Sequence[str], str] = None, + tilesize: int = 256, + **kwargs: Dict, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Create mercator tile from CBERS data. + + Attributes + ---------- + sceneid : str + CBERS sceneid. + tile_x : int + Mercator tile X index. + tile_y : int + Mercator tile Y index. + tile_z : int + Mercator tile ZOOM level. + bands : tuple or list or str, optional + Bands index for the RGB combination. If None uses default + defined for the instrument + tilesize : int, optional + Output image size. Default is 256 + kwargs: dict, optional + These will be passed to the 'rio_tiler.reader.tile' function. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if isinstance(bands, str): + bands = (bands,) + + scene_params = cbers_parser(sceneid) + + if not bands: + bands = scene_params["rgb"] + + for band in bands: + if band not in scene_params["bands"]: + raise InvalidBandName( + "{} is not a valid band name for {} CBERS instrument".format( + band, scene_params["instrument"] + ) + ) + + cbers_prefix = "{scheme}://{bucket}/{prefix}/{scene}".format(**scene_params) + with rasterio.open( + "{}_BAND{}.tif".format(cbers_prefix, scene_params["reference_band"]) + ) as src_dst: + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + + if not tile_exists(bounds, tile_z, tile_x, tile_y): + raise TileOutsideBounds( + "Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y) + ) + + addresses = [f"{cbers_prefix}_BAND{band}.tif" for band in bands] + return reader.multi_tile( + addresses, tile_x, tile_y, tile_z, tilesize=tilesize, nodata=0 + ) diff --git a/rio_tiler/io/cogeo.py b/rio_tiler/io/cogeo.py new file mode 100644 index 00000000..af3b58f7 --- /dev/null +++ b/rio_tiler/io/cogeo.py @@ -0,0 +1,274 @@ +"""rio_tiler.io.cogeo: raster processing.""" + +from typing import Any, Dict, Tuple, List, Optional + +import numpy + +import rasterio +from rasterio.crs import CRS +from rasterio.warp import transform_bounds + +from rio_tiler import reader +from rio_tiler import constants +from rio_tiler.utils import has_alpha_band, has_mask_band +from rio_tiler.mercator import get_zooms + + +def spatial_info(address: str) -> Dict: + """ + Return COGEO spatial info. + + Attributes + ---------- + address : str or PathLike object + A dataset path or URL. Will be opened in "r" mode. + + Returns + ------- + out : dict. + + """ + with rasterio.open(address) as src_dst: + minzoom, maxzoom = get_zooms(src_dst) + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + center = [(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2, minzoom] + + return dict( + address=address, bounds=bounds, center=center, minzoom=minzoom, maxzoom=maxzoom + ) + + +def bounds(address: str) -> Dict: + """ + Retrieve image bounds. + + Attributes + ---------- + address : str + file url. + + Returns + ------- + out : dict + dictionary with image bounds. + + """ + with rasterio.open(address) as src_dst: + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + return dict(address=address, bounds=bounds) + + +def metadata( + address: str, + pmin: float = 2.0, + pmax: float = 98.0, + hist_options: Dict = {}, + **kwargs: Any, +) -> Dict: + """ + Return image statistics. + + Attributes + ---------- + address : str or PathLike object + A dataset path or URL. Will be opened in "r" mode. + pmin : int, optional, (default: 2) + Histogram minimum cut. + pmax : int, optional, (default: 98) + Histogram maximum cut. + hist_options : dict, optional + Options to forward to numpy.histogram function. + e.g: {bins=20, range=(0, 1000)} + kwargs : optional + These are passed to 'rio_tiler.reader.preview' + + Returns + ------- + out : dict + Dictionary with image bounds and bands statistics. + + """ + with rasterio.open(address) as src_dst: + meta = reader.metadata( + src_dst, percentiles=(pmin, pmax), hist_options=hist_options, **kwargs + ) + + return dict(address=address, **meta) + + +def info(address: str) -> Dict: + """ + Return simple metadata about the file. + + Attributes + ---------- + address : str or PathLike object + A dataset path or URL. Will be opened in "r" mode. + + Returns + ------- + out : dict. + + """ + with rasterio.open(address) as src_dst: + minzoom, maxzoom = get_zooms(src_dst) + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + center = [(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2, minzoom] + + def _get_descr(ix): + """Return band description.""" + name = src_dst.descriptions[ix - 1] + if not name: + name = "band{}".format(ix) + return name + + band_descriptions = [(ix, _get_descr(ix)) for ix in src_dst.indexes] + tags = [(ix, src_dst.tags(ix)) for ix in src_dst.indexes] + + other_meta = dict() + if src_dst.scales[0] and src_dst.offsets[0]: + other_meta.update(dict(scale=src_dst.scales[0])) + other_meta.update(dict(offset=src_dst.offsets[0])) + + if has_alpha_band(src_dst): + nodata_type = "Alpha" + elif has_mask_band(src_dst): + nodata_type = "Mask" + elif src_dst.nodata is not None: + nodata_type = "Nodata" + else: + nodata_type = "None" + + try: + cmap = src_dst.colormap(1) + other_meta.update(dict(colormap=cmap)) + except ValueError: + pass + + return dict( + address=address, + bounds=bounds, + center=center, + minzoom=minzoom, + maxzoom=maxzoom, + band_metadata=tags, + band_descriptions=band_descriptions, + dtype=src_dst.meta["dtype"], + colorinterp=[src_dst.colorinterp[ix - 1].name for ix in src_dst.indexes], + nodata_type=nodata_type, + **other_meta, + ) + + +def tile( + address: str, + tile_x: int, + tile_y: int, + tile_z: int, + tilesize: int = 256, + **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Create mercator tile from any images. + + Attributes + ---------- + address : str + file url. + tile_x : int + Mercator tile X index. + tile_y : int + Mercator tile Y index. + tile_z : int + Mercator tile ZOOM level. + tilesize : int, optional (default: 256) + Output image size. + kwargs: dict, optional + These will be passed to the 'rio_tiler.reader.tile' function. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + with rasterio.open(address) as src_dst: + return reader.tile(src_dst, tile_x, tile_y, tile_z, tilesize, **kwargs) + + +def point(address: str, lon: float, lat: float, **kwargs: Any) -> List: + """ + Read point value from a file. + + Attributes + ---------- + address: str + file url. + lon: float + Longitude + lat: float + Latittude. + kwargs: dict, optional + These will be passed to the 'rio_tiler.reader.point' function. + + Returns + ------- + point: list + List of pixel values per bands indexes. + + """ + with rasterio.open(address) as src_dst: + return reader.point(src_dst, (lon, lat), **kwargs) + + +def area( + address: str, + bbox: Tuple[float, float, float, float], + dst_crs: Optional[CRS] = None, + bounds_crs: CRS = constants.WGS84_CRS, + max_size: int = 1024, + **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + + """ + Read value from a bbox. + + Attributes + ---------- + address: str + file url. + bbox: tuple + bounds to read (left, bottom, right, top) in "bounds_crs". + dst_crs: CRS or str, optional + Target coordinate reference system, default is the dataset CRS. + bounds_crs: CRS or str, optional + bounds coordinate reference system, default is "epsg:4326" + max_size: int, optional + Limit output size array, default is 1024. + kwargs: dict, optional + These will be passed to the 'rio_tiler.reader.part' function. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + with rasterio.open(address) as src_dst: + if not dst_crs: + dst_crs = src_dst.crs + + return reader.part( + src_dst, + bbox, + max_size=max_size, + bounds_crs=bounds_crs, + dst_crs=dst_crs, + **kwargs, + ) diff --git a/rio_tiler/io/landsat8.py b/rio_tiler/io/landsat8.py new file mode 100644 index 00000000..f7dd5933 --- /dev/null +++ b/rio_tiler/io/landsat8.py @@ -0,0 +1,364 @@ +"""rio_tiler.io.landsat8: Landsat-8 processing.""" + +from typing import Any, Dict, Sequence, Tuple, Union + +import os +import re +import datetime +from concurrent import futures + +from urllib.request import urlopen + +import numpy + +import rasterio +from rasterio.warp import transform_bounds +from rio_toa import reflectance, brightness_temp, toa_utils + +from rio_tiler import reader +from rio_tiler import constants +from rio_tiler.utils import _stats as raster_stats, tile_exists, pansharpening_brovey +from rio_tiler.errors import ( + TileOutsideBounds, + InvalidBandName, + InvalidLandsatSceneId, +) + +LANDSAT_BANDS = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "QA"] + + +def landsat_parser(sceneid: str) -> Dict: + """ + Parse Landsat-8 scene id. + + Author @perrygeo - http://www.perrygeo.com + + Attributes + ---------- + sceneid : str + Landsat sceneid. + + Returns + ------- + out : dict + dictionary with metadata constructed from the sceneid. + + """ + pre_collection = r"(L[COTEM]8\d{6}\d{7}[A-Z]{3}\d{2})" + collection_1 = r"(L[COTEM]08_L\d{1}[A-Z]{2}_\d{6}_\d{8}_\d{8}_\d{2}_(T1|T2|RT))" + if not re.match("^{}|{}$".format(pre_collection, collection_1), sceneid): + raise InvalidLandsatSceneId("Could not match {}".format(sceneid)) + + precollection_pattern = ( + r"^L" + r"(?P\w{1})" + r"(?P\w{1})" + r"(?P[0-9]{3})" + r"(?P[0-9]{3})" + r"(?P[0-9]{4})" + r"(?P[0-9]{3})" + r"(?P\w{3})" + r"(?P[0-9]{2})$" + ) + + collection_pattern = ( + r"^L" + r"(?P\w{1})" + r"(?P\w{2})" + r"_" + r"(?P\w{4})" + r"_" + r"(?P[0-9]{3})" + r"(?P[0-9]{3})" + r"_" + r"(?P[0-9]{4})" + r"(?P[0-9]{2})" + r"(?P[0-9]{2})" + r"_" + r"(?P[0-9]{4})" + r"(?P[0-9]{2})" + r"(?P[0-9]{2})" + r"_" + r"(?P\w{2})" + r"_" + r"(?P\w{2})$" + ) + + for pattern in [collection_pattern, precollection_pattern]: + match = re.match(pattern, sceneid, re.IGNORECASE) + if match: + meta: Dict[str, Any] = match.groupdict() + break + + meta["scene"] = sceneid + if meta.get("acquisitionJulianDay"): + date = datetime.datetime( + int(meta["acquisitionYear"]), 1, 1 + ) + datetime.timedelta(int(meta["acquisitionJulianDay"]) - 1) + + meta["date"] = date.strftime("%Y-%m-%d") + else: + meta["date"] = "{}-{}-{}".format( + meta["acquisitionYear"], meta["acquisitionMonth"], meta["acquisitionDay"] + ) + + collection = meta.get("collectionNumber", "") + if collection != "": + collection = "c{}".format(int(collection)) + + meta["scheme"] = "s3" + meta["bucket"] = "landsat-pds" + meta["prefix"] = os.path.join(collection, "L8", meta["path"], meta["row"], sceneid) + + return meta + + +def _landsat_get_mtl(sceneid: str) -> Dict: + """ + Get Landsat-8 MTL metadata. + + Attributes + ---------- + sceneid : str + Landsat sceneid. For scenes after May 2017, + sceneid have to be LANDSAT_PRODUCT_ID. + + Returns + ------- + out : dict + returns a JSON like object with the metadata. + + """ + scene_params = landsat_parser(sceneid) + meta_file = "http://{bucket}.s3.amazonaws.com/{prefix}/{scene}_MTL.txt".format( + **scene_params + ) + metadata = str(urlopen(meta_file).read().decode()) + return toa_utils._parse_mtl_txt(metadata) + + +def _convert(arr: numpy.ndarray, band: str, metadata: Dict) -> numpy.ndarray: + """Convert DN to TOA or Temp.""" + if band in ["1", "2", "3", "4", "5", "6", "7", "8", "9"]: # OLI + multi_reflect = metadata["RADIOMETRIC_RESCALING"].get( + f"REFLECTANCE_MULT_BAND_{band}" + ) + add_reflect = metadata["RADIOMETRIC_RESCALING"].get( + f"REFLECTANCE_ADD_BAND_{band}" + ) + sun_elev = metadata["IMAGE_ATTRIBUTES"]["SUN_ELEVATION"] + + arr = 10000 * reflectance.reflectance( + arr, multi_reflect, add_reflect, sun_elev, src_nodata=0 + ) + + elif band in ["10", "11"]: # TIRS + multi_rad = metadata["RADIOMETRIC_RESCALING"].get(f"RADIANCE_MULT_BAND_{band}") + add_rad = metadata["RADIOMETRIC_RESCALING"].get(f"RADIANCE_ADD_BAND_{band}") + k1 = metadata["TIRS_THERMAL_CONSTANTS"].get(f"K1_CONSTANT_BAND_{band}") + k2 = metadata["TIRS_THERMAL_CONSTANTS"].get(f"K2_CONSTANT_BAND_{band}") + + arr = brightness_temp.brightness_temp(arr, multi_rad, add_rad, k1, k2) + + # TODO + # elif band == "QA": + + return arr + + +def bounds(sceneid: str) -> Dict: + """ + Retrieve image bounds. + + Attributes + ---------- + sceneid : str + Landsat sceneid. For scenes after May 2017, + sceneid have to be LANDSAT_PRODUCT_ID. + + Returns + ------- + out : dict + dictionary with image bounds. + + """ + meta: Dict = _landsat_get_mtl(sceneid)["L1_METADATA_FILE"] + + return dict( + sceneid=sceneid, + bounds=toa_utils._get_bounds_from_metadata(meta["PRODUCT_METADATA"]), + ) + + +def metadata( + sceneid: str, + pmin: float = 2.0, + pmax: float = 98.0, + hist_options: Dict = {}, + **kwargs: Any, +) -> Dict: + """ + Retrieve image bounds and band statistics. + + Attributes + ---------- + sceneid : str + Landsat sceneid. For scenes after May 2017, + sceneid have to be LANDSAT_PRODUCT_ID. + pmin : int, optional, (default: 2) + Histogram minimum cut. + pmax : int, optional, (default: 98) + Histogram maximum cut. + hist_options : dict, optional + Options to forward to numpy.histogram function. + e.g: {bins=20, range=(0, 1000)} + kwargs : optional + These are passed to 'rio_tiler.reader.preview' + + Returns + ------- + out : dict + Dictionary with bounds and bands statistics. + + """ + scene_params = landsat_parser(sceneid) + meta: Dict = _landsat_get_mtl(sceneid)["L1_METADATA_FILE"] + + landsat_prefix = "{scheme}://{bucket}/{prefix}/{scene}".format(**scene_params) + + def worker(band: str): + asset = f"{landsat_prefix}_B{band}.TIF" + + if band == "QA": + nodata = 1 + resamp = "nearest" + else: + nodata = 0 + resamp = "bilinear" + + with rasterio.open(asset) as src_dst: + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + data, mask = reader.preview( + src_dst, nodata=nodata, resampling_method=resamp, **kwargs + ) + + if band != "QA": + data = data.astype("float32", casting="unsafe") + data = _convert(data, band, meta) + + data = numpy.ma.array(data) + data.mask = mask == 0 + + statistics = raster_stats(data, percentiles=(pmin, pmax), **hist_options) + return dict(bounds=bounds, statistics=statistics) + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + responses = list(executor.map(worker, LANDSAT_BANDS)) + + info: Dict[str, Any] = dict(sceneid=sceneid) + info["band_descriptions"] = [(ix + 1, b) for ix, b in enumerate(LANDSAT_BANDS)] + info["bounds"] = [ + r["bounds"] for b, r in zip(LANDSAT_BANDS, responses) if b == "8" + ][0] + + info["statistics"] = {b: d["statistics"] for b, d in zip(LANDSAT_BANDS, responses)} + return info + + +def tile( + sceneid: str, + tile_x: int, + tile_y: int, + tile_z: int, + bands: Union[Sequence[str], str] = ["4", "3", "2"], + tilesize: int = 256, + pan: bool = False, + **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Create mercator tile from Landsat-8 data. + + Attributes + ---------- + sceneid : str + Landsat sceneid. For scenes after May 2017, + sceneid have to be LANDSAT_PRODUCT_ID. + tile_x : int + Mercator tile X index. + tile_y : int + Mercator tile Y index. + tile_z : int + Mercator tile ZOOM level. + bands : tuple, str, optional (default: ("4", "3", "2")) + Bands index for the RGB combination. + tilesize : int, optional (default: 256) + Output image size. + pan : boolean, optional (default: False) + If True, apply pan-sharpening. + kwargs: dict, optional + These will be passed to the 'rio_tiler.utils._tile_read' function. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if isinstance(bands, str): + bands = (bands,) + + for band in bands: + if band not in LANDSAT_BANDS: + raise InvalidBandName("{} is not a valid Landsat band name".format(band)) + + scene_params = landsat_parser(sceneid) + + meta: Dict = _landsat_get_mtl(sceneid)["L1_METADATA_FILE"] + + landsat_prefix = "{scheme}://{bucket}/{prefix}/{scene}".format(**scene_params) + + bounds = toa_utils._get_bounds_from_metadata(meta["PRODUCT_METADATA"]) + if not tile_exists(bounds, tile_z, tile_x, tile_y): + raise TileOutsideBounds( + "Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y) + ) + + def worker(band: str): + asset = f"{landsat_prefix}_B{band}.TIF" + + if band == "QA": + nodata = 1 + resamp = "nearest" + else: + nodata = 0 + resamp = "bilinear" + + with rasterio.open(asset) as src_dst: + tile, mask = reader.tile( + src_dst, + tile_x, + tile_y, + tile_z, + tilesize=tilesize, + nodata=nodata, + resampling_method=resamp, + ) + + return tile, mask + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + data, masks = zip(*list(executor.map(worker, bands))) + data = numpy.concatenate(data) + mask = numpy.all(masks, axis=0).astype(numpy.uint8) * 255 + + if pan: + pan_data, mask = worker("8") + data = pansharpening_brovey(data, pan_data, 0.2, pan_data.dtype) + + if bands[0] != "QA" or len(bands) != 1: + for bdx, band in enumerate(bands): + data[bdx] = _convert(data[bdx], band, meta) + + return data, mask diff --git a/rio_tiler/io/sentinel1.py b/rio_tiler/io/sentinel1.py new file mode 100644 index 00000000..87ea4f92 --- /dev/null +++ b/rio_tiler/io/sentinel1.py @@ -0,0 +1,280 @@ +"""rio_tiler.io.sentinel1: Sentinel-1 processing.""" + +from typing import Any, Dict, Sequence, Tuple, Union + +import os +import re +import json +from concurrent import futures + +import numpy + +from boto3.session import Session as boto3_session + +import rasterio +from rasterio import transform +from rasterio.vrt import WarpedVRT + +from rio_tiler import reader +from rio_tiler import constants +from rio_tiler.errors import InvalidBandName, InvalidSentinelSceneId + +REGION = os.environ.get("AWS_REGION", "eu-central-1") +SENTINEL_BANDS = ["vv", "vh"] + + +def _aws_get_object( + bucket: str, + key: str, + request_pays: bool = True, + client: boto3_session.client = None, +) -> bytes: + """AWS s3 get object content.""" + if not client: + session = boto3_session(region_name=REGION) + client = session.client("s3") + + params = {"Bucket": bucket, "Key": key} + if request_pays: + params["RequestPayer"] = "requester" + + response = client.get_object(**params) + + return response["Body"].read() + + +def sentinel1_parser(sceneid: str) -> Dict: + """ + Parse Sentinel-1 scene id. + + Attributes + ---------- + sceneid : str + Sentinel-1 sceneid. + + Returns + ------- + out : dict + dictionary with metadata constructed from the sceneid. + + """ + if not re.match( + "^S1[AB]_(IW)|(EW)_[A-Z]{3}[FHM]_[0-9][SA][A-Z]{2}_[0-9]{8}T[0-9]{6}_[0-9]{8}T[0-9]{6}_[0-9A-Z]{6}_[0-9A-Z]{6}_[0-9A-Z]{4}$", + sceneid, + ): + raise InvalidSentinelSceneId("Could not match {}".format(sceneid)) + + sentinel_pattern = ( + r"^S" + r"(?P\w{1})" + r"(?P[AB]{1})" + r"_" + r"(?P[A-Z]{2})" + r"_" + r"(?P[A-Z]{3})" + r"(?P[FHM])" + r"_" + r"(?P[0-9])" + r"(?P[SA])" + r"(?P(SH)|(SV)|(DH)|(DV)|(HH)|(HV)|(VV)|(VH))" + r"_" + r"(?P[0-9]{8}T[0-9]{6})" + r"_" + r"(?P[0-9]{8}T[0-9]{6})" + r"_" + r"(?P[0-9]{6})" + r"_" + r"(?P[0-9A-Z]{6})" + r"_" + r"(?P[0-9A-Z]{4})$" + ) + + meta: Dict[str, Any] = re.match( + sentinel_pattern, sceneid, re.IGNORECASE + ).groupdict() + + meta["scene"] = sceneid + year = meta["startDateTime"][0:4] + month = meta["startDateTime"][4:6].strip("0") + day = meta["startDateTime"][6:8].strip("0") + + meta["scheme"] = "s3" + meta["bucket"] = "sentinel-s1-l1c" + meta["prefix"] = os.path.join( + meta["product"], year, month, day, meta["beam"], meta["polarisation"], sceneid + ) + + return meta + + +def _get_bounds(scene_info: Dict) -> Tuple[float, float, float, float]: + bucket, prefix = scene_info["bucket"], scene_info["prefix"] + product_info = json.loads(_aws_get_object(bucket, f"{prefix}/productInfo.json")) + + xyz = list(zip(*product_info["footprint"]["coordinates"][0])) + return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) + + +def bounds(sceneid: str) -> Dict: + """ + Retrieve image bounds. + + Attributes + ---------- + sceneid : str + Sentinel-1 sceneid. + + Returns + ------- + out : dict + dictionary with image bounds. + + """ + scene_params = sentinel1_parser(sceneid) + return dict(sceneid=sceneid, bounds=_get_bounds(scene_params)) + + +def metadata( + sceneid: str, + pmin: float = 2.0, + pmax: float = 98.0, + bands: Union[Sequence[str], str] = None, + hist_options: Dict = {}, + **kwargs, +) -> Dict: + """ + Retrieve image bounds and band statistics. + + Attributes + ---------- + sceneid : str + Sentinel-1 sceneid. + pmin : float, optional, (default: 2.) + Histogram minimum cut. + pmax : float, optional, (default: 98.) + Histogram maximum cut. + bands: tuple, str, required + Bands name (e.g vv, vh). + kwargs : optional + These are passed to 'rio_tiler.utils._stats' + e.g: bins=20, range=(0, 1000) + + Returns + ------- + out : dict + Dictionary with image bounds and bands statistics. + + """ + if not bands: + raise InvalidBandName("bands is required") + + if isinstance(bands, str): + bands = (bands,) + + for band in bands: + if band not in SENTINEL_BANDS: + raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) + + scene_params = sentinel1_parser(sceneid) + sentinel_prefix = "{scheme}://{bucket}/{prefix}/measurement".format(**scene_params) + + def worker(band: str): + asset = "{}/{}-{}.tiff".format( + sentinel_prefix, scene_params["beam"].lower(), band + ) + with rasterio.open(asset) as src_dst: + with WarpedVRT( + src_dst, + src_crs=src_dst.gcps[1], + src_transform=transform.from_gcps(src_dst.gcps[0]), + src_nodata=0, + ) as vrt_dst: + return reader.metadata( + vrt_dst, + percentiles=(pmin, pmax), + hist_options=hist_options, + **kwargs, + ) + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + responses = list(executor.map(worker, bands)) + + info = dict( + sceneid=sceneid, + bounds=responses[0]["bounds"], + band_descriptions=[(ix + 1, b) for ix, b in enumerate(bands)], + ) + + info["statistics"] = { + b: v for b, d in zip(bands, responses) for _, v in d["statistics"].items() + } + return info + + +def tile( + sceneid: str, + tile_x: int, + tile_y: int, + tile_z: int, + bands: Union[Sequence[str], str] = None, + tilesize: int = 256, + **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Create mercator tile from Sentinel-1 data. + + Attributes + ---------- + sceneid : str + Sentinel-2 sceneid. + tile_x : int + Mercator tile X index. + tile_y : int + Mercator tile Y index. + tile_z : int + Mercator tile ZOOM level. + bands: tuple, str, required + Bands name (e.g vv, vh). + tilesize : int, optional (default: 256) + Output image size. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if not bands: + raise InvalidBandName("bands is required") + + if isinstance(bands, str): + bands = (bands,) + + for band in bands: + if band not in SENTINEL_BANDS: + raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) + + scene_params = sentinel1_parser(sceneid) + sentinel_prefix = "{scheme}://{bucket}/{prefix}/measurement".format(**scene_params) + + def worker(band: str): + asset = "{}/{}-{}.tiff".format( + sentinel_prefix, scene_params["beam"].lower(), band + ) + with rasterio.open(asset) as src_dst: + with WarpedVRT( + src_dst, + src_crs=src_dst.gcps[1], + src_transform=transform.from_gcps(src_dst.gcps[0]), + src_nodata=0, + ) as vrt_dst: + return reader.tile( + vrt_dst, tile_x, tile_y, tile_z, tilesize=tilesize, **kwargs + ) + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + data, masks = zip(*list(executor.map(worker, bands))) + data = numpy.concatenate(data) + mask = numpy.all(masks, axis=0).astype(numpy.uint8) * 255 + + return data, mask diff --git a/rio_tiler/io/sentinel2.py b/rio_tiler/io/sentinel2.py new file mode 100644 index 00000000..7e516ec1 --- /dev/null +++ b/rio_tiler/io/sentinel2.py @@ -0,0 +1,280 @@ +"""rio_tiler.reader.sentinel2: Sentinel-2 processing.""" + +from typing import Any, Dict, Sequence, Tuple, Union + +import os +import re +import itertools +from collections import OrderedDict + +import numpy + +import rasterio +from rasterio.warp import transform_bounds + +from rio_tiler import reader +from rio_tiler import constants +from rio_tiler.utils import tile_exists +from rio_tiler.errors import TileOutsideBounds, InvalidBandName, InvalidSentinelSceneId + + +SENTINEL_L1_BANDS = OrderedDict( + [ + ("10", ["02", "03", "04", "08"]), + ("20", ["05", "06", "07", "11", "12", "8A"]), + ("60", ["01", "09", "10"]), + ] +) + +SENTINEL_L2_BANDS = OrderedDict( + [ + ("10", ["02", "03", "04", "08"]), + ("20", ["02", "03", "04", "05", "06", "07", "08", "11", "12", "8A"]), + ( + "60", + ["01", "02", "03", "04", "05", "06", "07", "08", "09", "11", "12", "8A"], + ), + ] +) + +SENTINEL_L2_PRODUCTS = OrderedDict( + [ + ("10", ["AOT", "WVP"]), + ("20", ["AOT", "SCL", "WVP"]), + ("60", ["AOT", "SCL", "WVP"]), + ] +) + + +def sentinel2_parser(sceneid: str) -> Dict: + """ + Parse Sentinel-2 scene id. + + Attributes + ---------- + sceneid : str + Sentinel-2 sceneid. + + Returns + ------- + out : dict + dictionary with metadata constructed from the sceneid. + + """ + + if not re.match("^S2[AB]_L[0-2][A-C]_[0-9]{8}_[0-9]{2}[A-Z]{3}_[0-9]$", sceneid): + raise InvalidSentinelSceneId("Could not match {}".format(sceneid)) + + sentinel_pattern = ( + r"^S" + r"(?P\w{1})" + r"(?P[AB]{1})" + r"_" + r"(?PL[0-2][ABC])" + r"_" + r"(?P[0-9]{4})" + r"(?P[0-9]{2})" + r"(?P[0-9]{2})" + r"_" + r"(?P[0-9]{2})" + r"(?P\w{1})" + r"(?P\w{2})" + r"_" + r"(?P[0-9]{1})$" + ) + + meta: Dict[str, Any] = re.match( + sentinel_pattern, sceneid, re.IGNORECASE + ).groupdict() + meta["scene"] = sceneid + + utm_zone = meta["utm"].lstrip("0") + grid_square = meta["sq"] + latitude_band = meta["lat"] + year = meta["acquisitionYear"] + month = meta["acquisitionMonth"].lstrip("0") + day = meta["acquisitionDay"].lstrip("0") + img_num = meta["num"] + + meta["scheme"] = "s3" + meta["bucket"] = "sentinel-s2-" + meta["processingLevel"].lower() + meta["prefix"] = os.path.join( + "tiles", utm_zone, latitude_band, grid_square, year, month, day, img_num + ) + + if meta["processingLevel"] == "L1C": + meta["preview_file"] = "preview.jp2" + meta["preview_prefix"] = "" + meta["bands"] = list( + itertools.chain.from_iterable( + [bands for _, bands in SENTINEL_L1_BANDS.items()] + ) + ) + meta["valid_bands"] = meta["bands"] + else: + meta["preview_file"] = "R60m/TCI.jp2" + meta["preview_prefix"] = "R60m" + meta["bands"] = SENTINEL_L2_BANDS["60"] + meta["valid_bands"] = meta["bands"] + SENTINEL_L2_PRODUCTS["60"] + + return meta + + +def _l2_prefixed_band(band: str) -> str: + """Return L2A prefixed bands name.""" + if band in SENTINEL_L2_BANDS["60"]: + for res, bands in SENTINEL_L2_BANDS.items(): + if band in bands: + return "R{}m/B{}".format(res, band) + elif band in SENTINEL_L2_PRODUCTS["60"]: + for res, bands in SENTINEL_L2_PRODUCTS.items(): + if band in bands: + return "R{}m/{}".format(res, band) + + raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) + + +def bounds(sceneid: str) -> Dict: + """ + Retrieve image bounds. + + Attributes + ---------- + sceneid : str + Sentinel-2 sceneid. + + Returns + ------- + out : dict + dictionary with image bounds. + + """ + scene_params = sentinel2_parser(sceneid) + preview_file = "{scheme}://{bucket}/{prefix}/{preview_file}".format(**scene_params) + with rasterio.open(preview_file) as src_dst: + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + + return dict(sceneid=sceneid, bounds=bounds) + + +def metadata( + sceneid: str, + pmin: float = 2.0, + pmax: float = 98.0, + hist_options: Dict = {}, + **kwargs: Any, +) -> Dict: + """ + Retrieve image bounds and band statistics. + + Attributes + ---------- + sceneid : str + Sentinel-2 sceneid. + pmin : float, optional, (default: 2.) + Histogram minimum cut. + pmax : float, optional, (default: 98.) + Histogram maximum cut. + hist_options : dict, optional + Options to forward to numpy.histogram function. + e.g: {bins=20, range=(0, 1000)} + kwargs : optional + These are passed to 'rio_tiler.reader.preview' + + Returns + ------- + out : dict + Dictionary with image bounds and bands statistics. + + """ + scene_params = sentinel2_parser(sceneid) + sentinel_prefix = "{scheme}://{bucket}/{prefix}/{preview_prefix}".format( + **scene_params + ) + bands = scene_params["bands"] + + addresses = [f"{sentinel_prefix}/B{band}.jp2" for band in bands] + + responses = reader.multi_metadata( + addresses, + indexes=[1], + nodata=0, + percentiles=(pmin, pmax), + hist_options=hist_options, + **kwargs, + ) + info: Dict[str, Any] = dict(sceneid=sceneid) + info["band_descriptions"] = [(ix + 1, b) for ix, b in enumerate(bands)] + info["bounds"] = responses[0]["bounds"] + info["statistics"] = { + b: v for b, d in zip(bands, responses) for k, v in d["statistics"].items() + } + return info + + +def tile( + sceneid: str, + tile_x: int, + tile_y: int, + tile_z: int, + bands: Union[Sequence[str], str] = ("04", "03", "02"), + tilesize: int = 256, + **kwargs: Dict, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Create mercator tile from Sentinel-2 data. + + Attributes + ---------- + sceneid : str + Sentinel-2 sceneid. + tile_x : int + Mercator tile X index. + tile_y : int + Mercator tile Y index. + tile_z : int + Mercator tile ZOOM level. + bands : tuple, str, optional (default: ('04', '03', '02')) + Bands index for the RGB combination. + tilesize : int, optional (default: 256) + Output image size. + kwargs: dict, optional + These will be passed to the 'rio_tiler.utils._tile_read' function. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if isinstance(bands, str): + bands = (bands,) + + scene_params = sentinel2_parser(sceneid) + for band in bands: + if band not in scene_params["valid_bands"]: + raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) + + sentinel_prefix = "{scheme}://{bucket}/{prefix}".format(**scene_params) + + preview_file = os.path.join(sentinel_prefix, scene_params["preview_file"]) + with rasterio.open(preview_file) as src: + bounds = transform_bounds( + src.crs, constants.WGS84_CRS, *src.bounds, densify_pts=21 + ) + if not tile_exists(bounds, tile_z, tile_x, tile_y): + raise TileOutsideBounds( + "Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y) + ) + + if scene_params["processingLevel"] == "L2A": + bands = [_l2_prefixed_band(b) for b in bands] + else: + bands = ["B{}".format(b) for b in bands] + + addresses = [f"{sentinel_prefix}/{band}.jp2" for band in bands] + return reader.multi_tile( + addresses, tile_x, tile_y, tile_z, tilesize=tilesize, nodata=0 + ) diff --git a/rio_tiler/io/stac.py b/rio_tiler/io/stac.py new file mode 100644 index 00000000..36568d29 --- /dev/null +++ b/rio_tiler/io/stac.py @@ -0,0 +1,151 @@ +"""rio_tiler.io.stac: STAC reader.""" + +from typing import Any, Dict, Sequence, Tuple + +import numpy + +from rio_tiler import reader +from rio_tiler.utils import tile_exists +from rio_tiler.errors import InvalidBandName, TileOutsideBounds + + +def _get_href(stac: Dict, assets: Sequence[str]) -> Sequence[str]: + """Validate asset names and return asset's url.""" + _assets = list(stac["assets"].keys()) + for asset in assets: + if asset not in _assets: + raise InvalidBandName(f"{asset} is not a valid asset name.") + + return [stac["assets"][asset]["href"] for asset in assets] + + +def spatial_info(stac: Dict) -> Dict: + """ + Return STAC spatial info. + + Attributes + ---------- + stac : dict + STAC item. + + Returns + ------- + out : dict. + + """ + raise Exception("Not implemented") + + +def bounds(stac: Dict) -> Dict: + """ + Return STAC bounds. + + Attributes + ---------- + stac : dict + STAC item. + + Returns + ------- + out : dict + dictionary with image bounds. + + """ + return dict(id=stac["id"], bounds=stac["bbox"]) + + +def metadata( + stac: Dict, + assets: Sequence[str], + pmin: float = 2.0, + pmax: float = 98.0, + hist_options: Dict = {}, + **kwargs: Any, +) -> Dict: + """ + Return STAC assets statistics. + + Attributes + ---------- + stac : dict + STAC item. + assets : list + Asset names. + pmin : int, optional, (default: 2) + Histogram minimum cut. + pmax : int, optional, (default: 98) + Histogram maximum cut. + hist_options : dict, optional + Options to forward to numpy.histogram function. + e.g: {bins=20, range=(0, 1000)} + kwargs : optional + These are passed to 'rio_tiler.reader.preview' + + Returns + ------- + out : dict + Dictionary with image bounds and bands statistics. + + """ + if isinstance(assets, str): + assets = (assets,) + + assets_url = _get_href(stac, assets) + responses = reader.multi_metadata( + assets_url, percentiles=(pmin, pmax), hist_options=hist_options, **kwargs + ) + + info: Dict[str, Any] = dict(id=stac["id"]) + info["band_descriptions"] = [(ix + 1, b) for ix, b in enumerate(assets)] + info["bounds"] = stac["bbox"] + info["statistics"] = {b: d["statistics"][1] for b, d in zip(assets, responses)} + info["dtypes"] = {b: d["dtype"] for b, d in zip(assets, responses)} + info["nodata_types"] = {b: d["nodata_type"] for b, d in zip(assets, responses)} + return info + + +def tile( + stac: Dict, + assets: Sequence[str], + tile_x: int, + tile_y: int, + tile_z: int, + tilesize: int = 256, + **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Create mercator tile from any images. + + Attributes + ---------- + stac : dict + STAC item. + assets : list + Asset names. + tile_x : int + Mercator tile X index. + tile_y : int + Mercator tile Y index. + tile_z : int + Mercator tile ZOOM level. + tilesize : int, optional (default: 256) + Output image size. + kwargs: dict, optional + These will be passed to the 'rio_tiler.reader.tile' function. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if isinstance(assets, str): + assets = (assets,) + + if not tile_exists(stac["bbox"], tile_z, tile_x, tile_y): + raise TileOutsideBounds( + f"Tile {tile_z}/{tile_x}/{tile_y} is outside item bounds" + ) + + assets_url = _get_href(stac, assets) + return reader.multi_tile(assets_url, tile_x, tile_y, tile_z, **kwargs) diff --git a/rio_tiler/landsat8.py b/rio_tiler/landsat8.py deleted file mode 100644 index 7a2ef7dd..00000000 --- a/rio_tiler/landsat8.py +++ /dev/null @@ -1,473 +0,0 @@ -"""rio_tiler.landsat8: Landsat-8 processing.""" - -import os -import re -import warnings -import datetime -import multiprocessing -from functools import partial -from concurrent import futures - -import numpy as np - -import mercantile - -import rasterio -from rasterio.crs import CRS -from rasterio.vrt import WarpedVRT -from rasterio.warp import transform_bounds -from rio_toa import reflectance, brightness_temp, toa_utils - -from rio_tiler import utils -from rio_tiler.errors import ( - TileOutsideBounds, - InvalidBandName, - InvalidLandsatSceneId, - NoOverviewWarning, -) - -# Python 2/3 -try: - from urllib.request import urlopen -except ImportError: - from urllib2 import urlopen - - -LANDSAT_BUCKET = "s3://landsat-pds" -LANDSAT_BANDS = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "QA"] - -# ref: https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor -MAX_THREADS = int(os.environ.get("MAX_THREADS", multiprocessing.cpu_count() * 5)) - - -def _landsat_get_mtl(sceneid): - """ - Get Landsat-8 MTL metadata. - - Attributes - ---------- - sceneid : str - Landsat sceneid. For scenes after May 2017, - sceneid have to be LANDSAT_PRODUCT_ID. - - Returns - ------- - out : dict - returns a JSON like object with the metadata. - - """ - scene_params = _landsat_parse_scene_id(sceneid) - meta_file = "http://landsat-pds.s3.amazonaws.com/{}_MTL.txt".format( - scene_params["key"] - ) - metadata = str(urlopen(meta_file).read().decode()) - return toa_utils._parse_mtl_txt(metadata) - - -def _landsat_parse_scene_id(sceneid): - """ - Parse Landsat-8 scene id. - - Author @perrygeo - http://www.perrygeo.com - - """ - pre_collection = r"(L[COTEM]8\d{6}\d{7}[A-Z]{3}\d{2})" - collection_1 = r"(L[COTEM]08_L\d{1}[A-Z]{2}_\d{6}_\d{8}_\d{8}_\d{2}_(T1|T2|RT))" - if not re.match("^{}|{}$".format(pre_collection, collection_1), sceneid): - raise InvalidLandsatSceneId("Could not match {}".format(sceneid)) - - precollection_pattern = ( - r"^L" - r"(?P\w{1})" - r"(?P\w{1})" - r"(?P[0-9]{3})" - r"(?P[0-9]{3})" - r"(?P[0-9]{4})" - r"(?P[0-9]{3})" - r"(?P\w{3})" - r"(?P[0-9]{2})$" - ) - - collection_pattern = ( - r"^L" - r"(?P\w{1})" - r"(?P\w{2})" - r"_" - r"(?P\w{4})" - r"_" - r"(?P[0-9]{3})" - r"(?P[0-9]{3})" - r"_" - r"(?P[0-9]{4})" - r"(?P[0-9]{2})" - r"(?P[0-9]{2})" - r"_" - r"(?P[0-9]{4})" - r"(?P[0-9]{2})" - r"(?P[0-9]{2})" - r"_" - r"(?P\w{2})" - r"_" - r"(?P\w{2})$" - ) - - meta = None - for pattern in [collection_pattern, precollection_pattern]: - match = re.match(pattern, sceneid, re.IGNORECASE) - if match: - meta = match.groupdict() - break - - if meta.get("acquisitionJulianDay"): - date = datetime.datetime( - int(meta["acquisitionYear"]), 1, 1 - ) + datetime.timedelta(int(meta["acquisitionJulianDay"]) - 1) - - meta["date"] = date.strftime("%Y-%m-%d") - else: - meta["date"] = "{}-{}-{}".format( - meta["acquisitionYear"], meta["acquisitionMonth"], meta["acquisitionDay"] - ) - - collection = meta.get("collectionNumber", "") - if collection != "": - collection = "c{}".format(int(collection)) - - meta["key"] = os.path.join( - collection, "L8", meta["path"], meta["row"], sceneid, sceneid - ) - - meta["scene"] = sceneid - - return meta - - -def _landsat_stats( - band, - address_prefix, - metadata, - overview_level=None, - max_size=1024, - percentiles=(2, 98), - dst_crs=CRS({"init": "EPSG:4326"}), - histogram_bins=10, - histogram_range=None, -): - """ - Retrieve landsat dataset statistics. - - Attributes - ---------- - band : str - Landsat band number - address_prefix : str - A Landsat AWS S3 dataset prefix. - metadata : dict - Landsat metadata - overview_level : int, optional - Overview (decimation) level to fetch. - max_size: int, optional - Maximum size of dataset to retrieve - (will be used to calculate the overview level to fetch). - percentiles : tulple, optional - Percentile or sequence of percentiles to compute, - which must be between 0 and 100 inclusive (default: (2, 98)). - dst_crs: CRS or dict - Target coordinate reference system (default: EPSG:4326). - histogram_bins: int, optional - Defines the number of equal-width histogram bins (default: 10). - histogram_range: tuple or list, optional - The lower and upper range of the bins. If not provided, range is simply - the min and max of the array. - - Returns - ------- - out : dict - (percentiles), min, max, stdev, histogram for each band, - e.g. - { - "4": { - 'pc': [15, 121], - 'min': 1, - 'max': 162, - 'std': 27.22067722127997, - 'histogram': [ - [102934, 135489, 20981, 13548, 11406, 8799, 7351, 5622, 2985, 662] - [1., 17.1, 33.2, 49.3, 65.4, 81.5, 97.6, 113.7, 129.8, 145.9, 162.] - ] - } - } - """ - src_path = "{}_B{}.TIF".format(address_prefix, band) - with rasterio.open(src_path) as src: - levels = src.overviews(1) - width = src.width - height = src.height - bounds = transform_bounds(src.crs, dst_crs, *src.bounds, densify_pts=21) - - if len(levels): - if overview_level: - decim = levels[overview_level] - else: - # determine which zoom level to read - for ii, decim in enumerate(levels): - if max(width // decim, height // decim) < max_size: - break - else: - decim = 1 - warnings.warn( - "Dataset has no overviews, reading the full dataset", NoOverviewWarning - ) - - out_shape = (height // decim, width // decim) - - if band == "QA": - nodata = 1 - else: - nodata = 0 - - vrt_params = dict( - nodata=nodata, add_alpha=False, src_nodata=nodata, init_dest_nodata=False - ) - with WarpedVRT(src, **vrt_params) as vrt: - arr = vrt.read(out_shape=out_shape, indexes=[1], masked=True) - - if band in ["1", "2", "3", "4", "5", "6", "7", "8", "9"]: # OLI - multi_reflect = metadata["RADIOMETRIC_RESCALING"].get( - "REFLECTANCE_MULT_BAND_{}".format(band) - ) - add_reflect = metadata["RADIOMETRIC_RESCALING"].get( - "REFLECTANCE_ADD_BAND_{}".format(band) - ) - sun_elev = metadata["IMAGE_ATTRIBUTES"]["SUN_ELEVATION"] - - arr = 10000 * reflectance.reflectance( - arr, multi_reflect, add_reflect, sun_elev, src_nodata=0 - ) - elif band in ["10", "11"]: # TIRS - multi_rad = metadata["RADIOMETRIC_RESCALING"].get( - "RADIANCE_MULT_BAND_{}".format(band) - ) - add_rad = metadata["RADIOMETRIC_RESCALING"].get( - "RADIANCE_ADD_BAND_{}".format(band) - ) - k1 = metadata["TIRS_THERMAL_CONSTANTS"].get("K1_CONSTANT_BAND_{}".format(band)) - k2 = metadata["TIRS_THERMAL_CONSTANTS"].get("K2_CONSTANT_BAND_{}".format(band)) - - arr = brightness_temp.brightness_temp(arr, multi_rad, add_rad, k1, k2) - - params = {} - if histogram_bins: - params.update(dict(bins=histogram_bins)) - if histogram_range: - params.update(dict(range=histogram_range)) - - stats = {band: utils._stats(arr, percentiles=percentiles, **params)} - - return { - "bounds": { - "value": bounds, - "crs": dst_crs.to_string() if isinstance(dst_crs, CRS) else dst_crs, - }, - "statistics": stats, - } - - -def bounds(sceneid): - """ - Retrieve image bounds. - - Attributes - ---------- - sceneid : str - Landsat sceneid. For scenes after May 2017, - sceneid have to be LANDSAT_PRODUCT_ID. - - Returns - ------- - out : dict - dictionary with image bounds. - - """ - meta_data = _landsat_get_mtl(sceneid).get("L1_METADATA_FILE") - - info = {"sceneid": sceneid} - info["bounds"] = toa_utils._get_bounds_from_metadata(meta_data["PRODUCT_METADATA"]) - - return info - - -def metadata(sceneid, pmin=2, pmax=98, **kwargs): - """ - Retrieve image bounds and band statistics. - - Attributes - ---------- - sceneid : str - Landsat sceneid. For scenes after May 2017, - sceneid have to be LANDSAT_PRODUCT_ID. - pmin : int, optional, (default: 2) - Histogram minimum cut. - pmax : int, optional, (default: 98) - Histogram maximum cut. - kwargs : optional - These are passed to 'rio_tiler.landsat8._landsat_stats' - e.g: histogram_bins=20, dst_crs='epsg:4326' - - Returns - ------- - out : dict - Dictionary with bounds and bands statistics. - - """ - scene_params = _landsat_parse_scene_id(sceneid) - meta_data = _landsat_get_mtl(sceneid).get("L1_METADATA_FILE") - path_prefix = "{}/{}".format(LANDSAT_BUCKET, scene_params["key"]) - - info = {"sceneid": sceneid} - - _stats_worker = partial( - _landsat_stats, - address_prefix=path_prefix, - metadata=meta_data, - overview_level=1, - percentiles=(pmin, pmax), - **kwargs - ) - - with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor: - responses = list(executor.map(_stats_worker, LANDSAT_BANDS)) - - info["bounds"] = [ - r["bounds"] for b, r in zip(LANDSAT_BANDS, responses) if b == "8" - ][0] - - info["statistics"] = { - b: v - for b, d in zip(LANDSAT_BANDS, responses) - for k, v in d["statistics"].items() - } - return info - - -def tile( - sceneid, - tile_x, - tile_y, - tile_z, - bands=("4", "3", "2"), - tilesize=256, - pan=False, - **kwargs -): - """ - Create mercator tile from Landsat-8 data. - - Attributes - ---------- - sceneid : str - Landsat sceneid. For scenes after May 2017, - sceneid have to be LANDSAT_PRODUCT_ID. - tile_x : int - Mercator tile X index. - tile_y : int - Mercator tile Y index. - tile_z : int - Mercator tile ZOOM level. - bands : tuple, str, optional (default: ("4", "3", "2")) - Bands index for the RGB combination. - tilesize : int, optional (default: 256) - Output image size. - pan : boolean, optional (default: False) - If True, apply pan-sharpening. - kwargs: dict, optional - These will be passed to the 'rio_tiler.utils._tile_read' function. - - Returns - ------- - data : numpy ndarray - mask: numpy array - - """ - if not isinstance(bands, tuple): - bands = tuple((bands,)) - - for band in bands: - if band not in LANDSAT_BANDS: - raise InvalidBandName("{} is not a valid Landsat band name".format(band)) - - scene_params = _landsat_parse_scene_id(sceneid) - meta_data = _landsat_get_mtl(sceneid).get("L1_METADATA_FILE") - landsat_address = "{}/{}".format(LANDSAT_BUCKET, scene_params["key"]) - - wgs_bounds = toa_utils._get_bounds_from_metadata(meta_data["PRODUCT_METADATA"]) - - if not utils.tile_exists(wgs_bounds, tile_z, tile_x, tile_y): - raise TileOutsideBounds( - "Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y) - ) - - mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) - tile_bounds = mercantile.xy_bounds(mercator_tile) - - def _tiler(band): - address = "{}_B{}.TIF".format(landsat_address, band) - if band == "QA": - nodata = 1 - else: - nodata = 0 - - return utils.tile_read( - address, bounds=tile_bounds, tilesize=tilesize, nodata=nodata, **kwargs - ) - - with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor: - data, masks = zip(*list(executor.map(_tiler, bands))) - data = np.concatenate(data) - mask = np.all(masks, axis=0).astype(np.uint8) * 255 - - if pan: - pan_address = "{}_B8.TIF".format(landsat_address) - matrix_pan, mask = utils.tile_read( - pan_address, tile_bounds, tilesize, nodata=0 - ) - data = utils.pansharpening_brovey(data, matrix_pan, 0.2, matrix_pan.dtype) - - sun_elev = meta_data["IMAGE_ATTRIBUTES"]["SUN_ELEVATION"] - - for bdx, band in enumerate(bands): - if band in ["1", "2", "3", "4", "5", "6", "7", "8", "9"]: # OLI - multi_reflect = meta_data["RADIOMETRIC_RESCALING"].get( - "REFLECTANCE_MULT_BAND_{}".format(band) - ) - - add_reflect = meta_data["RADIOMETRIC_RESCALING"].get( - "REFLECTANCE_ADD_BAND_{}".format(band) - ) - - data[bdx] = 10000 * reflectance.reflectance( - data[bdx], multi_reflect, add_reflect, sun_elev - ) - - elif band in ["10", "11"]: # TIRS - multi_rad = meta_data["RADIOMETRIC_RESCALING"].get( - "RADIANCE_MULT_BAND_{}".format(band) - ) - - add_rad = meta_data["RADIOMETRIC_RESCALING"].get( - "RADIANCE_ADD_BAND_{}".format(band) - ) - - k1 = meta_data["TIRS_THERMAL_CONSTANTS"].get( - "K1_CONSTANT_BAND_{}".format(band) - ) - - k2 = meta_data["TIRS_THERMAL_CONSTANTS"].get( - "K2_CONSTANT_BAND_{}".format(band) - ) - - data[bdx] = brightness_temp.brightness_temp( - data[bdx], multi_rad, add_rad, k1, k2 - ) - - return data, mask diff --git a/rio_tiler/main.py b/rio_tiler/main.py deleted file mode 100644 index a9d24d57..00000000 --- a/rio_tiler/main.py +++ /dev/null @@ -1,94 +0,0 @@ -"""rio_tiler.main: raster processing.""" - -import mercantile -import rasterio -from rasterio.warp import transform_bounds - -from rio_tiler import utils -from rio_tiler.errors import TileOutsideBounds - - -def bounds(address): - """ - Retrieve image bounds. - - Attributes - ---------- - address : str - file url. - - Returns - ------- - out : dict - dictionary with image bounds. - - """ - with rasterio.open(address) as src: - bounds = transform_bounds(src.crs, "epsg:4326", *src.bounds, densify_pts=21) - - return {"url": address, "bounds": list(bounds)} - - -def metadata(address, pmin=2, pmax=98, **kwargs): - """ - Return image bounds and band statistics. - - Attributes - ---------- - address : str or PathLike object - A dataset path or URL. Will be opened in "r" mode. - pmin : int, optional, (default: 2) - Histogram minimum cut. - pmax : int, optional, (default: 98) - Histogram maximum cut. - kwargs : optional - These are passed to 'rio_tiler.utils.raster_get_stats' - e.g: overview_level=2, dst_crs='epsg:4326' - - Returns - ------- - out : dict - Dictionary with image bounds and bands statistics. - - """ - info = {"address": address} - info.update(utils.raster_get_stats(address, percentiles=(pmin, pmax), **kwargs)) - return info - - -def tile(address, tile_x, tile_y, tile_z, tilesize=256, **kwargs): - """ - Create mercator tile from any images. - - Attributes - ---------- - address : str - file url. - tile_x : int - Mercator tile X index. - tile_y : int - Mercator tile Y index. - tile_z : int - Mercator tile ZOOM level. - tilesize : int, optional (default: 256) - Output image size. - kwargs: dict, optional - These will be passed to the 'rio_tiler.utils._tile_read' function. - - Returns - ------- - data : numpy ndarray - mask: numpy array - - """ - with rasterio.open(address) as src: - bounds = transform_bounds(src.crs, "epsg:4326", *src.bounds, densify_pts=21) - - if not utils.tile_exists(bounds, tile_z, tile_x, tile_y): - raise TileOutsideBounds( - "Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y) - ) - - mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) - tile_bounds = mercantile.xy_bounds(mercator_tile) - return utils.tile_read(src, tile_bounds, tilesize, **kwargs) diff --git a/rio_tiler/mercator.py b/rio_tiler/mercator.py index 0fc0ed1e..c3f7899e 100644 --- a/rio_tiler/mercator.py +++ b/rio_tiler/mercator.py @@ -1,25 +1,31 @@ """rio-tiler: mercator utility functions.""" +from typing import Tuple, Union + import math +from rasterio.io import DatasetReader, DatasetWriter +from rasterio.vrt import WarpedVRT from rasterio.warp import transform_bounds, calculate_default_transform +from rio_tiler import constants + -def _meters_per_pixel(zoom, lat=0.0, tilesize=256): +def _meters_per_pixel(zoom: int, lat: float = 0.0, tilesize: int = 256) -> float: """ Return the pixel resolution for a given mercator tile zoom and lattitude. Parameters ---------- - zoom: int - Mercator zoom level - lat: float, optional - Latitude in decimal degree (default: 0) - tilesize: int, optional - Mercator tile size (default: 256). + zoom: int + Mercator zoom level + lat: float, optional + Latitude in decimal degree (default: 0) + tilesize: int, optional + Mercator tile size (default: 256). Returns ------- - Pixel resolution in meters + Pixel resolution in meters """ return (math.cos(lat * math.pi / 180.0) * 2 * math.pi * 6378137) / ( @@ -27,7 +33,7 @@ def _meters_per_pixel(zoom, lat=0.0, tilesize=256): ) -def zoom_for_pixelsize(pixel_size, max_z=24, tilesize=256): +def zoom_for_pixelsize(pixel_size: float, max_z: int = 24, tilesize: int = 256) -> int: """ Get mercator zoom level corresponding to a pixel resolution. @@ -36,16 +42,16 @@ def zoom_for_pixelsize(pixel_size, max_z=24, tilesize=256): Parameters ---------- - pixel_size: float - Pixel size - max_z: int, optional (default: 24) - Max mercator zoom level allowed - tilesize: int, optional - Mercator tile size (default: 256). + pixel_size: float + Pixel size + max_z: int, optional (default: 24) + Max mercator zoom level allowed + tilesize: int, optional + Mercator tile size (default: 256). Returns ------- - Mercator zoom level corresponding to the pixel resolution + Mercator zoom level corresponding to the pixel resolution """ for z in range(max_z): @@ -55,7 +61,11 @@ def zoom_for_pixelsize(pixel_size, max_z=24, tilesize=256): return max_z - 1 -def get_zooms(src_dst, ensure_global_max_zoom=False, tilesize=256): +def get_zooms( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + ensure_global_max_zoom: bool = False, + tilesize: int = 256, +) -> Tuple[int, int]: """ Calculate raster min/max mercator zoom level. @@ -75,12 +85,18 @@ def get_zooms(src_dst, ensure_global_max_zoom=False, tilesize=256): Min/Max Mercator zoom levels. """ - bounds = transform_bounds(src_dst.crs, "epsg:4326", *src_dst.bounds, densify_pts=21) + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) center = [(bounds[0] + bounds[2]) / 2, (bounds[1] + bounds[3]) / 2] lat = center[1] if ensure_global_max_zoom else 0 dst_affine, w, h = calculate_default_transform( - src_dst.crs, "epsg:3857", src_dst.width, src_dst.height, *src_dst.bounds + src_dst.crs, + constants.WEB_MERCATOR_CRS, + src_dst.width, + src_dst.height, + *src_dst.bounds, ) mercator_resolution = max(abs(dst_affine[0]), abs(dst_affine[4])) diff --git a/rio_tiler/profiles.py b/rio_tiler/profiles.py index 29f075e5..51808621 100644 --- a/rio_tiler/profiles.py +++ b/rio_tiler/profiles.py @@ -1,6 +1,8 @@ """Image file profiles.""" -from rasterio.profiles import Profile +from collections import UserDict + +from rasterio.profiles import Profile # type: ignore class JPEGProfile(Profile): @@ -27,7 +29,7 @@ class WEBPProfile(Profile): defaults = {"quality": 75, "lossless": False} -class ImagesProfiles(dict): +class ImagesProfiles(UserDict): """ GDAL Image creation options. @@ -37,7 +39,8 @@ class ImagesProfiles(dict): def __init__(self): """Initialize COGProfiles dict.""" - self.update( + self.data = {} + self.data.update( { "jpeg": JPEGProfile(), "png": PNGProfile(), @@ -46,5 +49,15 @@ def __init__(self): } ) + def get(self, key, default=None): + """Like normal item access but return a copy of the key.""" + if key in (self.keys()): + return self.data[key].copy() + return default + + def __getitem__(self, key): + """Like normal item access but return a copy of the key.""" + return self.data[key].copy() + img_profiles = ImagesProfiles() diff --git a/rio_tiler/reader.py b/rio_tiler/reader.py new file mode 100644 index 00000000..2a05dadf --- /dev/null +++ b/rio_tiler/reader.py @@ -0,0 +1,582 @@ +"""rio-tiler.reader: image utility functions.""" + +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union + +import math +import warnings +from concurrent import futures + +import numpy + +from affine import Affine + +import mercantile + +import rasterio +from rasterio.io import DatasetReader, DatasetWriter +from rasterio.crs import CRS +from rasterio.vrt import WarpedVRT +from rasterio.warp import transform_bounds, transform +from rasterio.enums import Resampling, ColorInterp +from rasterio import windows + +from rio_tiler import constants +from rio_tiler.utils import ( + get_vrt_transform, + has_alpha_band, + has_mask_band, + non_alpha_indexes, + _stats as raster_stats, + _requested_tile_aligned_with_internal_tile as is_aligned, + tile_exists, +) +from rio_tiler.errors import TileOutsideBounds, AlphaBandWarning, DeprecationWarning + + +def _read( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + height: Optional[int] = None, + width: Optional[int] = None, + indexes: Optional[Union[Sequence[int], int]] = None, + out_window: Optional[windows.Window] = None, # DEPRECATED + window: Optional[windows.Window] = None, + nodata: Optional[Union[float, int, str]] = None, + resampling_method: Resampling = "bilinear", + force_binary_mask: bool = True, + unscale: bool = False, + vrt_options: Dict = {}, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Create WarpedVRT and read data and mask. + + Attributes + ---------- + src_dst: rasterio.io.DatasetReader + rasterio.io.DatasetReader object + height: int, optional + Output height of the array. + width: int, optional + Output width of the array. + indexes: list of ints or a single int, optional + Band indexes + out_window: rasterio.windows.Window, optional DEPRECATED + Output window to read. + window: rasterio.windows.Window, optional + Window to read. + nodata: int or float, optional + resampling_method: str, optional + Resampling algorithm. Default is "bilinear". + force_binary_mask: bool, optional + If True, rio-tiler makes sure mask has only 0 or 255 values. + Default is set to True. + unscale: bool, optional + If True, apply scale and offset to the data array. + Default is set to False. + vrt_options: dict, optional + These will be passed to the rasterio.warp.WarpedVRT class. + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if isinstance(indexes, int): + indexes = (indexes,) + + # Deprecate out_window. + if out_window is not None: + warnings.warn( + "out_window will be removed in 2.0, use window", DeprecationWarning + ) + if window is None: + window = out_window + + vrt_params = dict(add_alpha=True, resampling=Resampling[resampling_method]) + nodata = nodata if nodata is not None else src_dst.nodata + if nodata is not None: + vrt_params.update(dict(nodata=nodata, add_alpha=False, src_nodata=nodata)) + + if has_alpha_band(src_dst): + vrt_params.update(dict(add_alpha=False)) + + if indexes is None: + indexes = non_alpha_indexes(src_dst) + if indexes != src_dst.indexes: + warnings.warn( + "Alpha band was removed from the output data array", AlphaBandWarning + ) + + out_shape = (len(indexes), height, width) if height and width else None + mask_out_shape = (height, width) if height and width else None + resampling = Resampling[resampling_method] + + vrt_params.update(vrt_options) + with WarpedVRT(src_dst, **vrt_params) as vrt: + data = vrt.read( + indexes=indexes, window=window, out_shape=out_shape, resampling=resampling, + ) + if ColorInterp.alpha in vrt.colorinterp: + idx = vrt.colorinterp.index(ColorInterp.alpha) + 1 + mask = vrt.read( + indexes=idx, + window=window, + out_shape=mask_out_shape, + resampling=resampling, + out_dtype="uint8", + ) + else: + mask = vrt.dataset_mask( + window=window, out_shape=mask_out_shape, resampling=resampling, + ) + + if force_binary_mask: + mask = numpy.where(mask != 0, numpy.uint8(255), numpy.uint8(0)) + + if unscale: + data = data.astype("float32", casting="unsafe") + numpy.multiply(data, vrt.scales[0], out=data, casting="unsafe") + numpy.add(data, vrt.offsets[0], out=data, casting="unsafe") + + return data, mask + + +def part( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + bounds: Tuple[float, float, float, float], + height: Optional[int] = None, + width: Optional[int] = None, + padding: int = 0, + dst_crs: CRS = constants.WEB_MERCATOR_CRS, + bounds_crs: Optional[CRS] = None, + minimum_overlap: Optional[float] = None, + warp_vrt_option: Dict = {}, + max_size: Optional[int] = None, + **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Read part of an image. + + Attributes + ---------- + src_dst: rasterio.io.DatasetReader + rasterio.io.DatasetReader object + bounds: tuple + Output bounds (left, bottom, right, top) in target crs ("dst_crs"). + height: int, optional + Output height of the array. + width: int, optional + Output width of the array. + padding: int, optional + Padding to apply to each edge of the tile when retrieving data + to assist in reducing resampling artefacts along edges. + dst_crs: CRS or str, optional + Target coordinate reference system, default is "epsg:3857". + bounds_crs: CRS or str, optional + Overwrite bounds coordinate reference system, default is equal + to the output CRS (dst_crs). + minimum_tile_cover: float, optional + Minimum % overlap for which to raise an error with dataset not + covering enought of the tile. + warp_vrt_option: dict, optional + These will be passed to the rasterio.warp.WarpedVRT class. + max_size: int, optional + Limit output size array if not widht and height. + kwargs: Any, optional + Additional options to forward to reader._read() + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if max_size and width and height: + warnings.warn( + "'max_size' will be ignored with with 'height' and 'width' set.", + UserWarning, + ) + + if bounds_crs: + bounds = transform_bounds(bounds_crs, dst_crs, *bounds, densify_pts=21) + + if minimum_overlap: + src_bounds = transform_bounds( + src_dst.crs, dst_crs, *src_dst.bounds, densify_pts=21 + ) + x_overlap = max( + 0, min(src_bounds[2], bounds[2]) - max(src_bounds[0], bounds[0]) + ) + y_overlap = max( + 0, min(src_bounds[3], bounds[3]) - max(src_bounds[1], bounds[1]) + ) + cover_ratio = (x_overlap * y_overlap) / ( + (bounds[2] - bounds[0]) * (bounds[3] - bounds[1]) + ) + + if cover_ratio < minimum_overlap: + raise TileOutsideBounds( + "Dataset covers less than {:.0f}% of tile".format(cover_ratio * 100) + ) + + vrt_transform, vrt_width, vrt_height = get_vrt_transform( + src_dst, bounds, height, width, dst_crs=dst_crs + ) + + window = windows.Window(col_off=0, row_off=0, width=vrt_width, height=vrt_height) + + if max_size and not (width and height): + if max(vrt_width, vrt_height) > max_size: + ratio = vrt_height / vrt_width + if ratio > 1: + height = max_size + width = math.ceil(height / ratio) + else: + width = max_size + height = math.ceil(width * ratio) + + out_height = height or vrt_height + out_width = width or vrt_width + if padding > 0 and not is_aligned(src_dst, bounds, out_height, out_width): + vrt_transform = vrt_transform * Affine.translation(-padding, -padding) + orig_vrt_height = vrt_height + orig_vrt_width = vrt_width + vrt_height = vrt_height + 2 * padding + vrt_width = vrt_width + 2 * padding + window = windows.Window( + col_off=padding, + row_off=padding, + width=orig_vrt_width, + height=orig_vrt_height, + ) + + return _read( + src_dst, + out_height, + out_width, + window=window, + vrt_options=dict( + crs=dst_crs, + transform=vrt_transform, + width=vrt_width, + height=vrt_height, + **warp_vrt_option, + ), + **kwargs, + ) + + +def preview( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + max_size: int = 1024, + height: int = None, + width: int = None, + **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Read image and resample to low resolution. + + + Attributes + ---------- + src_dst : rasterio.io.DatasetReader + rasterio.io.DatasetReader object + max_size : int + `max_size` of the longest dimension, respecting + bounds X/Y aspect ratio. + height: int, optional + output height of the data + width: int, optional + output width of the data + kwargs : Any, optional + Additional options to forward to reader._read() + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + if not height and not width: + if max(src_dst.height, src_dst.width) < max_size: + height, width = src_dst.height, src_dst.width + else: + ratio = src_dst.height / src_dst.width + if ratio > 1: + height = max_size + width = math.ceil(height / ratio) + else: + width = max_size + height = math.ceil(width * ratio) + + return _read(src_dst, height, width, **kwargs) + + +def point( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + coordinates: Tuple[float, float], + indexes: Optional[Union[Sequence[int], int]] = None, + coord_crs: CRS = constants.WGS84_CRS, + unscale: bool = False, +) -> List: + """ + Read point value + + Attributes + ---------- + src_dst : rasterio.io.DatasetReader + rasterio.io.DatasetReader object + coordinates : tuple + (X, Y) coordinates. + indexes : list of ints or a single int, optional + Band indexes + coord_crs : rasterio.crs.CRS, optional + (X, Y) coordinate system. Default is WGS84/EPSG:4326. + unscale, bool, optional + If True, apply scale and offset to the data. + Default is set to False. + kwargs : Any, optional + Additional options to forward to src_dst.sample() + + Returns + ------- + point : list + List of pixel values per bands indexes. + + """ + if isinstance(indexes, int): + indexes = (indexes,) + + lon, lat = transform(coord_crs, src_dst.crs, [coordinates[0]], [coordinates[1]]) + if not ( + (src_dst.bounds[0] < lon[0] < src_dst.bounds[2]) + and (src_dst.bounds[1] < lat[0] < src_dst.bounds[3]) + ): + raise Exception("Point is outside dataset bounds") + + indexes = indexes if indexes is not None else src_dst.indexes + + point_values = list(src_dst.sample([(lon[0], lat[0])], indexes=indexes))[0] + + if unscale: + point_values = point_values.astype("float32", casting="unsafe") + numpy.multiply( + point_values, src_dst.scales[0], out=point_values, casting="unsafe" + ) + numpy.add(point_values, src_dst.offsets[0], out=point_values, casting="unsafe") + + return point_values.tolist() + + +def metadata( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + bounds: Optional[Tuple[float, float, float, float]] = None, + indexes: Optional[Union[Sequence[int], int]] = None, + max_size: int = 1024, + bounds_crs: CRS = constants.WGS84_CRS, + percentiles: Tuple[float, float] = (2.0, 98.0), + hist_options: Dict = {}, + **kwargs: Any, +) -> Dict: + """ + Retrieve statistics from multiple sentinel bands. + + Attributes + ---------- + src_dst : rasterio.io.DatasetReader + rasterio.io.DatasetReader object + bounds : tuple, optional + Bounding box coordinates from which to calculate image statistics. + max_size : int + `max_size` of the longest dimension, respecting + bounds X/Y aspect ratio. + indexes : list of ints or a single int, optional + Band indexes. + bounds_crs: CRS or str, optional + Specify bounds coordinate reference system, default WGS84/EPSG4326. + percentiles: tuple, optional + Tuple of Min/Max percentiles to compute. Default is (2, 98). + hist_options : dict, optional + Options to forward to numpy.histogram function. + kwargs : Any, optional + Additional options to forward to part or preview + + Returns + ------- + dict + + """ + if isinstance(indexes, int): + indexes = (indexes,) + + if indexes is None: + indexes = non_alpha_indexes(src_dst) + if indexes != src_dst.indexes: + warnings.warn( + "Alpha band was removed from the output data array", AlphaBandWarning + ) + + if bounds: + data, mask = part( + src_dst, + bounds, + max_size=max_size, + indexes=indexes, + dst_crs=src_dst.crs, + bounds_crs=bounds_crs, + **kwargs, + ) + bounds = transform_bounds( + bounds_crs, constants.WGS84_CRS, *bounds, densify_pts=21 + ) + + else: + data, mask = preview(src_dst, max_size=max_size, indexes=indexes, **kwargs) + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + + data = numpy.ma.array(data) + data.mask = mask == 0 + + statistics = { + indexes[b]: raster_stats(data[b], percentiles=percentiles, **hist_options) + for b in range(data.shape[0]) + } + + def _get_descr(ix): + """Return band description.""" + name = src_dst.descriptions[ix - 1] + if not name: + name = "band{}".format(ix) + return name + + band_descriptions = [(ix, _get_descr(ix)) for ix in indexes] + tags = [(ix, src_dst.tags(ix)) for ix in indexes] + + other_meta = dict() + if src_dst.scales[0] and src_dst.offsets[0]: + other_meta.update(dict(scale=src_dst.scales[0])) + other_meta.update(dict(offset=src_dst.offsets[0])) + + if has_alpha_band(src_dst): + nodata_type = "Alpha" + elif has_mask_band(src_dst): + nodata_type = "Mask" + elif src_dst.nodata is not None: + nodata_type = "Nodata" + else: + nodata_type = "None" + + try: + cmap = src_dst.colormap(1) + other_meta.update(dict(colormap=cmap)) + except ValueError: + pass + + return dict( + bounds=bounds, + statistics=statistics, + band_metadata=tags, + band_descriptions=band_descriptions, + dtype=src_dst.meta["dtype"], + colorinterp=[src_dst.colorinterp[ix - 1].name for ix in indexes], + nodata_type=nodata_type, + **other_meta, + ) + + +def tile( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + x: int, + y: int, + z: int, + tilesize: int = 256, + **kwargs, +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """ + Read mercator tile from an image. + + Attributes + ---------- + src_dst : rasterio.io.DatasetReader + rasterio.io.DatasetReader object + x : int + Mercator tile X index. + y : int + Mercator tile Y index. + z : int + Mercator tile ZOOM level. + tilesize : int, optional + Output tile size. Default is 256. + kwargs : Any, optional + Additional options to forward to part() + + Returns + ------- + data : numpy ndarray + mask: numpy array + + """ + bounds = transform_bounds( + src_dst.crs, constants.WGS84_CRS, *src_dst.bounds, densify_pts=21 + ) + if not tile_exists(bounds, z, x, y): + raise TileOutsideBounds(f"Tile {z}/{x}/{y} is outside image bounds") + + tile_bounds = mercantile.xy_bounds(mercantile.Tile(x=x, y=y, z=z)) + return part(src_dst, tile_bounds, tilesize, tilesize, **kwargs) + + +def multi_tile( + assets: Sequence[str], *args: Any, **kwargs: Any +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """Assemble multiple rio_tiler.reader.tile.""" + + def worker(asset: str): + with rasterio.open(asset) as src_dst: + return tile(src_dst, *args, **kwargs) + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + data, masks = zip(*list(executor.map(worker, assets))) + data = numpy.concatenate(data) + mask = numpy.all(masks, axis=0).astype(numpy.uint8) * 255 + return data, mask + + +def multi_preview( + assets: Sequence[str], *args: Any, **kwargs: Any +) -> Tuple[numpy.ndarray, numpy.ndarray]: + """Assemble multiple rio_tiler.reader.preview.""" + + def worker(asset: str): + with rasterio.open(asset) as src_dst: + return preview(src_dst, *args, **kwargs) + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + data, masks = zip(*list(executor.map(worker, assets))) + data = numpy.concatenate(data) + mask = numpy.all(masks, axis=0).astype(numpy.uint8) * 255 + return data, mask + + +def multi_point(assets: Sequence[str], *args: Any, **kwargs: Any) -> Sequence: + """Assemble multiple rio_tiler.reader.point.""" + + def worker(asset: str): + with rasterio.open(asset) as src_dst: + return point(src_dst, *args, **kwargs) + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + return list(executor.map(worker, assets)) + + +def multi_metadata(assets: Sequence[str], *args: Any, **kwargs: Any) -> Sequence: + """Assemble multiple rio_tiler.reader.metadata.""" + + def worker(asset: str): + with rasterio.open(asset) as src_dst: + return metadata(src_dst, *args, **kwargs) + + with futures.ThreadPoolExecutor(max_workers=constants.MAX_THREADS) as executor: + return list(executor.map(worker, assets)) diff --git a/rio_tiler/sentinel1.py b/rio_tiler/sentinel1.py deleted file mode 100644 index 64f6d55b..00000000 --- a/rio_tiler/sentinel1.py +++ /dev/null @@ -1,283 +0,0 @@ -"""rio_tiler.sentinel1: Sentinel-1 processing.""" - -import os -import re -import json -from functools import partial -from concurrent import futures - -import numpy - -from boto3.session import Session as boto3_session - -import mercantile - -import rasterio -from rasterio.vrt import WarpedVRT -from rasterio import transform - -from rio_tiler import utils -from rio_tiler.errors import TileOutsideBounds, InvalidBandName, InvalidSentinelSceneId - -REGION = os.environ.get("AWS_REGION", "eu-central-1") -SENTINEL_BUCKET = "s3://sentinel-s1-l1c" -SENTINEL_BANDS = ["vv", "vh"] - - -def _aws_get_object(bucket, key, request_pays=True, client=None): - """AWS s3 get object content.""" - if not client: - session = boto3_session(region_name=REGION) - client = session.client("s3") - - params = {"Bucket": bucket, "Key": key} - if request_pays: - params["RequestPayer"] = "requester" - response = client.get_object(**params) - return response["Body"].read() - - -def _sentinel_parse_scene_id(sceneid): - """Parse Sentinel-1 scene id. - - Attributes - ---------- - sceneid : str - Sentinel-1 sceneid. - - Returns - ------- - out : dict - dictionary with metadata constructed from the sceneid. - - e.g: - _sentinel_parse_scene_id('S1A_IW_GRDH_1SDV_20180716T004042_20180716T004107_022812_02792A_FD5B') - { - "sensor": "1", - "satellite": "A", - "beam": "IW", - "product": "GRD", - "resolution": "H", - "processing_level": "1", - "product_class": "S", - "polarisation": "DV", - "startDateTime": "20180716T004042", - "stopDateTime": "20180716T004107", - "absolute_orbit": "022812", - "mission_task": "02792A", - "product_id": "FD5B", - "key": "GRD/2018/7/16/IW/DV/S1A_IW_GRDH_1SDV_20180716T004042_20180716T004107_022812_02792A_FD5B", - "scene": "S1A_IW_GRDH_1SDV_20180716T004042_20180716T004107_022812_02792A_FD5B", - } - - """ - if not re.match( - "^S1[AB]_(IW)|(EW)_[A-Z]{3}[FHM]_[0-9][SA][A-Z]{2}_[0-9]{8}T[0-9]{6}_[0-9]{8}T[0-9]{6}_[0-9A-Z]{6}_[0-9A-Z]{6}_[0-9A-Z]{4}$", - sceneid, - ): - raise InvalidSentinelSceneId("Could not match {}".format(sceneid)) - - sentinel_pattern = ( - r"^S" - r"(?P\w{1})" - r"(?P[AB]{1})" - r"_" - r"(?P[A-Z]{2})" - r"_" - r"(?P[A-Z]{3})" - r"(?P[FHM])" - r"_" - r"(?P[0-9])" - r"(?P[SA])" - r"(?P(SH)|(SV)|(DH)|(DV)|(HH)|(HV)|(VV)|(VH))" - r"_" - r"(?P[0-9]{8}T[0-9]{6})" - r"_" - r"(?P[0-9]{8}T[0-9]{6})" - r"_" - r"(?P[0-9]{6})" - r"_" - r"(?P[0-9A-Z]{6})" - r"_" - r"(?P[0-9A-Z]{4})$" - ) - - meta = re.match(sentinel_pattern, sceneid, re.IGNORECASE).groupdict() - year = meta["startDateTime"][0:4] - month = meta["startDateTime"][4:6].strip("0") - day = meta["startDateTime"][6:8].strip("0") - meta["key"] = "{}/{}/{}/{}/{}/{}/{}".format( - meta["product"], year, month, day, meta["beam"], meta["polarisation"], sceneid - ) - - meta["scene"] = sceneid - - return meta - - -def _get_bounds(scene_info): - bucket = SENTINEL_BUCKET.replace("s3://", "") - product_info = json.loads( - _aws_get_object(bucket, "{}/productInfo.json".format(scene_info["key"])) - ) - geom = product_info["footprint"] - xyz = list(zip(*geom["coordinates"][0])) - return min(xyz[0]), min(xyz[1]), max(xyz[0]), max(xyz[1]) - - -def bounds(sceneid): - """ - Retrieve image bounds. - - Attributes - ---------- - sceneid : str - Sentinel-1 sceneid. - - Returns - ------- - out : dict - dictionary with image bounds. - - """ - scene_params = _sentinel_parse_scene_id(sceneid) - return {"sceneid": sceneid, "bounds": list(_get_bounds(scene_params))} - - -def metadata(sceneid, pmin=2, pmax=98, bands=None, **kwargs): - """ - Retrieve image bounds and band statistics. - - Attributes - ---------- - sceneid : str - Sentinel-1 sceneid. - pmin : int, optional, (default: 2) - Histogram minimum cut. - pmax : int, optional, (default: 98) - Histogram maximum cut. - bands: tuple, str, required - Bands name (e.g vv, vh). - kwargs : optional - These are passed to 'rio_tiler.sentinel1._stats' - e.g: histogram_bins=20' - - Returns - ------- - out : dict - Dictionary with image bounds and bands statistics. - - """ - if not bands: - raise InvalidBandName("bands is required") - - if not isinstance(bands, tuple): - bands = tuple((bands,)) - - for band in bands: - if band not in SENTINEL_BANDS: - raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) - - scene_params = _sentinel_parse_scene_id(sceneid) - sentinel_address = "{}/{}/measurement".format(SENTINEL_BUCKET, scene_params["key"]) - - addresses = [ - "{}/{}-{}.tiff".format(sentinel_address, scene_params["beam"].lower(), band) - for band in bands - ] - - def _s1_metadata(src_path, percentiles, **kwarg): - with rasterio.open(src_path) as src_dst: - with WarpedVRT( - src_dst, - src_crs=src_dst.gcps[1], - src_transform=transform.from_gcps(src_dst.gcps[0]), - src_nodata=0, - ) as vrt_dst: - return utils.raster_get_stats(vrt_dst, percentiles=percentiles, **kwarg) - - _stats_worker = partial(_s1_metadata, percentiles=(pmin, pmax), **kwargs) - with futures.ThreadPoolExecutor() as executor: - responses = list(executor.map(_stats_worker, addresses)) - - info = { - "sceneid": sceneid, - "bounds": responses[0]["bounds"], - "minzoom": responses[0]["minzoom"], - "maxzoom": responses[0]["maxzoom"], - } - - info["statistics"] = { - b: v for b, d in zip(bands, responses) for k, v in d["statistics"].items() - } - return info - - -def tile(sceneid, tile_x, tile_y, tile_z, bands=None, tilesize=256, **kwargs): - """ - Create mercator tile from Sentinel-1 data. - - Attributes - ---------- - sceneid : str - Sentinel-2 sceneid. - tile_x : int - Mercator tile X index. - tile_y : int - Mercator tile Y index. - tile_z : int - Mercator tile ZOOM level. - bands: tuple, str, required - Bands name (e.g vv, vh). - tilesize : int, optional (default: 256) - Output image size. - - Returns - ------- - data : numpy ndarray - mask: numpy array - - """ - if not bands: - raise InvalidBandName("bands is required") - - if not isinstance(bands, tuple): - bands = tuple((bands,)) - - for band in bands: - if band not in SENTINEL_BANDS: - raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) - - scene_params = _sentinel_parse_scene_id(sceneid) - sentinel_address = "{}/{}/measurement".format(SENTINEL_BUCKET, scene_params["key"]) - - mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) - tile_bounds = mercantile.xy_bounds(mercator_tile) - - addresses = [ - "{}/{}-{}.tiff".format(sentinel_address, scene_params["beam"].lower(), band) - for band in bands - ] - - def _s1_tiler(src_path): - with rasterio.open(src_path) as src_dst: - with WarpedVRT( - src_dst, - src_crs=src_dst.gcps[1], - src_transform=transform.from_gcps(src_dst.gcps[0]), - src_nodata=0, - ) as vrt_dst: - if not utils.tile_exists(vrt_dst.bounds, tile_z, tile_x, tile_y): - raise TileOutsideBounds( - "Tile {}/{}/{} is outside image bounds".format( - tile_z, tile_x, tile_y - ) - ) - - return utils._tile_read(vrt_dst, bounds=tile_bounds, tilesize=tilesize) - - with futures.ThreadPoolExecutor() as executor: - data, masks = zip(*list(executor.map(_s1_tiler, addresses))) - mask = numpy.all(masks, axis=0).astype(numpy.uint8) * 255 - - return numpy.concatenate(data), mask diff --git a/rio_tiler/sentinel2.py b/rio_tiler/sentinel2.py deleted file mode 100644 index 2e86c7a5..00000000 --- a/rio_tiler/sentinel2.py +++ /dev/null @@ -1,367 +0,0 @@ -"""rio_tiler.sentinel2: Sentinel-2 processing.""" - -import os -import re -import warnings -import itertools -import multiprocessing -from collections import OrderedDict -from functools import partial -from concurrent import futures - -import numpy as np - -import mercantile -import rasterio -from rasterio.crs import CRS -from rasterio.warp import transform_bounds - -from rio_tiler import utils -from rio_tiler.errors import ( - DeprecationWarning, - TileOutsideBounds, - InvalidBandName, - InvalidSentinelSceneId, -) - -AWS_SENTINEL_BUCKET = "s3://sentinel-s2-" - -SENTINEL_L1_BANDS = OrderedDict( - [ - ("10m", ["02", "03", "04", "08"]), - ("20m", ["05", "06", "07", "11", "12", "8A"]), - ("60m", ["01", "09", "10"]), - ] -) - -SENTINEL_L2_BANDS = OrderedDict( - [ - ("10m", ["02", "03", "04", "08"]), - ("20m", ["02", "03", "04", "05", "06", "07", "08", "11", "12", "8A"]), - ( - "60m", - ["01", "02", "03", "04", "05", "06", "07", "08", "09", "11", "12", "8A"], - ), - ] -) - -SENTINEL_L2_PRODUCTS = OrderedDict( - [ - ("10m", ["AOT", "WVP"]), - ("20m", ["AOT", "SCL", "WVP"]), - ("60m", ["AOT", "SCL", "WVP"]), - ] -) - -# ref: https://docs.python.org/3/library/concurrent.futures.html#threadpoolexecutor -MAX_THREADS = int(os.environ.get("MAX_THREADS", multiprocessing.cpu_count() * 5)) - - -def _sentinel_parse_scene_id(sceneid): - """ - Parse Sentinel-2 scene id. - - Attributes - ---------- - sceneid : str - Sentinel-2 sceneid. - - Returns - ------- - out : dict - dictionary with metadata constructed from the sceneid. - - e.g: - _sentinel_parse_scene_id('S2A_tile_20170323_07SNC_0') - { - "acquisitionDay": "23", - "acquisitionMonth": "03", - "acquisitionYear": "2017", - "key": "tiles/7/S/NC/2017/3/23/0", - "lat": "S", - "num": "0", - "satellite": "A", - "scene": "S2A_tile_20170323_07SNC_0", - "sensor": "2", - "sq": "NC", - "utm": "07", - } - - """ - old_sceneid = "S2[AB]_tile_[0-9]{8}_[0-9]{2}[A-Z]{3}_[0-9]" - new_sceneid = "S2[AB]_L[0-2][A-C]_[0-9]{8}_[0-9]{2}[A-Z]{3}_[0-9]" - if not re.match("^{}|{}$".format(old_sceneid, new_sceneid), sceneid): - raise InvalidSentinelSceneId("Could not match {}".format(sceneid)) - - if re.match(old_sceneid, sceneid): - warnings.warn( - "Old Sentinel-2 scene id will be deprecated starting in rio-tiler v2.0.0" - "Processing level is set to L1A.", - DeprecationWarning, - ) - - sentinel_pattern_old = ( - r"^S" - r"(?P\w{1})" - r"(?P[AB]{1})" - r"_tile_" - r"(?P[0-9]{4})" - r"(?P[0-9]{2})" - r"(?P[0-9]{2})" - r"_" - r"(?P[0-9]{2})" - r"(?P\w{1})" - r"(?P\w{2})" - r"_" - r"(?P[0-9]{1})$" - ) - - sentinel_pattern_new = ( - r"^S" - r"(?P\w{1})" - r"(?P[AB]{1})" - r"_" - r"(?PL[0-2][ABC])" - r"_" - r"(?P[0-9]{4})" - r"(?P[0-9]{2})" - r"(?P[0-9]{2})" - r"_" - r"(?P[0-9]{2})" - r"(?P\w{1})" - r"(?P\w{2})" - r"_" - r"(?P[0-9]{1})$" - ) - - meta = None - for pattern in [sentinel_pattern_old, sentinel_pattern_new]: - match = re.match(pattern, sceneid, re.IGNORECASE) - if match: - meta = match.groupdict() - break - - if not meta.get("processingLevel"): - meta["processingLevel"] = "L1C" - - utm_zone = meta["utm"].lstrip("0") - grid_square = meta["sq"] - latitude_band = meta["lat"] - year = meta["acquisitionYear"] - month = meta["acquisitionMonth"].lstrip("0") - day = meta["acquisitionDay"].lstrip("0") - img_num = meta["num"] - - meta["scene"] = sceneid - meta["aws_bucket"] = AWS_SENTINEL_BUCKET + meta["processingLevel"].lower() - meta["aws_prefix"] = "tiles/{}/{}/{}/{}/{}/{}/{}".format( - utm_zone, latitude_band, grid_square, year, month, day, img_num - ) - meta["key"] = meta["aws_prefix"] # Will be deprecated in rio-tiler v2.0.0 - - if meta["processingLevel"] == "L1C": - meta["preview_file"] = "preview.jp2" - meta["preview_prefix"] = "preview" - meta["bands"] = list( - itertools.chain.from_iterable( - [bands for _, bands in SENTINEL_L1_BANDS.items()] - ) - ) - meta["valid_bands"] = meta["bands"] - else: - meta["preview_file"] = "R60m/TCI.jp2" - meta["preview_prefix"] = "R60m" - meta["bands"] = SENTINEL_L2_BANDS["60m"] - meta["valid_bands"] = meta["bands"] + SENTINEL_L2_PRODUCTS["60m"] - - return meta - - -def _l2_prefixed_band(band): - """Return L2A prefixed bands name.""" - if band in SENTINEL_L2_BANDS["60m"]: - for res, bands in SENTINEL_L2_BANDS.items(): - if band in bands: - return "R{}/B{}".format(res, band) - elif band in SENTINEL_L2_PRODUCTS["60m"]: - for res, bands in SENTINEL_L2_PRODUCTS.items(): - if band in bands: - return "R{}/{}".format(res, band) - else: - raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) - - -def bounds(sceneid): - """ - Retrieve image bounds. - - Attributes - ---------- - sceneid : str - Sentinel-2 sceneid. - - Returns - ------- - out : dict - dictionary with image bounds. - - """ - scene_params = _sentinel_parse_scene_id(sceneid) - preview_file = os.path.join( - scene_params["aws_bucket"], - scene_params["aws_prefix"], - scene_params["preview_file"], - ) - with rasterio.open(preview_file) as src: - wgs_bounds = transform_bounds( - *[src.crs, "epsg:4326"] + list(src.bounds), densify_pts=21 - ) - - info = {"sceneid": sceneid} - info["bounds"] = list(wgs_bounds) - - return info - - -def _sentinel_stats( - src_path, percentiles=(2, 98), histogram_bins=10, histogram_range=None -): - """ - src_path : str or PathLike object - A dataset path or URL. Will be opened in "r" mode. - """ - - with rasterio.open(src_path) as src: - arr = src.read(indexes=[1], masked=True) - arr[arr == 0] = np.ma.masked - - params = {} - if histogram_bins: - params.update(dict(bins=histogram_bins)) - if histogram_range: - params.update(dict(range=histogram_range)) - - return {1: utils._stats(arr, percentiles=percentiles, **params)} - - -def metadata(sceneid, pmin=2, pmax=98, **kwargs): - """ - Retrieve image bounds and band statistics. - - Attributes - ---------- - sceneid : str - Sentinel-2 sceneid. - pmin : int, optional, (default: 2) - Histogram minimum cut. - pmax : int, optional, (default: 98) - Histogram maximum cut. - kwargs : optional - These are passed to 'rio_tiler.sentinel2._sentinel_stats' - e.g: histogram_bins=20' - - Returns - ------- - out : dict - Dictionary with image bounds and bands statistics. - - """ - scene_params = _sentinel_parse_scene_id(sceneid) - path_prefix = os.path.join(scene_params["aws_bucket"], scene_params["aws_prefix"]) - preview_file = os.path.join(path_prefix, scene_params["preview_file"]) - - dst_crs = CRS({"init": "EPSG:4326"}) - with rasterio.open(preview_file) as src: - bounds = transform_bounds( - *[src.crs, dst_crs] + list(src.bounds), densify_pts=21 - ) - - info = {"sceneid": sceneid} - info["bounds"] = {"value": bounds, "crs": dst_crs.to_string()} - - addresses = [ - "{}/{}/B{}.jp2".format(path_prefix, scene_params["preview_prefix"], band) - for band in scene_params["bands"] - ] - _stats_worker = partial(_sentinel_stats, percentiles=(pmin, pmax), **kwargs) - with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor: - responses = executor.map(_stats_worker, addresses) - - info["statistics"] = { - b: v for b, d in zip(scene_params["bands"], responses) for k, v in d.items() - } - return info - - -def tile( - sceneid, tile_x, tile_y, tile_z, bands=("04", "03", "02"), tilesize=256, **kwargs -): - """ - Create mercator tile from Sentinel-2 data. - - Attributes - ---------- - sceneid : str - Sentinel-2 sceneid. - tile_x : int - Mercator tile X index. - tile_y : int - Mercator tile Y index. - tile_z : int - Mercator tile ZOOM level. - bands : tuple, str, optional (default: ('04', '03', '02')) - Bands index for the RGB combination. - tilesize : int, optional (default: 256) - Output image size. - kwargs: dict, optional - These will be passed to the 'rio_tiler.utils._tile_read' function. - - Returns - ------- - data : numpy ndarray - mask: numpy array - - """ - scene_params = _sentinel_parse_scene_id(sceneid) - - if not isinstance(bands, tuple): - bands = tuple((bands,)) - - for band in bands: - if band not in scene_params["valid_bands"]: - raise InvalidBandName("{} is not a valid Sentinel band name".format(band)) - - preview_file = os.path.join( - scene_params["aws_bucket"], - scene_params["aws_prefix"], - scene_params["preview_file"], - ) - with rasterio.open(preview_file) as src: - bounds = transform_bounds(src.crs, "epsg:4326", *src.bounds, densify_pts=21) - - if not utils.tile_exists(bounds, tile_z, tile_x, tile_y): - raise TileOutsideBounds( - "Tile {}/{}/{} is outside image bounds".format(tile_z, tile_x, tile_y) - ) - - mercator_tile = mercantile.Tile(x=tile_x, y=tile_y, z=tile_z) - tile_bounds = mercantile.xy_bounds(mercator_tile) - - path_prefix = os.path.join(scene_params["aws_bucket"], scene_params["aws_prefix"]) - if scene_params["processingLevel"] == "L2A": - bands = [_l2_prefixed_band(b) for b in bands] - else: - bands = ["B{}".format(b) for b in bands] - - def _read_tile(path): - with rasterio.open(path) as src_dst: - return utils.tile_read( - src_dst, bounds=tile_bounds, tilesize=tilesize, nodata=0, **kwargs - ) - - addresses = ["{}/{}.jp2".format(path_prefix, band) for band in bands] - with futures.ThreadPoolExecutor(max_workers=MAX_THREADS) as executor: - data, masks = zip(*list(executor.map(_read_tile, addresses))) - mask = np.all(masks, axis=0).astype(np.uint8) * 255 - - return np.concatenate(data), mask diff --git a/rio_tiler/utils.py b/rio_tiler/utils.py index 2a483dd4..8ddf196b 100644 --- a/rio_tiler/utils.py +++ b/rio_tiler/utils.py @@ -1,40 +1,37 @@ """rio_tiler.utils: utility functions.""" -import os +from typing import Any, Dict, Optional, Sequence, Tuple, Union + import re import math -import logging -import warnings -import numpy as np -import numexpr as ne +import numpy +import numexpr +from affine import Affine import mercantile -import rasterio +from rasterio import windows from rasterio.crs import CRS from rasterio.vrt import WarpedVRT -from rasterio.enums import Resampling, MaskFlags, ColorInterp +from rasterio.enums import MaskFlags, ColorInterp from rasterio.io import DatasetReader, DatasetWriter, MemoryFile -from rasterio import transform -from rasterio import windows -from rasterio.warp import calculate_default_transform, transform_bounds +from rasterio.transform import from_bounds +from rasterio.warp import calculate_default_transform -from rio_tiler.mercator import get_zooms +from rio_tiler import constants +from rio_tiler.colormap import apply_cmap -from rio_tiler.errors import NoOverviewWarning, DeprecationWarning, TileOutsideBounds -from affine import Affine - -logger = logging.getLogger(__name__) - -def _chunks(l, n): +def _chunks(my_list: Sequence, chuck_size: int): """Yield successive n-sized chunks from l.""" - for i in range(0, len(l), n): - yield l[i : i + n] + for i in range(0, len(my_list), chuck_size): + yield my_list[i : i + chuck_size] -def _stats(arr, percentiles=(2, 98), **kwargs): +def _stats( + arr: numpy.ma.array, percentiles: Tuple[float, float] = (2, 98), **kwargs: Any +) -> Dict: """ Calculate array statistics. @@ -64,9 +61,9 @@ def _stats(arr, percentiles=(2, 98), **kwargs): ] } """ - sample, edges = np.histogram(arr[~arr.mask], **kwargs) + sample, edges = numpy.histogram(arr[~arr.mask], **kwargs) return { - "pc": np.percentile(arr[~arr.mask], percentiles).astype(arr.dtype).tolist(), + "pc": numpy.percentile(arr[~arr.mask], percentiles).astype(arr.dtype).tolist(), "min": arr.min().item(), "max": arr.max().item(), "std": arr.std().item(), @@ -75,200 +72,75 @@ def _stats(arr, percentiles=(2, 98), **kwargs): # https://github.com/OSGeo/gdal/blob/b1c9c12ad373e40b955162b45d704070d4ebf7b0/gdal/frmts/ingr/IngrTypes.cpp#L191 -def _div_round_up(a, b): +def _div_round_up(a: int, b: int) -> int: return (a // b) if (a % b) == 0 else (a // b) + 1 -def _raster_get_stats( - src_dst, - indexes=None, - nodata=None, - overview_level=None, - max_size=1024, - percentiles=(2, 98), - dst_crs=CRS({"init": "EPSG:4326"}), - histogram_bins=10, - histogram_range=None, - resampling_method="bilinear", - warp_vrt_option={}, -): +def get_overview_level( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + bounds: Tuple[float, float, float, float], + height: int, + width: int, + dst_crs: CRS = constants.WEB_MERCATOR_CRS, +) -> int: """ - Retrieve dataset statistics. + Return the overview level corresponding to the tile resolution. + + Freely adapted from https://github.com/OSGeo/gdal/blob/41993f127e6e1669fbd9e944744b7c9b2bd6c400/gdal/apps/gdalwarp_lib.cpp#L2293-L2362 Attributes ---------- - src_dst : rasterio.io.DatasetReader - rasterio.io.DatasetReader object - indexes : tuple, list, int, optional - Dataset band indexes. - nodata, int, optional - Custom nodata value if not preset in dataset. - overview_level : int, optional - Overview (decimation) level to fetch. - max_size: int, optional - Maximum size of dataset to retrieve - (will be used to calculate the overview level to fetch). - percentiles : tulple, optional - Percentile or sequence of percentiles to compute, - which must be between 0 and 100 inclusive (default: (2, 98)). - dst_crs: CRS or dict - Target coordinate reference system (default: EPSG:4326). - histogram_bins: int, optional - Defines the number of equal-width histogram bins (default: 10). - histogram_range: tuple or list, optional - The lower and upper range of the bins. If not provided, range is simply - the min and max of the array. - resampling_method : str, optional (default: "bilinear") - Resampling algorithm. - warp_vrt_option: dict, optional (default: {}) - These will be passed to the rasterio.warp.WarpedVRT class. + src_dst : rasterio.io.DatasetReader + Rasterio io.DatasetReader object + bounds : list + Bounds (left, bottom, right, top) in target crs ("dst_crs"). + height : int + Output height. + width : int + Output width. + dst_crs: CRS or str, optional + Target coordinate reference system (default "epsg:3857"). Returns ------- - out : dict - bounds, mercator zoom range, band descriptions - and band statistics: (percentiles), min, max, stdev, histogram - - e.g. - { - 'bounds': { - 'value': (145.72265625, 14.853515625, 145.810546875, 14.94140625), - 'crs': '+init=EPSG:4326' - }, - 'minzoom': 8, - 'maxzoom': 12, - 'band_descriptions': [(1, 'red'), (2, 'green'), (3, 'blue'), (4, 'nir')] - 'statistics': { - 1: { - 'pc': [38, 147], - 'min': 20, - 'max': 180, - 'std': 28.123562304138662, - 'histogram': [ - [1625, 219241, 28344, 15808, 12325, 10687, 8535, 7348, 4656, 1208], - [20.0, 36.0, 52.0, 68.0, 84.0, 100.0, 116.0, 132.0, 148.0, 164.0, 180.0] - ] - } - ... - 3: {...} - 4: {...} - } - } + ovr_idx: Int or None + Overview level """ - if isinstance(indexes, int): - indexes = [indexes] - elif isinstance(indexes, tuple): - indexes = list(indexes) - - levels = src_dst.overviews(1) - width = src_dst.width - height = src_dst.height - indexes = indexes if indexes else src_dst.indexes - nodata = nodata if nodata is not None else src_dst.nodata - bounds = transform_bounds(src_dst.crs, dst_crs, *src_dst.bounds, densify_pts=21) - - minzoom, maxzoom = get_zooms(src_dst) - - def _get_descr(ix): - """Return band description.""" - name = src_dst.descriptions[ix - 1] - if not name: - name = "band{}".format(ix) - return name - - band_descriptions = [(ix, _get_descr(ix)) for ix in indexes] - - if len(levels): - if overview_level: - decim = levels[overview_level] - else: - # determine which zoom level to read - for ii, decim in enumerate(levels): - if ( - max(_div_round_up(width, decim), _div_round_up(height, decim)) - < max_size - ): - break - else: - decim = 1 - warnings.warn( - "Dataset has no overviews, reading the full dataset", NoOverviewWarning - ) - - out_shape = ( - len(indexes), - _div_round_up(height, decim), - _div_round_up(width, decim), + dst_transform, _, _ = calculate_default_transform( + src_dst.crs, dst_crs, src_dst.width, src_dst.height, *src_dst.bounds ) + src_res = dst_transform.a + + # Compute what the "natural" output resolution + # (in pixels) would be for this input dataset + vrt_transform = from_bounds(*bounds, width, height) + target_res = vrt_transform.a + + ovr_idx = -1 + if target_res > src_res: + res = [src_res * decim for decim in src_dst.overviews(1)] + + for ovr_idx in range(ovr_idx, len(res) - 1): + ovrRes = src_res if ovr_idx < 0 else res[ovr_idx] + nextRes = res[ovr_idx + 1] + if (ovrRes < target_res) and (nextRes > target_res): + break + if abs(ovrRes - target_res) < 1e-1: + break + else: + ovr_idx = len(res) - 1 - vrt_params = dict(add_alpha=True) - if has_alpha_band(src_dst): - vrt_params.update(dict(add_alpha=False)) - - if nodata is not None: - vrt_params.update(dict(nodata=nodata, add_alpha=False, src_nodata=nodata)) - - vrt_params.update(warp_vrt_option) - with WarpedVRT(src_dst, **vrt_params) as vrt: - arr = vrt.read( - out_shape=out_shape, - indexes=indexes, - resampling=Resampling[resampling_method], - masked=True, - ) - - params = {} - if histogram_bins: - params.update(dict(bins=histogram_bins)) - if histogram_range: - params.update(dict(range=histogram_range)) - - stats = { - indexes[b]: _stats(arr[b], percentiles=percentiles, **params) - for b in range(arr.shape[0]) - if vrt.colorinterp[b] != ColorInterp.alpha - } - - return { - "bounds": { - "value": bounds, - "crs": dst_crs.to_string() if isinstance(dst_crs, CRS) else dst_crs, - }, - "minzoom": minzoom, - "maxzoom": maxzoom, - "band_descriptions": band_descriptions, - "statistics": stats, - } - - -def raster_get_stats(source, **kwargs): - """ - Read data and mask. - - Attributes - ---------- - source : str or rasterio.io.DatasetReader - input file path or rasterio.io.DatasetReader object - kwargs: dict, optional - These will be passed to the _raster_get_stats function. - - Returns - ------- - out : array, int - returns pixel value. - - """ - if isinstance(source, (DatasetReader, DatasetWriter, WarpedVRT)): - return _raster_get_stats(source, **kwargs) - else: - with rasterio.open(source) as src_dst: - return _raster_get_stats(src_dst, **kwargs) + return ovr_idx def get_vrt_transform( - src_dst, bounds, bounds_crs=None, dst_crs=CRS({"init": "EPSG:3857"}) -): + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + bounds: Tuple[float, float, float, float], + height: Optional[int] = None, + width: Optional[int] = None, + dst_crs: CRS = constants.WEB_MERCATOR_CRS, +) -> Tuple[Affine, int, int]: """ Calculate VRT transform. @@ -278,9 +150,10 @@ def get_vrt_transform( Rasterio io.DatasetReader object bounds : list Bounds (left, bottom, right, top) in target crs ("dst_crs"). - bounds_crs : str - Coordinate reference system string (default "epsg:3857") - Replaced by "dst_crs" and will be deprecated in 1.3.0. + height : int, optional + Desired output height of the array for the bounds. + width : int, optional + Desired output width of the array for the bounds. dst_crs: CRS or str, optional Target coordinate reference system (default "epsg:3857"). @@ -292,25 +165,37 @@ def get_vrt_transform( Output dimensions """ - if bounds_crs is not None: - warnings.warn( - "bounds_crs will be removed in 1.3.0, use dst_crs", DeprecationWarning - ) - dst_crs = bounds_crs - dst_transform, _, _ = calculate_default_transform( src_dst.crs, dst_crs, src_dst.width, src_dst.height, *src_dst.bounds ) w, s, e, n = bounds - vrt_width = math.ceil((e - w) / dst_transform.a) - vrt_height = math.ceil((s - n) / dst_transform.e) - vrt_transform = transform.from_bounds(w, s, e, n, vrt_width, vrt_height) + if not height or not width: + vrt_width = math.ceil((e - w) / dst_transform.a) + vrt_height = math.ceil((s - n) / dst_transform.e) + vrt_transform = from_bounds(w, s, e, n, vrt_width, vrt_height) + return vrt_transform, vrt_width, vrt_height + + tile_transform = from_bounds(w, s, e, n, width, height) + w_res = ( + tile_transform.a + if abs(tile_transform.a) < abs(dst_transform.a) + else dst_transform.a + ) + h_res = ( + tile_transform.e + if abs(tile_transform.e) < abs(dst_transform.e) + else dst_transform.e + ) + + vrt_width = math.ceil((e - w) / w_res) + vrt_height = math.ceil((s - n) / h_res) + vrt_transform = from_bounds(w, s, e, n, vrt_width, vrt_height) return vrt_transform, vrt_width, vrt_height -def has_alpha_band(src_dst): +def has_alpha_band(src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT]) -> bool: """Check for alpha band or mask in source.""" if ( any([MaskFlags.alpha in flags for flags in src_dst.mask_flag_enums]) @@ -320,217 +205,76 @@ def has_alpha_band(src_dst): return False -def non_alpha_indexes(src_dst): +def has_mask_band(src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT]) -> bool: + """Check for mask band in source.""" + if any([MaskFlags.per_dataset in flags for flags in src_dst.mask_flag_enums]): + return True + return False + + +def non_alpha_indexes(src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT]) -> Tuple: """Return indexes of non-alpha bands.""" return tuple( - ( - b - for ix, b in enumerate(src_dst.indexes) - if ( - src_dst.mask_flag_enums[ix] is not MaskFlags.alpha - and src_dst.colorinterp[ix] is not ColorInterp.alpha - ) + b + for ix, b in enumerate(src_dst.indexes) + if ( + src_dst.mask_flag_enums[ix] is not MaskFlags.alpha + and src_dst.colorinterp[ix] is not ColorInterp.alpha ) ) -def _tile_read( - src_dst, - bounds, - tilesize, - indexes=None, - nodata=None, - resampling_method="bilinear", - tile_edge_padding=2, - dst_crs=CRS({"init": "EPSG:3857"}), - bounds_crs=None, - minimum_tile_cover=None, - warp_vrt_option={}, -): - """ - Read data and mask. - - Attributes - ---------- - src_dst : rasterio.io.DatasetReader - rasterio.io.DatasetReader object - bounds : list - Output bounds (left, bottom, right, top) in target crs ("dst_crs"). - tilesize : int - Output image size - indexes : list of ints or a single int, optional, (defaults: None) - If `indexes` is a list, the result is a 3D array, but is - a 2D array if it is a band index number. - nodata: int or float, optional (defaults: None) - resampling_method : str, optional (default: "bilinear") - Resampling algorithm. - tile_edge_padding : int, optional (default: 2) - Padding to apply to each edge of the tile when retrieving data - to assist in reducing resampling artefacts along edges. - dst_crs: CRS or str, optional - Target coordinate reference system (default "epsg:3857"). - bounds_crs: CRS or str, optional - Overwrite bounds coordinate reference system (default None, equal to dst_crs). - minimum_tile_cover: float, optional (default: None) - Minimum % overlap for which to raise an error with dataset not - covering enought of the tile. - warp_vrt_option: dict, optional (default: {}) - These will be passed to the rasterio.warp.WarpedVRT class. - - Returns - ------- - data : numpy ndarray - mask: numpy array - - """ - if isinstance(indexes, int): - indexes = [indexes] - elif isinstance(indexes, tuple): - indexes = list(indexes) - - if not bounds_crs: - bounds_crs = dst_crs - - bounds = transform_bounds(bounds_crs, dst_crs, *bounds, densify_pts=21) - - vrt_params = dict( - add_alpha=True, crs=dst_crs, resampling=Resampling[resampling_method] - ) - - vrt_transform, vrt_width, vrt_height = get_vrt_transform( - src_dst, bounds, dst_crs=dst_crs - ) - - out_window = windows.Window( - col_off=0, row_off=0, width=vrt_width, height=vrt_height - ) - - src_bounds = transform_bounds(src_dst.crs, dst_crs, *src_dst.bounds, densify_pts=21) - x_overlap = max(0, min(src_bounds[2], bounds[2]) - max(src_bounds[0], bounds[0])) - y_overlap = max(0, min(src_bounds[3], bounds[3]) - max(src_bounds[1], bounds[1])) - cover_ratio = (x_overlap * y_overlap) / ( - (bounds[2] - bounds[0]) * (bounds[3] - bounds[1]) - ) - if minimum_tile_cover and cover_ratio < minimum_tile_cover: - raise TileOutsideBounds( - "Dataset covers less than {:.0f}% of tile".format(cover_ratio * 100) - ) - - if tile_edge_padding > 0 and not _requested_tile_aligned_with_internal_tile( - src_dst, bounds, tilesize - ): - vrt_transform = vrt_transform * Affine.translation( - -tile_edge_padding, -tile_edge_padding - ) - orig_vrt_height = vrt_height - orig_vrt_width = vrt_width - vrt_height = vrt_height + 2 * tile_edge_padding - vrt_width = vrt_width + 2 * tile_edge_padding - out_window = windows.Window( - col_off=tile_edge_padding, - row_off=tile_edge_padding, - width=orig_vrt_width, - height=orig_vrt_height, - ) - - vrt_params.update(dict(transform=vrt_transform, width=vrt_width, height=vrt_height)) - - indexes = indexes if indexes is not None else src_dst.indexes - out_shape = (len(indexes), tilesize, tilesize) - - nodata = nodata if nodata is not None else src_dst.nodata - if nodata is not None: - vrt_params.update(dict(nodata=nodata, add_alpha=False, src_nodata=nodata)) - - if has_alpha_band(src_dst): - vrt_params.update(dict(add_alpha=False)) - - vrt_params.update(warp_vrt_option) - with WarpedVRT(src_dst, **vrt_params) as vrt: - data = vrt.read( - out_shape=out_shape, - indexes=indexes, - window=out_window, - resampling=Resampling[resampling_method], - ) - mask = vrt.dataset_mask(out_shape=(tilesize, tilesize), window=out_window) - - return data, mask - - -def tile_read(source, bounds, tilesize, **kwargs): - """ - Read data and mask. - - Attributes - ---------- - source : str or rasterio.io.DatasetReader - input file path or rasterio.io.DatasetReader object - bounds : list - Mercator tile bounds (left, bottom, right, top) - tilesize : int - Output image size - kwargs: dict, optional - These will be passed to the _tile_read function. - - Returns - ------- - out : array, int - returns pixel value. - - """ - if isinstance(source, (DatasetReader, DatasetWriter, WarpedVRT)): - return _tile_read(source, bounds, tilesize, **kwargs) - else: - with rasterio.open(source) as src_dst: - return _tile_read(src_dst, bounds, tilesize, **kwargs) - - -def linear_rescale(image, in_range=(0, 1), out_range=(1, 255)): +def linear_rescale( + image: numpy.ndarray, + in_range: Tuple[Union[int, float], Union[int, float]] = (0, 1), + out_range: Tuple[Union[int, float], Union[int, float]] = (1, 255), +) -> numpy.ndarray: """ Linear rescaling. Attributes ---------- - image : numpy ndarray - Image array to rescale. - in_range : list, int, optional, (default: [0,1]) - Image min/max value to rescale. - out_range : list, int, optional, (default: [1,255]) - output min/max bounds to rescale to. + image : numpy ndarray + Image array to rescale. + in_range : list, int, optional, (default: [0,1]) + Image min/max value to rescale. + out_range : list, int, optional, (default: [1,255]) + output min/max bounds to rescale to. Returns ------- - out : numpy ndarray - returns rescaled image array. + out : numpy ndarray + returns rescaled image array. """ imin, imax = in_range omin, omax = out_range - image = np.clip(image, imin, imax) - imin - image = image / np.float(imax - imin) + image = numpy.clip(image, imin, imax) - imin + image = image / numpy.float(imax - imin) return image * (omax - omin) + omin -def tile_exists(bounds, tile_z, tile_x, tile_y): +def tile_exists( + bounds: Tuple[float, float, float, float], tile_z: int, tile_x: int, tile_y: int +) -> bool: """ Check if a mercatile tile is inside a given bounds. Attributes ---------- - bounds : list - WGS84 bounds (left, bottom, right, top). - x : int - Mercator tile Y index. - y : int - Mercator tile Y index. - z : int - Mercator tile ZOOM level. + bounds : list + WGS84 bounds (left, bottom, right, top). + z : int + Mercator tile ZOOM level. + y : int + Mercator tile Y index. + x : int + Mercator tile Y index. Returns ------- - out : boolean - if True, the z-x-y mercator tile in inside the bounds. + out : boolean + if True, the z-x-y mercator tile in inside the bounds. """ mintile = mercantile.tile(bounds[0], bounds[3], tile_z) @@ -544,13 +288,21 @@ def tile_exists(bounds, tile_z, tile_x, tile_y): ) -def _requested_tile_aligned_with_internal_tile(src_dst, bounds, tilesize): +def _requested_tile_aligned_with_internal_tile( + src_dst: Union[DatasetReader, DatasetWriter, WarpedVRT], + bounds: Tuple[float, float, float, float], + height: int, + width: int, +) -> bool: """Check if tile is aligned with internal tiles.""" - if src_dst.crs != CRS.from_epsg(3857): + if not src_dst.is_tiled: + return False + + if src_dst.crs != constants.WEB_MERCATOR_CRS: return False col_off, row_off, w, h = windows.from_bounds( - *bounds, height=tilesize, transform=src_dst.transform, width=tilesize + *bounds, height=height, transform=src_dst.transform, width=width ).flatten() if round(w) % 64 and round(h) % 64: @@ -565,212 +317,185 @@ def _requested_tile_aligned_with_internal_tile(src_dst, bounds, tilesize): return True -def _apply_discrete_colormap(arr, cmap): +def geotiff_options( + x: int, + y: int, + z: int, + tilesize: int = 256, + dst_crs: CRS = constants.WEB_MERCATOR_CRS, +) -> Dict: """ - Apply discrete colormap. + GeoTIFF options. Attributes ---------- - arr : numpy.ndarray - 1D image array to convert. - color_map: dict - Discrete ColorMap dictionary - e.g: - { - 1: [255, 255, 255], - 2: [255, 0, 0] - } + x : int + Mercator tile X index. + y : int + Mercator tile Y index. + z : int + Mercator tile ZOOM level. + tilesize : int, optional + Output tile size. Default is 256. + dst_crs: CRS, optional + Target coordinate reference system, default is "epsg:3857". Returns ------- - arr: numpy.ndarray + dict """ - res = np.zeros((arr.shape[1], arr.shape[2], 3), dtype=np.uint8) - for k, v in cmap.items(): - res[arr[0] == k] = v - return np.transpose(res, [2, 0, 1]) - - -def array_to_image( - arr, mask=None, img_format="png", color_map=None, **creation_options -): + bounds = mercantile.xy_bounds(mercantile.Tile(x=x, y=y, z=z)) + dst_transform = from_bounds(*bounds, tilesize, tilesize) + return dict(crs=dst_crs, transform=dst_transform) + + +def render( + tile: numpy.ndarray, + mask: Optional[numpy.ndarray] = None, + img_format: str = "PNG", + colormap: Optional[Dict] = None, + **creation_options: Any, +) -> bytes: """ Translate numpy ndarray to image buffer using GDAL. Usage ----- - tile, mask = rio_tiler.utils.tile_read(......) - with open('test.jpg', 'wb') as f: - f.write(array_to_image(tile, mask, img_format="jpeg")) + tile, mask = rio_tiler.utils.tile_read(......) + with open('test.jpg', 'wb') as f: + f.write(render(tile, mask, img_format="jpeg")) Attributes ---------- - arr : numpy ndarray - Image array to encode. - mask: numpy ndarray, optional - Mask array - img_format: str, optional - Image format to return (default: 'png'). - List of supported format by GDAL: https://www.gdal.org/formats_list.html - color_map: numpy.ndarray or dict, optional - color_map can be either a (256, 3) array or RGB triplet - (e.g. [[255, 255, 255],...]) mapping each 1D pixel value rescaled - from 0 to 255 - OR - it can be a dictionary of discrete values - (e.g. { 1.3: [255, 255, 255], 2.5: [255, 0, 0]}) mapping any pixel value to a triplet - creation_options: dict, optional - Image driver creation options to pass to GDAL + tile : numpy ndarray + Image array to encode. + mask: numpy ndarray, optional + Mask array + img_format: str, optional + Image format to return (default: 'png'). + List of supported format by GDAL: https://www.gdal.org/formats_list.html + colormap: dict, optional + GDAL RGBA Color Table dictionary. + creation_options: dict, optional + Image driver creation options to pass to GDAL Returns ------- - bytes + bytes: BytesIO + Reurn image body. """ - img_format = img_format.lower() + img_format = img_format.upper() - if len(arr.shape) < 3: - arr = np.expand_dims(arr, axis=0) + if len(tile.shape) < 3: + tile = numpy.expand_dims(tile, axis=0) - if color_map is not None and isinstance(color_map, dict): - arr = _apply_discrete_colormap(arr, color_map) - elif color_map is not None: - arr = np.transpose(color_map[arr][0], [2, 0, 1]).astype(np.uint8) + if colormap: + tile, alpha = apply_cmap(tile, colormap) + if mask is not None: + mask = ( + mask * alpha * 255 + ) # This is a special case when we want to mask some valid data # WEBP doesn't support 1band dataset so we must hack to create a RGB dataset - if img_format == "webp" and arr.shape[0] == 1: - arr = np.repeat(arr, 3, axis=0) + if img_format == "WEBP" and tile.shape[0] == 1: + tile = numpy.repeat(tile, 3, axis=0) + elif img_format == "JPEG": + mask = None - if mask is not None and img_format != "jpeg": - nbands = arr.shape[0] + 1 - else: - nbands = arr.shape[0] + count, height, width = tile.shape output_profile = dict( driver=img_format, - dtype=arr.dtype, - count=nbands, - height=arr.shape[1], - width=arr.shape[2], + dtype=tile.dtype, + count=count + 1 if mask is not None else count, + height=height, + width=width, ) output_profile.update(creation_options) with MemoryFile() as memfile: with memfile.open(**output_profile) as dst: - dst.write(arr, indexes=list(range(1, arr.shape[0] + 1))) - + dst.write(tile, indexes=list(range(1, count + 1))) # Use Mask as an alpha band - if mask is not None and img_format != "jpeg": - dst.write(mask.astype(arr.dtype), indexes=nbands) + if mask is not None: + dst.write(mask.astype(tile.dtype), indexes=count + 1) return memfile.read() -def get_colormap(name="cfastie", format="pil"): - """ - Return Pillow or GDAL compatible colormap array. - - Attributes - ---------- - name : str, optional - Colormap name (default: cfastie) - format: str, optional - Compatiblity library, should be "pil" or "gdal" (default: pil). - - Returns - ------- - colormap : list or numpy.array - Color map list in a Pillow friendly format - more info: http://pillow.readthedocs.io/en/3.4.x/reference/Image.html#PIL.Image.Image.putpalette - or - Color map array in GDAL friendly format - - """ - cmap_file = os.path.join(os.path.dirname(__file__), "cmap", "{0}.npy".format(name)) - cmap = list(np.load(cmap_file).flatten()) - - if format.lower() == "pil": - return cmap - elif format.lower() == "gdal": - return np.array(list(_chunks(cmap, 3))) - else: - raise Exception("Unsupported {} colormap format".format(format)) - - -def mapzen_elevation_rgb(arr): +def mapzen_elevation_rgb(arr: numpy.ndarray) -> numpy.ndarray: """ Encode elevation value to RGB values compatible with Mapzen tangram. Attributes ---------- - arr : numpy ndarray - Image array to encode. + arr : numpy ndarray + Image array to encode. Returns ------- - out : numpy ndarray - RGB array (3, h, w) + out : numpy ndarray + RGB array (3, h, w) """ - arr = np.clip(arr + 32768.0, 0.0, 65535.0) + arr = numpy.clip(arr + 32768.0, 0.0, 65535.0) r = arr / 256 g = arr % 256 b = (arr * 256) % 256 - return np.stack([r, g, b]).astype(np.uint8) + return numpy.stack([r, g, b]).astype(numpy.uint8) -def expression(sceneid, tile_x, tile_y, tile_z, expr=None, **kwargs): +def expression( + sceneid: str, tile_x: int, tile_y: int, tile_z: int, expr: str, **kwargs: Any, +) -> Tuple[numpy.ndarray, numpy.ndarray]: """ Apply expression on data. Attributes ---------- - sceneid : str - Landsat id, Sentinel id, CBERS ids or file url. - - tile_x : int - Mercator tile X index. - tile_y : int - Mercator tile Y index. - tile_z : int - Mercator tile ZOOM level. - expr : str, required - Expression to apply (e.g '(b5+b4)/(b5-b4)') - Band name should start with 'b'. + sceneid : str + Landsat id, Sentinel id, CBERS ids or file url. + tile_x : int + Mercator tile X index. + tile_y : int + Mercator tile Y index. + tile_z : int + Mercator tile ZOOM level. + expr : str, required + Expression to apply (e.g '(b5+b4)/(b5-b4)') + Band name should start with 'b'. Returns ------- - out : ndarray - Returns processed pixel value. + out : ndarray + Returns processed pixel value. """ - if not expr: - raise Exception("Missing expression") - - bands_names = tuple(set(re.findall(r"b(?P[0-9A]{1,2})", expr))) + bands_names = list(set(re.findall(r"b(?P[0-9A]{1,2})", expr))) rgb = expr.split(",") if sceneid.startswith("L"): - from rio_tiler.landsat8 import tile as l8_tile + from rio_tiler.io.landsat8 import tile as l8_tile arr, mask = l8_tile( sceneid, tile_x, tile_y, tile_z, bands=bands_names, **kwargs ) elif sceneid.startswith("S2"): - from rio_tiler.sentinel2 import tile as s2_tile + from rio_tiler.io.sentinel2 import tile as s2_tile arr, mask = s2_tile( sceneid, tile_x, tile_y, tile_z, bands=bands_names, **kwargs ) elif sceneid.startswith("CBERS"): - from rio_tiler.cbers import tile as cbers_tile + from rio_tiler.io.cbers import tile as cbers_tile arr, mask = cbers_tile( sceneid, tile_x, tile_y, tile_z, bands=bands_names, **kwargs ) else: - from rio_tiler.main import tile as main_tile + from rio_tiler.io.cogeo import tile as main_tile bands = tuple(map(int, bands_names)) arr, mask = main_tile(sceneid, tile_x, tile_y, tile_z, indexes=bands, **kwargs) @@ -779,14 +504,19 @@ def expression(sceneid, tile_x, tile_y, tile_z, expr=None, **kwargs): arr = dict(zip(bands_names, arr)) return ( - np.array( - [np.nan_to_num(ne.evaluate(bloc.strip(), local_dict=arr)) for bloc in rgb] + numpy.array( + [ + numpy.nan_to_num(numexpr.evaluate(bloc.strip(), local_dict=arr)) + for bloc in rgb + ] ), mask, ) -def pansharpening_brovey(rgb, pan, weight, pan_dtype): +def pansharpening_brovey( + rgb: numpy.ndarray, pan: numpy.ndarray, weight: float, pan_dtype: str +) -> numpy.ndarray: """ Brovey Method: Each resampled, multispectral pixel is multiplied by the ratio of the corresponding @@ -796,9 +526,11 @@ def pansharpening_brovey(rgb, pan, weight, pan_dtype): Original code from https://github.com/mapbox/rio-pansharpen """ - def _calculateRatio(rgb, pan, weight): + def _calculateRatio( + rgb: numpy.ndarray, pan: numpy.ndarray, weight: float + ) -> numpy.ndarray: return pan / ((rgb[0] + rgb[1] + rgb[2] * weight) / (2 + weight)) - with np.errstate(invalid="ignore", divide="ignore"): + with numpy.errstate(invalid="ignore", divide="ignore"): ratio = _calculateRatio(rgb, pan, weight) - return np.clip(ratio * rgb, 0, np.iinfo(pan_dtype).max).astype(pan_dtype) + return numpy.clip(ratio * rgb, 0, numpy.iinfo(pan_dtype).max).astype(pan_dtype) diff --git a/setup.py b/setup.py index 04bc13a0..eaf65504 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ readme = f.read() # Runtime requirements. -inst_reqs = ["numpy", "numexpr", "mercantile", "boto3", "rasterio[s3]>=1.1", "rio-toa"] +inst_reqs = ["numpy", "numexpr", "mercantile", "rasterio[s3]>=1.1.3", "rio-toa"] extra_reqs = { "test": ["mock", "pytest", "pytest-benchmark", "pytest-cov", "rio-cogeo"], @@ -22,7 +22,8 @@ setup( name="rio-tiler", - version="1.4.0", + version="2.0a7", + python_requires=">=3", description=u"""Get mercator tile from CloudOptimized GeoTIFF and other cloud hosted raster such as CBERS-4, Sentinel-2, Sentinel-1 and Landsat-8 AWS PDS""", long_description=readme, long_description_content_type="text/markdown", @@ -30,8 +31,8 @@ "Intended Audience :: Information Technology", "Intended Audience :: Science/Research", "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 2.7", "Topic :: Scientific/Engineering :: GIS", ], keywords="COG cogeo raster aws map tiler gdal rasterio", diff --git a/tests/benchmarks/test_benchmarks.py b/tests/benchmarks/test_benchmarks.py index 435e7f7f..f9f1603a 100644 --- a/tests/benchmarks/test_benchmarks.py +++ b/tests/benchmarks/test_benchmarks.py @@ -5,7 +5,7 @@ import rasterio import mercantile -from rio_tiler import utils +from rio_tiler import reader from . import benchmark_dataset, benchmark_tiles @@ -18,7 +18,9 @@ def read_tile(src_path, tile): # We make sure to not store things in cache. with rasterio.Env(GDAL_CACHEMAX=0, NUM_THREADS="all"): with rasterio.open(src_path) as src_dst: - return utils._tile_read(src_dst, tile_bounds, 256) + return reader.part( + src_dst, tile_bounds, 256, 256, resampling_method="nearest" + ) @pytest.mark.parametrize("tile_name", ["full", "boundless"]) diff --git a/tests/fixtures/blue.tif b/tests/fixtures/blue.tif new file mode 100644 index 00000000..ab527cb4 Binary files /dev/null and b/tests/fixtures/blue.tif differ diff --git a/tests/fixtures/cog_cmap.tif b/tests/fixtures/cog_cmap.tif new file mode 100644 index 00000000..7cb1ffc5 Binary files /dev/null and b/tests/fixtures/cog_cmap.tif differ diff --git a/tests/fixtures/cog_dateline.tif b/tests/fixtures/cog_dateline.tif new file mode 100644 index 00000000..f146136e Binary files /dev/null and b/tests/fixtures/cog_dateline.tif differ diff --git a/tests/fixtures/cog_scale.tif b/tests/fixtures/cog_scale.tif new file mode 100644 index 00000000..f0408030 Binary files /dev/null and b/tests/fixtures/cog_scale.tif differ diff --git a/tests/fixtures/cog_tags.tif b/tests/fixtures/cog_tags.tif new file mode 100644 index 00000000..056ea816 Binary files /dev/null and b/tests/fixtures/cog_tags.tif differ diff --git a/tests/fixtures/green.tif b/tests/fixtures/green.tif new file mode 100644 index 00000000..002ed054 Binary files /dev/null and b/tests/fixtures/green.tif differ diff --git a/tests/fixtures/nocog.tif b/tests/fixtures/nocog.tif new file mode 100644 index 00000000..a7dfd5d1 Binary files /dev/null and b/tests/fixtures/nocog.tif differ diff --git a/tests/fixtures/red.tif b/tests/fixtures/red.tif new file mode 100644 index 00000000..933bbc0b Binary files /dev/null and b/tests/fixtures/red.tif differ diff --git a/tests/fixtures/stac.json b/tests/fixtures/stac.json new file mode 100644 index 00000000..c01f7081 --- /dev/null +++ b/tests/fixtures/stac.json @@ -0,0 +1,67 @@ +{ + "stac_version": "0.9.0", + "stac_extensions": [], + "type": "Feature", + "id": "JQT-123456789", + "bbox": [-81.3085227080129, 32.10817938759764, -78.81735409341113, 34.22870275071835], + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + -81.3085227080129, + 32.10817938759764 + ], + [ + -78.81735409341113, + 32.10817938759764 + ], + [ + -78.81735409341113, + 34.22870275071835 + ], + [ + -81.3085227080129, + 34.22870275071835 + ], + [ + -81.3085227080129, + 32.10817938759764 + ] + ] + ] + }, + "properties": { + "datetime": "2016-05-03T13:21:30.040Z", + "collection": "JQT" + }, + "links": [ + { + "rel": "self", + "href": "http://cool-sat.com/catalog/JQT/a-fake-item.json" + }, + { + "rel": "collection", + "href": "http://cool-sat.com/catalog.json" + } + ], + "assets": { + "red": { + "href": "http://somewhere-over-the-rainbow.io/red.tif", + "title": "red" + }, + "green": { + "href": "http://somewhere-over-the-rainbow.io/green.tif", + "title": "green" + }, + "blue": { + "href": "http://somewhere-over-the-rainbow.io/blue.tif", + "title": "blue" + }, + "thumbnail": { + "href": "http://cool-sat.com/catalog/a-fake-item/thumbnail.png", + "title": "Thumbnail", + "roles": [ "thumbnail" ] + } + } +} \ No newline at end of file diff --git a/tests/test_cmap.py b/tests/test_cmap.py index fa66d3e2..e6e690f7 100644 --- a/tests/test_cmap.py +++ b/tests/test_cmap.py @@ -1,12 +1,107 @@ -"""tests rio_tiler.utils""" +"""tests rio_tiler colormaps""" -from rio_tiler import cmap, utils +import pytest + +import numpy + +from rio_tiler import colormap +from rio_tiler.cmap import cmap_list def test_get_cmaplist(): - """Should work as expected return cmap names.""" - assert len(cmap.cmap_list) == 167 - for name in cmap.cmap_list: - cm = utils.get_colormap(name, format="gdal") - assert cm.shape == (256, 3) - assert cm.dtype == "uint8" + """Should work as expected return all rio-tiler colormaps.""" + assert len(cmap_list) == 167 + + +def test_valid_cmaps(): + """Make sure all colormaps have 4 values and 256 items.""" + for c in cmap_list: + cm = colormap.get_colormap(c) + assert len(cm[0]) == 4 + assert len(cm.items()) == 256 + + +def test_update_alpha(): + """Should update the alpha channel.""" + cm = colormap.get_colormap("viridis") + idx = 1 + assert cm[idx][-1] == 255 + colormap._update_alpha(cm, idx) + assert cm[idx][-1] == 0 + + idx = [2, 3] + colormap._update_alpha(cm, idx) + assert cm[idx[0]][-1] == 0 + assert cm[idx[1]][-1] == 0 + + idx = 1 + assert cm[idx][-1] == 0 + colormap._update_alpha(cm, idx, alpha=255) + assert cm[idx][-1] == 255 + + +def test_remove_value(): + """Should remove cmap value.""" + cm = colormap.get_colormap("viridis") + idx = 1 + colormap._remove_value(cm, idx) + assert not cm.get(1) + + idx = [2, 3] + colormap._remove_value(cm, idx) + assert not cm.get(2) + assert not cm.get(3) + + +def test_update_cmap(): + """Should update the colormap.""" + cm = colormap.get_colormap("viridis") + val = {1: [0, 0, 0], 2: [255, 255, 255, 255]} + colormap._update_cmap(cm, val) + assert cm[1] == [0, 0, 0, 255] + assert cm[2] == [255, 255, 255, 255] + + +def test_make_lut(): + """Should create valid lookup table.""" + cm = {1: [100, 100, 100, 255], 2: [255, 255, 255, 255]} + lut = colormap.make_lut(cm) + assert len(lut) == 256 + assert lut[0].tolist() == [0, 0, 0, 0] + assert lut[1].tolist() == [100, 100, 100, 255] + assert lut[4].tolist() == [0, 0, 0, 0] + + +def test_apply_cmap(): + """Should return valid data and mask.""" + cm = {1: [0, 0, 0, 255], 2: [255, 255, 255, 255]} + data = numpy.zeros(shape=(1, 10, 10), dtype=numpy.uint8) + data[0, 2:5, 2:5] = 1 + data[0, 5:, 5:] = 2 + d, m = colormap.apply_cmap(data, cm) + assert d.shape == (3, 10, 10) + assert m.shape == (10, 10) + mask = numpy.zeros(shape=(10, 10), dtype=numpy.uint8) + mask[2:5, 2:5] = 255 + mask[5:, 5:] = 255 + numpy.testing.assert_array_equal(m, mask) + + with pytest.raises(Exception): + data = numpy.repeat(data, 3, axis=0) + colormap.apply_cmap(data, cm) + + +def test_apply_discrete_cmap(): + """Should return valid data and mask.""" + cm = {1: [0, 0, 0, 255], 2: [255, 255, 255, 255]} + data = numpy.zeros(shape=(1, 10, 10), dtype=numpy.uint8) + data[0, 0:2, 0:2] = 1000 + data[0, 2:5, 2:5] = 1 + data[0, 5:, 5:] = 2 + d, m = colormap.apply_discrete_cmap(data, cm) + assert d.shape == (3, 10, 10) + assert m.shape == (10, 10) + mask = numpy.zeros(shape=(10, 10), dtype=numpy.uint8) + mask[2:5, 2:5] = 255 + mask[5:, 5:] = 255 + numpy.testing.assert_array_equal(m, mask) diff --git a/tests/test_cbers.py b/tests/test_io_cbers.py similarity index 69% rename from tests/test_cbers.py rename to tests/test_io_cbers.py index 32d2fcd6..da3b8f05 100644 --- a/tests/test_cbers.py +++ b/tests/test_io_cbers.py @@ -1,9 +1,12 @@ """tests rio_tiler.sentinel2""" import os + import pytest +from mock import patch -from rio_tiler import cbers +import rasterio +from rio_tiler.io import cbers from rio_tiler.errors import TileOutsideBounds, InvalidBandName, InvalidCBERSSceneId CBERS_BUCKET = os.path.join(os.path.dirname(__file__), "fixtures", "cbers-pds") @@ -28,12 +31,17 @@ def testing_env_var(monkeypatch): monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "TRUE") -def test_bounds_valid(monkeypatch): - """ - Should work as expected (get bounds) - """ +def mock_rasterio_open(asset): + """Mock rasterio Open.""" + assert asset.startswith("s3://cbers-pds") + asset = asset.replace("s3://cbers-pds", CBERS_BUCKET) + return rasterio.open(asset) + - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) +@patch("rio_tiler.io.cbers.rasterio") +def test_bounds_valid(rio): + """Should work as expected (get bounds)""" + rio.open = mock_rasterio_open meta = cbers.bounds(CBERS_MUX_SCENE) assert meta.get("sceneid") == CBERS_MUX_SCENE @@ -52,63 +60,65 @@ def test_bounds_valid(monkeypatch): assert len(meta.get("bounds")) == 4 -def test_metadata_valid_default(monkeypatch): - """Should work as expected. +@patch("rio_tiler.reader.rasterio") +def test_metadata_valid_default(rio): + """ + Should work as expected. Get bounds and get histogram cuts values for all bands """ - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) + rio.open = mock_rasterio_open meta = cbers.metadata(CBERS_MUX_SCENE) assert meta["sceneid"] == CBERS_MUX_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 4 - assert meta["statistics"]["5"]["pc"] == [28, 91] + assert meta["statistics"]["5"]["pc"] == [28, 98] - meta = cbers.metadata(CBERS_MUX_SCENE, histogram_bins=20) + meta = cbers.metadata(CBERS_MUX_SCENE, hist_options=dict(bins=20)) assert meta["sceneid"] == CBERS_MUX_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 4 assert len(meta["statistics"]["5"]["histogram"][0]) == 20 - assert meta["statistics"]["5"]["pc"] == [28, 91] + assert meta["statistics"]["5"]["pc"] == [28, 98] meta = cbers.metadata(CBERS_AWFI_SCENE) assert meta["sceneid"] == CBERS_AWFI_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 4 meta = cbers.metadata(CBERS_PAN10M_SCENE) assert meta["sceneid"] == CBERS_PAN10M_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 3 meta = cbers.metadata(CBERS_PAN5M_SCENE) assert meta["sceneid"] == CBERS_PAN5M_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 1 -def test_metadata_valid_custom(monkeypatch): +@patch("rio_tiler.reader.rasterio") +def test_metadata_valid_custom(rio): """ Should work as expected (get bounds and get histogram cuts values for all bands) """ - - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) + rio.open = mock_rasterio_open meta = cbers.metadata(CBERS_MUX_SCENE, pmin=5, pmax=95) assert meta.get("sceneid") == CBERS_MUX_SCENE - assert len(meta["bounds"]["value"]) == 4 - assert meta["statistics"]["5"]["pc"] == [29, 60] + assert len(meta["bounds"]) == 4 + assert meta["statistics"]["5"]["pc"] == [30, 61] -def test_tile_valid_default(monkeypatch): - """ - Should work as expected - """ - - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) +@patch("rio_tiler.reader.rasterio") +@patch("rio_tiler.io.cbers.rasterio") +def test_tile_valid_default(crio, rio): + """Should work as expected""" + crio.open = mock_rasterio_open + rio.open = mock_rasterio_open tile_z = 10 tile_x = 664 @@ -139,9 +149,12 @@ def test_tile_valid_default(monkeypatch): assert mask.shape == (256, 256) -def test_tile_valid_custom(monkeypatch): +@patch("rio_tiler.reader.rasterio") +@patch("rio_tiler.io.cbers.rasterio") +def test_tile_valid_custom(crio, rio): """Should return custom band combination tiles.""" - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) + crio.open = mock_rasterio_open + rio.open = mock_rasterio_open tile_z = 10 tile_x = 664 @@ -176,9 +189,12 @@ def test_tile_valid_custom(monkeypatch): assert mask.shape == (256, 256) -def test_tile_valid_oneband(monkeypatch): +@patch("rio_tiler.reader.rasterio") +@patch("rio_tiler.io.cbers.rasterio") +def test_tile_valid_oneband(crio, rio): """Test when passing a string instead of a tuple.""" - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) + crio.open = mock_rasterio_open + rio.open = mock_rasterio_open tile_z = 10 tile_x = 390 @@ -190,9 +206,12 @@ def test_tile_valid_oneband(monkeypatch): assert mask.shape == (256, 256) -def test_tile_invalid_band(monkeypatch): +@patch("rio_tiler.reader.rasterio") +@patch("rio_tiler.io.cbers.rasterio") +def test_tile_invalid_band(crio, rio): """Should raise an error on invalid band name.""" - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) + crio.open = mock_rasterio_open + rio.open = mock_rasterio_open tile_z = 10 tile_x = 390 @@ -200,15 +219,15 @@ def test_tile_invalid_band(monkeypatch): bands = "21" with pytest.raises(InvalidBandName): - data, mask = cbers.tile(CBERS_PAN5M_SCENE, tile_x, tile_y, tile_z, bands=bands) - + cbers.tile(CBERS_PAN5M_SCENE, tile_x, tile_y, tile_z, bands=bands) -def test_tile_invalid_bounds(monkeypatch): - """ - Should raise an error with invalid tile - """ - monkeypatch.setattr(cbers, "CBERS_BUCKET", CBERS_BUCKET) +@patch("rio_tiler.reader.rasterio") +@patch("rio_tiler.io.cbers.rasterio") +def test_tile_invalid_bounds(crio, rio): + """Should raise an error with invalid tile.""" + crio.open = mock_rasterio_open + rio.open = mock_rasterio_open tile_z = 10 tile_x = 694 @@ -222,87 +241,95 @@ def test_cbers_id_invalid(): """Raises error on invalid cbers sceneid.""" scene = "CBERS_4_MUX_20171121_057_094" with pytest.raises(InvalidCBERSSceneId): - cbers._cbers_parse_scene_id(scene) + cbers.cbers_parser(scene) def test_cbers_id_valid(): """Parse valid CBERS sceneids and return metadata.""" scene = "CBERS_4_MUX_20171121_057_094_L2" expected_content = { - "acquisitionDay": "21", - "acquisitionMonth": "11", - "acquisitionYear": "2017", + "satellite": "CBERS", + "mission": "4", "instrument": "MUX", - "key": "CBERS4/MUX/057/094/CBERS_4_MUX_20171121_057_094_L2", + "acquisitionYear": "2017", + "acquisitionMonth": "11", + "acquisitionDay": "21", "path": "057", - "processingCorrectionLevel": "L2", "row": "094", - "mission": "4", + "processingCorrectionLevel": "L2", "scene": "CBERS_4_MUX_20171121_057_094_L2", "reference_band": "6", - "bands": ["5", "6", "7", "8"], + "bands": ("5", "6", "7", "8"), "rgb": ("7", "6", "5"), - "satellite": "CBERS", + "scheme": "s3", + "bucket": "cbers-pds", + "prefix": "CBERS4/MUX/057/094/CBERS_4_MUX_20171121_057_094_L2", } - assert cbers._cbers_parse_scene_id(scene) == expected_content + assert cbers.cbers_parser(scene) == expected_content scene = "CBERS_4_AWFI_20171121_057_094_L2" expected_content = { - "acquisitionDay": "21", - "acquisitionMonth": "11", - "acquisitionYear": "2017", + "satellite": "CBERS", + "mission": "4", "instrument": "AWFI", - "key": "CBERS4/AWFI/057/094/CBERS_4_AWFI_20171121_057_094_L2", + "acquisitionYear": "2017", + "acquisitionMonth": "11", + "acquisitionDay": "21", "path": "057", - "processingCorrectionLevel": "L2", "row": "094", - "mission": "4", + "processingCorrectionLevel": "L2", "scene": "CBERS_4_AWFI_20171121_057_094_L2", "reference_band": "14", - "bands": ["13", "14", "15", "16"], + "bands": ("13", "14", "15", "16"), "rgb": ("15", "14", "13"), - "satellite": "CBERS", + "scheme": "s3", + "bucket": "cbers-pds", + "prefix": "CBERS4/AWFI/057/094/CBERS_4_AWFI_20171121_057_094_L2", } - assert cbers._cbers_parse_scene_id(scene) == expected_content + assert cbers.cbers_parser(scene) == expected_content scene = "CBERS_4_PAN10M_20171121_057_094_L2" expected_content = { - "acquisitionDay": "21", - "acquisitionMonth": "11", - "acquisitionYear": "2017", + "satellite": "CBERS", + "mission": "4", "instrument": "PAN10M", - "key": "CBERS4/PAN10M/057/094/CBERS_4_PAN10M_20171121_057_094_L2", + "acquisitionYear": "2017", + "acquisitionMonth": "11", + "acquisitionDay": "21", "path": "057", - "processingCorrectionLevel": "L2", "row": "094", - "mission": "4", + "processingCorrectionLevel": "L2", "scene": "CBERS_4_PAN10M_20171121_057_094_L2", "reference_band": "4", - "bands": ["2", "3", "4"], + "bands": ("2", "3", "4"), "rgb": ("3", "4", "2"), - "satellite": "CBERS", + "scheme": "s3", + "bucket": "cbers-pds", + "prefix": "CBERS4/PAN10M/057/094/CBERS_4_PAN10M_20171121_057_094_L2", } - assert cbers._cbers_parse_scene_id(scene) == expected_content + assert cbers.cbers_parser(scene) == expected_content scene = "CBERS_4_PAN5M_20171121_057_094_L2" expected_content = { - "acquisitionDay": "21", - "acquisitionMonth": "11", - "acquisitionYear": "2017", + "satellite": "CBERS", + "mission": "4", "instrument": "PAN5M", - "key": "CBERS4/PAN5M/057/094/CBERS_4_PAN5M_20171121_057_094_L2", + "acquisitionYear": "2017", + "acquisitionMonth": "11", + "acquisitionDay": "21", "path": "057", - "processingCorrectionLevel": "L2", "row": "094", - "mission": "4", + "processingCorrectionLevel": "L2", "scene": "CBERS_4_PAN5M_20171121_057_094_L2", "reference_band": "1", - "bands": ["1"], + "bands": ("1"), "rgb": ("1", "1", "1"), - "satellite": "CBERS", + "scheme": "s3", + "bucket": "cbers-pds", + "prefix": "CBERS4/PAN5M/057/094/CBERS_4_PAN5M_20171121_057_094_L2", } - assert cbers._cbers_parse_scene_id(scene) == expected_content + assert cbers.cbers_parser(scene) == expected_content diff --git a/tests/test_io_cogeo.py b/tests/test_io_cogeo.py new file mode 100644 index 00000000..2141ec44 --- /dev/null +++ b/tests/test_io_cogeo.py @@ -0,0 +1,129 @@ +"""tests rio_tiler.base""" + +import os +import pytest + +from rio_tiler.io import cogeo +from rio_tiler.errors import TileOutsideBounds +from rio_tiler import constants + +PREFIX = os.path.join(os.path.dirname(__file__), "fixtures") +ADDRESS = "{}/my-bucket/hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1.tif".format( + PREFIX +) +COG_TAGS = os.path.join(os.path.dirname(__file__), "fixtures", "cog_tags.tif") + + +@pytest.fixture(autouse=True) +def testing_env_var(monkeypatch): + """Set fake env to make sure we don't hit AWS services.""" + monkeypatch.setenv("AWS_ACCESS_KEY_ID", "jqt") + monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "rde") + monkeypatch.delenv("AWS_PROFILE", raising=False) + monkeypatch.setenv("AWS_CONFIG_FILE", "/tmp/noconfigheere") + monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", "/tmp/noconfighereeither") + monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "EMPTY_DIR") + + +def test_spatial_info_valid(): + """Should work as expected (get spatial info)""" + meta = cogeo.spatial_info(ADDRESS) + assert meta.get("address") + assert meta.get("minzoom") + assert meta.get("maxzoom") + assert meta.get("center") + assert len(meta.get("bounds")) == 4 + + +def test_bounds_valid(): + """Should work as expected (get bounds)""" + meta = cogeo.bounds(ADDRESS) + assert meta.get("address") == ADDRESS + assert len(meta.get("bounds")) == 4 + + +def test_info_valid(): + """Should work as expected (get file info)""" + meta = cogeo.info(COG_TAGS) + assert meta.get("address") == COG_TAGS + assert meta.get("bounds") + assert meta.get("minzoom") + assert meta.get("maxzoom") + assert meta.get("band_descriptions") + assert meta.get("dtype") == "int16" + assert meta.get("colorinterp") == ["gray"] + assert meta.get("nodata_type") == "Nodata" + assert meta.get("scale") + assert meta.get("offset") + assert meta.get("band_metadata") + bmeta = meta.get("band_metadata")[0][1] + assert bmeta.get("STATISTICS_MAXIMUM") + assert bmeta.get("STATISTICS_MEAN") + assert bmeta.get("STATISTICS_MINIMUM") + + +def test_metadata_valid(): + """Get bounds and get stats for all bands.""" + meta = cogeo.metadata(ADDRESS) + assert meta["address"] == ADDRESS + assert len(meta["band_descriptions"]) == 3 + assert (1, "band1") == meta["band_descriptions"][0] + assert len(meta["statistics"].items()) == 3 + assert meta["statistics"][1]["pc"] == [12, 198] + + +def test_metadata_valid_custom(): + """Get bounds and get stats for all bands with custom percentiles.""" + meta = cogeo.metadata( + ADDRESS, pmin=5, pmax=90, hist_options=dict(bins=20), max_size=128 + ) + assert meta["address"] == ADDRESS + assert len(meta["statistics"].items()) == 3 + assert len(meta["statistics"][1]["histogram"][0]) == 20 + assert meta["statistics"][1]["pc"] == [41, 184] + + +def test_tile_valid_default(): + """Should return a 3 bands array and a full valid mask.""" + tile_z = 21 + tile_x = 438217 + tile_y = 801835 + + data, mask = cogeo.tile(ADDRESS, tile_x, tile_y, tile_z) + assert data.shape == (3, 256, 256) + assert mask.all() + + +def test_tile_invalid_bounds(): + """Should raise an error with invalid tile.""" + tile_z = 19 + tile_x = 554 + tile_y = 200458 + + with pytest.raises(TileOutsideBounds): + cogeo.tile(ADDRESS, tile_x, tile_y, tile_z) + + +def test_point_valid(): + """Read point.""" + lon = -104.77499638118547 + lat = 38.953606785685125 + assert cogeo.point(ADDRESS, lon, lat) + + +def test_area_valid(): + """Read part of an image.""" + bbox = ( + -104.77506637573242, + 38.95353532141205, + -104.77472305297852, + 38.95366881479647, + ) + data, mask = cogeo.area(ADDRESS, bbox) + assert data.shape == (3, 100, 199) + + data, mask = cogeo.area(ADDRESS, bbox, max_size=100) + assert data.shape == (3, 51, 100) + + data, mask = cogeo.area(ADDRESS, bbox, dst_crs=constants.WGS84_CRS) + assert data.shape == (3, 82, 210) diff --git a/tests/test_landsat.py b/tests/test_io_landsat.py similarity index 51% rename from tests/test_landsat.py rename to tests/test_io_landsat.py index 04ed2f56..84d0909d 100644 --- a/tests/test_landsat.py +++ b/tests/test_io_landsat.py @@ -3,18 +3,15 @@ import os import pytest +import numpy from mock import patch -from rasterio.crs import CRS from rio_toa import toa_utils -from rio_tiler import landsat8 -from rio_tiler.errors import ( - TileOutsideBounds, - InvalidBandName, - NoOverviewWarning, - InvalidLandsatSceneId, -) +import rasterio +from rio_tiler.io import landsat8 +from rio_tiler.errors import TileOutsideBounds, InvalidBandName, InvalidLandsatSceneId + LANDSAT_SCENE_C1 = "LC08_L1TP_016037_20170813_20170814_01_RT" LANDSAT_BUCKET = os.path.join(os.path.dirname(__file__), "fixtures", "landsat-pds") @@ -30,6 +27,13 @@ LANDSAT_METADATA_RAW = f.read().encode("utf-8") +def mock_rasterio_open(asset): + """Mock rasterio Open.""" + assert asset.startswith("s3://landsat-pds") + asset = asset.replace("s3://landsat-pds", LANDSAT_BUCKET) + return rasterio.open(asset) + + @pytest.fixture(autouse=True) def testing_env_var(monkeypatch): """Set fake env to make sure we don't hit AWS services.""" @@ -41,12 +45,9 @@ def testing_env_var(monkeypatch): monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "TRUE") -@patch("rio_tiler.landsat8._landsat_get_mtl") +@patch("rio_tiler.io.landsat8._landsat_get_mtl") def test_bounds_valid(landsat_get_mtl): - """ - Should work as expected (get and parse metadata) - """ - + """Should work as expected (get and parse metadata).""" landsat_get_mtl.return_value = LANDSAT_METADATA meta = landsat8.bounds(LANDSAT_SCENE_C1) @@ -54,51 +55,49 @@ def test_bounds_valid(landsat_get_mtl): assert len(meta.get("bounds")) == 4 -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_metadata_valid_default(landsat_get_mtl, monkeypatch): +@patch("rio_tiler.io.landsat8.rasterio") +@patch("rio_tiler.io.landsat8._landsat_get_mtl") +def test_metadata_valid_default(landsat_get_mtl, rio): """Get bounds and get stats for all bands.""" - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) landsat_get_mtl.return_value = LANDSAT_METADATA + rio.open = mock_rasterio_open meta = landsat8.metadata(LANDSAT_SCENE_C1) assert meta["sceneid"] == LANDSAT_SCENE_C1 - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 12 assert len(meta["statistics"]["1"]["histogram"][0]) == 10 - assert list(map(int, meta["statistics"]["1"]["pc"])) == [1210, 7046] + assert list(map(int, meta["statistics"]["1"]["pc"])) == [1206, 6957] - meta = landsat8.metadata(LANDSAT_SCENE_C1, histogram_bins=20) + meta = landsat8.metadata(LANDSAT_SCENE_C1, hist_options=dict(bins=20)) assert meta["sceneid"] == LANDSAT_SCENE_C1 assert len(meta["statistics"]["1"]["histogram"][0]) == 20 - meta = landsat8.metadata( - LANDSAT_SCENE_C1, histogram_bins=None, histogram_range=[1000, 4000] - ) + meta = landsat8.metadata(LANDSAT_SCENE_C1, hist_options=dict(range=[1000, 4000])) assert meta["sceneid"] == LANDSAT_SCENE_C1 assert len(meta["statistics"]["1"]["histogram"][0]) == 10 -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_metadata_valid_custom(landsat_get_mtl, monkeypatch): +@patch("rio_tiler.io.landsat8.rasterio") +@patch("rio_tiler.io.landsat8._landsat_get_mtl") +def test_metadata_valid_custom(landsat_get_mtl, rio): """Get bounds and get stats for all bands with custom percentiles.""" - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) landsat_get_mtl.return_value = LANDSAT_METADATA + rio.open = mock_rasterio_open meta = landsat8.metadata(LANDSAT_SCENE_C1, pmin=10, pmax=90) assert meta["sceneid"] == LANDSAT_SCENE_C1 - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 12 - assert list(map(int, meta["statistics"]["1"]["pc"])) == [1275, 3918] + assert list(map(int, meta["statistics"]["1"]["pc"])) == [1274, 3964] -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_tile_valid_default(landsat_get_mtl, monkeypatch): - """ - Should work as expected - """ - - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) +@patch("rio_tiler.io.landsat8.rasterio") +@patch("rio_tiler.io.landsat8._landsat_get_mtl") +def test_tile_valid(landsat_get_mtl, rio): + """Should work as expected.""" landsat_get_mtl.return_value = LANDSAT_METADATA + rio.open = mock_rasterio_open tile_z = 8 tile_x = 71 @@ -106,99 +105,54 @@ def test_tile_valid_default(landsat_get_mtl, monkeypatch): data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z) assert data.shape == (3, 256, 256) + assert data.dtype == numpy.uint16 assert mask.shape == (256, 256) + assert not mask.all() - -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_tile_valid_nrg(landsat_get_mtl, monkeypatch): - """Should return a custom band combination tile.""" - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) - landsat_get_mtl.return_value = LANDSAT_METADATA - - tile_z = 8 - tile_x = 71 - tile_y = 102 - bands = ("5", "4", "3") - data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands=bands) + data, mask = landsat8.tile( + LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands=("5", "4", "3") + ) assert data.shape == (3, 256, 256) assert mask.shape == (256, 256) - -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_tile_valid_tir(landsat_get_mtl, monkeypatch): - """Should return a tile and mask from TIR band.""" - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) - landsat_get_mtl.return_value = LANDSAT_METADATA - - tile_z = 8 - tile_x = 71 - tile_y = 102 - bands = "10" - - data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands=bands) + data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands="10") assert data.shape == (1, 256, 256) + assert data.dtype == numpy.uint16 assert mask.shape == (256, 256) - -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_tile_valid_qa(landsat_get_mtl, monkeypatch): - """Should return a tile and mask from TIR band.""" - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) - landsat_get_mtl.return_value = LANDSAT_METADATA - - tile_z = 8 - tile_x = 71 - tile_y = 102 - bands = "QA" - - data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands=bands) + data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands="QA") assert data.shape == (1, 256, 256) + assert data.dtype == numpy.uint16 assert mask.shape == (256, 256) + assert not mask.all() + data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, pan=True) + assert data.shape == (3, 256, 256) + assert data.dtype == numpy.uint16 + assert mask.shape == (256, 256) -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_tile_invalidband(landsat_get_mtl, monkeypatch): - """Should raise an error on invalid band name.""" - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) +@patch("rio_tiler.io.landsat8.rasterio") +@patch("rio_tiler.io.landsat8._landsat_get_mtl") +def test_tile_invalidband(landsat_get_mtl, rio): + """Should raise an error on invalid band name.""" tile_z = 8 tile_x = 71 tile_y = 102 bands = "25" with pytest.raises(InvalidBandName): - data, mask = landsat8.tile( - LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands=bands - ) - landsat_get_mtl.assert_not_called() - - -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_tile_valid_pan(landsat_get_mtl, monkeypatch): - """ - Should work as expected - """ - - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) - landsat_get_mtl.return_value = LANDSAT_METADATA - - tile_z = 8 - tile_x = 71 - tile_y = 102 - - data, mask = landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, pan=True) - assert data.shape == (3, 256, 256) - assert mask.shape == (256, 256) + landsat8.tile(LANDSAT_SCENE_C1, tile_x, tile_y, tile_z, bands=bands) + landsat_get_mtl.assert_not_called() -@patch("rio_tiler.landsat8._landsat_get_mtl") -def test_tile_invalid_bounds(landsat_get_mtl, monkeypatch): - """ - Should raise an error with invalid tile - """ - monkeypatch.setattr(landsat8, "LANDSAT_BUCKET", LANDSAT_BUCKET) +@patch("rio_tiler.io.landsat8.rasterio") +@patch("rio_tiler.io.landsat8._landsat_get_mtl") +def test_tile_invalid_bounds(landsat_get_mtl, rio): + """Should raise an error with invalid tile""" landsat_get_mtl.return_value = LANDSAT_METADATA + rio.open = mock_rasterio_open tile_z = 8 tile_x = 701 @@ -212,63 +166,66 @@ def test_landsat_id_pre_invalid(): """Raises error on invalid pre-collection.""" scene = "L0300342017083LGN00" with pytest.raises(InvalidLandsatSceneId): - landsat8._landsat_parse_scene_id(scene) + landsat8.landsat_parser(scene) def test_landsat_id_c1_invalid(): """Raises error on invalid collection1 sceneid.""" scene = "LC08_005004_20170410_20170414_01_T1" with pytest.raises(InvalidLandsatSceneId): - landsat8._landsat_parse_scene_id(scene) + landsat8.landsat_parser(scene) def test_landsat_id_pre_valid(): """Parse landsat valid pre-collection sceneid and return metadata.""" scene = "LC80300342017083LGN00" expected_content = { - "acquisitionJulianDay": "083", - "acquisitionYear": "2017", - "archiveVersion": "00", - "date": "2017-03-24", - "groundStationIdentifier": "LGN", - "key": "L8/030/034/LC80300342017083LGN00/LC80300342017083LGN00", + "sensor": "C", + "satellite": "8", "path": "030", "row": "034", - "satellite": "8", + "acquisitionYear": "2017", + "acquisitionJulianDay": "083", + "groundStationIdentifier": "LGN", + "archiveVersion": "00", "scene": "LC80300342017083LGN00", - "sensor": "C", + "date": "2017-03-24", + "scheme": "s3", + "bucket": "landsat-pds", + "prefix": "L8/030/034/LC80300342017083LGN00", } - assert landsat8._landsat_parse_scene_id(scene) == expected_content + assert landsat8.landsat_parser(scene) == expected_content def test_landsat_id_c1_valid(): """Parse landsat valid collection1 sceneid and return metadata.""" scene = "LC08_L1TP_005004_20170410_20170414_01_T1" expected_content = { - "acquisitionDay": "10", - "acquisitionMonth": "04", - "acquisitionYear": "2017", - "collectionCategory": "T1", - "collectionNumber": "01", - "date": "2017-04-10", - "key": "c1/L8/005/004/LC08_L1TP_005004_20170410_\ -20170414_01_T1/LC08_L1TP_005004_20170410_20170414_01_T1", - "path": "005", + "sensor": "C", + "satellite": "08", "processingCorrectionLevel": "L1TP", - "processingDay": "14", - "processingMonth": "04", - "processingYear": "2017", + "path": "005", "row": "004", - "satellite": "08", + "acquisitionYear": "2017", + "acquisitionMonth": "04", + "acquisitionDay": "10", + "processingYear": "2017", + "processingMonth": "04", + "processingDay": "14", + "collectionNumber": "01", + "collectionCategory": "T1", "scene": "LC08_L1TP_005004_20170410_20170414_01_T1", - "sensor": "C", + "date": "2017-04-10", + "scheme": "s3", + "bucket": "landsat-pds", + "prefix": "c1/L8/005/004/LC08_L1TP_005004_20170410_20170414_01_T1", } - assert landsat8._landsat_parse_scene_id(scene) == expected_content + assert landsat8.landsat_parser(scene) == expected_content -@patch("rio_tiler.landsat8.urlopen") +@patch("rio_tiler.io.landsat8.urlopen") def test_landsat_get_mtl_valid(urlopen): """Return MTL metadata.""" urlopen.return_value.read.return_value = LANDSAT_METADATA_RAW @@ -279,47 +236,9 @@ def test_landsat_get_mtl_valid(urlopen): ) -@patch("rio_tiler.landsat8.urlopen") +@patch("rio_tiler.io.landsat8.urlopen") def test_landsat_get_mtl_invalid(urlopen): """Raises error when MTL file not found or empty.""" urlopen.return_value.read.return_value = {} with pytest.raises(Exception): landsat8._landsat_get_mtl(LANDSAT_SCENE_C1) - - -def test_landsat_get_stats_valid(): - """Should return a valid dict with array statistics.""" - stats = landsat8._landsat_stats( - "4", LANDSAT_PATH, LANDSAT_METADATA["L1_METADATA_FILE"] - ) - assert stats["bounds"] - assert stats["bounds"]["crs"] == CRS({"init": "EPSG:4326"}) - assert stats["statistics"]["4"] - assert isinstance(stats["statistics"]["4"]["pc"][0], float) - assert list(map(int, stats["statistics"]["4"]["pc"])) == [423, 7028] - - -def test_landsat_get_stats_validOptions(): - """Should return a valid dict with array statistics.""" - stats = landsat8._landsat_stats( - "10", - LANDSAT_PATH, - LANDSAT_METADATA["L1_METADATA_FILE"], - overview_level=2, - percentiles=(5, 95), - dst_crs="epsg:3857", - ) - assert stats["bounds"] - assert stats["bounds"]["crs"] == "epsg:3857" - assert stats["statistics"]["10"] - assert list(map(int, stats["statistics"]["10"]["pc"])) == [281, 297] - - -def test_landsat_get_stats_noOverviews(monkeypatch): - """Should return a valid dict with array statistics and warns about missing overviews.""" - monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "EMPTY_DIR") - with pytest.warns(NoOverviewWarning): - stats = landsat8._landsat_stats( - "5", LANDSAT_PATH, LANDSAT_METADATA["L1_METADATA_FILE"] - ) - assert stats["statistics"]["5"] diff --git a/tests/test_sentinel1.py b/tests/test_io_sentinel1.py similarity index 74% rename from tests/test_sentinel1.py rename to tests/test_io_sentinel1.py index ae767016..9e92aacc 100644 --- a/tests/test_sentinel1.py +++ b/tests/test_io_sentinel1.py @@ -1,4 +1,4 @@ -"""tests rio_tiler.sentinel2""" +"""tests rio_tiler.io.sentinel1""" import os import pytest @@ -6,7 +6,8 @@ from mock import patch -from rio_tiler import sentinel1 +import rasterio +from rio_tiler.io import sentinel1 from rio_tiler.errors import TileOutsideBounds, InvalidBandName, InvalidSentinelSceneId SENTINEL_SCENE = "S1A_IW_GRDH_1SDV_20180716T004042_20180716T004107_022812_02792A_FD5B" @@ -21,6 +22,13 @@ SENTINEL_METADATA = f.read().encode("utf-8") +def mock_rasterio_open(asset): + """Mock rasterio Open.""" + assert asset.startswith("s3://sentinel-s1-l1c") + asset = asset.replace("s3://sentinel-s1-l1c", SENTINEL_BUCKET) + return rasterio.open(asset) + + @pytest.fixture(autouse=True) def testing_env_var(monkeypatch): """Set fake env to make sure we don't hit AWS services.""" @@ -32,11 +40,9 @@ def testing_env_var(monkeypatch): monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "EMPTY_DIR") -@patch("rio_tiler.sentinel1.boto3_session") -def test_bounds_valid(session, monkeypatch): +@patch("rio_tiler.io.sentinel1.boto3_session") +def test_bounds_valid(session): """Should work as expected (get bounds)""" - monkeypatch.setattr(sentinel1, "SENTINEL_BUCKET", SENTINEL_BUCKET) - session.return_value.client.return_value.get_object.return_value = { "Body": BytesIO(SENTINEL_METADATA) } @@ -47,31 +53,33 @@ def test_bounds_valid(session, monkeypatch): == "S1A_IW_GRDH_1SDV_20180716T004042_20180716T004107_022812_02792A_FD5B" ) assert len(meta.get("bounds")) == 4 + calls = session.return_value.client.return_value.get_object.call_args + assert calls[1]["Bucket"] == "sentinel-s1-l1c" + assert calls[1]["Key"].endswith("productInfo.json") def test_parse_sceneid(): """Test sentinel1._sentinel_parse_scene_id.""" - meta = sentinel1._sentinel_parse_scene_id(SENTINEL_SCENE) + meta = sentinel1.sentinel1_parser(SENTINEL_SCENE) meta[ "key" ] = "GRD/2018/7/16/IW/DV/S1A_IW_GRDH_1SDV_20180716T004042_20180716T004107_022812_02792A_FD5B" with pytest.raises(InvalidSentinelSceneId): - sentinel1._sentinel_parse_scene_id("S2A_tile_20170729_19UDP_0") + sentinel1.sentinel1_parser("S2A_tile_20170729_19UDP_0") -def test_metadata(monkeypatch): +@patch("rio_tiler.io.sentinel1.rasterio") +def test_metadata(rio): """Test sentinel1.metadata.""" - monkeypatch.setattr(sentinel1, "SENTINEL_BUCKET", SENTINEL_BUCKET) + rio.open = mock_rasterio_open meta = sentinel1.metadata(SENTINEL_SCENE, bands=("vv", "vh")) assert meta["sceneid"] == SENTINEL_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 2 - assert meta["statistics"]["vv"]["min"] == 4 - assert meta["statistics"]["vh"]["max"] == 730 - assert meta["minzoom"] == 7 - assert meta["maxzoom"] == 9 + assert meta["statistics"]["vv"]["min"] == 1 + assert meta["statistics"]["vh"]["max"] == 507 meta = sentinel1.metadata(SENTINEL_SCENE, bands="vv") assert len(meta["statistics"].items()) == 1 @@ -83,9 +91,10 @@ def test_metadata(monkeypatch): sentinel1.metadata(SENTINEL_SCENE) -def test_tile_valid_default(monkeypatch): +@patch("rio_tiler.io.sentinel1.rasterio") +def test_tile_valid_default(rio): """Test tile reading.""" - monkeypatch.setattr(sentinel1, "SENTINEL_BUCKET", SENTINEL_BUCKET) + rio.open = mock_rasterio_open tile_z = 8 tile_x = 183 diff --git a/tests/test_sentinel2.py b/tests/test_io_sentinel2.py similarity index 54% rename from tests/test_sentinel2.py rename to tests/test_io_sentinel2.py index 6c4eb893..2a73dfda 100644 --- a/tests/test_sentinel2.py +++ b/tests/test_io_sentinel2.py @@ -6,9 +6,9 @@ from mock import patch import rasterio -from rio_tiler import sentinel2 +from rio_tiler.io import sentinel2 + from rio_tiler.errors import ( - DeprecationWarning, TileOutsideBounds, InvalidBandName, InvalidSentinelSceneId, @@ -32,12 +32,12 @@ def testing_env_var(monkeypatch): def mock_rasterio_open(asset): """Mock rasterio Open for Sentinel2 dataset.""" - assert asset.startswith("s3://sentinel-s2") + assert asset.startswith("s3://sentinel-s2-l") asset = asset.replace("s3://sentinel-s2", SENTINEL_BUCKET) return rasterio.open(asset) -@patch("rio_tiler.sentinel2.rasterio") +@patch("rio_tiler.io.sentinel2.rasterio") def test_bounds_valid(rio): """Should work as expected (get bounds).""" rio.open = mock_rasterio_open @@ -47,47 +47,49 @@ def test_bounds_valid(rio): assert len(meta.get("bounds")) == 4 -@patch("rio_tiler.sentinel2.rasterio") +@patch("rio_tiler.reader.rasterio") def test_metadata_valid_default(rio): """Get bounds and get stats for all bands.""" rio.open = mock_rasterio_open meta = sentinel2.metadata(SENTINEL_SCENE) assert meta["sceneid"] == SENTINEL_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 13 - assert meta["statistics"]["01"]["pc"] == [1088, 8235] + assert meta["statistics"]["01"]["pc"] == [1094, 8170] -@patch("rio_tiler.sentinel2.rasterio") +@patch("rio_tiler.reader.rasterio") def test_metadata_valid_custom(rio): """Get bounds and get stats for all bands with custom percentiles.""" rio.open = mock_rasterio_open meta = sentinel2.metadata(SENTINEL_SCENE, pmin=5, pmax=95) assert meta["sceneid"] == SENTINEL_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 13 - assert meta["statistics"]["01"]["pc"] == [1110, 7236] + assert meta["statistics"]["01"]["pc"] == [1116, 7166] - meta = sentinel2.metadata(SENTINEL_SCENE, pmin=5, pmax=95, histogram_bins=20) + meta = sentinel2.metadata( + SENTINEL_SCENE, pmin=5, pmax=95, hist_options=dict(bins=20) + ) assert meta["sceneid"] == SENTINEL_SCENE - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 13 - assert meta["statistics"]["01"]["pc"] == [1110, 7236] + assert meta["statistics"]["01"]["pc"] == [1116, 7166] assert len(meta["statistics"]["01"]["histogram"][0]) == 20 - meta = sentinel2.metadata( - SENTINEL_SCENE, histogram_bins=None, histogram_range=[1000, 4000] - ) + meta = sentinel2.metadata(SENTINEL_SCENE, hist_options=dict(range=[1000, 4000])) assert meta["sceneid"] == SENTINEL_SCENE assert len(meta["statistics"]["01"]["histogram"][0]) == 10 -@patch("rio_tiler.sentinel2.rasterio") -def test_tile_valid_default(rio): +@patch("rio_tiler.io.sentinel2.rasterio") +@patch("rio_tiler.reader.rasterio") +def test_tile_valid_default(rio, srio): """Should work as expected.""" rio.open = mock_rasterio_open + srio.open = mock_rasterio_open tile_z = 8 tile_x = 77 @@ -97,56 +99,18 @@ def test_tile_valid_default(rio): assert data.shape == (3, 256, 256) assert mask.shape == (256, 256) - -@patch("rio_tiler.sentinel2.rasterio") -def test_tile_valid_nrg(rio): - """Should work as expected""" - rio.open = mock_rasterio_open - - tile_z = 8 - tile_x = 77 - tile_y = 89 - bands = ("08", "04", "03") - - data, mask = sentinel2.tile(SENTINEL_SCENE, tile_x, tile_y, tile_z, bands=bands) + data, mask = sentinel2.tile( + SENTINEL_SCENE, tile_x, tile_y, tile_z, bands=("08", "04", "03") + ) assert data.shape == (3, 256, 256) assert mask.shape == (256, 256) - -@patch("rio_tiler.sentinel2.rasterio") -def test_tile_valid_oneband(rio): - """Test when passing a string instead of a tuple.""" - rio.open = mock_rasterio_open - - tile_z = 8 - tile_x = 77 - tile_y = 89 - bands = "08" - - data, mask = sentinel2.tile(SENTINEL_SCENE, tile_x, tile_y, tile_z, bands=bands) + data, mask = sentinel2.tile(SENTINEL_SCENE, tile_x, tile_y, tile_z, bands="08") assert data.shape == (1, 256, 256) assert mask.shape == (256, 256) - -@patch("rio_tiler.sentinel2.rasterio") -def test_tile_invalid_band(rio): - """Should raise an error on invalid band name.""" - rio.open = mock_rasterio_open - - tile_z = 8 - tile_x = 77 - tile_y = 89 - bands = "9A" - with pytest.raises(InvalidBandName): - data, mask = sentinel2.tile(SENTINEL_SCENE, tile_x, tile_y, tile_z, bands=bands) - rio.assert_not_called() - - -@patch("rio_tiler.sentinel2.rasterio") -def test_tile_invalid_bounds(rio): - """Should raise an error with invalid tile.""" - rio.open = mock_rasterio_open + sentinel2.tile(SENTINEL_SCENE, tile_x, tile_y, tile_z, bands="9A") tile_z = 8 tile_x = 177 @@ -154,149 +118,33 @@ def test_tile_invalid_bounds(rio): with pytest.raises(TileOutsideBounds): sentinel2.tile(SENTINEL_SCENE, tile_x, tile_y, tile_z) - assert rio.called_once() def test_sentinel_id_invalid(): """Raises error on invalid sentinel-2 sceneid.""" with pytest.raises(InvalidSentinelSceneId): - sentinel2._sentinel_parse_scene_id("S2A_tile_20170323_17SNC") + sentinel2.sentinel2_parser("S2A_tile_20170323_17SNC") -def test_sentinel_id_valid(): +def test_sentinel_newid_valid(): """Parse sentinel-2 valid sceneid and return metadata.""" expected_content = { - "acquisitionDay": "23", - "acquisitionMonth": "03", - "acquisitionYear": "2017", - "aws_bucket": "s3://sentinel-s2-l1c", - "aws_prefix": "tiles/17/S/NC/2017/3/23/0", - "key": "tiles/17/S/NC/2017/3/23/0", - "lat": "S", - "num": "0", - "satellite": "A", - "scene": "S2A_tile_20170323_17SNC_0", "sensor": "2", - "sq": "NC", - "utm": "17", - "processingLevel": "L1C", - "preview_file": "preview.jp2", - "preview_prefix": "preview", - "bands": [ - "02", - "03", - "04", - "08", - "05", - "06", - "07", - "11", - "12", - "8A", - "01", - "09", - "10", - ], - "valid_bands": [ - "02", - "03", - "04", - "08", - "05", - "06", - "07", - "11", - "12", - "8A", - "01", - "09", - "10", - ], - } - with pytest.warns(DeprecationWarning): - assert ( - sentinel2._sentinel_parse_scene_id("S2A_tile_20170323_17SNC_0") - == expected_content - ) - - -def test_sentinel_id_valid_strip(): - """Parse sentinel-2 valid sceneid with leading 0 and return metadata.""" - expected_content = { - "acquisitionDay": "23", - "acquisitionMonth": "03", - "acquisitionYear": "2017", - "aws_bucket": "s3://sentinel-s2-l1c", - "aws_prefix": "tiles/7/S/NC/2017/3/23/0", - "key": "tiles/7/S/NC/2017/3/23/0", - "lat": "S", - "num": "0", "satellite": "A", - "scene": "S2A_tile_20170323_07SNC_0", - "sensor": "2", - "sq": "NC", - "utm": "07", "processingLevel": "L1C", - "preview_file": "preview.jp2", - "preview_prefix": "preview", - "bands": [ - "02", - "03", - "04", - "08", - "05", - "06", - "07", - "11", - "12", - "8A", - "01", - "09", - "10", - ], - "valid_bands": [ - "02", - "03", - "04", - "08", - "05", - "06", - "07", - "11", - "12", - "8A", - "01", - "09", - "10", - ], - } - - with pytest.warns(DeprecationWarning): - assert ( - sentinel2._sentinel_parse_scene_id("S2A_tile_20170323_07SNC_0") - == expected_content - ) - - -def test_sentinel_newid_valid(): - """Parse sentinel-2 valid sceneid and return metadata.""" - expected_content = { - "acquisitionDay": "29", - "acquisitionMonth": "07", "acquisitionYear": "2017", - "aws_bucket": "s3://sentinel-s2-l1c", - "aws_prefix": "tiles/19/U/DP/2017/7/29/0", - "key": "tiles/19/U/DP/2017/7/29/0", + "acquisitionMonth": "07", + "acquisitionDay": "29", + "utm": "19", "lat": "U", + "sq": "DP", "num": "0", - "satellite": "A", "scene": "S2A_L1C_20170729_19UDP_0", - "sensor": "2", - "sq": "DP", - "utm": "19", - "processingLevel": "L1C", + "scheme": "s3", + "bucket": "sentinel-s2-l1c", + "prefix": "tiles/19/U/DP/2017/7/29/0", "preview_file": "preview.jp2", - "preview_prefix": "preview", + "preview_prefix": "", "bands": [ "02", "03", @@ -328,26 +176,26 @@ def test_sentinel_newid_valid(): "10", ], } - assert sentinel2._sentinel_parse_scene_id(SENTINEL_SCENE) == expected_content + assert sentinel2.sentinel2_parser(SENTINEL_SCENE) == expected_content def test_sentinel_newidl2a_valid(): """Parse sentinel-2 valid sceneid and return metadata.""" expected_content = { - "acquisitionDay": "29", - "acquisitionMonth": "07", + "sensor": "2", + "satellite": "A", + "processingLevel": "L2A", "acquisitionYear": "2017", - "aws_bucket": "s3://sentinel-s2-l2a", - "aws_prefix": "tiles/19/U/DP/2017/7/29/0", - "key": "tiles/19/U/DP/2017/7/29/0", + "acquisitionMonth": "07", + "acquisitionDay": "29", + "utm": "19", "lat": "U", + "sq": "DP", "num": "0", - "satellite": "A", "scene": "S2A_L2A_20170729_19UDP_0", - "sensor": "2", - "sq": "DP", - "utm": "19", - "processingLevel": "L2A", + "scheme": "s3", + "bucket": "sentinel-s2-l2a", + "prefix": "tiles/19/U/DP/2017/7/29/0", "preview_file": "R60m/TCI.jp2", "preview_prefix": "R60m", "bands": [ @@ -382,10 +230,10 @@ def test_sentinel_newidl2a_valid(): "WVP", ], } - assert sentinel2._sentinel_parse_scene_id(SENTINEL_SCENE_L2) == expected_content + assert sentinel2.sentinel2_parser(SENTINEL_SCENE_L2) == expected_content -@patch("rio_tiler.sentinel2.rasterio") +@patch("rio_tiler.io.sentinel2.rasterio") def test_boundsl2_valid(rio): """Should work as expected (get bounds).""" rio.open = mock_rasterio_open @@ -395,22 +243,24 @@ def test_boundsl2_valid(rio): assert len(meta.get("bounds")) == 4 -@patch("rio_tiler.sentinel2.rasterio") +@patch("rio_tiler.reader.rasterio") def test_metadatal2_valid_default(rio): """Get bounds and get stats for all bands.""" rio.open = mock_rasterio_open meta = sentinel2.metadata(SENTINEL_SCENE_L2) assert meta["sceneid"] == SENTINEL_SCENE_L2 - assert len(meta["bounds"]["value"]) == 4 + assert len(meta["bounds"]) == 4 assert len(meta["statistics"].items()) == 12 assert meta["statistics"]["01"]["pc"] == [1094, 8170] -@patch("rio_tiler.sentinel2.rasterio") -def test_tile_validl2_default(rio): +@patch("rio_tiler.io.sentinel2.rasterio") +@patch("rio_tiler.reader.rasterio") +def test_tile_validl2_default(rio, srio): """Should work as expected.""" rio.open = mock_rasterio_open + srio.open = mock_rasterio_open tile_z = 8 tile_x = 77 diff --git a/tests/test_io_stac.py b/tests/test_io_stac.py new file mode 100644 index 00000000..495462b6 --- /dev/null +++ b/tests/test_io_stac.py @@ -0,0 +1,90 @@ +"""tests rio_tiler.io.stac""" + +import os +import json +import pytest +from mock import patch + +import rasterio +from rio_tiler.errors import InvalidBandName, TileOutsideBounds + +PREFIX = os.path.join(os.path.dirname(__file__), "fixtures") + +with open(os.path.join(PREFIX, "stac.json")) as f: + stac_item = json.loads(f.read()) + + +def mock_rasterio_open(asset): + """Mock rasterio Open.""" + assert asset.startswith("http://somewhere-over-the-rainbow.io") + asset = asset.replace("http://somewhere-over-the-rainbow.io", PREFIX) + return rasterio.open(asset) + + +@pytest.fixture(autouse=True) +def app(monkeypatch): + """Set fake env to make sure we don't hit AWS services.""" + monkeypatch.setenv("AWS_ACCESS_KEY_ID", "jqt") + monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "rde") + monkeypatch.delenv("AWS_PROFILE", raising=False) + monkeypatch.setenv("AWS_CONFIG_FILE", "/tmp/noconfigheere") + monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", "/tmp/noconfighereeither") + monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "EMPTY_DIR") + + from rio_tiler.io import stac + + return stac + + +def test_spatial_info_valid(app): + """Should raise an exception.""" + with pytest.raises(Exception): + app.spatial_info(stac_item) + + +def test_bounds_valid(app): + """Should work as expected (get bounds)""" + meta = app.bounds(stac_item) + assert meta["id"] == stac_item["id"] + assert len(meta["bounds"]) == 4 + + +@patch("rio_tiler.reader.rasterio") +def test_metadata_valid(rio, app): + """Get bounds and get stats for all bands.""" + rio.open = mock_rasterio_open + + with pytest.raises(InvalidBandName): + app.metadata(stac_item, "vert") + + meta = app.metadata(stac_item, "green") + assert meta["id"] == stac_item["id"] + assert len(meta["bounds"]) == 4 + assert meta["band_descriptions"][0] == (1, "green") + assert len(meta["statistics"].items()) == 1 + assert meta["nodata_types"] == {"green": "Nodata"} + assert meta["dtypes"] == {"green": "uint16"} + + meta = app.metadata(stac_item, ["green", "red", "blue"]) + assert meta["id"] == stac_item["id"] + assert len(meta["bounds"]) == 4 + assert meta["band_descriptions"] == [(1, "green"), (2, "red"), (3, "blue")] + assert len(meta["statistics"].items()) == 3 + assert meta["nodata_types"] == { + "green": "Nodata", + "red": "Nodata", + "blue": "Nodata", + } + + +@patch("rio_tiler.reader.rasterio") +def test_tile_valid(rio, app): + """Should raise or return tiles.""" + rio.open = mock_rasterio_open + + with pytest.raises(TileOutsideBounds): + app.tile(stac_item, "green", 701, 102, 8) + + data, mask = app.tile(stac_item, "green", 71, 102, 8) + assert data.shape == (1, 256, 256) + assert mask.shape == (256, 256) diff --git a/tests/test_main.py b/tests/test_main.py deleted file mode 100644 index 15a0dc31..00000000 --- a/tests/test_main.py +++ /dev/null @@ -1,80 +0,0 @@ -"""tests rio_tiler.base""" - -import os -import pytest - -from rio_tiler import main -from rio_tiler.errors import TileOutsideBounds - -PREFIX = os.path.join(os.path.dirname(__file__), "fixtures") -ADDRESS = "{}/my-bucket/hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1.tif".format( - PREFIX -) - - -@pytest.fixture(autouse=True) -def testing_env_var(monkeypatch): - """Set fake env to make sure we don't hit AWS services.""" - monkeypatch.setenv("AWS_ACCESS_KEY_ID", "jqt") - monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "rde") - monkeypatch.delenv("AWS_PROFILE", raising=False) - monkeypatch.setenv("AWS_CONFIG_FILE", "/tmp/noconfigheere") - monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", "/tmp/noconfighereeither") - monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "TRUE") - - -def test_bounds_valid(): - """ - Should work as expected (get bounds) - """ - - meta = main.bounds(ADDRESS) - assert meta.get("url") == ADDRESS - assert len(meta.get("bounds")) == 4 - - -def test_metadata_valid(): - """Get bounds and get stats for all bands.""" - meta = main.metadata(ADDRESS) - assert meta["address"] == ADDRESS - assert len(meta["bounds"]["value"]) == 4 - assert meta["minzoom"] - assert meta["maxzoom"] - assert len(meta["band_descriptions"]) == 3 - assert (1, "band1") == meta["band_descriptions"][0] - assert len(meta["statistics"].items()) == 3 - assert meta["statistics"][1]["pc"] == [11, 199] - - -def test_metadata_valid_custom(): - """Get bounds and get stats for all bands with custom percentiles.""" - meta = main.metadata( - ADDRESS, pmin=5, pmax=90, dst_crs="epsg:3857", histogram_bins=20 - ) - assert meta["address"] == ADDRESS - assert meta["bounds"]["crs"] == "epsg:3857" - assert len(meta["bounds"]["value"]) == 4 - assert len(meta["statistics"].items()) == 3 - assert len(meta["statistics"][1]["histogram"][0]) == 20 - assert meta["statistics"][1]["pc"] == [28, 192] - - -def test_tile_valid_default(): - """Should return a 3 bands array and a full valid mask.""" - tile_z = 21 - tile_x = 438217 - tile_y = 801835 - - data, mask = main.tile(ADDRESS, tile_x, tile_y, tile_z) - assert data.shape == (3, 256, 256) - assert mask.all() - - -def test_tile_invalid_bounds(): - """Should raise an error with invalid tile.""" - tile_z = 19 - tile_x = 554 - tile_y = 200458 - - with pytest.raises(TileOutsideBounds): - main.tile(ADDRESS, tile_x, tile_y, tile_z) diff --git a/tests/test_mask.py b/tests/test_mask.py new file mode 100644 index 00000000..0c26f0e1 --- /dev/null +++ b/tests/test_mask.py @@ -0,0 +1,94 @@ +"""test masks.""" + +import os +import pytest + +import numpy +import mercantile + +import rasterio +from rasterio.crs import CRS +from rasterio.coords import BoundingBox + +from rio_tiler import reader + + +tiles = { + "masked": mercantile.Tile(x=535, y=498, z=10), + "boundless": mercantile.Tile(x=540, y=497, z=10), +} +equator = { + "name": "equator", + "bounds": BoundingBox(left=382792.5, bottom=362992.5, right=610507.5, top=595207.5), + "crs": CRS.from_epsg(32632), +} + +dataset = [ + dict(equator, dtype="uint8", nodata_type="alpha"), + dict(equator, dtype="uint8", nodata_type="nodata"), + dict(equator, dtype="uint8", nodata_type="mask"), + dict(equator, dtype="int8", nodata_type="alpha"), + dict(equator, dtype="int8", nodata_type="nodata"), + dict(equator, dtype="int8", nodata_type="mask"), + # dict(equator, dtype="uint16", nodata_type="alpha"), #fail + dict(equator, dtype="uint16", nodata_type="nodata"), + dict(equator, dtype="uint16", nodata_type="mask"), + # dict(equator, dtype="int16", nodata_type="alpha"), # Fail + dict(equator, dtype="int16", nodata_type="nodata"), + # dict(equator, dtype="int16", nodata_type="mask"), # Fail +] + +cog_path = os.path.join(os.path.dirname(__file__), "fixtures", "mask") + + +def test_mask_bilinear(cloudoptimized_geotiff): + """Test mask read with bilinear resampling""" + src_path = cloudoptimized_geotiff( + cog_path, **equator, dtype="uint8", nodata_type="mask" + ) + with rasterio.open(src_path) as src_dst: + data, mask = reader.tile( + src_dst, + 535, + 498, + 10, + tilesize=256, + resampling_method="bilinear", + force_binary_mask=True, + ) + masknodata = (data[0] != 0).astype(numpy.uint8) * 255 + numpy.testing.assert_array_equal(mask, masknodata) + + data, mask = reader.tile( + src_dst, + 535, + 498, + 10, + tilesize=256, + resampling_method="bilinear", + force_binary_mask=False, + ) + masknodata = (data[0] != 0).astype(numpy.uint8) * 255 + assert not numpy.array_equal(mask, masknodata) + + +@pytest.mark.parametrize("resampling", ["bilinear", "nearest"]) +@pytest.mark.parametrize("tile_name", ["masked"]) +@pytest.mark.parametrize("dataset_info", dataset) +def test_mask(dataset_info, tile_name, resampling, cloudoptimized_geotiff): + """Test tile read for multiple combination of datatype/mask/tile extent.""" + src_path = cloudoptimized_geotiff(cog_path, **dataset_info) + + tile = tiles[tile_name] + with rasterio.open(src_path) as src_dst: + data, mask = reader.tile( + src_dst, + tile.x, + tile.y, + tile.z, + tilesize=256, + resampling_method=resampling, + force_binary_mask=True, + ) + masknodata = (data[0] != 0).astype(numpy.uint8) * 255 + numpy.testing.assert_array_equal(mask, masknodata) diff --git a/tests/test_profiles.py b/tests/test_profiles.py index a4fb9a4e..20bfd7c3 100644 --- a/tests/test_profiles.py +++ b/tests/test_profiles.py @@ -2,14 +2,27 @@ import pytest -from rio_tiler import profiles +from rio_tiler.profiles import img_profiles def test_gdal_profiles(): """Return GDAL compatible profiles.""" - assert profiles.img_profiles["jpeg"] - assert profiles.img_profiles["png"] - assert profiles.img_profiles["pngraw"] - assert profiles.img_profiles["webp"] + assert img_profiles["jpeg"] + assert img_profiles["png"] + assert img_profiles["pngraw"] + assert img_profiles["webp"] with pytest.raises(KeyError): - profiles.img_profiles["wepc"] + img_profiles["wepc"] + + prof = img_profiles.get("jpeg") + prof["test"] = True + new_prof = img_profiles.get("jpeg") + assert not new_prof.get("test") + + prof = img_profiles["jpeg"] + prof["test"] = True + new_prof = img_profiles["jpeg"] + assert not new_prof.get("test") + + prof = img_profiles.get("jpe", {"a": "b"}) + assert prof == {"a": "b"} diff --git a/tests/test_reader.py b/tests/test_reader.py new file mode 100644 index 00000000..e9648b8c --- /dev/null +++ b/tests/test_reader.py @@ -0,0 +1,555 @@ +"""tests rio_tiler.reader""" + +import os +import pytest + +import numpy +import mercantile + +import rasterio + +from rio_tiler import reader +from rio_tiler import constants +from rio_tiler.errors import TileOutsideBounds, AlphaBandWarning + + +LANDSAT_SCENE_C1 = "LC08_L1TP_016037_20170813_20170814_01_RT" +LANDSAT_BUCKET = os.path.join(os.path.dirname(__file__), "fixtures", "landsat-pds") +LANDSAT_PATH = os.path.join( + LANDSAT_BUCKET, "c1", "L8", "016", "037", LANDSAT_SCENE_C1, LANDSAT_SCENE_C1 +) + +S3_KEY = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1.tif" +S3_KEY_ALPHA = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_alpha.tif" +S3_KEY_NODATA = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_nodata.tif" +S3_KEY_MASK = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_mask.tif" +S3_KEY_EXTMASK = ( + "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_extmask.tif" +) + +S3_LOCAL = PREFIX = os.path.join(os.path.dirname(__file__), "fixtures", "my-bucket") +S3_PATH = os.path.join(S3_LOCAL, S3_KEY) +S3_ALPHA_PATH = os.path.join(S3_LOCAL, S3_KEY_ALPHA) +S3_NODATA_PATH = os.path.join(S3_LOCAL, S3_KEY_NODATA) +S3_MASK_PATH = os.path.join(S3_LOCAL, S3_KEY_MASK) +S3_EXTMASK_PATH = os.path.join(S3_LOCAL, S3_KEY_EXTMASK) + +KEY_PIX4D = "pix4d/pix4d_alpha_nodata.tif" +PIX4D_PATH = os.path.join(S3_LOCAL, KEY_PIX4D) + +COG_WEB_TILED = os.path.join(os.path.dirname(__file__), "fixtures", "web.tif") +COG_SCALE = os.path.join(os.path.dirname(__file__), "fixtures", "cog_scale.tif") +COG_CMAP = os.path.join(os.path.dirname(__file__), "fixtures", "cog_cmap.tif") +COG_DLINE = os.path.join(os.path.dirname(__file__), "fixtures", "cog_dateline.tif") + + +@pytest.fixture(autouse=True) +def testing_env_var(monkeypatch): + """Set fake env to make sure we don't hit AWS services.""" + monkeypatch.setenv("AWS_ACCESS_KEY_ID", "jqt") + monkeypatch.setenv("AWS_SECRET_ACCESS_KEY", "rde") + monkeypatch.delenv("AWS_PROFILE", raising=False) + monkeypatch.setenv("AWS_CONFIG_FILE", "/tmp/noconfigheere") + monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", "/tmp/noconfighereeither") + monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "EMPTY_DIR") + + +def test_tile_read_valid(): + """Should work as expected (read landsat band).""" + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds, 16, 16) + assert arr.shape == (1, 16, 16) + assert mask.shape == (16, 16) + + # Read bounds at full resolution + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds) + assert arr.shape == (1, 73, 73) + assert mask.shape == (73, 73) + + # set max_size for the returned array + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds, max_size=50) + assert arr.shape == (1, 50, 50) + assert mask.shape == (50, 50) + + # If max_size is bigger than actual size, there is no effect + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds, max_size=80) + assert arr.shape == (1, 73, 73) + assert mask.shape == (73, 73) + + # Incompatible max_size with height and width + with pytest.warns(UserWarning): + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds, max_size=50, width=25, height=25) + assert arr.shape == (1, 25, 25) + assert mask.shape == (25, 25) + + +def test_tile_read_validResampling(): + """Should return a 1 band array and a mask.""" + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds, 16, 16, resampling_method="nearest") + assert arr.shape == (1, 16, 16) + assert mask.shape == (16, 16) + + +def test_resampling_returns_different_results(): + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, _ = reader.part(src_dst, bounds, 16, 16) + arr2, _ = reader.part(src_dst, bounds, 16, 16, resampling_method="nearest") + + assert not numpy.array_equal(arr, arr2) + + +def test_resampling_with_diff_padding_returns_different_results(): + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, _ = reader.part(src_dst, bounds, 32, 32, nodata=0) + arr2, _ = reader.part(src_dst, bounds, 32, 32, nodata=0, padding=2) + + assert not numpy.array_equal(arr, arr2) + + +# This is NOT TRUE, padding affects the whole array not just the border. +# def test_tile_padding_only_effects_edge_pixels(): +# """Adding tile padding should effect edge pixels only.""" +# bounds = ( +# -8844681.416934313, +# 3757032.814272982, +# -8766409.899970293, +# 3835304.331237001, +# ) +# with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: +# arr, _ = reader.part(src_dst, bounds, 32, 32, nodata=0) +# arr2, _ = reader.part(src_dst, bounds, 32, 32, nodata=0, padding=2) +# assert not np.array_equal(arr[0][0], arr2[0][0]) +# assert np.array_equal(arr[0][5:-5][5:-5], arr2[0][5:-5][5:-5]) + + +def test_that_tiling_ignores_padding_if_web_friendly_internal_tiles_exist(): + """Ignore Padding when COG is aligned.""" + with rasterio.open(COG_WEB_TILED) as src_dst: + arr, _ = reader.tile(src_dst, 147, 182, 9, tilesize=256, padding=0) + arr2, _ = reader.tile(src_dst, 147, 182, 9, tilesize=256, padding=100) + assert numpy.array_equal(arr, arr2) + + +def test_tile_read_invalidResampling(): + """Should raise an error on invalid resampling method name.""" + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + with pytest.raises(KeyError): + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + reader.part(src_dst, bounds, 16, 16, resampling_method="jacques") + + +def test_tile_read_tuple_index(): + """Should work as expected""" + bounds = ( + -11663507.036777973, + 4715018.0897710975, + -11663487.927520901, + 4715037.199028169, + ) + with rasterio.open(S3_PATH) as src_dst: + arr, mask = reader.part(src_dst, bounds, 16, 16, indexes=(1,)) + assert arr.shape == (1, 16, 16) + assert mask.shape == (16, 16) + + +def test_tile_read_int_index(): + """Should work as expected.""" + bounds = ( + -11663507.036777973, + 4715018.0897710975, + -11663487.927520901, + 4715037.199028169, + ) + with rasterio.open(S3_PATH) as src_dst: + arr, mask = reader.part(src_dst, bounds, 16, 16, indexes=1) + assert arr.shape == (1, 16, 16) + assert mask.shape == (16, 16) + + +def test_tile_read_bgr(): + """Should work as expected (read rgb)""" + bounds = ( + -11663507.036777973, + 4715018.0897710975, + -11663487.927520901, + 4715037.199028169, + ) + with rasterio.open(S3_PATH) as src_dst: + arr, mask = reader.part(src_dst, bounds, 16, 16, indexes=(3, 2, 1)) + assert arr.shape == (3, 16, 16) + assert mask.shape == (16, 16) + + +def test_tile_read_alpha(): + """Read masked area.""" + # non-boundless tile covering the alpha masked part + with rasterio.open(S3_ALPHA_PATH) as src_dst: + arr, mask = reader.tile( + src_dst, 876432, 1603670, 22, tilesize=256, indexes=(1, 2, 3) + ) + assert arr.shape == (3, 256, 256) + assert not mask.all() + + with pytest.warns(AlphaBandWarning): + with rasterio.open(S3_ALPHA_PATH) as src_dst: + nb = src_dst.count + arr, mask = reader.tile(src_dst, 876432, 1603670, 22, tilesize=256) + assert not nb == arr.shape[0] + assert arr.shape == (3, 256, 256) + assert not mask.all() + + +def test_tile_read_internal_nodata(): + """Read masked area.""" + # non-boundless tile covering the nodata part + with rasterio.open(S3_NODATA_PATH) as src_dst: + arr, mask = reader.tile( + src_dst, 876431, 1603670, 22, tilesize=256, indexes=(1, 2, 3) + ) + assert arr.shape == (3, 256, 256) + assert not mask.all() + + +def test_tile_read_wrong_nodata(): + """Return empty mask on wrong nodata.""" + # non-boundless tile covering the nodata part + with rasterio.open(S3_NODATA_PATH) as src_dst: + arr, mask = reader.tile( + src_dst, 438217, 801835, 21, tilesize=256, indexes=(1, 2, 3), nodata=1000 + ) + assert arr.shape == (3, 256, 256) + assert mask.all() + + # Mask boundless values + arr, mask = reader.tile( + src_dst, 109554, 200458, 19, tilesize=256, indexes=(1, 2, 3), nodata=1000 + ) + assert arr.shape == (3, 256, 256) + assert not mask.all() + + +def test_tile_read_mask(): + """Read masked area.""" + with rasterio.Env(GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR"): + # non-boundless tile covering the masked part + with rasterio.open(S3_MASK_PATH) as src_dst: + arr, mask = reader.tile(src_dst, 876431, 1603669, 22, tilesize=16) + assert arr.shape == (3, 16, 16) + assert mask.shape == (16, 16) + assert not mask.all() + + # boundless tile covering the masked part + with rasterio.open(S3_MASK_PATH) as src_dst: + arr, mask = reader.tile(src_dst, 876431, 1603668, 22, tilesize=256) + assert arr.shape == (3, 256, 256) + assert not mask.all() + + +def test_tile_read_extmask(): + """Read masked area.""" + # non-boundless tile covering the masked part + mercator_tile = mercantile.Tile(x=876431, y=1603669, z=22) + bounds = mercantile.xy_bounds(mercator_tile) + with rasterio.Env(GDAL_DISABLE_READDIR_ON_OPEN="TRUE"): + with rasterio.open(S3_EXTMASK_PATH) as src_dst: + arr, mask = reader.part(src_dst, bounds, 256, 256) + assert arr.shape == (3, 256, 256) + assert mask.shape == (256, 256) + assert not mask.all() + + # boundless tile covering the masked part + mercator_tile = mercantile.Tile(x=876431, y=1603668, z=22) + bounds = mercantile.xy_bounds(mercator_tile) + with rasterio.Env(GDAL_DISABLE_READDIR_ON_OPEN="EMPTY_DIR"): + with rasterio.open(S3_MASK_PATH) as src_dst: + arr, mask = reader.part(src_dst, bounds, 256, 256) + assert arr.shape == (3, 256, 256) + assert not mask.all() + + +def test_tile_read_nodata(): + """Should work as expected when forcing nodata value.""" + bounds = ( + -9040360.209344367, + 3991847.365165044, + -9001224.450862356, + 4030983.1236470537, + ) + + tilesize = 16 + with rasterio.open(f"{LANDSAT_PATH}_B4.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds, tilesize, tilesize, nodata=0) + assert arr.shape == (1, 16, 16) + assert mask.shape == (16, 16) + assert not mask.all() + + +def test_tile_read_nodata_and_alpha(): + """Should work as expected when forcing nodata value""" + bounds = ( + 13604568.04230881, + -333876.9395496497, + 13605791.034761373, + -332653.9470970885, + ) + + tilesize = 16 + with rasterio.open(PIX4D_PATH) as src_dst: + arr, mask = reader.part(src_dst, bounds, tilesize, tilesize, indexes=[1, 2, 3]) + assert arr.shape == (3, 16, 16) + assert mask.shape == (16, 16) + assert not mask.all() + + +def test_tile_read_dataset(): + """Should work as expected""" + address = f"{LANDSAT_PATH}_B2.TIF" + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + tilesize = 16 + with rasterio.open(address) as src_dst: + arr, mask = reader.part(src_dst, bounds, tilesize, tilesize) + assert arr.shape == (1, 16, 16) + assert mask.shape == (16, 16) + + +def test_tile_read_dataset_nodata(): + """Should work as expected (read rgb)""" + # non-boundless tile covering the nodata part 22-876431-1603670 + bounds = ( + -11663535.70066358, + 4715027.644399633, + -11663526.146035044, + 4715037.199028169, + ) + tilesize = 16 + with rasterio.open(S3_NODATA_PATH) as src_dst: + arr, mask = reader.part(src_dst, bounds, tilesize, tilesize) + assert arr.shape == (3, 16, 16) + assert not mask.all() + + +def test_tile_read_not_covering_the_whole_tile(): + """Should raise an error when dataset doesn't cover more than 50% of the tile.""" + bounds = ( + -9079495.967826376, + 3991847.365165044, + -9001224.450862356, + 4070118.882129065, + ) + tilesize = 16 + with pytest.raises(TileOutsideBounds): + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + reader.part(src_dst, bounds, tilesize, tilesize, minimum_overlap=0.6) + + +# See https://github.com/cogeotiff/rio-tiler/issues/105#issuecomment-492268836 +def test_tile_read_validMask(): + """Dataset mask should be the same as the actual mask.""" + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + tilesize = 128 + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part(src_dst, bounds, tilesize, tilesize, nodata=0) + + masknodata = (arr[0] != 0).astype(numpy.uint8) * 255 + numpy.testing.assert_array_equal(mask, masknodata) + + +def test_tile_read_crs(): + """Read tile using different target CRS and bounds CRS.""" + bounds = ( + -11663507.036777973, + 4715018.0897710975, + -11663487.927520901, + 4715037.199028169, + ) + tilesize = 16 + with rasterio.open(S3_PATH) as src_dst: + # Test target CRS with input bounds in bounds_crs + arr, mask = reader.part( + src_dst, + bounds, + tilesize, + tilesize, + indexes=(3, 2, 1), + dst_crs=constants.WGS84_CRS, + bounds_crs=constants.WEB_MERCATOR_CRS, + ) + assert arr.shape == (3, 16, 16) + assert mask.shape == (16, 16) + + # Test target CRS with input bounds in target CRS + bounds = ( + -104.7750663757324, + 38.95353532141203, + -104.77489471435543, + 38.95366881479646, + ) + arr_crs, _ = reader.part( + src_dst, + bounds, + tilesize, + tilesize, + indexes=(3, 2, 1), + dst_crs=constants.WGS84_CRS, + ) + + assert numpy.array_equal(arr, arr_crs) + + +def test_tile_read_vrt_option(): + """Should work as expected (read landsat band).""" + bounds = ( + -8844681.416934313, + 3757032.814272982, + -8766409.899970293, + 3835304.331237001, + ) + tilesize = 16 + with rasterio.open(f"{LANDSAT_PATH}_B2.TIF") as src_dst: + arr, mask = reader.part( + src_dst, + bounds, + tilesize, + tilesize, + warp_vrt_option=dict(source_extra=10, num_threads=10), + ) + assert arr.shape == (1, 16, 16) + assert mask.shape == (16, 16) + + +def test_read_unscale(): + """Should or Shouldn't apply scale and offset to a data.""" + with rasterio.open(COG_SCALE) as src_dst: + arr, mask = reader.tile(src_dst, 218, 99, 8, tilesize=128) + arrS, maskS = reader.tile(src_dst, 218, 99, 8, tilesize=128, unscale=True) + + assert arr.dtype == "int16" + assert arrS.dtype == "float32" + assert not numpy.array_equal(arr, arrS) + numpy.testing.assert_array_equal(mask, maskS) + + meta = reader.metadata(src_dst) + assert isinstance(meta["statistics"][1]["min"], int) + + meta = reader.metadata(src_dst, unscale=True) + assert isinstance(meta["statistics"][1]["min"], float) + + p = reader.point(src_dst, [310000, 4100000], coord_crs=src_dst.crs) + assert p == [8917] + + p = reader.point( + src_dst, [310000, 4100000], coord_crs=src_dst.crs, unscale=True + ) + assert round(p[0], 3) == 1000.892 + + +def test_point(): + """Read point values""" + with rasterio.open(COG_SCALE) as src_dst: + p = reader.point(src_dst, [310000, 4100000], coord_crs=src_dst.crs, indexes=1) + assert p == [8917] + + p = reader.point(src_dst, [310000, 4100000], coord_crs=src_dst.crs) + assert p == [8917] + + with pytest.raises(Exception): + reader.point(src_dst, [810000, 4100000], coord_crs=src_dst.crs) + + +def test_metadata(): + """Should return correct metadata.""" + with rasterio.open(COG_CMAP) as src_dst: + meta = reader.metadata(src_dst) + assert meta["dtype"] == "int8" + assert meta["colorinterp"] == ["palette"] + assert not meta.get("scale") + assert not meta.get("ofsset") + assert meta.get("colormap") + + with rasterio.open(COG_SCALE) as src_dst: + meta = reader.metadata(src_dst) + assert meta["dtype"] == "int16" + assert meta["colorinterp"] == ["gray"] + assert meta["scale"] == 0.0001 + assert meta["offset"] == 1000.0 + assert meta["band_descriptions"] == [(1, "Green")] + assert not meta.get("colormap") + assert meta["nodata_type"] == "Nodata" + + meta = reader.metadata(src_dst, indexes=1) + assert meta["colorinterp"] == ["gray"] + + bounds = mercantile.bounds(mercantile.Tile(x=218, y=99, z=8)) + meta = reader.metadata(src_dst, bounds) + assert meta["colorinterp"] == ["gray"] + assert meta["bounds"] == bounds + + with rasterio.open(S3_ALPHA_PATH) as src_dst: + with pytest.warns(AlphaBandWarning): + meta = reader.metadata(src_dst) + assert len(meta["band_descriptions"]) == 3 + assert meta["colorinterp"] == ["red", "green", "blue"] + assert meta["nodata_type"] == "Alpha" + + meta = reader.metadata(src_dst, indexes=(1, 2, 3, 4)) + assert len(meta["band_descriptions"]) == 4 + assert meta["colorinterp"] == ["red", "green", "blue", "alpha"] + assert meta["nodata_type"] == "Alpha" + + with rasterio.open(S3_MASK_PATH) as src_dst: + meta = reader.metadata(src_dst) + assert meta["nodata_type"] == "Mask" + + +def test_dateline(): + """Should return correct metadata.""" + with rasterio.open(COG_DLINE) as src_dst: + tile, _ = reader.tile(src_dst, 1, 42, 7, tilesize=64) + assert tile.shape == (1, 64, 64) + + tile, _ = reader.tile(src_dst, 127, 42, 7, tilesize=64) + assert tile.shape == (1, 64, 64) diff --git a/tests/test_utils.py b/tests/test_utils.py index ad103e82..e348f22e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -8,42 +8,23 @@ import numpy as np import mercantile -from rio_toa import toa_utils - import rasterio -from rasterio.crs import CRS -from rasterio.enums import Resampling from rio_tiler import utils -from rio_tiler.errors import NoOverviewWarning, DeprecationWarning, TileOutsideBounds +from rio_tiler import colormap +from rio_tiler import constants from .conftest import requires_webp -SENTINEL_SCENE = "S2A_tile_20170729_19UDP_0" -SENTINEL_BUCKET = os.path.join(os.path.dirname(__file__), "fixtures", "sentinel-s2-l1c") -SENTINEL_PATH = os.path.join(SENTINEL_BUCKET, "tiles/19/U/DP/2017/7/29/0/") - -LANDSAT_SCENE_C1 = "LC08_L1TP_016037_20170813_20170814_01_RT" -LANDSAT_BUCKET = os.path.join(os.path.dirname(__file__), "fixtures", "landsat-pds") -LANDSAT_PATH = os.path.join( - LANDSAT_BUCKET, "c1", "L8", "016", "037", LANDSAT_SCENE_C1, LANDSAT_SCENE_C1 -) - S3_KEY = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1.tif" S3_KEY_ALPHA = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_alpha.tif" -S3_KEY_NODATA = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_nodata.tif" S3_KEY_MASK = "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_mask.tif" -S3_KEY_EXTMASK = ( - "hro_sources/colorado/201404_13SED190110_201404_0x1500m_CL_1_extmask.tif" -) S3_LOCAL = PREFIX = os.path.join(os.path.dirname(__file__), "fixtures", "my-bucket") S3_PATH = os.path.join(S3_LOCAL, S3_KEY) S3_ALPHA_PATH = os.path.join(S3_LOCAL, S3_KEY_ALPHA) -S3_NODATA_PATH = os.path.join(S3_LOCAL, S3_KEY_NODATA) S3_MASK_PATH = os.path.join(S3_LOCAL, S3_KEY_MASK) -S3_EXTMASK_PATH = os.path.join(S3_LOCAL, S3_KEY_EXTMASK) KEY_PIX4D = "pix4d/pix4d_alpha_nodata.tif" PIX4D_PATH = os.path.join(S3_LOCAL, KEY_PIX4D) @@ -51,10 +32,7 @@ COG_DST = os.path.join(os.path.dirname(__file__), "fixtures", "cog_name.tif") COG_WEB_TILED = os.path.join(os.path.dirname(__file__), "fixtures", "web.tif") COG_NOWEB = os.path.join(os.path.dirname(__file__), "fixtures", "noweb.tif") - - -with open("{}_MTL.txt".format(LANDSAT_PATH), "r") as f: - LANDSAT_METADATA = toa_utils._parse_mtl_txt(f.read()) +NOCOG = os.path.join(os.path.dirname(__file__), "fixtures", "nocog.tif") @pytest.fixture(autouse=True) @@ -65,340 +43,11 @@ def testing_env_var(monkeypatch): monkeypatch.delenv("AWS_PROFILE", raising=False) monkeypatch.setenv("AWS_CONFIG_FILE", "/tmp/noconfigheere") monkeypatch.setenv("AWS_SHARED_CREDENTIALS_FILE", "/tmp/noconfighereeither") - monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "TRUE") - - -def test_tile_read_valid(): - """Should work as expected (read landsat band).""" - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 - - arr, mask = utils.tile_read(address, bounds, tilesize) - assert arr.shape == (1, 16, 16) - assert mask.shape == (16, 16) - - -def test_tile_read_validResampling(): - """Should return a 1 band array and a mask.""" - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 - - arr, mask = utils.tile_read(address, bounds, tilesize, resampling_method="nearest") - assert arr.shape == (1, 16, 16) - assert mask.shape == (16, 16) - - -def test_resampling_returns_different_results(): - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 - - arr, mask = utils.tile_read(address, bounds, tilesize) - arr2, mask2 = utils.tile_read( - address, bounds, tilesize, resampling_method="nearest" - ) - assert not np.array_equal(arr, arr2) - - -def test_resampling_with_diff_padding_returns_different_results(): - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 - - arr, mask = utils.tile_read(address, bounds, tilesize) - arr2, mask2 = utils.tile_read(address, bounds, tilesize, tile_edge_padding=0) - assert not np.array_equal(arr, arr2) - - -def test_tile_padding_only_effects_edge_pixels(): - """Adding tile padding should effect edge pixels only.""" - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 - - arr, mask = utils.tile_read(address, bounds, tilesize) - arr2, mask2 = utils.tile_read(address, bounds, tilesize, tile_edge_padding=0) - assert not np.array_equal(arr[0][0], arr2[0][0]) - assert np.array_equal(arr[0][5:-5][5:-5], arr2[0][5:-5][5:-5]) - - -def test_that_tiling_ignores_padding_if_web_friendly_internal_tiles_exist(): - address = COG_WEB_TILED - bounds = mercantile.bounds(147, 182, 9) - tilesize = 256 - - arr, mask = utils.tile_read(address, bounds, tilesize) - arr2, mask2 = utils.tile_read(address, bounds, tilesize, tile_edge_padding=0) - assert np.array_equal(arr, arr2) - - -def test_tile_read_invalidResampling(): - """Should raise an error on invalid resampling method name.""" - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 - with pytest.raises(KeyError): - arr, mask = utils.tile_read( - address, bounds, tilesize, resampling_method="jacques" - ) - - -def test_tile_read_list_index(): - """ - Should work as expected - """ - bounds = ( - -11663507.036777973, - 4715018.0897710975, - -11663487.927520901, - 4715037.199028169, - ) - tilesize = 16 - - arr, mask = utils.tile_read(S3_PATH, bounds, tilesize, indexes=(1)) - assert arr.shape == (1, 16, 16) - assert mask.shape == (16, 16) - - -def test_tile_read_int_index(): - """ - Should work as expected - """ - bounds = ( - -11663507.036777973, - 4715018.0897710975, - -11663487.927520901, - 4715037.199028169, - ) - tilesize = 16 - - arr, mask = utils.tile_read(S3_PATH, bounds, tilesize, indexes=1) - assert arr.shape == (1, 16, 16) - assert mask.shape == (16, 16) - - -def test_tile_read_rgb(): - """ - Should work as expected (read rgb) - """ - bounds = ( - -11663507.036777973, - 4715018.0897710975, - -11663487.927520901, - 4715037.199028169, - ) - tilesize = 16 - - arr, mask = utils.tile_read(S3_PATH, bounds, tilesize, indexes=(3, 2, 1)) - assert arr.shape == (3, 16, 16) - assert mask.shape == (16, 16) - - -def test_tile_read_alpha(): - """Read masked area.""" - # non-boundless tile covering the alpha masked part - mercator_tile = mercantile.Tile(x=876432, y=1603670, z=22) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read(S3_ALPHA_PATH, bounds, 256, indexes=(1, 2, 3)) - assert arr.shape == (3, 256, 256) - assert not mask.all() - - -def test_tile_read_internal_nodata(): - """Read masked area.""" - # non-boundless tile covering the nodata part - mercator_tile = mercantile.Tile(x=876431, y=1603670, z=22) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read(S3_NODATA_PATH, bounds, 256, indexes=(1, 2, 3)) - assert arr.shape == (3, 256, 256) - assert not mask.all() - - -def test_tile_read_wrong_nodata(): - """Return empty mask on wrong nodata.""" - # non-boundless tile covering the nodata part - mercator_tile = mercantile.Tile(x=438217, y=801835, z=21) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read( - S3_NODATA_PATH, bounds, 256, indexes=(1, 2, 3), nodata=1000 - ) - assert arr.shape == (3, 256, 256) - assert mask.all() - - # Mask boundless values - mercator_tile = mercantile.Tile(x=109554, y=200458, z=19) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read( - S3_NODATA_PATH, bounds, 256, indexes=(1, 2, 3), nodata=1000 - ) - assert arr.shape == (3, 256, 256) - assert not mask.all() - - -def test_tile_read_mask(): - """Read masked area.""" - # non-boundless tile covering the masked part - mercator_tile = mercantile.Tile(x=876431, y=1603669, z=22) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read(S3_MASK_PATH, bounds, 256) - assert arr.shape == (3, 256, 256) - assert mask.shape == (256, 256) - assert not mask.all() - - # boundless tile covering the masked part - mercator_tile = mercantile.Tile(x=876431, y=1603668, z=22) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read(S3_MASK_PATH, bounds, 256) - assert arr.shape == (3, 256, 256) - assert not mask.all() - - -def test_tile_read_extmask(): - """Read masked area.""" - # non-boundless tile covering the masked part - mercator_tile = mercantile.Tile(x=876431, y=1603669, z=22) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read(S3_EXTMASK_PATH, bounds, 256) - assert arr.shape == (3, 256, 256) - assert mask.shape == (256, 256) - assert not mask.all() - - # boundless tile covering the masked part - mercator_tile = mercantile.Tile(x=876431, y=1603668, z=22) - bounds = mercantile.xy_bounds(mercator_tile) - arr, mask = utils.tile_read(S3_MASK_PATH, bounds, 256) - assert arr.shape == (3, 256, 256) - assert not mask.all() - - -def test_tile_read_nodata(): - """Should work as expected when forcing nodata value.""" - address = "{}_B4.TIF".format(LANDSAT_PATH) - bounds = ( - -9040360.209344367, - 3991847.365165044, - -9001224.450862356, - 4030983.1236470537, - ) - - tilesize = 16 - nodata = 0 - - arr, mask = utils.tile_read(address, bounds, tilesize, nodata=nodata) - assert arr.shape == (1, 16, 16) - assert mask.shape == (16, 16) - assert not mask.all() - - -def test_tile_read_nodata_and_alpha(): - """Should work as expected when forcing nodata value""" - bounds = ( - 13604568.04230881, - -333876.9395496497, - 13605791.034761373, - -332653.9470970885, - ) - - tilesize = 16 - arr, mask = utils.tile_read(PIX4D_PATH, bounds, tilesize, indexes=[1, 2, 3]) - assert arr.shape == (3, 16, 16) - assert mask.shape == (16, 16) - assert not mask.all() - - -def test_tile_read_dataset(): - """ - Should work as expected (read rgb) - """ - - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 - - with rasterio.open(address) as src: - arr, mask = utils.tile_read(src, bounds, tilesize) - assert arr.shape == (1, 16, 16) - assert mask.shape == (16, 16) - assert src.closed - - -def test_tile_read_dataset_nodata(): - """ - Should work as expected (read rgb) - """ - # non-boundless tile covering the nodata part 22-876431-1603670 - bounds = ( - -11663535.70066358, - 4715027.644399633, - -11663526.146035044, - 4715037.199028169, - ) - tilesize = 16 - - with rasterio.open(S3_NODATA_PATH) as src: - arr, mask = utils.tile_read(src, bounds, tilesize) - assert arr.shape == (3, 16, 16) - assert not mask.all() - assert src.closed - - -def test_tile_read_not_covering_the_whole_tile(): - """Should raise an error when dataset doesn't cover more than 50% of the tile.""" - address = "{}_B2.TIF".format(LANDSAT_PATH) - - bounds = ( - -9079495.967826376, - 3991847.365165044, - -9001224.450862356, - 4070118.882129065, - ) - tilesize = 16 - with pytest.raises(TileOutsideBounds): - utils.tile_read(address, bounds, tilesize, minimum_tile_cover=0.6) + monkeypatch.setenv("GDAL_DISABLE_READDIR_ON_OPEN", "EMPTY_DIR") def test_linear_rescale_valid(): - """ - Should work as expected (read data band) - """ - + """Should work as expected (read data band).""" data = np.zeros((1, 1), dtype=np.int16) + 1000 expected_value = np.zeros((1, 1), dtype=np.int16) + 25.5 assert ( @@ -408,57 +57,22 @@ def test_linear_rescale_valid(): def test_tile_exists_valid(): - """ - Should work as expected (return true) - """ - + """Should work as expected (return true).""" tile = "7-36-50" tile_z, tile_x, tile_y = map(int, tile.split("-")) bounds = [-78.75, 34.30714385628803, -75.93749999999999, 36.59788913307021] assert utils.tile_exists(bounds, tile_z, tile_x, tile_y) -def test_get_colormap_valid(): - """Returns 'cfastie' colormap in a PIL friendly format.""" - assert len(utils.get_colormap()) == 768 # 3 x256 - - -def test_get_colormap_schwarzwald(): - """Returns 'schwarzwald' colormap in a GDAL friendly format.""" - assert len(utils.get_colormap(name="schwarzwald")) == 768 # 3 x256 - - -def test_get_colormap_rplumbo(): - """Returns 'rplumbo' colormap in a GDAL friendly format.""" - assert len(utils.get_colormap(name="rplumbo")) == 768 # 3 x256 - - -def test_get_colormap_gdal(): - """Returns 'cfastie' colormap in a GDAL friendly format.""" - assert len(utils.get_colormap(format="gdal")) == 256 # 256 x 3 - - -def test_get_colormap_unsupported(): - """Raise error on unsupported format.""" - with pytest.raises(Exception): - utils.get_colormap(format="gal") - - def test_mapzen_elevation_rgb(): - """ - Should work as expected - """ - + """Should work as expected.""" arr = np.random.randint(0, 3000, size=(512, 512)) assert utils.mapzen_elevation_rgb(arr).shape == (3, 512, 512) -@patch("rio_tiler.landsat8.tile") +@patch("rio_tiler.io.landsat8.tile") def test_expression_ndvi(landsat_tile): - """ - Should work as expected - """ - + """Should work as expected""" landsat_tile.return_value = [ np.random.randint(0, 255, size=(2, 256, 256), dtype=np.uint8), np.random.randint(0, 1, size=(256, 256), dtype=np.uint8) * 255, @@ -477,12 +91,9 @@ def test_expression_ndvi(landsat_tile): assert len(landsat_tile.call_args[1].get("bands")) == 2 -@patch("rio_tiler.sentinel2.tile") +@patch("rio_tiler.io.sentinel2.tile") def test_expression_sentinel2(sentinel2): - """ - Should work as expected - """ - + """Should work as expected.""" sentinel2.return_value = [ np.random.randint(0, 255, size=(2, 256, 256), dtype=np.uint8), np.random.randint(0, 1, size=(256, 256), dtype=np.uint8) * 255, @@ -501,12 +112,9 @@ def test_expression_sentinel2(sentinel2): assert sorted(list(sentinel2.call_args[1].get("bands"))) == ["12", "8A"] -@patch("rio_tiler.landsat8.tile") +@patch("rio_tiler.io.landsat8.tile") def test_expression_landsat_rgb(landsat_tile): - """ - Should work as expected - """ - + """Should work as expected.""" landsat_tile.return_value = [ np.random.randint(0, 255, size=(3, 256, 256), dtype=np.uint8), np.random.randint(0, 1, size=(256, 256), dtype=np.uint8) * 255, @@ -524,7 +132,7 @@ def test_expression_landsat_rgb(landsat_tile): assert len(landsat_tile.call_args[1].get("bands")) == 3 -@patch("rio_tiler.cbers.tile") +@patch("rio_tiler.io.cbers.tile") def test_expression_cbers_rgb(cbers_tile): """Should read tile from CBERS data.""" cbers_tile.return_value = [ @@ -565,10 +173,7 @@ def test_expression_main_ratio(): def test_expression_main_rgb(): - """ - Should work as expected - """ - + """Should work as expected.""" expr = "b1*0.8, b2*1.1, b3*0.8" tile_z = 19 tile_x = 109554 @@ -584,10 +189,7 @@ def test_expression_main_rgb(): def test_expression_main_kwargs(): - """ - Should work as expected - """ - + """Should work as expected.""" expr = "(b3 - b2) / (b3 + b2)" tile_z = 19 tile_x = 109554 @@ -603,7 +205,7 @@ def test_expression_main_kwargs(): def test_expression_missing(): - """Should raise an exception on missing expression""" + """Should raise an exception on missing expression.""" tile_z = 19 tile_x = 109554 tile_y = 200458 @@ -625,31 +227,21 @@ def test_get_vrt_transform_valid(): ) with rasterio.open(S3_PATH) as src: - vrt_transform, vrt_width, vrt_height = utils.get_vrt_transform(src, bounds) - assert vrt_transform[2] == -11663507.036777973 - assert vrt_transform[5] == 4715037.199028169 - assert vrt_width == 100 - assert vrt_height == 100 - - -def test_get_vrt_transform_deprWarning(): - """Should Warn user for bounds_crs depreciation.""" - bounds = ( - -104.77523803710938, - 38.95353532141205, - -104.77455139160156, - 38.954069293441066, - ) - with pytest.warns(DeprecationWarning): - with rasterio.open(S3_PATH) as src: - vrt_transform, vrt_width, vrt_height = utils.get_vrt_transform( - src, bounds, bounds_crs="epsg:4326" - ) + vrt_transform, vrt_width, vrt_height = utils.get_vrt_transform( + src, bounds, 64, 64 + ) + assert vrt_transform[2] == -11663507.036777973 + assert vrt_transform[5] == 4715037.199028169 + assert vrt_width == 100 + assert vrt_height == 100 - assert vrt_transform[2] == -104.77523803710938 - assert vrt_transform[5] == 38.954069293441066 - assert vrt_width == 420 - assert vrt_height == 327 + vrt_transform, vrt_width, vrt_height = utils.get_vrt_transform( + src, bounds, 256, 256 + ) + assert vrt_transform[2] == -11663507.036777973 + assert vrt_transform[5] == 4715037.199028169 + assert vrt_width == 256 + assert vrt_height == 256 def test_get_vrt_transform_valid4326(): @@ -662,7 +254,7 @@ def test_get_vrt_transform_valid4326(): ) with rasterio.open(S3_PATH) as src: vrt_transform, vrt_width, vrt_height = utils.get_vrt_transform( - src, bounds, dst_crs="epsg:4326" + src, bounds, 256, 256, dst_crs=constants.WGS84_CRS ) assert vrt_transform[2] == -104.77523803710938 @@ -675,6 +267,7 @@ def test_statsFunction_valid(): """Should return a valid dict with array statistics.""" with rasterio.open(S3_ALPHA_PATH) as src: arr = src.read(indexes=[1], masked=True) + stats = utils._stats(arr) assert stats["pc"] == [10, 200] assert stats["min"] == 0 @@ -690,178 +283,67 @@ def test_statsFunction_valid(): assert len(stats["histogram"][0]) == 20 -def test_raster_get_stats_valid(): - """Should return a valid dict with array statistics.""" - stats = utils.raster_get_stats(S3_PATH) - assert stats["bounds"] - assert stats["bounds"]["crs"] == CRS({"init": "EPSG:4326"}) - assert len(stats["statistics"]) == 3 - assert stats["statistics"][1]["pc"] == [11, 199] - assert stats["statistics"][2]["pc"] == [26, 201] - assert stats["statistics"][3]["pc"] == [54, 192] - assert stats["minzoom"] - assert stats["maxzoom"] - assert len(stats["band_descriptions"]) == 3 - assert (1, "band1") == stats["band_descriptions"][0] - - with rasterio.open(S3_PATH) as src_dst: - stats = utils.raster_get_stats(src_dst) - assert stats["bounds"] - assert stats["bounds"]["crs"] == CRS({"init": "EPSG:4326"}) - assert len(stats["statistics"]) == 3 - assert stats["statistics"][1]["pc"] == [11, 199] - assert stats["statistics"][2]["pc"] == [26, 201] - assert stats["statistics"][3]["pc"] == [54, 192] - assert stats["minzoom"] - assert stats["maxzoom"] - assert len(stats["band_descriptions"]) == 3 - assert (1, "band1") == stats["band_descriptions"][0] - - stats = utils.raster_get_stats(COG_DST) - assert stats["minzoom"] - assert stats["maxzoom"] - assert len(stats["band_descriptions"]) == 1 - assert (1, "b1") == stats["band_descriptions"][0] - - stats = utils.raster_get_stats(S3_PATH, histogram_bins=20) - assert len(stats["statistics"][1]["histogram"][0]) == 20 - - stats = utils.raster_get_stats( - S3_PATH, histogram_bins=None, histogram_range=[30, 70] - ) - assert len(stats["statistics"][1]["histogram"][0]) == 10 - - stats = utils.raster_get_stats( - S3_PATH, - histogram_bins=None, - histogram_range=[30, 70], - warp_vrt_option=dict(source_extra=10, num_threads=10), - ) - assert len(stats["statistics"][1]["histogram"][0]) == 10 - - -def test_raster_get_stats_validAlpha(): - """Should return a valid dict with array statistics.""" - with pytest.warns(NoOverviewWarning): - stats = utils.raster_get_stats(S3_ALPHA_PATH) - assert len(stats["statistics"]) == 3 - assert stats["statistics"][1]["pc"] == [10, 200] - assert stats["statistics"][2]["pc"] == [27, 202] - assert stats["statistics"][3]["pc"] == [55, 193] - - -def test_raster_get_stats_validNodata(): - """Should return a valid dict with array statistics.""" - with pytest.warns(NoOverviewWarning): - stats = utils.raster_get_stats(S3_NODATA_PATH) - assert stats["bounds"] - assert len(stats["statistics"]) == 3 - assert stats["statistics"][1]["pc"] == [13, 199] - assert stats["statistics"][2]["pc"] == [27, 202] - assert stats["statistics"][3]["pc"] == [56, 192] - - with pytest.warns(NoOverviewWarning): - stats = utils.raster_get_stats(S3_NODATA_PATH, nodata=0) - assert stats["bounds"] - assert len(stats["statistics"]) == 3 - assert stats["statistics"][1]["pc"] == [13, 199] - assert stats["statistics"][2]["pc"] == [27, 202] - assert stats["statistics"][3]["pc"] == [56, 192] - - -def test_raster_get_stats_validOptions(): - """Should return a valid dict with array statistics.""" - stats = utils.raster_get_stats( - S3_PATH, indexes=3, overview_level=1, percentiles=(10, 90), dst_crs="epsg:3857" - ) - assert stats["bounds"]["crs"] == "epsg:3857" - assert len(stats["statistics"]) == 1 - assert stats["statistics"][3]["pc"] == [77, 178] - - stats = utils.raster_get_stats(S3_PATH, indexes=(3,)) - assert len(stats["statistics"]) == 1 - assert stats["statistics"][3]["pc"] == [54, 192] - - -def test_raster_get_stats_ovr(): - """Validate that overview level return the same result than reeading the overview.""" - resampling_method = "bilinear" - rio_stats = utils.raster_get_stats( - S3_PATH, overview_level=1, resampling_method=resampling_method - ) - - with rasterio.open(S3_PATH, overview_level=1) as src_dst: - indexes = src_dst.indexes - arr = src_dst.read(resampling=Resampling[resampling_method], masked=True) - stats = {indexes[b]: utils._stats(arr[b], bins=10) for b in range(arr.shape[0])} - assert rio_stats["statistics"] == stats - - -def test_array_to_image_valid_1band(): +def test_render_valid_1band(): """Creates PNG image buffer from one band array.""" arr = np.random.randint(0, 255, size=(512, 512), dtype=np.uint8) - assert utils.array_to_image(arr) + assert utils.render(arr) -def test_array_to_image_valid_colormap(): +def test_render_valid_colormap(): """Creates 'colormaped' PNG image buffer from one band array.""" arr = np.random.randint(0, 255, size=(1, 512, 512), dtype=np.uint8) - cmap = utils.get_colormap(name="cfastie", format="gdal") - assert utils.array_to_image(arr, color_map=cmap) + mask = np.zeros((512, 512), dtype=np.uint8) + cmap = colormap.get_colormap("cfastie") + assert utils.render(arr, mask, colormap=cmap, img_format="jpeg") -def test_array_to_image_valid_colormapDict(): +def test_render_valid_colormapDict(): """Create 'colormaped' PNG image buffer from one band array using discrete cmap.""" arr = np.random.randint(0, 255, size=(1, 512, 512), dtype=np.uint8) - cmap = {1: [255, 255, 255], 50: [255, 255, 0], 100: [255, 0, 0], 150: [0, 0, 255]} - assert utils.array_to_image(arr, color_map=cmap) + cmap = { + 1: [255, 255, 255, 255], + 50: [255, 255, 0, 255], + 100: [255, 0, 0, 255], + 150: [0, 0, 255, 255], + } + assert utils.render(arr, colormap=cmap) -def test_apply_discrete_colormap_valid(): - """Apply discrete colormap to array.""" - arr = np.random.randint(0, 255, size=(1, 512, 512), dtype=np.uint8) - arr[0, 0, 0] = 1 - arr[0, 1, 1] = 100 - cmap = {1: [255, 255, 255], 50: [255, 255, 0], 100: [255, 0, 0], 150: [0, 0, 255]} - res = utils._apply_discrete_colormap(arr, cmap) - assert res[:, 0, 0].tolist() == [255, 255, 255] - assert res[:, 1, 1].tolist() == [255, 0, 0] - - -def test_array_to_image_valid_mask(): +def test_render_valid_mask(): """Creates image buffer from 3 bands array and mask.""" arr = np.random.randint(0, 255, size=(3, 512, 512), dtype=np.uint8) mask = np.zeros((512, 512), dtype=np.uint8) - assert utils.array_to_image(arr, mask=mask) - assert utils.array_to_image(arr, mask=mask, img_format="jpeg") + assert utils.render(arr, mask=mask) + assert utils.render(arr, mask=mask, img_format="jpeg") -def test_array_to_image_valid_options(): +def test_render_valid_options(): """Creates image buffer with driver options.""" arr = np.random.randint(0, 255, size=(3, 512, 512), dtype=np.uint8) mask = np.zeros((512, 512), dtype=np.uint8) + 255 - assert utils.array_to_image(arr, mask=mask, img_format="png", ZLEVEL=9) + assert utils.render(arr, mask=mask, img_format="png", ZLEVEL=9) -def test_array_to_image_geotiff16Bytes(): +def test_render_geotiff16Bytes(): """Creates GeoTIFF image buffer from 3 bands array.""" arr = np.random.randint(0, 255, size=(3, 512, 512), dtype=np.uint16) mask = np.zeros((512, 512), dtype=np.uint8) + 255 - assert utils.array_to_image(arr, mask=mask, img_format="GTiff") + assert utils.render(arr, mask=mask, img_format="GTiff") -def test_array_to_image_geotiff(): +def test_render_geotiff(): """Creates GeoTIFF image buffer from 3 bands array.""" arr = np.random.randint(0, 255, size=(3, 512, 512), dtype=np.uint8) mask = np.zeros((512, 512), dtype=np.uint8) + 255 - assert utils.array_to_image(arr, mask=mask, img_format="GTiff") + ops = utils.geotiff_options(1, 0, 0) + assert utils.render(arr, mask=mask, img_format="GTiff", **ops) @requires_webp -def test_array_to_image_valid_1bandWebp(): +def test_render_valid_1bandWebp(): """Creates WEBP image buffer from 1 band array.""" arr = np.random.randint(0, 255, size=(1, 512, 512), dtype=np.uint8) - assert utils.array_to_image(arr, img_format="WEBP") + assert utils.render(arr, img_format="WEBP") def test_aligned_with_internaltile(): @@ -869,93 +351,102 @@ def test_aligned_with_internaltile(): bounds = mercantile.bounds(43, 25, 7) with rasterio.open(COG_DST) as src_dst: assert not utils._requested_tile_aligned_with_internal_tile( - src_dst, bounds, 256 + src_dst, bounds, 256, 256 + ) + + with rasterio.open(NOCOG) as src_dst: + assert not utils._requested_tile_aligned_with_internal_tile( + src_dst, bounds, 256, 256 ) bounds = mercantile.bounds(147, 182, 9) with rasterio.open(COG_NOWEB) as src_dst: assert not utils._requested_tile_aligned_with_internal_tile( - src_dst, bounds, 256 + src_dst, bounds, 256, 256 ) with rasterio.open(COG_WEB_TILED) as src_dst: - assert utils._requested_tile_aligned_with_internal_tile(src_dst, bounds, 256) + assert utils._requested_tile_aligned_with_internal_tile( + src_dst, bounds, 256, 256 + ) -# See https://github.com/cogeotiff/rio-tiler/issues/105#issuecomment-492268836 -# def test_tile_read_validMask(): -# """Dataset mask should be the same as the actual mask.""" -# address = "{}_B2.TIF".format(LANDSAT_PATH) +def test_find_non_alpha(): + """Return valid indexes.""" + with rasterio.open(S3_ALPHA_PATH) as src_dst: + assert utils.non_alpha_indexes(src_dst) == (1, 2, 3) -# bounds = ( -# -8844681.416934313, -# 3757032.814272982, -# -8766409.899970293, -# 3835304.331237001, -# ) -# tilesize = 128 -# arr, mask = utils.tile_read(address, bounds, tilesize, nodata=0) -# masknodata = (arr[0] != 0).astype(np.uint8) * 255 -# np.testing.assert_array_equal(mask, masknodata) + with rasterio.open(PIX4D_PATH) as src_dst: + assert utils.non_alpha_indexes(src_dst) == (1, 2, 3) -def test_tile_read_crs(): - """Read tile using different target CRS and bounds CRS.""" - bounds = ( - -11663507.036777973, - 4715018.0897710975, - -11663487.927520901, - 4715037.199028169, - ) - tilesize = 16 - - # Test target CRS with input bounds in bounds_crs - arr, mask = utils.tile_read( - S3_PATH, - bounds, - tilesize, - indexes=(3, 2, 1), - dst_crs="epsg:4326", - bounds_crs="epsg:3857", - ) - assert arr.shape == (3, 16, 16) - assert mask.shape == (16, 16) +def test_has_alpha(): + """Check if rasters have alpha bands.""" + with rasterio.open(S3_ALPHA_PATH) as src_dst: + assert utils.has_alpha_band(src_dst) - # Test target CRS with input bounds in target CRS - bounds = ( - -104.7750663757324, - 38.95353532141203, - -104.77489471435543, - 38.95366881479646, - ) - arr_crs, mask_crs = utils.tile_read( - S3_PATH, bounds, tilesize, indexes=(3, 2, 1), dst_crs="epsg:4326" - ) - assert np.array_equal(arr, arr_crs) + with rasterio.open(COG_DST) as src_dst: + assert not utils.has_alpha_band(src_dst) -def test_tile_read_vrt_option(): - """Should work as expected (read landsat band).""" - address = "{}_B2.TIF".format(LANDSAT_PATH) - bounds = ( - -8844681.416934313, - 3757032.814272982, - -8766409.899970293, - 3835304.331237001, - ) - tilesize = 16 +def test_has_mask(): + """Should return True.""" + with rasterio.open(S3_MASK_PATH) as src_dst: + assert utils.has_mask_band(src_dst) - arr, mask = utils.tile_read( - address, bounds, tilesize, warp_vrt_option=dict(source_extra=10, num_threads=10) - ) - assert arr.shape == (1, 16, 16) - assert mask.shape == (16, 16) + with rasterio.open(COG_DST) as src_dst: + assert not utils.has_mask_band(src_dst) -def test_find_non_alpha(): - """Return valid indexes.""" - with rasterio.open(S3_ALPHA_PATH) as src_dst: - assert utils.non_alpha_indexes(src_dst) == (1, 2, 3) +def test_chunck(): + """Should split a list in multiple chunks.""" + chuncks = list(utils._chunks(list(range(10)), 3)) + assert len(chuncks) == 4 - with rasterio.open(PIX4D_PATH) as src_dst: - assert utils.non_alpha_indexes(src_dst) == (1, 2, 3) + +def test_div(): + """Should return up rounded value.""" + assert utils._div_round_up(3, 2) == 2 + assert utils._div_round_up(2, 2) == 1 + + +def test_ovr_level(): + """Should return the correct overview level.""" + with rasterio.open(COG_DST) as src_dst: + # raw/-1: 2667x2658 0: 1329x1334, 1: 665x667, 2: 333x334, 3: 167x167 + assert ( + utils.get_overview_level( + src_dst, src_dst.bounds, 100, 100, dst_crs=src_dst.crs + ) + == 3 + ) + assert ( + utils.get_overview_level( + src_dst, src_dst.bounds, 200, 200, dst_crs=src_dst.crs + ) + == 2 + ) + assert ( + utils.get_overview_level( + src_dst, src_dst.bounds, 500, 500, dst_crs=src_dst.crs + ) + == 1 + ) + assert ( + utils.get_overview_level( + src_dst, src_dst.bounds, 800, 800, dst_crs=src_dst.crs + ) + == 0 + ) + assert ( + utils.get_overview_level( + src_dst, src_dst.bounds, 1500, 1500, dst_crs=src_dst.crs + ) + == -1 + ) + assert ( + utils.get_overview_level( + src_dst, src_dst.bounds, 3000, 3000, dst_crs=src_dst.crs + ) + == -1 + ) diff --git a/tox.ini b/tox.ini index dab55c2a..27f8c8ae 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py27,py36,py37 +envlist = py36,py37 [testenv] extras = test @@ -13,6 +13,10 @@ deps= exclude = .git,__pycache__,docs/source/conf.py,old,build,dist max-line-length = 90 +[mypy] +no_strict_optional = True +ignore_missing_imports = True + # Release tooling [testenv:build] basepython = python3