diff --git a/README.md b/README.md index 76d0e1f..932ff2a 100644 --- a/README.md +++ b/README.md @@ -90,6 +90,37 @@ $ curl "http://localhost:8000/tiles/14/3851/6812@2x?url=https%3A%2F%2Fs3-us-west ![greyscale stretched](docs/greyscale_stretched.png) + +### `/ndtiles/{z}/{x}/{y}` - Tiles + +Calculated normalized diffference index between two bands. The most common example is NDVI + +tile = (Band2 - Band1)/(Band2 _ Band 1) + +#### Parameters + +* `url` - a URL to a valid COG. Required. +* `nodata` - a custom NODATA value. +* `band1` - Band1 for Normalized Difference +* `band2` - Band2 for Normalized Difference + +`@2x` can be added to the filename (after the `{y}` coordinate) to request +retina tiles. The map preview will detect support for retina displays and +request tiles accordingly. + +PNGs or JPEGs will be rendered depending on the presence of NODATA values in the +source image (surfaced as transparency in the output). + +#### Examples + +```bash +$ curl "http://localhost:8000/tiles/14/3851/6812@2x?url=https%3A%2F%2Fs3-us-west-2.amazonaws.com%2Fplanet-disaster-data%2Fhurricane-harvey%2FSkySat_Freeport_s03_20170831T162740Z3.tif" | imgcat +``` + +![RGB](docs/nd_test.jpeg) + + + ### `/tiles` - TileJSON #### Parameters diff --git a/docs/nd_test.jpeg b/docs/nd_test.jpeg new file mode 100644 index 0000000..8ebdc85 Binary files /dev/null and b/docs/nd_test.jpeg differ diff --git a/virtual/catalogs.py b/virtual/catalogs.py index f291d86..2625c84 100644 --- a/virtual/catalogs.py +++ b/virtual/catalogs.py @@ -13,12 +13,17 @@ class VirtualCatalog(Catalog): - def __init__(self, uri, rgb=None, nodata=None, linear_stretch=None): + def __init__(self, uri, rgb=None, nodata=None, linear_stretch=None, band1=None, band2=None): self._uri = uri self._rgb = rgb self._nodata = nodata self._linear_stretch = linear_stretch self._meta = {} + self._band1 = int(band1) + self._band2 = int(band2) + + LOG.info("band1:{}".format(band1)) + LOG.info("band2:{}".format(band2)) with get_source(self._uri) as src: self._bounds = warp.transform_bounds(src.crs, WGS84_CRS, *src.bounds) @@ -74,6 +79,10 @@ def get_sources(self, bounds, resolution): if self._linear_stretch is not None: recipes["linear_stretch"] = "per_band" + if (self._band1 is not None) and (self._band2 is not None): + recipes['nd_bands'] = (self._band1, self._band2) + LOG.info("nd_bands: {}".format(recipes['nd_bands'])) + yield Source( url=self._uri, name=self._name, diff --git a/virtual/marblecutter_overload_nd.py b/virtual/marblecutter_overload_nd.py new file mode 100644 index 0000000..2e9006e --- /dev/null +++ b/virtual/marblecutter_overload_nd.py @@ -0,0 +1,157 @@ +# coding=utf-8 +from __future__ import absolute_import, division, print_function + +import logging + +import mercantile +from rasterio.crs import CRS + + +LOG = logging.getLogger(__name__) +TILE_SHAPE = (256, 256) +WEB_MERCATOR_CRS = CRS.from_epsg(3857) +WGS84_CRS = CRS.from_epsg(4326) + + +# coding=utf-8 + +import unicodedata + +import numpy as np +from rasterio.transform import Affine +from virtual import mosaic +from marblecutter.stats import Timer +from marblecutter.utils import Bounds, PixelCollection +from marblecutter import get_resolution_in_meters, NoDataAvailable + + + +def render_nd_tile(tile, catalog, transformation=None, format=None, scale=1, data_band_count=3): + """Render a tile into Web Mercator.""" + bounds = Bounds(mercantile.xy_bounds(tile), WEB_MERCATOR_CRS) + shape = tuple(map(int, Affine.scale(scale) * TILE_SHAPE)) + + catalog.validate(tile) + + return render_nd( + bounds, + shape, + WEB_MERCATOR_CRS, + catalog=catalog, + format=format, + data_band_count=data_band_count, + transformation=transformation, + ) + + +def performBandMath(pixels, catalog): + LOG.info("bandmath") + if (catalog.band1 is not None) and (catalog.band2 is not None): + LOG.info("Perform Bandmath") + band1 = int(catalog.band1) if isinstance(catalog.band1, str) else catalog.band1 + band2 = int(catalog.band2) if isinstance(catalog.band2, str) else catalog.band2 + + LOG.info("pixels.data shape = {}".format(pixels.data.shape)) + data = np.asarray(pixels.data) + LOG.info("data shape = {}".format(pixels.data.shape)) + LOG.info("pixels = {}".format(pixels)) + + + bandTile1 = data[:, :, band1] + bandTile2 = data[:, :, band2] + + newBand = (bandTile2.astype(float) - bandTile1.astype(float)) / (bandTile2.astype(float) + bandTile1.astype(float)) + newData = data + for idx in range(np.shape(data)[2]): + newData[:,:,idx] = newBand + + np.seterr(divide='ignore', invalid='ignore') + new_pixel_collection = PixelCollection(newData, + pixels.bounds, + pixels.band) + + else: + ##TODO introduce new error + raise NoDataAvailable() + + LOG.info("return Bandmath") + return new_pixel_collection + +def render_nd( + bounds, + shape, + target_crs, + format, + data_band_count, + catalog=None, + sources=None, + transformation=None +): + """Render data intersecting bounds into shape using an optional + transformation. And perform normative diference using bands specified in catalog""" + resolution_m = get_resolution_in_meters(bounds, shape) + stats = [] + + if sources is None and catalog is None: + raise Exception("Either sources or a catalog must be provided.") + + if transformation: + bounds, shape, offsets = transformation.expand(bounds, shape) + + if sources is None and catalog is not None: + with Timer() as t: + sources = catalog.get_sources(bounds, resolution_m) + stats.append(("Get Sources", t.elapsed)) + + # TODO try to avoid materializing the iterator + sources = list(sources) + + if sources is None or len(sources) == 0: + raise NoDataAvailable() + + with Timer() as t: + sources_used, pixels = mosaic.composite( + sources, bounds, shape, target_crs, data_band_count + ) + stats.append(("Composite", t.elapsed)) + + if pixels.data is None: + raise NoDataAvailable() + + data_format = "raw" + + if transformation: + with Timer() as t: + pixels, data_format = transformation.transform(pixels) + stats.append(("Transform", t.elapsed)) + + with Timer() as t: + pixels = transformation.postprocess(pixels, data_format, offsets) + + stats.append(("Post-process", t.elapsed)) + + with Timer() as t: + (content_type, formatted) = format(pixels, data_format) + stats.append(("Format", t.elapsed)) + + headers = { + "Content-Type": content_type, + "Server-Timing": [ + 'op{};desc="{}";dur={:0.2f}'.format(i, name, time) + for (i, (name, time)) in enumerate(stats) + ] + + [ + 'src{};desc="{} - {}"'.format( + i, + unicodedata.normalize("NFKD", unicode(name)).encode( + "ascii", "ignore" + ).replace( + '"', '\\"' + ), + url, + ) + for (i, (name, url)) in enumerate(sources_used) + ], + } + + return (headers, formatted) diff --git a/virtual/mosaic.py b/virtual/mosaic.py new file mode 100644 index 0000000..1cc74c8 --- /dev/null +++ b/virtual/mosaic.py @@ -0,0 +1,123 @@ +# noqa +# coding=utf-8 +from __future__ import absolute_import, division, print_function + +import logging +import multiprocessing +from concurrent import futures + +import numpy as np + +from rasterio import warp + +from virtual import recipes +from marblecutter.utils import Bounds, PixelCollection + +LOG = logging.getLogger(__name__) + + +def composite(sources, bounds, shape, target_crs, band_count): + """Composite data from sources into a single raster covering bounds, but in + the target CRS.""" + # avoid circular dependencies + from marblecutter import _nodata, get_resolution_in_meters, get_source, read_window + + # TODO this belongs in render + if bounds.crs == target_crs: + canvas_bounds = bounds + else: + canvas_bounds = Bounds( + warp.transform_bounds(bounds.crs, target_crs, *bounds.bounds), target_crs + ) + + resolution = get_resolution_in_meters(bounds, shape) + sources = recipes.preprocess(sources, resolution=resolution) + + def _read_window(source): + with get_source(source.url) as src: + LOG.info( + "Compositing %s (%s) as band %s", source.url, source.name, source.band + ) + + # read a window from the source data + # TODO ask for a buffer here, get back an updated bounding box + # reflecting it + try: + window_data = read_window(src, canvas_bounds, shape, source.recipes) + except Exception as e: + LOG.exception("Error reading %s: %s", source.url, e) + return + + return source, PixelCollection( + window_data.data, window_data.bounds, source.band + ) + + # iterate over available sources, sorted by decreasing "quality" + with futures.ThreadPoolExecutor( + max_workers=multiprocessing.cpu_count() * 5 + ) as executor: + ws = executor.map(_read_window, sources) + + sources_used = [] + + ws = recipes.postprocess(ws) + + canvas_data = np.ma.zeros( + (band_count,) + shape, dtype=np.float32, fill_value=_nodata(np.float32) + ) + canvas_data.mask = True + + canvas = PixelCollection(canvas_data, canvas_bounds) + + for source, window_data in filter(None, ws): + window_data = recipes.apply(source.recipes, window_data, source=source) + + if window_data.data is None: + continue + + # paste the resulting data onto a canvas + canvas = paste( + PixelCollection( + window_data.data.astype(np.float32), + window_data.bounds, + window_data.band, + ), + canvas, + ) + sources_used.append(source) + + if not canvas.data.mask.any(): + # stop if all pixels are valid + break + + return map(lambda s: (s.name, s.url), sources_used), canvas + + +def paste(window_pixels, canvas_pixels): + """ "Reproject" src data into the correct position within a larger image""" + window_data, (window_bounds, window_crs), band = window_pixels + canvas, (canvas_bounds, canvas_crs), _ = canvas_pixels + if window_crs != canvas_crs: + raise Exception("CRSes must match: {} != {}".format(window_crs, canvas_crs)) + + if window_bounds != canvas_bounds: + raise Exception( + "Bounds must match: {} != {}".format(window_bounds, canvas_bounds) + ) + + if band is None and window_data.shape != canvas.shape: + raise Exception( + "Data shapes must match: {} != {}".format(window_data.shape, canvas.shape) + ) + + if band is None: + merged = np.ma.where(canvas.mask & ~window_data.mask, window_data, canvas) + merged.fill_value = canvas.fill_value + else: + merged_band = np.ma.where( + canvas.mask[band] & ~window_data.mask, window_data, canvas[band] + ) + canvas[band] = merged_band[0] + merged = canvas + + return PixelCollection(merged, canvas_pixels.bounds) diff --git a/virtual/nd_formats.py b/virtual/nd_formats.py new file mode 100644 index 0000000..a57b3f0 --- /dev/null +++ b/virtual/nd_formats.py @@ -0,0 +1,36 @@ +# coding=utf-8 +from __future__ import absolute_import + +import logging + +import numpy as np + +from marblecutter import _isimage +from marblecutter.utils import PixelCollection +from marblecutter.formats.jpeg import JPEG +from marblecutter.formats.png import PNG + +LOG = logging.getLogger(__name__) +JPEG_FORMAT = JPEG() +PNG_FORMAT = PNG() + + +def Optimalnd(): + + def _format(pixels, data_format): + if not _isimage(data_format): + raise Exception("Must be an image format") + + alpha = pixels.data[:, :, 3] + + if np.equal(alpha, 255).all(): + # solid + rgb_pixels = PixelCollection( + pixels.data[:, :, 0:3], pixels.bounds, pixels.band + ) + return JPEG_FORMAT(rgb_pixels, "RGB") + else: + # partially transparent + return PNG_FORMAT(pixels, data_format) + + return _format diff --git a/virtual/recipes.py b/virtual/recipes.py new file mode 100644 index 0000000..92bee88 --- /dev/null +++ b/virtual/recipes.py @@ -0,0 +1,274 @@ +# coding=utf-8 +from __future__ import absolute_import, division, print_function + +import itertools +import logging +from functools import reduce + +import numpy as np + +from rio_pansharpen.methods import Brovey +from rio_tiler import utils +from rio_toa import reflectance + +from marblecutter.utils import PixelCollection, Source + +BAND_MAPPING = {"r": 0, "g": 1, "b": 2, "pan": 4} +LOG = logging.getLogger(__name__) + + +def apply(recipes, pixels, source=None): + data = pixels.data + dtype_min = np.iinfo(data.dtype).min + dtype_max = np.iinfo(data.dtype).max + + if "landsat8" in recipes: + LOG.info("Applying landsat 8 recipe") + + out = np.ma.empty(shape=(data.shape), dtype=np.float32) + + for bdx, source_band in enumerate((4, 3, 2)): + sun_elev = source.meta["L1_METADATA_FILE"]["IMAGE_ATTRIBUTES"][ + "SUN_ELEVATION" + ] + multi_reflect = source.meta["L1_METADATA_FILE"][ + "RADIOMETRIC_RESCALING" + ].get( + "REFLECTANCE_MULT_BAND_{}".format(source_band) + ) + add_reflect = source.meta["L1_METADATA_FILE"]["RADIOMETRIC_RESCALING"].get( + "REFLECTANCE_ADD_BAND_{}".format(source_band) + ) + + min_val = source.meta.get("values", {}).get(str(source_band), {}).get( + "min", dtype_min + ) + max_val = source.meta.get("values", {}).get(str(source_band), {}).get( + "max", dtype_max + ) + + band_data = 10000 * reflectance.reflectance( + data[bdx], multi_reflect, add_reflect, sun_elev, src_nodata=0 + ) + + # calculate local min/max as fallbacks + if ( + min_val == dtype_min + and max_val == dtype_max + and len(data.compressed()) > 0 + ): + local_min, local_max = np.percentile(band_data.compressed(), (2, 98)) + min_val = max(min_val, local_min) + max_val = min(max_val, local_max) + + out[bdx] = np.ma.where( + band_data > 0, + utils.linear_rescale( + band_data, in_range=[min_val, max_val], out_range=[0, 1] + ), + 0, + ) + + data = out + + if "imagery" in recipes: + LOG.info("Applying imagery recipe") + LOG.info("recipes={}".format(recipes)) + if "rgb_bands" in recipes: + data = np.ma.array([data[i - 1] for i in recipes["rgb_bands"]]) + + if "nd_bands" in recipes: + LOG.info("nd_bands") + band1_tile = np.ma.array(data[recipes['nd_bands'][0]-1]) + band2_tile = np.ma.array(data[recipes['nd_bands'][1]-1]) + + + data = np.ma.array([(band2_tile.astype(float) - band1_tile.astype(float)) / (band2_tile.astype(float) + band1_tile.astype(float))]) + + if data.shape[0] > 3: + # alpha band (and beyond) present; drop it (them) + # TODO use band 4 as a mask instead + data = data[0:3] + + if "linear_stretch" in recipes: + if recipes["linear_stretch"] == "global": + data = utils.linear_rescale( + data, + in_range=(np.min(data), np.max(data)), + out_range=(dtype_min, dtype_max), + ) + elif recipes["linear_stretch"] == "per_band": + for band in xrange(0, data.shape[0]): + min_val = source.meta.get("values", {}).get(band, {}).get( + "min", np.min(data[band]) + ) + max_val = source.meta.get("values", {}).get(band, {}).get( + "max", np.max(data[band]) + ) + data[band] = np.ma.where( + data[band] > 0, + utils.linear_rescale( + data[band], + in_range=(min_val, max_val), + out_range=(dtype_min, dtype_max), + ), + 0, + ) + else: + # rescale after reducing and before increasing dimensionality + if data.dtype != np.uint8 and not np.issubdtype(data.dtype, np.floating): + # rescale non-8-bit sources (assuming that they're raw sensor data) + + for band in xrange(0, data.shape[0]): + min_val = source.meta.get("values", {}).get(band, {}).get( + "min", dtype_min + ) + max_val = source.meta.get("values", {}).get(band, {}).get( + "max", dtype_max + ) + + if ( + min_val == dtype_min + and max_val == dtype_max + and len(data.compressed()) > 0 + ): + local_min, local_max = np.percentile( + data[band].compressed(), (2, 98) + ) + min_val = max(min_val, local_min) + max_val = min(max_val, local_max) + + data[band] = np.ma.where( + data[band] > 0, + utils.linear_rescale( + data[band], + in_range=(min_val, max_val), + out_range=(dtype_min, dtype_max), + ), + 0, + ) + + if data.shape[0] == 1: + # likely greyscale image; use the same band on all channels + data = np.ma.array([data[0], data[0], data[0]]) + + # normalize to 0..1 based on the range of the source type (only + # for int*s) + if not np.issubdtype(data.dtype, np.floating): + data = data.astype(np.float32) / np.iinfo(data.dtype).max + + return PixelCollection(data, pixels.bounds) + + +def preprocess(sources, resolution=None): + for idx, source in enumerate(sources): + # TODO make this configurable + # limit the number of sources used + if idx == 15: + return + + if "landsat8" in source.recipes: + for target_band, source_band in iter(source.band_info.items()): + band = BAND_MAPPING.get(target_band) + s = Source( + source.url.replace("{band}", str(source_band)), + source.name, + source.resolution, + source.band_info, + source.meta, + source.recipes, + source.acquired_at, + band, + source.priority, + ) + if target_band == "pan": + if min(resolution) < source.resolution * 2: + yield s + elif band is not None: + yield s + else: + yield source + + +def is_rgb(band): + return band <= 2 + + +def _reduce_landsat_windows(canvas, window): + from marblecutter.mosaic import paste + + _, pixels = window + + return paste(pixels, canvas) + + +def postprocess(windows): + from marblecutter import _nodata + + windows = list(filter(None, windows)) + + landsat_windows = filter(lambda sw: "LC08_" in sw[0].url, windows) + landsat_windows = dict( + [ + (k, list(v)) + for k, v in itertools.groupby( + landsat_windows, lambda x: x[0].url.split("/")[-2] + ) + ] + ) + + pan_bands = dict( + list( + map( + lambda sw: (sw[0].url.split("/")[-2], sw[1]), + filter(lambda sw: sw[0].band == 4, filter(None, windows)), + ) + ) + ) + + for source, pixels in windows: + if pixels is None or pixels.data is None: + continue + + if "landsat8" in source.recipes: + scene_id = source.url.split("/")[-2] + source = Source( + "/".join(source.url.split("/")[0:-1]), + source.name, + source.resolution, + source.band_info, + source.meta, + source.recipes, + source.acquired_at, + None, + source.priority, + source.coverage, + ) + + # pick out all bands for the same scene the first time it's seen + if scene_id in landsat_windows: + ws = filter( + lambda sw: is_rgb(sw[0].band), landsat_windows.pop(scene_id) + ) + canvas_data = np.ma.zeros( + (3,) + pixels.data.shape[1:], + dtype=pixels.data.dtype, + fill_value=_nodata(np.int16), + ) + canvas_data.mask = True + + canvas = PixelCollection(canvas_data, pixels.bounds) + pixels = reduce(_reduce_landsat_windows, ws, canvas) + + if scene_id in pan_bands: + pan = pan_bands[scene_id] + + pansharpened, _ = Brovey( + pixels.data, pan.data[0], 0.2, pan.data.dtype + ) + + yield source, PixelCollection(pansharpened, pixels.bounds) + else: + yield source, pixels + else: + yield source, pixels diff --git a/virtual/web.py b/virtual/web.py index eb7fe81..6f51985 100644 --- a/virtual/web.py +++ b/virtual/web.py @@ -9,15 +9,24 @@ from marblecutter.formats.optimal import Optimal from marblecutter.transformations import Image from marblecutter.web import app +from . import marblecutter_overload_nd as marblecutter_nd from mercantile import Tile import urllib +from .nd_formats import Optimalnd from .catalogs import VirtualCatalog + + + + + + LOG = logging.getLogger(__name__) IMAGE_TRANSFORMATION = Image() IMAGE_FORMAT = Optimal() +IMAGE_FORMAT_ND = Optimal() @lru_cache() @@ -26,10 +35,12 @@ def make_catalog(args): rgb = args.get("rgb") nodata = args.get("nodata") linear_stretch = args.get("linearStretch") + band1 = args.get('band1') + band2 = args.get('band2') try: return VirtualCatalog( - source, rgb=rgb, nodata=nodata, linear_stretch=linear_stretch + source, rgb=rgb, nodata=nodata, linear_stretch=linear_stretch, band1=band1, band2=band2 ) except Exception as e: LOG.warn(e.message) @@ -119,3 +130,23 @@ def render_png(z, x, y, scale=1, prefix=None): headers.update(catalog.headers) return data, 200, headers + + +@app.route("/ndtiles///") +@app.route("/ndtiles///@x") +@app.route("//ndtiles////") +@app.route("//ndtiles///") +def render_nd_png(z, x, y, scale=1, prefix=None): + catalog = make_catalog(request.args) + tile = Tile(x, y, z) + headers, data = marblecutter_nd.render_nd_tile( + tile, + catalog, + format=IMAGE_FORMAT_ND, + transformation=IMAGE_TRANSFORMATION, + scale=scale, + ) + + headers.update(catalog.headers) + + return data, 200, headers