0. Overhaul of how datasets work

This commit is contained in:
Caleb Robinson 2019-08-04 22:24:29 +00:00
Родитель 9b416febc1
Коммит a07f3eee11
6 изменённых файлов: 739 добавлений и 524 удалений

Просмотреть файл

@ -32,6 +32,8 @@ import glob
from web_tool import ROOT_DIR
from DataLoaderAbstract import DataLoader
# ------------------------------------------------------
# Miscellaneous methods
# ------------------------------------------------------
@ -49,251 +51,7 @@ def extent_to_transformed_geom(extent, dest_crs="EPSG:4269"):
return fiona.transform.transform_geom(src_crs, dest_crs, geom)
# ------------------------------------------------------
# Methods for DataLayerTypes.CUSTOM
# ------------------------------------------------------
def lookup_shape_by_extent(extent, shapes, shapes_crs):
transformed_geom = extent_to_transformed_geom(extent, shapes_crs)
transformed_shape = shapely.geometry.shape(transformed_geom)
mask_geom = None
for i, shape in enumerate(shapes):
if shape.contains(transformed_shape):
return i, shape
raise ValueError("No shape contains the extent")
def download_custom_data_by_extent(extent, shapes, shapes_crs, data_fn):
# First, figure out which shape the extent is in
_, shape = lookup_shape_by_extent(extent, shapes, shapes_crs)
mask_geom = shapely.geometry.mapping(shape)
# Second, crop out that area for running the entire model on
f = rasterio.open(os.path.join(ROOT_DIR, data_fn), "r")
src_profile = f.profile
src_crs = f.crs.to_string()
transformed_mask_geom = fiona.transform.transform_geom(shapes_crs, src_crs, mask_geom)
src_image, src_transform = rasterio.mask.mask(f, [transformed_mask_geom], crop=True)
f.close()
if src_image.shape[0] == 3:
src_image = np.concatenate([
src_image,
src_image[0][np.newaxis]
], axis=0)
return src_image, src_profile, src_transform
def get_custom_data_by_extent(extent, padding, data_fn):
f = rasterio.open(os.path.join(ROOT_DIR, data_fn), "r")
src_index = f.index
src_crs = f.crs
transformed_geom = extent_to_transformed_geom(extent, f.crs.to_dict())
transformed_geom = shapely.geometry.shape(transformed_geom)
buffed_geom = transformed_geom.buffer(padding)
geom = shapely.geometry.mapping(shapely.geometry.box(*buffed_geom.bounds))
src_image, src_transform = rasterio.mask.mask(f, [geom], crop=True)
f.close()
if src_image.shape[0] == 3:
src_image = np.concatenate([
src_image,
src_image[0][np.newaxis]
], axis=0)
return src_image, src_crs, src_transform, buffed_geom.bounds, src_index
# ------------------------------------------------------
# Methods for DataLayerTypes.USA_NAIP_LIST
# ------------------------------------------------------
class GeoDataTypes(Enum):
NAIP = 1
NLCD = 2
LANDSAT_LEAFON = 3
LANDSAT_LEAFOFF = 4
BUILDINGS = 5
LANDCOVER = 6
class LookupNAIP(object):
TILES = None
@staticmethod
def lookup(extent):
if LookupNAIP.TILES is None:
assert all([os.path.exists(fn) for fn in [
ROOT_DIR + "/data/tile_index.dat",
ROOT_DIR + "/data/tile_index.idx",
ROOT_DIR + "/data/tiles.p"
]]), "You do not have the correct files, did you setup the project correctly"
LookupNAIP.TILES = pickle.load(open(ROOT_DIR + "/data/tiles.p", "rb"))
return LookupNAIP.lookup_naip_tile_by_geom(extent)
@staticmethod
def lookup_naip_tile_by_geom(extent):
tile_index = rtree.index.Index(ROOT_DIR + "/data/tile_index")
geom = extent_to_transformed_geom(extent, "EPSG:4269")
minx, miny, maxx, maxy = shapely.geometry.shape(geom).bounds
geom = shapely.geometry.mapping(shapely.geometry.box(minx, miny, maxx, maxy, ccw=True))
geom = shapely.geometry.shape(geom)
intersected_indices = list(tile_index.intersection(geom.bounds))
for idx in intersected_indices:
intersected_fn = LookupNAIP.TILES[idx][0]
intersected_geom = LookupNAIP.TILES[idx][1]
if intersected_geom.contains(geom):
print("Found %d intersections, returning at %s" % (len(intersected_indices), intersected_fn))
return intersected_fn
if len(intersected_indices) > 0:
raise ValueError("Error, there are overlaps with tile index, but no tile completely contains selection")
else:
raise ValueError("No tile intersections")
def get_fn_by_geo_data_type(naip_fn, geo_data_type):
fn = None
if geo_data_type == GeoDataTypes.NAIP:
fn = naip_fn
elif geo_data_type == GeoDataTypes.NLCD:
fn = naip_fn.replace("/esri-naip/", "/resampled-nlcd/")[:-4] + "_nlcd.tif"
elif geo_data_type == GeoDataTypes.LANDSAT_LEAFON:
fn = naip_fn.replace("/esri-naip/data/v1/", "/resampled-landsat8/data/leaf_on/")[:-4] + "_landsat.tif"
elif geo_data_type == GeoDataTypes.LANDSAT_LEAFOFF:
fn = naip_fn.replace("/esri-naip/data/v1/", "/resampled-landsat8/data/leaf_off/")[:-4] + "_landsat.tif"
elif geo_data_type == GeoDataTypes.BUILDINGS:
fn = naip_fn.replace("/esri-naip/", "/resampled-buildings/")[:-4] + "_building.tif"
elif geo_data_type == GeoDataTypes.LANDCOVER:
fn = naip_fn.replace("/esri-naip/", "/resampled-lc/")[:-4] + "_lc.tif"
else:
raise ValueError("GeoDataType not recognized")
return fn
def download_usa_data_by_extent(extent, geo_data_type=GeoDataTypes.NAIP):
naip_fn = LookupNAIP.lookup(extent)
fn = get_fn_by_geo_data_type(naip_fn, geo_data_type)
f = rasterio.open(fn, "r")
src_profile = f.profile
src_transform = f.profile["transform"]
print(src_profile)
print(src_transform)
data = f.read()
f.close()
return data, src_profile, src_transform
def get_usa_data_by_extent(extent, padding=20, geo_data_type=GeoDataTypes.NAIP):
naip_fn = LookupNAIP.lookup(extent)
fn = get_fn_by_geo_data_type(naip_fn, geo_data_type)
f = rasterio.open(fn, "r")
src_index = f.index
src_crs = f.crs
transformed_geom = extent_to_transformed_geom(extent, f.crs.to_dict())
transformed_geom = shapely.geometry.shape(transformed_geom)
buffed_geom = transformed_geom.buffer(padding)
geom = shapely.geometry.mapping(shapely.geometry.box(*buffed_geom.bounds))
src_image, src_transform = rasterio.mask.mask(f, [geom], crop=True)
f.close()
return src_image, src_crs, src_transform, buffed_geom.bounds, src_index
# ------------------------------------------------------
# Methods for DataLayerTypes.ESRI_WORLD_IMAGERY
# ------------------------------------------------------
def get_image_by_xyz_from_url(tile):
'''NOTE: Here "tile" refers to a mercantile "Tile" object.'''
req = urlopen("https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/%d/%d/%d" % (
tile.z, tile.y, tile.x
))
arr = np.asarray(bytearray(req.read()), dtype=np.uint8)
img = cv2.imdecode(arr, -1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img
def get_tile_as_virtual_raster(tile):
'''NOTE: Here "tile" refers to a mercantile "Tile" object.'''
img = get_image_by_xyz_from_url(tile)
geom = shapely.geometry.shape(mercantile.feature(tile)["geometry"])
minx, miny, maxx, maxy = geom.bounds
dst_transform = rasterio.transform.from_bounds(minx, miny, maxx, maxy, 256, 256)
dst_profile = {
"driver": "GTiff",
"width": 256,
"height": 256,
"transform": dst_transform,
"crs": "epsg:4326",
"count": 3,
"dtype": "uint8"
}
test_f = rasterio.io.MemoryFile()
with test_f.open(**dst_profile) as test_d:
test_d.write(img[:,:,0], 1)
test_d.write(img[:,:,1], 2)
test_d.write(img[:,:,2], 3)
test_f.seek(0)
return test_f
def get_esri_data_by_extent(extent, padding=0.0001, zoom_level=17):
transformed_geom = extent_to_transformed_geom(extent, "epsg:4326")
transformed_geom = shapely.geometry.shape(transformed_geom)
buffed_geom = transformed_geom.buffer(padding)
minx, miny, maxx, maxy = buffed_geom.bounds
virtual_files = [] # this is nutty
virtual_datasets = []
for i, tile in enumerate(mercantile.tiles(minx, miny, maxx, maxy, zoom_level)):
f = get_tile_as_virtual_raster(tile)
virtual_files.append(f)
virtual_datasets.append(f.open())
out_image, out_transform = rasterio.merge.merge(virtual_datasets, bounds=(minx, miny, maxx, maxy))
for ds in virtual_datasets:
ds.close()
for f in virtual_files:
f.close()
dst_crs = rasterio.crs.CRS.from_epsg(4326)
dst_profile = {
"driver": "GTiff",
"width": out_image.shape[1],
"height": out_image.shape[0],
"transform": out_transform,
"crs": "epsg:4326",
"count": 3,
"dtype": "uint8"
}
test_f = rasterio.io.MemoryFile()
with test_f.open(**dst_profile) as test_d:
test_d.write(out_image[:,:,0], 1)
test_d.write(out_image[:,:,1], 2)
test_d.write(out_image[:,:,2], 3)
test_f.seek(0)
with test_f.open() as test_d:
dst_index = test_d.index
test_f.close()
r,g,b = out_image
out_image = np.stack([r,g,b,r])
return out_image, dst_crs, out_transform, (minx, miny, maxx, maxy), dst_index
# ------------------------------------------------------
# Methods for getting ouputs ready to display on the frontend
# ------------------------------------------------------
def warp_data_to_3857(src_img, src_crs, src_transform, src_bounds):
def warp_data_to_3857(src_img, src_crs, src_transform, src_bounds, resolution=1):
''' Assume that src_img is (height, width, channels)
'''
assert len(src_img.shape) == 3
@ -311,7 +69,7 @@ def warp_data_to_3857(src_img, src_crs, src_transform, src_bounds):
bottom=src_bounds[1],
right=src_bounds[2],
top=src_bounds[3],
resolution=1
resolution=resolution
)
dst_image = np.zeros((num_channels, height, width), np.float32)
@ -335,4 +93,287 @@ def crop_data_by_extent(src_img, src_bounds, extent):
new_bounds = np.array(src_bounds)
diff = np.round(original_bounds - new_bounds).astype(int)
return src_img[diff[1]:diff[3], diff[0]:diff[2], :]
return src_img[diff[1]:diff[3], diff[0]:diff[2], :]
# ------------------------------------------------------
# DataLoader for arbitrary GeoTIFFs
# ------------------------------------------------------
class DataLoaderCustom(DataLoader):
def __init__(self, data_fn, shapes, padding):
self.data_fn = data_fn
self.shapes = shapes
self.padding = padding
def get_data_from_extent(self, extent):
f = rasterio.open(os.path.join(ROOT_DIR, self.data_fn), "r")
src_index = f.index
src_crs = f.crs
transformed_geom = extent_to_transformed_geom(extent, f.crs.to_dict())
transformed_geom = shapely.geometry.shape(transformed_geom)
buffed_geom = transformed_geom.buffer(self.padding)
geom = shapely.geometry.mapping(shapely.geometry.box(*buffed_geom.bounds))
src_image, src_transform = rasterio.mask.mask(f, [geom], crop=True)
f.close()
if src_image.shape[0] == 3:
src_image = np.concatenate([
src_image,
src_image[0][np.newaxis]
], axis=0)
return src_image, src_crs, src_transform, buffed_geom.bounds, src_index
def get_area_from_shape_by_extent(self, extent, shape_layer):
i, shape = self.get_shape_by_extent(extent, shape_layer)
return self.shapes[shape_layer]["areas"][i]
def get_data_from_shape_by_extent(self, extent, shape_layer):
# First, figure out which shape the extent is in
_, shape = self.get_shape_by_extent(extent, shape_layer)
mask_geom = shapely.geometry.mapping(shape)
# Second, crop out that area for running the entire model on
f = rasterio.open(os.path.join(ROOT_DIR, self.data_fn), "r")
src_profile = f.profile
src_crs = f.crs.to_string()
src_bounds = f.bounds
transformed_mask_geom = fiona.transform.transform_geom(self.shapes[shape_layer]["crs"], src_crs, mask_geom)
src_image, src_transform = rasterio.mask.mask(f, [transformed_mask_geom], crop=True, all_touched=True, pad=False)
f.close()
if src_image.shape[0] == 3:
src_image = np.concatenate([
src_image,
src_image[0][np.newaxis]
], axis=0)
return src_image, src_profile, src_transform, shapely.geometry.shape(transformed_mask_geom).bounds, src_crs
def get_shape_by_extent(self, extent, shape_layer):
transformed_geom = extent_to_transformed_geom(extent, self.shapes[shape_layer]["crs"])
transformed_shape = shapely.geometry.shape(transformed_geom)
mask_geom = None
for i, shape in enumerate(self.shapes[shape_layer]["geoms"]):
if shape.contains(transformed_shape.centroid):
return i, shape
raise ValueError("No shape contains the centroid")
# ------------------------------------------------------
# DataLoader for US NAIP data and other aligned layers
# ------------------------------------------------------
class NAIPTileIndex(object):
TILES = None
@staticmethod
def lookup(extent):
if NAIPTileIndex.TILES is None:
assert all([os.path.exists(fn) for fn in [
ROOT_DIR + "/data/tile_index.dat",
ROOT_DIR + "/data/tile_index.idx",
ROOT_DIR + "/data/tiles.p"
]]), "You do not have the correct files, did you setup the project correctly"
NAIPTileIndex.TILES = pickle.load(open(ROOT_DIR + "/data/tiles.p", "rb"))
return NAIPTileIndex.lookup_naip_tile_by_geom(extent)
@staticmethod
def lookup_naip_tile_by_geom(extent):
tile_index = rtree.index.Index(ROOT_DIR + "/data/tile_index")
geom = extent_to_transformed_geom(extent, "EPSG:4269")
minx, miny, maxx, maxy = shapely.geometry.shape(geom).bounds
geom = shapely.geometry.mapping(shapely.geometry.box(minx, miny, maxx, maxy, ccw=True))
geom = shapely.geometry.shape(geom)
intersected_indices = list(tile_index.intersection(geom.bounds))
for idx in intersected_indices:
intersected_fn = NAIPTileIndex.TILES[idx][0]
intersected_geom = NAIPTileIndex.TILES[idx][1]
if intersected_geom.contains(geom):
print("Found %d intersections, returning at %s" % (len(intersected_indices), intersected_fn))
return intersected_fn
if len(intersected_indices) > 0:
raise ValueError("Error, there are overlaps with tile index, but no tile completely contains selection")
else:
raise ValueError("No tile intersections")
class USALayerGeoDataTypes(Enum):
NAIP = 1
NLCD = 2
LANDSAT_LEAFON = 3
LANDSAT_LEAFOFF = 4
BUILDINGS = 5
LANDCOVER = 6
class DataLoaderUSALayer(DataLoader):
def __init__(self, shapes, padding):
self.shapes = shapes
print("Loading US layer with shapes", self.shapes)
self.padding = padding
def get_fn_by_geo_data_type(self, naip_fn, geo_data_type):
fn = None
if geo_data_type == USALayerGeoDataTypes.NAIP:
fn = naip_fn
elif geo_data_type == USALayerGeoDataTypes.NLCD:
fn = naip_fn.replace("/esri-naip/", "/resampled-nlcd/")[:-4] + "_nlcd.tif"
elif geo_data_type == USALayerGeoDataTypes.LANDSAT_LEAFON:
fn = naip_fn.replace("/esri-naip/data/v1/", "/resampled-landsat8/data/leaf_on/")[:-4] + "_landsat.tif"
elif geo_data_type == USALayerGeoDataTypes.LANDSAT_LEAFOFF:
fn = naip_fn.replace("/esri-naip/data/v1/", "/resampled-landsat8/data/leaf_off/")[:-4] + "_landsat.tif"
elif geo_data_type == USALayerGeoDataTypes.BUILDINGS:
fn = naip_fn.replace("/esri-naip/", "/resampled-buildings/")[:-4] + "_building.tif"
elif geo_data_type == USALayerGeoDataTypes.LANDCOVER:
fn = naip_fn.replace("/esri-naip/", "/resampled-lc/")[:-4] + "_lc.tif"
else:
raise ValueError("GeoDataType not recognized")
return fn
def get_shape_by_extent(self, extent, shape_layer):
transformed_geom = extent_to_transformed_geom(extent, shapes_crs)
transformed_shape = shapely.geometry.shape(transformed_geom)
mask_geom = None
for i, shape in enumerate(shapes):
if shape.contains(transformed_shape.centroid):
return i, shape
raise ValueError("No shape contains the centroid")
def get_data_from_extent(self, extent, geo_data_type=USALayerGeoDataTypes.NAIP):
naip_fn = NAIPTileIndex.lookup(extent)
fn = self.get_fn_by_geo_data_type(naip_fn, geo_data_type)
f = rasterio.open(fn, "r")
src_index = f.index
src_crs = f.crs
transformed_geom = extent_to_transformed_geom(extent, f.crs.to_dict())
transformed_geom = shapely.geometry.shape(transformed_geom)
buffed_geom = transformed_geom.buffer(self.padding)
geom = shapely.geometry.mapping(shapely.geometry.box(*buffed_geom.bounds))
src_image, src_transform = rasterio.mask.mask(f, [geom], crop=True)
f.close()
return src_image, src_crs, src_transform, buffed_geom.bounds, src_index
def get_area_from_shape_by_extent(self, extent, shape_layer):
raise NotImplementedError()
def get_data_from_shape_by_extent(self, extent, shape_layer, geo_data_type=USALayerGeoDataTypes.NAIP):
naip_fn = NAIPTileIndex.lookup(extent)
fn = self.get_fn_by_geo_data_type(naip_fn, geo_data_type)
f = rasterio.open(fn, "r")
src_profile = f.profile
src_transform = f.profile["transform"]
src_bounds = f.bounds
src_crs = f.crs
data = f.read()
f.close()
return data, src_profile, src_transform, src_bounds, src_crs
# ------------------------------------------------------
# DataLoader for loading RGB data from arbitrary basemaps
# ------------------------------------------------------
class DataLoaderBasemap(DataLoader):
def __init__(self, data_url, padding):
self.data_url = data_url
self.padding = padding
self.zoom_level = 17
def get_image_by_xyz_from_url(self, tile):
'''NOTE: Here "tile" refers to a mercantile "Tile" object.'''
req = urlopen(self.data_url.format(
z=tile.z, y=tile.y, x=tile.x
))
arr = np.asarray(bytearray(req.read()), dtype=np.uint8)
img = cv2.imdecode(arr, -1)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img
def get_tile_as_virtual_raster(self, tile):
'''NOTE: Here "tile" refers to a mercantile "Tile" object.'''
img = self.get_image_by_xyz_from_url(tile)
geom = shapely.geometry.shape(mercantile.feature(tile)["geometry"])
minx, miny, maxx, maxy = geom.bounds
dst_transform = rasterio.transform.from_bounds(minx, miny, maxx, maxy, 256, 256)
dst_profile = {
"driver": "GTiff",
"width": 256,
"height": 256,
"transform": dst_transform,
"crs": "epsg:4326",
"count": 3,
"dtype": "uint8"
}
test_f = rasterio.io.MemoryFile()
with test_f.open(**dst_profile) as test_d:
test_d.write(img[:,:,0], 1)
test_d.write(img[:,:,1], 2)
test_d.write(img[:,:,2], 3)
test_f.seek(0)
return test_f
def get_shape_by_extent(self, extent, shape_layer):
raise NotImplementedError()
def get_data_from_extent(self, extent):
transformed_geom = extent_to_transformed_geom(extent, "epsg:4326")
transformed_geom = shapely.geometry.shape(transformed_geom)
buffed_geom = transformed_geom.buffer(self.padding)
minx, miny, maxx, maxy = buffed_geom.bounds
virtual_files = [] # this is nutty
virtual_datasets = []
for i, tile in enumerate(mercantile.tiles(minx, miny, maxx, maxy, self.zoom_level)):
f = self.get_tile_as_virtual_raster(tile)
virtual_files.append(f)
virtual_datasets.append(f.open())
out_image, out_transform = rasterio.merge.merge(virtual_datasets, bounds=(minx, miny, maxx, maxy))
for ds in virtual_datasets:
ds.close()
for f in virtual_files:
f.close()
dst_crs = rasterio.crs.CRS.from_epsg(4326)
dst_profile = {
"driver": "GTiff",
"width": out_image.shape[1],
"height": out_image.shape[0],
"transform": out_transform,
"crs": "epsg:4326",
"count": 3,
"dtype": "uint8"
}
test_f = rasterio.io.MemoryFile()
with test_f.open(**dst_profile) as test_d:
test_d.write(out_image[:,:,0], 1)
test_d.write(out_image[:,:,1], 2)
test_d.write(out_image[:,:,2], 3)
test_f.seek(0)
with test_f.open() as test_d:
dst_index = test_d.index
test_f.close()
r,g,b = out_image
out_image = np.stack([r,g,b,r])
return out_image, dst_crs, out_transform, (minx, miny, maxx, maxy), dst_index
def get_area_from_shape_by_extent(self, extent, shape_layer):
raise NotImplementedError()
def get_data_from_shape_by_extent(self, extent, shape_layer):
raise NotImplementedError()

Просмотреть файл

@ -0,0 +1,29 @@
from abc import ABC, abstractmethod
class DataLoader(ABC):
# @property
# @abstractmethod
# def padding(self):
# pass
# @property
# @abstractmethod
# def shapes(self):
# pass
@abstractmethod
def get_shape_by_extent(self, extent, shape_layer):
raise NotImplementedError()
@abstractmethod
def get_data_from_extent(self, extent):
raise NotImplementedError()
@abstractmethod
def get_area_from_shape_by_extent(self, extent, shape_layer):
raise NotImplementedError()
@abstractmethod
def get_data_from_shape_by_extent(self, extent, shape_layer):
raise NotImplementedError()

366
web_tool/Datasets.py Normal file
Просмотреть файл

@ -0,0 +1,366 @@
import os
import utm
import fiona
import fiona.transform
import shapely
import shapely.geometry
from enum import Enum
from web_tool import ROOT_DIR
from DataLoaderNew import DataLoaderCustom, DataLoaderUSALayer, DataLoaderBasemap
class DatasetTypes(Enum):
CUSTOM = 1
USA_LAYER = 2
BASEMAP = 3
'''
This dictionary defines how the backend tool will return data to the frontend.
An entry is formated like below:
"LAYER NAME": {
"data_layer_type": DatasetTypes.ESRI_WORLD_IMAGERY,
"shapes_fn": None,
"data_fn": None,
"shapes": None, # NOTE: this is always `None` and populated automatically when this file loads (see code at bottom of file)
"shapes_crs": None # NOTE: this is always `None` and populated automatically when this file loads (see code at bottom of file)
"padding": None # NOTE: this is optional and only used in DatasetTypes.CUSTOM
}
LAYER_NAME - should correspond to an entry in js/tile_layers.js
data_layer_type - should be an item from the DatasetTypes enum and describes where the data comes from.
- If ESRI_WORLD_IMAGERY then the backend will lookup imagery from the ESRI World Imagery basemap and not respond to requests for downloading
- If USA_NAIP_LIST then the backend will lookup imagery from the full USA tile_index (i.e. how we usually do it) and requests for downloading will be executed on the same tiles
- If CUSTOM then the backend will query the "shapes_fn" and "data_fn" files for how/what to download, downloading will happen similarlly
shapes_fn - should be a path, relative to `frontend_server.ROOT_DIR`, of a geojson defining shapes over which the data_fn file is valid. When a "download" happens the raster specified by "data_fn" will be masked with one of these shapes.
data_fn - should be a path, relative to `frontend_server.ROOT_DIR`, of a raster file defining the imagery to use
shapes - list of `shapely.geometry.shape` objects created from the shapes in "shapes_fn".
shapes_crs - the CRS of the shapes_fn
padding - NOTE: Optional, only used in DatasetTypes.CUSTOM - defines the padding used in raster extraction, used to get the required 240x240 input
'''
DATASETS = {} # This dictionary should be the only thing imported from other files
DATASET_DEFINITIONS = {
"esri_world_imagery": {
"name": "ESRI World Imagery",
"imagery_metadata": "ESRI World Imagery",
"data_layer_type": DatasetTypes.BASEMAP,
"data_url": 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
"data_padding": 0.0005,
"leafletTileLayer": {
"url": 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
"args": {
"minZoom": 4,
"maxZoom": 20,
"attribution": 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
}
},
"shape_layers": None,
"location": {
"center": [38, -88],
"initialZoom": 4,
"bounds": None
}
},
"esri_world_imagery_naip": {
"name": "ESRI World Imagery",
"imagery_metadata": "ESRI World Imagery",
"data_layer_type": DatasetTypes.USA_LAYER,
"data_padding": 20,
"leafletTileLayer": {
"url": 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
"args": {
"minZoom": 4,
"maxZoom": 20,
"attribution": 'Tiles © Esri — Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community'
}
},
"shape_layers": None,
"location": {
"center": [38, -88],
"initialZoom": 4,
"bounds": None
}
},
"user_study_5": {
"name": "User Study Area 5",
"imagery_metadata": "NAIP Imagery",
"data_layer_type": DatasetTypes.CUSTOM,
"data_fn": "tiles/user_study_5.tif",
"data_padding": 20,
"leafletTileLayer": {
"url": 'tiles/user_study_5/{z}/{x}/{y}.png',
"args": {
"tms": True,
"minZoom": 13,
"maxNativeZoom": 18,
"maxZoom": 20,
"attribution": 'Georeferenced Image'
}
},
"shape_layers": [
{"name": "Area boundary", "shapes_fn": "shapes/user_study_5_outline.geojson", "zone_name_key": None}
],
"location": {
"center": [42.448269618302362, -75.110429001207137],
"initialZoom": 13,
"bounds": None
}
},
"yangon_sentinel": {
"name": "Yangon, Myanmar",
"imagery_metadata": "Sentinel Imagery",
"data_layer_type": DatasetTypes.CUSTOM,
"data_fn": "tiles/yangon.tif",
"data_padding": 1100,
"leafletTileLayer": {
"url": 'tiles/yangon/{z}/{x}/{y}.png',
"args": {
"tms": True,
"minZoom": 10,
"maxNativeZoom": 16,
"maxZoom": 20,
"attribution": 'Georeferenced Image'
}
},
"shape_layers": [
{"name": "States", "shapes_fn": "shapes/yangon_sentinel_admin_1_clipped.geojson", "zone_name_key": "ST"},
{"name": "Districts", "shapes_fn": "shapes/yangon_sentinel_admin_2_clipped.geojson", "zone_name_key": "DT"},
{"name": "Townships", "shapes_fn": "shapes/yangon_sentinel_admin_3_clipped.geojson", "zone_name_key": "TS"},
{"name": "Wards", "shapes_fn": "shapes/yangon_sentinel_admin_4_clipped.geojson", "zone_name_key": "Ward"}
],
"location": {
"center": [16.66177, 96.326427],
"initialZoom": 10,
"bounds": None
}
},
"hcmc_sentinel": {
"name": "Hồ Chí Minh City, Vietnam",
"imagery_metadata": "Sentinel Imagery",
"data_layer_type": DatasetTypes.CUSTOM,
"data_fn": "tiles/hcmc_sentinel.tif",
"data_padding": 1100,
"leafletTileLayer": {
"url": 'tiles/hcmc_sentinel_tiles/{z}/{x}/{y}.png',
"args": {
"tms": True,
"minZoom": 10,
"maxNativeZoom": 16,
"maxZoom": 20,
"attribution": 'Georeferenced Image'
}
},
"shape_layers": [
{"name": "Provinces", "shapes_fn": "shapes/hcmc_sentinel_admin_1_clipped.geojson", "zone_name_key": "NAME_1"},
{"name": "Districts", "shapes_fn": "shapes/hcmc_sentinel_admin_2_clipped.geojson", "zone_name_key": "NAME_2"},
{"name": "Wards", "shapes_fn": "shapes/hcmc_sentinel_admin_3_clipped.geojson", "zone_name_key": "NAME_3"}
],
"location": {
"center": [10.682, 106.752],
"initialZoom": 11,
"bounds": None
}
},
"yangon_lidar": {
"name": "Yangon, Myanmar",
"imagery_metadata": "LiDAR Imagery",
"data_layer_type": DatasetTypes.CUSTOM,
"data_fn": "tiles/yangon_lidar.tif",
"data_padding": 20,
"leafletTileLayer": {
"url": 'tiles/yangon_lidar/{z}/{x}/{y}.png',
"args": {
"tms": True,
"minZoom": 10,
"maxNativeZoom": 20,
"maxZoom": 21,
"attribution": 'Georeferenced Image'
}
},
"shape_layers": [
{"name": "States", "shapes_fn": "shapes/yangon_lidar_admin_1_clipped.geojson", "zone_name_key": "ST"},
{"name": "Districts", "shapes_fn": "shapes/yangon_lidar_admin_2_clipped.geojson", "zone_name_key": "DT"},
{"name": "Townships", "shapes_fn": "shapes/yangon_lidar_admin_3_clipped.geojson", "zone_name_key": "TS"},
{"name": "Wards", "shapes_fn": "shapes/yangon_lidar_admin_4_clipped.geojson", "zone_name_key": "Ward"}
],
"location": {
"center": [16.7870, 96.1450],
"initialZoom": 15,
"bounds": None
}
},
"hcmc_dg": {
"name": "Thủ Đức District, Hồ Chí Minh City, Vietnam",
"imagery_metadata": "Digital Globe Imagery",
"data_layer_type": DatasetTypes.CUSTOM,
"data_fn": "tiles/HCMC.tif",
"data_padding": 0,
"leafletTileLayer": {
"url": 'tiles/HCMC/{z}/{x}/{y}.png',
"args": {
"tms": True,
"minZoom": 14,
"maxNativeZoom": 18,
"maxZoom": 21,
"attribution": 'Georeferenced Image'
}
},
"shape_layers": [
{"name": "Provinces", "shapes_fn": "shapes/hcmc_digital-globe_admin_1_clipped.geojson", "zone_name_key": "NAME_1"},
{"name": "Districts", "shapes_fn": "shapes/hcmc_digital-globe_admin_2_clipped.geojson", "zone_name_key": "NAME_2"},
{"name": "Wards", "shapes_fn": "shapes/hcmc_digital-globe_admin_3_clipped.geojson", "zone_name_key": "NAME_3"}
],
"location": {
"center": [10.838, 106.750],
"initialZoom": 14,
"bounds": None
}
},
"airbus": {
"name": "Virginia, USA",
"data_layer_type": DatasetTypes.CUSTOM,
"imagery_metadata": "Airbus Imagery",
"data_fn": "tiles/airbus_epsg4326.tif",
"data_padding": 0.003,
"leafletTileLayer": {
"url": 'tiles/airbus/{z}/{x}/{y}.png',
"args": {
"tms": True,
"minZoom": 13,
"maxNativeZoom": 18,
"maxZoom": 21,
"attribution": 'Georeferenced Image',
}
},
"shape_layers": [
{"name": "Grid", "shapes_fn": "shapes/airbus-data-grid-epsg4326.geojson", "zone_name_key": None}
],
"location": {
"center": [36.80, -76.12],
"initialZoom": 14,
"bounds": [[36.882932, -76.2623637], [36.7298842, -76.0249016]]
}
},
"chesapeake": {
"name": "Maryland, USA",
"data_layer_type": DatasetTypes.USA_LAYER,
"imagery_metadata": "NAIP Imagery",
"data_padding": 20,
"leafletTileLayer": {
"url": 'tiles/chesapeake_test/{z}/{x}/{y}.png',
"args": {
"tms": True,
"minZoom": 2,
"maxNativeZoom": 18,
"maxZoom": 20,
"attribution": 'Georeferenced Image',
}
},
"shape_layers": None,
"location": {
"center": [38.11437, -75.99980],
"initialZoom": 10,
}
}
}
def load_geojson_as_list(fn):
shapes = []
areas = []
crs = None
with fiona.open(fn) as f:
src_crs = f.crs
for row in f:
geom = row["geometry"]
if geom["type"] == "Polygon":
lon, lat = geom["coordinates"][0][0]
elif geom["type"] == "MultiPolygon":
lon, lat = geom["coordinates"][0][0][0]
else:
raise ValueError("Polygons and MultiPolygons only")
zone_number = utm.latlon_to_zone_number(lat, lon)
hemisphere = "+north" if lat > 0 else "+south"
dest_crs = "+proj=utm +zone=%d %s +datum=WGS84 +units=m +no_defs" % (zone_number, hemisphere)
projected_geom = fiona.transform.transform_geom(src_crs, dest_crs, geom)
area = shapely.geometry.shape(projected_geom).area / 1000000.0 # we calculate the area in square meters then convert to square kilometers
shape = shapely.geometry.shape(geom)
areas.append(area)
shapes.append(shape)
return shapes, areas, src_crs
def get_javascript_string_from_dataset(dataset):
outputs = {
"center": dataset["location"]["center"],
"initialZoom": dataset["location"]["initialZoom"],
"name": dataset["name"],
"imageMetadata": dataset["imagery_metadata"],
"url": dataset["leafletTileLayer"]["url"],
"kwargs": str(dataset["leafletTileLayer"]["args"]).replace("True","true"),
"shapes": str([
{"name": shape_layer["name"], "shapes_fn": shape_layer["shapes_fn"], "zone_name_key": shape_layer["zone_name_key"]} for shape_layer in dataset["shape_layers"]
]).replace("None", "null") if dataset["shape_layers"] is not None else "null"
}
return '''{{
"location": [{center}, {initialZoom}, "{name}", "{imageMetadata}"],
"tileObject": L.tileLayer("{url}", {kwargs}),
"shapes": {shapes}
}}'''.format(**outputs)
'''When this file is loaded we should load each dataset
'''
for dataset_key, dataset in DATASET_DEFINITIONS.items():
javascript_string = ""
loaded = True
shape_layers = {}
# Load shapes first
if dataset["shape_layers"] is not None:
for shape_layer in dataset["shape_layers"]:
fn = os.path.join(ROOT_DIR, shape_layer["shapes_fn"])
if os.path.exists(fn):
shapes, areas, crs = load_geojson_as_list(fn)
shape_layer["geoms"] = shapes
shape_layer["areas"] = areas
shape_layer["crs"] = crs["init"]
shape_layers[shape_layer["name"]] = shape_layer
else:
print("WARNING: %s doesn't exist, this server will not be able to serve the '%s' dataset" % (fn, dataset_key))
loaded = False
# Check to see if data_fn exists
if "data_fn" in dataset:
fn = os.path.join(ROOT_DIR, dataset["data_fn"])
if not os.path.exists(fn):
print("WARNING: %s doesn't exist, this server will not be able to serve the '%s' dataset" % (fn, dataset_key))
loaded = False
if loaded:
if dataset["data_layer_type"] == DatasetTypes.CUSTOM:
data_loader = DataLoaderCustom(dataset["data_fn"], shape_layers, dataset["data_padding"])
elif dataset["data_layer_type"] == DatasetTypes.USA_LAYER:
data_loader = DataLoaderUSALayer(shape_layers, dataset["data_padding"])
elif dataset["data_layer_type"] == DatasetTypes.BASEMAP:
data_loader = DataLoaderBasemap(dataset["data_url"], dataset["data_padding"])
else:
raise ValueError("DatasetType not recognized")
DATASETS[dataset_key] = {
"data_loader": data_loader,
"shape_layers": shape_layers,
"javascript_string": get_javascript_string_from_dataset(dataset)
}
else:
pass # we are missing some files needed to load this dataset

Просмотреть файл

@ -1,194 +0,0 @@
import os
import utm
import fiona
import fiona.transform
import shapely
import shapely.geometry
from enum import Enum
from web_tool import ROOT_DIR
def load_geojson_as_list(fn):
shapes = []
areas = []
crs = None
with fiona.open(fn) as f:
src_crs = f.crs
for row in f:
geom = row["geometry"]
if geom["type"] == "Polygon":
lon, lat = geom["coordinates"][0][0]
elif geom["type"] == "MultiPolygon":
lon, lat = geom["coordinates"][0][0][0]
else:
raise ValueError("Polygons and MultiPolygons only")
zone_number = utm.latlon_to_zone_number(lat, lon)
hemisphere = "+north" if lat > 0 else "+south"
dest_crs = "+proj=utm +zone=%d %s +datum=WGS84 +units=m +no_defs" % (zone_number, hemisphere)
projected_geom = fiona.transform.transform_geom(src_crs, dest_crs, geom)
area = shapely.geometry.shape(projected_geom).area / 1000000.0 # we calculate the area in square meters then convert to square kilometers
shape = shapely.geometry.shape(geom)
areas.append(area)
shapes.append(shape)
return shapes, areas, src_crs
class DataLayerTypes(Enum):
ESRI_WORLD_IMAGERY = 1
USA_NAIP_LIST = 2
CUSTOM = 3
'''
This dictionary defines how the backend tool will return data to the frontend.
An entry is formated like below:
"LAYER NAME": {
"data_layer_type": DataLayerTypes.ESRI_WORLD_IMAGERY,
"shapes_fn": None,
"data_fn": None,
"shapes": None, # NOTE: this is always `None` and populated automatically when this file loads (see code at bottom of file)
"shapes_crs": None # NOTE: this is always `None` and populated automatically when this file loads (see code at bottom of file)
"padding": None # NOTE: this is optional and only used in DataLayerTypes.CUSTOM
}
LAYER_NAME - should correspond to an entry in js/tile_layers.js
data_layer_type - should be an item from the DataLayerTypes enum and describes where the data comes from.
- If ESRI_WORLD_IMAGERY then the backend will lookup imagery from the ESRI World Imagery basemap and not respond to requests for downloading
- If USA_NAIP_LIST then the backend will lookup imagery from the full USA tile_index (i.e. how we usually do it) and requests for downloading will be executed on the same tiles
- If CUSTOM then the backend will query the "shapes_fn" and "data_fn" files for how/what to download, downloading will happen similarlly
shapes_fn - should be a path, relative to `frontend_server.ROOT_DIR`, of a geojson defining shapes over which the data_fn file is valid. When a "download" happens the raster specified by "data_fn" will be masked with one of these shapes.
data_fn - should be a path, relative to `frontend_server.ROOT_DIR`, of a raster file defining the imagery to use
shapes - list of `shapely.geometry.shape` objects created from the shapes in "shapes_fn".
shapes_crs - the CRS of the shapes_fn
padding - NOTE: Optional, only used in DataLayerTypes.CUSTOM - defines the padding used in raster extraction, used to get the required 240x240 input
'''
DATA_LAYERS = {
"esri_world_imagery": {
"data_layer_type": DataLayerTypes.ESRI_WORLD_IMAGERY,
"shapes": None,
"data_fn": None,
},
"esri_world_imagery_naip": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"osm": {
"data_layer_type": DataLayerTypes.ESRI_WORLD_IMAGERY,
"shapes": None,
"data_fn": None,
},
"chesapeake": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"demo_set_1": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"user_study_1": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"user_study_2": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"user_study_3": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"user_study_4": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"user_study_5": {
"data_layer_type": DataLayerTypes.CUSTOM,
"shapes": [
{"name": "Area boundary", "shapes_fn": "shapes/user_study_5_outline.geojson", "zone_name_key": None}
],
"data_fn": "tiles/user_study_5.tif",
"padding": 20
},
"philipsburg_mt": {
"data_layer_type": DataLayerTypes.USA_NAIP_LIST,
"shapes": None,
"data_fn": None,
},
"aceh": {
"data_layer_type": DataLayerTypes.CUSTOM,
"shapes": None,
"data_fn": None,
},
"yangon_sentinel": {
"data_layer_type": DataLayerTypes.CUSTOM,
"shapes": [
{"name": "States", "shapes_fn": "shapes/yangon_sentinel_admin_1_clipped.geojson", "zone_name_key": "ST"},
{"name": "Districts", "shapes_fn": "shapes/yangon_sentinel_admin_2_clipped.geojson", "zone_name_key": "DT"},
{"name": "Townships", "shapes_fn": "shapes/yangon_sentinel_admin_3_clipped.geojson", "zone_name_key": "TS"},
{"name": "Wards", "shapes_fn": "shapes/yangon_sentinel_admin_4_clipped.geojson", "zone_name_key": "Ward"}
],
"data_fn": "tiles/yangon.tif",
"padding": 1100
},
"hcmc_sentinel": {
"data_layer_type": DataLayerTypes.CUSTOM,
"shapes": [
{"name": "Provinces", "shapes_fn": "shapes/hcmc_sentinel_admin_1_clipped.geojson", "zone_name_key": "NAME_1"},
{"name": "Districts", "shapes_fn": "shapes/hcmc_sentinel_admin_2_clipped.geojson", "zone_name_key": "NAME_2"},
{"name": "Wards", "shapes_fn": "shapes/hcmc_sentinel_admin_3_clipped.geojson", "zone_name_key": "NAME_3"}
],
"data_fn": "tiles/hcmc_sentinel.tif",
"padding": 1100
},
"yangon_lidar": {
"data_layer_type": DataLayerTypes.CUSTOM,
"shapes": [
{"name": "States", "shapes_fn": "shapes/yangon_sentinel_admin_1_clipped.geojson", "zone_name_key": "ST"},
{"name": "Districts", "shapes_fn": "shapes/yangon_sentinel_admin_2_clipped.geojson", "zone_name_key": "DT"},
{"name": "Townships", "shapes_fn": "shapes/yangon_sentinel_admin_3_clipped.geojson", "zone_name_key": "TS"},
{"name": "Wards", "shapes_fn": "shapes/yangon_sentinel_admin_4_clipped.geojson", "zone_name_key": "Ward"}
],
"data_fn": "tiles/yangon_lidar.tif",
},
"hcmc_dg": {
"data_layer_type": DataLayerTypes.CUSTOM,
"shapes": [
{"name": "Provinces", "shapes_fn": "shapes/hcmc_digital-globe_admin_1_clipped.geojson", "zone_name_key": "NAME_1"},
{"name": "Districts", "shapes_fn": "shapes/hcmc_digital-globe_admin_2_clipped.geojson", "zone_name_key": "NAME_2"},
{"name": "Wards", "shapes_fn": "shapes/hcmc_digital-globe_admin_3_clipped.geojson", "zone_name_key": "NAME_3"}
],
"data_fn": "tiles/HCMC.tif",
"padding": 0
},
"airbus": {
"data_layer_type": DataLayerTypes.CUSTOM,
"shapes": [
{"name": "Grid", "shapes_fn": "shapes/airbus-data-grid-epsg4326.geojson", "zone_name_key": "id"}
],
"data_fn": "tiles/airbus_epsg26918.tif",
"padding": 150
}
}
for k in DATA_LAYERS.keys():
if DATA_LAYERS[k]["shapes"] is not None:
print("Loading shapes for the %s dataset" % (k))
for zone_layer in DATA_LAYERS[k]["shapes"]:
fn = os.path.join(ROOT_DIR, zone_layer["shapes_fn"])
if os.path.exists(fn):
shapes, areas, crs = load_geojson_as_list(fn)
zone_layer["shapes_geoms"] = shapes
zone_layer["shapes_areas"] = areas
zone_layer["shapes_crs"] = crs["init"]
else:
print("WARNING: %s doesn't exist, this server will not be able to serve the '%s' dataset" % (fn, k))

Просмотреть файл

@ -127,7 +127,7 @@
================================================== -->
<script src="js/main.js" type="text/javascript"></script>
<script src="js/utils.js" type="text/javascript"></script>
<script src="js/tile_layers.js" type="text/javascript"></script>
<script src="js/datasets.js" type="text/javascript"></script>
<script src="js/globals.js" type="text/javascript"></script>
<!-- List of backend URLS to query

Просмотреть файл

@ -32,9 +32,9 @@ import joblib
from azure.cosmosdb.table.tableservice import TableService
from azure.cosmosdb.table.models import Entity
import DataLoader
from DataLoaderNew import warp_data_to_3857, crop_data_by_extent
from Heatmap import Heatmap
from TileLayers import DataLayerTypes, DATA_LAYERS
from Datasets import DATASETS
from Utils import get_random_string, class_prediction_to_img, get_shape_layer_by_name, AtomicCounter
import ServerModelsNIPS
@ -250,7 +250,7 @@ def record_correction():
Heatmap.increment(tile.z, tile.y, tile.x)
#
naip_crs, naip_transform, naip_index, padding = Session.current_transform
naip_crs, naip_transform, naip_index = Session.current_transform
xs, ys = fiona.transform.transform(origin_crs, naip_crs.to_dict(), [tlon,blon], [tlat,blat])
@ -319,24 +319,12 @@ def pred_patch():
# Load the input data sources for the given tile
# ------------------------------------------------------
if dataset not in DATA_LAYERS:
if dataset not in DATASETS:
raise ValueError("Dataset doesn't seem to be valid, do the datasets in js/tile_layers.js correspond to those in TileLayers.py")
if DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.ESRI_WORLD_IMAGERY:
padding = 0.0005
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DataLoader.get_esri_data_by_extent(extent, padding=padding)
elif DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.USA_NAIP_LIST:
padding = 20
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DataLoader.get_usa_data_by_extent(extent, padding=padding, geo_data_type=DataLoader.GeoDataTypes.NAIP)
elif DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.CUSTOM:
if "padding" in DATA_LAYERS[dataset]:
padding = DATA_LAYERS[dataset]["padding"]
else:
padding = 20
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DataLoader.get_custom_data_by_extent(extent, padding=padding, data_fn=DATA_LAYERS[dataset]["data_fn"])
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DATASETS[dataset]["data_loader"].get_data_from_extent(extent)
naip_data = np.rollaxis(naip_data, 0, 3) # we do this here instead of get_data_by_extent because not all GeoDataTypes will have a channel dimension
Session.current_transform = (naip_crs, naip_transform, naip_index, padding)
Session.current_transform = (naip_crs, naip_transform, naip_index)
# ------------------------------------------------------
# Step 3
@ -352,8 +340,8 @@ def pred_patch():
# Step 4
# Warp output to EPSG:3857 and crop off the padded area
# ------------------------------------------------------
output, output_bounds = DataLoader.warp_data_to_3857(output, naip_crs, naip_transform, naip_bounds)
output = DataLoader.crop_data_by_extent(output, output_bounds, extent)
output, output_bounds = warp_data_to_3857(output, naip_crs, naip_transform, naip_bounds)
output = crop_data_by_extent(output, output_bounds, extent)
# ------------------------------------------------------
# Step 5
@ -387,30 +375,16 @@ def pred_tile():
dataset = data["dataset"]
zone_layer_name = data["zoneLayerName"]
if dataset not in DATA_LAYERS:
raise ValueError("Dataset doesn't seem to be valid, do the datasets in js/tile_layers.js correspond to those in TileLayers.py")
shape_area = None
if DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.ESRI_WORLD_IMAGERY:
if dataset not in DATASETS:
raise ValueError("Dataset doesn't seem to be valid, do the datasets in js/tile_layers.js correspond to those in TileLayers.py")
try:
naip_data, raster_profile, raster_transform, raster_bounds, raster_crs = DATASETS[dataset]["data_loader"].get_data_from_shape_by_extent(extent, zone_layer_name)
naip_data = np.rollaxis(naip_data, 0, 3)
shape_area = DATASETS[dataset]["data_loader"].get_area_from_shape_by_extent(extent, zone_layer_name)
except NotImplementedError as e:
bottle.response.status = 400
return json.dumps({"error": "Cannot currently download with ESRI World Imagery"})
elif DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.USA_NAIP_LIST:
naip_data, raster_profile, raster_transform, raster_bounds, raster_crs = DataLoader.download_usa_data_by_extent(extent, geo_data_type=DataLoader.GeoDataTypes.NAIP)
elif DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.CUSTOM:
dl = DATA_LAYERS[dataset]
layer = get_shape_layer_by_name(dl["shapes"], zone_layer_name)
if layer is None:
bottle.response.status = 400
return json.dumps({"error": "You have not selected a set of zones to use"})
print("Downloading using shapes from layer: %s" % (layer["name"]))
shape_idx, _ = DataLoader.lookup_shape_by_extent(extent, layer["shapes_geoms"], layer["shapes_crs"])
shape_area = layer["shapes_areas"][shape_idx]
naip_data, raster_profile, raster_transform, raster_bounds, raster_crs = DataLoader.download_custom_data_by_extent(extent, shapes=layer["shapes_geoms"], shapes_crs=layer["shapes_crs"], data_fn=dl["data_fn"])
naip_data = np.rollaxis(naip_data, 0, 3)
return json.dumps({"error": "Cannot currently download imagery with 'Basemap' based datasets"})
output = Session.model.run(naip_data, extent, True)
@ -421,7 +395,6 @@ def pred_tile():
nodata_mask = np.sum(naip_data == 0, axis=2) == 4
output_hard[nodata_mask] = 255
vals, counts = np.unique(output_hard[~nodata_mask], return_counts=True)
# ------------------------------------------------------
# Step 4
@ -432,7 +405,7 @@ def pred_tile():
img_hard = cv2.cvtColor(img_hard, cv2.COLOR_RGB2BGRA)
img_hard[nodata_mask] = [0,0,0,0]
img_hard, img_hard_bounds = DataLoader.warp_data_to_3857(img_hard, raster_crs, raster_transform, raster_bounds, resolution=10)
img_hard, img_hard_bounds = warp_data_to_3857(img_hard, raster_crs, raster_transform, raster_bounds, resolution=10)
cv2.imwrite(os.path.join(ROOT_DIR, "downloads/%s.png" % (tmp_id)), img_hard)
data["downloadPNG"] = "downloads/%s.png" % (tmp_id)
@ -478,35 +451,14 @@ def get_input():
extent = data["extent"]
dataset = data["dataset"]
# ------------------------------------------------------
# Step 1
# Transform the input extent into a shapely geometry
# Find the tile assosciated with the geometry
# ------------------------------------------------------
# ------------------------------------------------------
# Step 2
# Load the input data sources for the given tile
# ------------------------------------------------------
if dataset not in DATA_LAYERS:
raise ValueError("Dataset doesn't seem to be valid, do the datasets in js/tile_layers.js correspond to those in TileLayers.py")
if dataset not in DATASETS:
raise ValueError("Dataset doesn't seem to be valid, please check Datasets.py")
if DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.ESRI_WORLD_IMAGERY:
padding = 0.0005
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DataLoader.get_esri_data_by_extent(extent, padding=padding)
elif DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.USA_NAIP_LIST:
padding = 20
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DataLoader.get_usa_data_by_extent(extent, padding=padding, geo_data_type=DataLoader.GeoDataTypes.NAIP)
elif DATA_LAYERS[dataset]["data_layer_type"] == DataLayerTypes.CUSTOM:
if "padding" in DATA_LAYERS[dataset]:
padding = DATA_LAYERS[dataset]["padding"]
else:
padding = 20
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DataLoader.get_custom_data_by_extent(extent, padding=padding, data_fn=DATA_LAYERS[dataset]["data_fn"])
naip_data, naip_crs, naip_transform, naip_bounds, naip_index = DATASETS[dataset]["data_loader"].get_data_from_extent(extent)
naip_data = np.rollaxis(naip_data, 0, 3)
naip_data, new_bounds = DataLoader.warp_data_to_3857(naip_data, naip_crs, naip_transform, naip_bounds)
naip_data = DataLoader.crop_data_by_extent(naip_data, new_bounds, extent)
naip_data, new_bounds = warp_data_to_3857(naip_data, naip_crs, naip_transform, naip_bounds)
naip_data = crop_data_by_extent(naip_data, new_bounds, extent)
naip_img = naip_data[:,:,:3].copy().astype(np.uint8) # keep the RGB channels to save as a color image
@ -517,21 +469,41 @@ def get_input():
bottle.response.status = 200
return json.dumps(data)
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
@bottle.get("/")
def root_app():
def get_root_app():
return bottle.static_file("index.html", root="./" + ROOT_DIR + "/")
@bottle.get("/favicon.ico")
def favicon():
def get_datasets():
tile_layers = "var tileLayers = {\n"
for dataset_name, dataset in DATASETS.items():
tile_layers += '"%s": %s,\n' % (dataset_name, dataset["javascript_string"])
tile_layers += "};"
interesting_locations = '''var interestingLocations = [
L.marker([47.60, -122.15]).bindPopup('Bellevue, WA'),
L.marker([39.74, -104.99]).bindPopup('Denver, CO'),
L.marker([37.53, -77.44]).bindPopup('Richmond, VA'),
L.marker([39.74, -104.99]).bindPopup('Denver, CO'),
L.marker([37.53, -77.44]).bindPopup('Richmond, VA'),
L.marker([33.746526, -84.387522]).bindPopup('Atlanta, GA'),
L.marker([32.774250, -96.796122]).bindPopup('Dallas, TX'),
L.marker([40.106675, -88.236409]).bindPopup('Champaign, IL'),
L.marker([38.679485, -75.874667]).bindPopup('Dorchester County, MD'),
L.marker([34.020618, -118.464412]).bindPopup('Santa Monica, CA'),
L.marker([37.748517, -122.429771]).bindPopup('San Fransisco, CA'),
L.marker([38.601951, -98.329227]).bindPopup('Ellsworth County, KS')
];'''
return tile_layers + '\n\n' + interesting_locations
def get_favicon():
return
@bottle.get("/<filepath:re:.*>")
def everything_else(filepath):
def get_everything_else(filepath):
return bottle.static_file(filepath, root="./" + ROOT_DIR + "/")
#---------------------------------------------------------------------------------------
#---------------------------------------------------------------------------------------
@ -668,9 +640,10 @@ def main():
app.route("/heatmap/<z>/<y>/<x>", method="GET", callback=do_heatmap)
app.route("/", method="GET", callback=root_app)
app.route("/favicon.ico", method="GET", callback=favicon)
app.route("/<filepath:re:.*>", method="GET", callback=everything_else)
app.route("/", method="GET", callback=get_root_app)
app.route("/js/datasets.js", method="GET", callback=get_datasets)
app.route("/favicon.ico", method="GET", callback=get_favicon)
app.route("/<filepath:re:.*>", method="GET", callback=get_everything_else)
bottle_server_kwargs = {
"host": args.host,