Commit afcb0fa5 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Bugfix for "missing L2B mask files" during continued processing that earlier...

Bugfix for "missing L2B mask files" during continued processing that earlier stopped in L2B processing; implemented proper MGRS tiling scheme according to project agreements
GEOP:
- updated some deprecated 3rd-party-library references
- added transform_any_prj(): a function fort transforming XY-coordinates from any source to any target projection
- warp_ndarray():
    - implemented extra option 'outExtent_within' allowing to output arrays with a larger geographical extent than the input image (needed for MGRS tiling scheme)
- improved some docstrings
- added prj_equal(): a function to quickly check if two projections are equal
- added snap_bounds_to_pixGrid(): a function to snap map bounds to a given pixel grid
- added clip_array_using_mapBounds(): a function for clipping arrays with a map info using a given bounding box
L0A_P:
- add_local_availability():
    - replaced a hard coded list of processing levels
    - bugfix for returning an invalid processing level if processing is already done
L1A_P:
- fill_from_disk(): bugfix for copying memory addresses  when fill_from_disk() is called in multiprocessing
- log_for_fullArr_or_firstTile(): bugfix for not handling empty arr_pos
- added get_subset_obj(), based on older code from helper_functions: a function to generate subsets of GMS objects based on bounding box that can contain image AND map coordinates whereas map coordinates can also have a different projection (needed for proper generation of MGRS tiles)
 - added to_MGRS_tiles(, based on older code from helper_functions: a function for cutting a GMS object into MGRS tiles
 L1B_P:
 - L1B_object.apply_deshift_results():
     - bugfix for not updating geoinformations of 'masks' attribute after having applied spatial shift corrections
META:
- added 'data ignore value' to metadata
IO:
- refactored module 'gms_io' to 'io'
ARR:
- added submodule 'GeoArray' to io:
    - added class 'GeoArray': a class for simplifying array access, regardless to their actual memory location (in memory or on disk)
    - added _clip_array_at_mapPos(): a function for clipping a geocoded array using a given bounding box in the same projection like the array itself
    - added  get_array_at_mapPos(): a function for clipping a geocoded array using a given bounding box that can have any projection
OUT_W:
- added 'data ignore value' to enviHdr_keyOrder
- added 'data ignore value' to mask_to_ENVI_Classification()
- added set_output_nodataVal(): a function for adding a data ignore value to an already written file
- Obj2ENVI: bugfix for not handling empty arr_pos
DB_T:
- delete_processing_results(), GMS_JOB.__delete_procdata(), GMS_JOB.delete_procdata_of_entire_job(), GMS_JOB.delete_procdata_of_failed_sceneIDs(), added functionality to delete a specific processing level from disk
HLP_F:
- revised find_nearest(): it now supports an automatic rounding algorithm that rounds a value to the nearest neighbour
- replaced a deprecated 3rd-party function reference
- get_arrSubsetBounds_from_shapelyPolyLonLat(): added flag 'ensure_valid_coords'
MRGS_tile:
- added submodule "mgrs_tile" for easily retrieving informations about MGRS tiles (bounding box, projection conversions, buffering, etc.)
SpatialIndexMediator:
- replaced wrong host address
PC:
- updated calls for MGRS tiling within L2C_map_1()
parent 0d17e8bb
...@@ -8,11 +8,11 @@ ...@@ -8,11 +8,11 @@
############################################################################### ###############################################################################
from . import algorithms from . import algorithms
from . import gms_io from . import io
from . import misc from . import misc
__all__=['algorithms', __all__=['algorithms',
'gms_io', 'io',
'misc'] 'misc']
__version__ = '20160905.01' __version__ = '20160905.01'
......
...@@ -47,7 +47,7 @@ except ImportError: ...@@ -47,7 +47,7 @@ except ImportError:
from gdalconst import * from gdalconst import *
from rasterio.warp import reproject as rio_reproject from rasterio.warp import reproject as rio_reproject
from rasterio.warp import calculate_default_transform as rio_calc_transform from rasterio.warp import calculate_default_transform as rio_calc_transform
from rasterio.warp import RESAMPLING from rasterio.warp import Resampling
import pyproj import pyproj
from pyorbital import astronomy from pyorbital import astronomy
import ephem import ephem
...@@ -55,9 +55,9 @@ from spectral.io import envi ...@@ -55,9 +55,9 @@ from spectral.io import envi
from shapely.geometry import Polygon from shapely.geometry import Polygon
from shapely.geometry import shape from shapely.geometry import shape
from shapely.geometry import MultiPoint from shapely.geometry import MultiPoint
from numba import jit, autojit from numba import jit
from ..gms_io import envifilehandling_BD as ef from ..io import envifilehandling_BD as ef
from ..misc import helper_functions as HLP_F from ..misc import helper_functions as HLP_F
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller) job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
...@@ -2751,41 +2751,51 @@ def transform_wgs84_to_utm(lon, lat, zone=None): ...@@ -2751,41 +2751,51 @@ def transform_wgs84_to_utm(lon, lat, zone=None):
UTM = pyproj.Proj(proj='utm', zone=get_utm_zone(lon) if zone is None else zone, ellps='WGS84') UTM = pyproj.Proj(proj='utm', zone=get_utm_zone(lon) if zone is None else zone, ellps='WGS84')
return UTM(lon, lat) return UTM(lon, lat)
def transform_any_prj(prj_src,prj_tgt,x,y):
prj_src = pyproj.Proj(get_proj4info(proj=prj_src))
prj_tgt = pyproj.Proj(get_proj4info(proj=prj_tgt))
x,y = pyproj.transform(prj_src,prj_tgt,x,y)
return x,y
def get_point_bounds(points): def get_point_bounds(points):
mp_points = MultiPoint(points) mp_points = MultiPoint(points)
(min_lon, min_lat, max_lon, max_lat) = mp_points.bounds (min_lon, min_lat, max_lon, max_lat) = mp_points.bounds
return min_lon, min_lat, max_lon, max_lat return min_lon, min_lat, max_lon, max_lat
def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, outUL=None,
out_res=None, out_extent=None, out_dtype=None, rsp_alg=0, in_nodata=None, out_nodata=None): def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, outUL=None, out_res=None,
out_extent=None, out_dtype=None, rsp_alg=0, in_nodata=None, out_nodata=None, outExtent_within=True):
"""Reproject / warp a numpy array with given geo information to target coordinate system. """Reproject / warp a numpy array with given geo information to target coordinate system.
:param ndarray: numpy.ndarray [rows,cols,bands]
:param in_gt: input gdal GeoTransform :param ndarray: numpy.ndarray [rows,cols,bands]
:param in_prj: input projection as WKT string :param in_gt: input gdal GeoTransform
:param out_prj: output projection as WKT string :param in_prj: input projection as WKT string
:param out_gt: output gdal GeoTransform as float tuple in the source coordinate system (optional) :param out_prj: output projection as WKT string
:param outUL: [X,Y] output upper left coordinates as floats in the source coordinate system :param out_gt: output gdal GeoTransform as float tuple in the source coordinate system (optional)
(requires outRowsCols) :param outUL: [X,Y] output upper left coordinates as floats in the source coordinate system
:param outRowsCols: [rows, cols] (optional) (requires outRowsCols)
:param out_res: output resolution as tuple of floats (x,y) in the TARGET coordinate system :param outRowsCols: [rows, cols] (optional)
:param out_extent: [left, bottom, right, top] as floats in the source coordinate system :param out_res: output resolution as tuple of floats (x,y) in the TARGET coordinate system
:param out_dtype: output data type as numpy data type :param out_extent: [left, bottom, right, top] as floats in the source coordinate system
:param rsp_alg: Resampling method to use. One of the following (int, default is 0): :param out_dtype: output data type as numpy data type
0 = nearest neighbour, 1 = bilinear, 2 = cubic, 3 = cubic spline, 4 = lanczos, :param rsp_alg: Resampling method to use. One of the following (int, default is 0):
5 = average, 6 = mode 0 = nearest neighbour, 1 = bilinear, 2 = cubic, 3 = cubic spline, 4 = lanczos,
:param in_nodata no data value of the input image 5 = average, 6 = mode
:param out_nodata no data value of the output image :param in_nodata: no data value of the input image
:return out_arr: warped numpy array :param out_nodata: no data value of the output image
:return out_gt: warped gdal GeoTransform :param outExtent_within: Ensures that the output extent is within the input extent.
:return out_prj: warped projection as WKT string Otherwise an exception is raised.
:return out_arr: warped numpy array
:return out_gt: warped gdal GeoTransform
:return out_prj: warped projection as WKT string
""" """
if not ndarray.flags['OWNDATA']: if not ndarray.flags['OWNDATA']:
temp = np.empty_like(ndarray) temp = np.empty_like(ndarray)
temp[:] = ndarray temp[:] = ndarray
ndarray = temp # deep copy: converts view to its own array in order to avoid wrong output ndarray = temp # deep copy: converts view to its own array in order to avoid wrong output
with rasterio.drivers(): with rasterio.env.Env():
if outUL is not None: if outUL is not None:
assert outRowsCols is not None, 'outRowsCols must be given if outUL is given.' assert outRowsCols is not None, 'outRowsCols must be given if outUL is given.'
outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL
...@@ -2803,7 +2813,7 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, ...@@ -2803,7 +2813,7 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
# convert input array axis order to rasterio axis order # convert input array axis order to rasterio axis order
ndarray = np.swapaxes(np.swapaxes(ndarray, 0, 2), 1, 2) ndarray = np.swapaxes(np.swapaxes(ndarray, 0, 2), 1, 2)
bands, rows, cols = ndarray.shape bands, rows, cols = ndarray.shape
rows, cols = outRowsCols if outRowsCols else rows, cols rows, cols = outRowsCols if outRowsCols else (rows, cols)
else: else:
rows, cols = ndarray.shape if outRowsCols is None else outRowsCols rows, cols = ndarray.shape if outRowsCols is None else outRowsCols
...@@ -2818,22 +2828,23 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, ...@@ -2818,22 +2828,23 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
# get dst_transform # get dst_transform
if out_gt is None and out_extent is None: if out_gt is None and out_extent is None:
if outRowsCols: if outRowsCols:
outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL
ulRC2bounds = lambda ul, r, c: [ul[0], ul[1] + r * in_gt[5], ul[0] + c * in_gt[1], ulRC2bounds = lambda ul, r, c: [ul[0], ul[1] + r * in_gt[5], ul[0] + c * in_gt[1], ul[1]] # left, bottom, right, top
ul[1]] # left, bottom, right, top
left, bottom, right, top = ulRC2bounds(outUL, rows, cols) left, bottom, right, top = ulRC2bounds(outUL, rows, cols)
else: # outRowsCols is None and outUL is None: use in_gt else: # outRowsCols is None and outUL is None: use in_gt
left, bottom, right, top = gt2bounds(in_gt, rows, cols) left, bottom, right, top = gt2bounds(in_gt, rows, cols)
# ,im_xmax,im_ymin,im_ymax = corner_coord_to_minmax(get_corner_coordinates(self.ds_im2shift)) # ,im_xmax,im_ymin,im_ymax = corner_coord_to_minmax(get_corner_coordinates(self.ds_im2shift))
elif out_extent: elif out_extent:
left, bottom, right, top = out_extent left, bottom, right, top = out_extent
# input array is only a window of the actual input array
assert left >= in_gt[0] and right <= (in_gt[0] + (cols + 1) * in_gt[1]) and \
bottom >= in_gt[3] + (rows + 1) * in_gt[5] and top <= in_gt[3], \
"out_extent has to be completely within the input image bounds."
else: # out_gt is given else: # out_gt is given
left, bottom, right, top = gt2bounds(in_gt, rows, cols) left, bottom, right, top = gt2bounds(in_gt, rows, cols)
if outExtent_within:
# input array is only a window of the actual input array
assert left >= in_gt[0] and right <= (in_gt[0] + (cols + 1) * in_gt[1]) and \
bottom >= in_gt[3] + (rows + 1) * in_gt[5] and top <= in_gt[3], \
"out_extent has to be completely within the input image bounds."
if out_res is None: if out_res is None:
# get pixel resolution in target coord system # get pixel resolution in target coord system
prj_in_out = (isProjectedOrGeographic(in_prj), isProjectedOrGeographic(out_prj)) prj_in_out = (isProjectedOrGeographic(in_prj), isProjectedOrGeographic(out_prj))
...@@ -2861,8 +2872,9 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, ...@@ -2861,8 +2872,9 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
src_transform = rasterio.transform.from_origin(in_gt[0], in_gt[3], in_gt[1], in_gt[5]) src_transform = rasterio.transform.from_origin(in_gt[0], in_gt[3], in_gt[1], in_gt[5])
dict_rspInt_rspAlg = {0: RESAMPLING.nearest, 1: RESAMPLING.bilinear, 2: RESAMPLING.cubic, dict_rspInt_rspAlg = \
3: RESAMPLING.cubic_spline, 4: RESAMPLING.lanczos, 5: RESAMPLING.average, 6: RESAMPLING.mode} {0: Resampling.nearest, 1: Resampling.bilinear, 2: Resampling.cubic,
3: Resampling.cubic_spline, 4: Resampling.lanczos, 5: Resampling.average, 6: Resampling.mode}
out_arr = np.zeros((bands, out_rows, out_cols), out_dtype) \ out_arr = np.zeros((bands, out_rows, out_cols), out_dtype) \
if len(ndarray.shape) == 3 else np.zeros((out_rows, out_cols), out_dtype) if len(ndarray.shape) == 3 else np.zeros((out_rows, out_cols), out_dtype)
...@@ -2874,9 +2886,22 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, ...@@ -2874,9 +2886,22 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
with warnings.catch_warnings(): with warnings.catch_warnings():
warnings.simplefilter('ignore') # FIXME supresses: FutureWarning: GDAL-style transforms are deprecated and will not be supported in Rasterio 1.0. warnings.simplefilter('ignore') # FIXME supresses: FutureWarning: GDAL-style transforms are deprecated and will not be supported in Rasterio 1.0.
try: try:
#print('INPUTS')
#print(ndarray.shape, ndarray.dtype, out_arr.shape, out_arr.dtype)
#print(in_gt)
#print(src_crs)
#print(out_gt)
#print(dst_crs)
#print(dict_rspInt_rspAlg[rsp_alg])
#print(in_nodata)
#print(out_nodata)
rio_reproject(ndarray, out_arr, rio_reproject(ndarray, out_arr,
src_transform=in_gt, src_crs=src_crs, dst_transform=out_gt, dst_crs=dst_crs, src_transform=in_gt, src_crs=src_crs, dst_transform=out_gt, dst_crs=dst_crs,
resampling=dict_rspInt_rspAlg[rsp_alg], src_nodata=in_nodata, dst_nodata=out_nodata) resampling=dict_rspInt_rspAlg[rsp_alg], src_nodata=in_nodata, dst_nodata=out_nodata)
from matplotlib import pyplot as plt
#print(out_arr.shape)
#plt.figure()
#plt.imshow(out_arr[:,:,1])
except KeyError: except KeyError:
print(in_dtype, str(in_dtype)) print(in_dtype, str(in_dtype))
print(ndarray.dtype) print(ndarray.dtype)
...@@ -2884,12 +2909,11 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, ...@@ -2884,12 +2909,11 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
# convert output array axis order to GMS axis order [rows,cols,bands] # convert output array axis order to GMS axis order [rows,cols,bands]
out_arr = out_arr if len(ndarray.shape) == 2 else np.swapaxes(np.swapaxes(out_arr, 0, 1), 1, 2) out_arr = out_arr if len(ndarray.shape) == 2 else np.swapaxes(np.swapaxes(out_arr, 0, 1), 1, 2)
if outRowsCols:
out_arr = out_arr[:outRowsCols[0], :outRowsCols[1]]
return out_arr, out_gt, out_prj return out_arr, out_gt, out_prj
def warp_mp(args):
in_arr,gt,prj,out_prj,out_ext,rsp,in_nd = args
warped = warp_ndarray(in_arr,gt,prj,out_prj,out_extent=out_ext,rsp_alg=rsp,in_nodata=in_nd)[0]
return warped
def reproject_shapelyPoly(shapelyPoly, tgt_prj): def reproject_shapelyPoly(shapelyPoly, tgt_prj):
# type: (shapely.Polygon, str) -> shapely.Polygon # type: (shapely.Polygon, str) -> shapely.Polygon
...@@ -3065,16 +3089,20 @@ def pixelToMapYX(pixelCoords, path_im=None, geotransform=None, projection=None): ...@@ -3065,16 +3089,20 @@ def pixelToMapYX(pixelCoords, path_im=None, geotransform=None, projection=None):
def mapXY2imXY(mapXY, gt): def mapXY2imXY(mapXY, gt):
# type: (tuple,list) -> tuple # type: (tuple,list) -> tuple
"""Translates given map coordinates into pixel locations according to the given image geotransform. """Translates given map coordinates into pixel locations according to the given image geotransform.
:param mapXY: <tuple> The map coordinates to be translated in the form (x,y). :param mapXY: <tuple> The map coordinates to be translated in the form (x,y).
:param gt: <list> GDAL geotransform :param gt: <list> GDAL geotransform
:returns: <tuple> image coordinate tuple X/Y (column, row)
""" """
return (mapXY[0]-gt[0])/gt[1],(mapXY[1]-gt[3])/gt[5] return (mapXY[0]-gt[0])/gt[1],(mapXY[1]-gt[3])/gt[5]
def imXY2mapXY(imXY, gt): def imXY2mapXY(imXY, gt):
# type: (tuple,list) -> tuple # type: (tuple,list) -> tuple
"""Translates given pixel locations into map coordinates according to the given image geotransform. """Translates given pixel locations into map coordinates according to the given image geotransform.
:param imXY: <tuple> The image coordinates to be translated in the form (x,y). :param imXY: <tuple> The image coordinates to be translated in the form (x,y).
:param gt: <list> GDAL geotransform :param gt: <list> GDAL geotransform
:returns: <tuple> map coordinate tuple X/Y (mapX, mapY)
""" """
return (gt[0] + imXY[0] * abs(gt[1])), (gt[3] - imXY[1] * abs(gt[5])) return (gt[0] + imXY[0] * abs(gt[1])), (gt[3] - imXY[1] * abs(gt[5]))
...@@ -3173,7 +3201,7 @@ def mapinfo2geotransform(map_info): ...@@ -3173,7 +3201,7 @@ def mapinfo2geotransform(map_info):
"""Builds GDAL GeoTransform tuple from an ENVI map info. """Builds GDAL GeoTransform tuple from an ENVI map info.
:param map_info: ENVI map info (list), e.g. ['UTM', 1, 1, 192585.0, 5379315.0, 30.0, 30.0, 41, 'North', 'WGS-84'] :param map_info: ENVI map info (list), e.g. ['UTM', 1, 1, 192585.0, 5379315.0, 30.0, 30.0, 41, 'North', 'WGS-84']
:returns: GDAL GeoTransform, e.g. [249885.0, 30.0, 0.0, 4578615.0, 0.0, -30.0] :returns: GDAL GeoTransform, e.g. [249885.0, 30.0, 0.0, 4578615.0, 0.0, -30.0]
:rtype: tuple :rtype: list
""" """
# FIXME rotated datasets are currently not supported -> function must return rotation at gt[2] and gt[4] # FIXME rotated datasets are currently not supported -> function must return rotation at gt[2] and gt[4]
if map_info[1]==1 and map_info[2]==1: if map_info[1]==1 and map_info[2]==1:
...@@ -3185,9 +3213,8 @@ def mapinfo2geotransform(map_info): ...@@ -3185,9 +3213,8 @@ def mapinfo2geotransform(map_info):
"The map info argument has to be a list of length 10. Got %s." %map_info "The map info argument has to be a list of length 10. Got %s." %map_info
return [ULmapX,float(map_info[5]),0.,ULmapY,0.,-float(map_info[6])] return [ULmapX,float(map_info[5]),0.,ULmapY,0.,-float(map_info[6])]
def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None): def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
"""Returns (UL, LL, LR, UR) in the same coordinate units like the given geotransform.""" """Returns (ULxy, LLxy, LRxy, URxy) in the same coordinate units like the given geotransform."""
assert gdal_ds or (gt and cols and rows), \ assert gdal_ds or (gt and cols and rows), \
"GEOP.get_corner_coordinates: Missing argument! Please provide either 'gdal_ds' or 'gt', 'cols' AND 'rows'." "GEOP.get_corner_coordinates: Missing argument! Please provide either 'gdal_ds' or 'gt', 'cols' AND 'rows'."
gdal_ds_GT = gdal_ds.GetGeoTransform() if gdal_ds else gt gdal_ds_GT = gdal_ds.GetGeoTransform() if gdal_ds else gt
...@@ -3233,6 +3260,11 @@ def proj4_to_dict(proj4): ...@@ -3233,6 +3260,11 @@ def proj4_to_dict(proj4):
items = [item for item in proj4.replace('+','').split(' ') if '=' in item] items = [item for item in proj4.replace('+','').split(' ') if '=' in item]
return {k:v for k,v in [kv.split('=') for kv in items]} return {k:v for k,v in [kv.split('=') for kv in items]}
def prj_equal(prj1, prj2):
#type: (str,str) -> bool
"""Checks if the given two projections are equal."""
return get_proj4info(proj=prj1)==get_proj4info(proj=prj2)
def corner_coord_to_minmax(corner_coords): def corner_coord_to_minmax(corner_coords):
# type: (list) -> (float,float,float,float) # type: (list) -> (float,float,float,float)
"""Converts [[x1,y1],[x2,y2],[]...] to (xmin,xmax,ymin,ymax) """Converts [[x1,y1],[x2,y2],[]...] to (xmin,xmax,ymin,ymax)
...@@ -3902,4 +3934,50 @@ def is_coord_grid_equal(gt,xgrid,ygrid): ...@@ -3902,4 +3934,50 @@ def is_coord_grid_equal(gt,xgrid,ygrid):
if is_point_on_grid((ULx,ULy),xgrid,ygrid) and xgsd==abs(xgrid[1]-xgrid[0]) and abs(ygsd)==abs(ygrid[1]-ygrid[0]): if is_point_on_grid((ULx,ULy),xgrid,ygrid) and xgsd==abs(xgrid[1]-xgrid[0]) and abs(ygsd)==abs(ygrid[1]-ygrid[0]):
return True return True
else: else:
return False return False
\ No newline at end of file
def snap_bounds_to_pixGrid(bounds, gt, roundAlg='auto'):
in_xmin, in_ymin, in_xmax, in_ymax = bounds
xgrid = np.arange(gt[0], gt[0] + 2 * gt[1], gt[1])
ygrid = np.arange(gt[3], gt[3] + 2 * abs(gt[5]), abs(gt[5]))
xmin = HLP_F.find_nearest(xgrid, in_xmin, roundAlg, extrapolate=True)
ymax = HLP_F.find_nearest(ygrid, in_ymax, roundAlg, extrapolate=True)
xmax = HLP_F.find_nearest(xgrid, in_xmax, roundAlg, extrapolate=True)
ymin = HLP_F.find_nearest(ygrid, in_ymin, roundAlg, extrapolate=True)
return xmin, ymin, xmax, ymax
def clip_array_using_mapBounds(array, bounds, im_prj, im_gt, fillVal=0):
"""
:param array:
:param bounds:
:param im_prj:
:param im_gt:
:param fillVal:
"""
print(bounds)
# get target bounds on the same grid like the input array
tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax = snap_bounds_to_pixGrid(bounds,im_gt)
print(tgt_xmin,tgt_ymax,tgt_xmax,tgt_ymin)
# get target dims
tgt_rows = int(abs((tgt_ymax - tgt_ymin) / im_gt[5])) + 1
tgt_cols = int(abs((tgt_xmax - tgt_xmin) / im_gt[1])) + 1
tgt_bands = array.shape[2] if len(array.shape) == 3 else None
tgt_shape = (tgt_rows, tgt_cols, tgt_bands) if len(array.shape) == 3 else (tgt_rows, tgt_cols)
# get array pos to fill
R, C = array.shape[:2]
tgt_gt = [tgt_xmin, im_gt[1], 0., tgt_ymax, 0., im_gt[5]]
cS, rS = [int(i) for i in mapXY2imXY(imXY2mapXY((0 , 0 ), im_gt), tgt_gt)]
cE, rE = [int(i) for i in mapXY2imXY(imXY2mapXY((C-1, R-1), im_gt), tgt_gt)]
print(rS,rE,cS,cE)
# create target array and fill it with the given pixel values at the correct geo position
tgt_arr = np.full(tgt_shape, fillVal, array.dtype)
tgt_arr[rS:rE + 1, cS:cE + 1] = array
return tgt_arr, tgt_gt, im_prj
...@@ -20,7 +20,7 @@ import glob ...@@ -20,7 +20,7 @@ import glob
import builtins import builtins
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller) job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from ..gms_io import Input_reader as INP_R from ..io import Input_reader as INP_R
from ..misc import helper_functions as HLP_F from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T from ..misc import database_tools as DB_T
from ..misc import path_generator as PG from ..misc import path_generator as PG
...@@ -183,9 +183,12 @@ def add_local_availability(dataset): ...@@ -183,9 +183,12 @@ def add_local_availability(dataset):
DB_T.SQL_DB_to_csv() DB_T.SQL_DB_to_csv()
dataset['proc_level'] = ProcL dataset['proc_level'] = ProcL
elif len(DB_match) == 1: elif len(DB_match) == 1:
proc_chain = ['L0A','L0B','L1A','L1B','L1C','L2A','L2B','L2C','L2D'] try:
print('Found a matching %s dataset for %s. Processing skipped until %s.' \ print('Found a matching %s dataset for %s. Processing skipped until %s.' \
%(ProcL,dataset['entity_ID'],proc_chain[proc_chain.index(ProcL)+1])) %(ProcL,dataset['entity_ID'],HLP_F.proc_chain[HLP_F.proc_chain.index(ProcL)+1]))
except IndexError:
print('Found a matching %s dataset for %s. Processing already done.' \
%(ProcL,dataset['entity_ID']))
if DB_match[0][0] == ProcL: if DB_match[0][0] == ProcL:
dataset['proc_level'] = DB_match[0][0] dataset['proc_level'] = DB_match[0][0]
else: else:
......
...@@ -34,19 +34,23 @@ import datetime ...@@ -34,19 +34,23 @@ import datetime
import builtins import builtins
import time import time
import warnings import warnings
import copy
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
from pyhdf import SD from pyhdf import SD
from spectral.io import envi from spectral.io import envi
from shapely.geometry import box
from . import METADATA as META
from . import GEOPROCESSING as GEOP from . import METADATA as META
from . import gms_cloud_classifier as CLD_P # Cloud Processor from . import GEOPROCESSING as GEOP
from . import py_tools_ah from . import gms_cloud_classifier as CLD_P # Cloud Processor
from ..misc import helper_functions as HLP_F from . import py_tools_ah
from ..gms_io import Input_reader as INP_R from ..misc import helper_functions as HLP_F
from ..gms_io import Output_writer as OUT_W from ..io import Input_reader as INP_R
from ..misc import path_generator as PG from ..io import Output_writer as OUT_W
from ..misc import database_tools as DB_T from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from ..misc.mgrs_tile import MGRS_tile
from ..io.GeoArray import GeoArray
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller) job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
...@@ -214,7 +218,7 @@ class L1A_object(object): ...@@ -214,7 +218,7 @@ class L1A_object(object):
""" """
path_GMS_file = tuple_GMS_subset[0] path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file) GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
[setattr(self, key, value) for key, value in GMSfileDict.items()] # copy all attributes from GMS file self.__dict__ = GMSfileDict.copy() # copy all attributes from GMS file
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d') self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
self.arr_shape, self.arr_pos = tuple_GMS_subset[1] self.arr_shape, self.arr_pos = tuple_GMS_subset[1]
...@@ -237,7 +241,7 @@ class L1A_object(object): ...@@ -237,7 +241,7 @@ class L1A_object(object):
self.masks = path_masks self.masks = path_masks
self.GMS_identifier['logger'] = 'not set' self.GMS_identifier['logger'] = 'not set'
return self return copy.copy(self)
########################### core functions #################################### ########################### core functions ####################################
...@@ -637,7 +641,8 @@ class L1A_object(object): ...@@ -637,7 +641,8 @@ class L1A_object(object):
:param logger: a logging.logger object :param logger: a logging.logger object
""" """
logger = logger if logger else self.logger logger = logger if logger else self.logger
if subset is None and (self.arr_shape=='cube' or [self.arr_pos[0][0], self.arr_pos[1][0]] == [0,0]) or \ if subset is None and\
(self.arr_shape=='cube' or self.arr_pos is None or [self.arr_pos[0][0],self.arr_pos[1][0]]==[0,0]) or \
subset==['cube',None] or (subset and [subset[1][0][0], subset[1][1][0]]==[0,0]) or \ subset==['cube',None] or (subset and [subset[1][0][0], subset[1][1][0]]==[0,0]) or \
hasattr(self,'logAtThisTile') and getattr(self,'logAtThisTile'): # cube or 1st tile hasattr(self,'logAtThisTile') and getattr(self,'logAtThisTile'): # cube or 1st tile
logger.info(log_msg) logger.info(log_msg)
...@@ -962,6 +967,35 @@ class L1A_object(object): ...@@ -962,6 +967,35 @@ class L1A_object(object):
return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds} return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds}
def build_combined_masks_array(self):
# type: () -> dict
"""Generates self.masks attribute (unsigned integer 8bit) from by concatenating all masks included in GMS obj.
The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""
arrays2combine = [aN for aN in ['mask_1bit', 'mask_clouds'] \
if hasattr(self, aN) and isinstance(getattr(self, aN), np.ndarray)]
if arrays2combine:
self.log_for_fullArr_or_firstTile('Combining masks for output writer...')
get_data = lambda arrName: getattr(self,arrName).astype(np.uint8)[:,:,None]
for aN in arrays2combine:
if False in np.equal(getattr(self,aN),getattr(self,aN).astype(np.uint8)):
warnings.warn('Some pixel values of attribute %s changed during data type '
'conversion within build_combined_masks_array().')
self.masks = get_data(arrays2combine[0]) if len(arrays2combine)==1 else \
np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
mapI, CSS = (self.meta['map info'], self.meta['coordinate system string']) \
if hasattr(self, 'meta') and self.meta else (self.MetaObj.map_info, self.MetaObj.projection)
nodataVal = HLP_F.get_outFillZeroSaturated(self.masks.dtype)[0]
self.masks_meta = {'map info': mapI, 'coordinate system string': CSS,
'bands':len(arrays2combine),'band names':arrays2combine, 'data ignore value':nodataVal}
return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks} # usually not needed
def calc_corner_positions(self): def calc_corner_positions(self):
"""Get true corner positions in the form """Get true corner positions in the form
[UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]""" [UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]"""
...@@ -1097,33 +1131,6 @@ class L1A_object(object): ...@@ -1097,33 +1131,6 @@ class L1A_object(object):
if update_spec_vals: self.MetaObj.spec_vals['fill'] = nodata_val if update_spec_vals: self.MetaObj.spec_vals['fill'] = nodata_val
def build_combined_masks_array(self):
# type: () -> dict
"""Generates self.masks attribute (unsigned integer 8bit) from by concatenating all masks included in GMS obj.
The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""
arrays2combine = [aN for aN in ['mask_1bit', 'mask_clouds'] \
if hasattr(self, aN) and isinstance(getattr(self, aN), np.ndarray)]
if arrays2combine:
self.log_for_fullArr_or_firstTile('Combining masks for output writer...')
get_data = lambda arrName: getattr(self,arrName).astype(np.uint8)[:,:,None]
for aN in arrays2combine:
if False in np.equal(getattr(self,aN),getattr(self,aN).astype(np.uint8)):
warnings.warn('Some pixel values of attribute %s changed during data type '
'conversion within build_combined_masks_array().')
self.masks = get_data(arrays2combine[0]) if len(arrays2combine)==1 else \
np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
mapI, CSS = (self.meta['map info'], self.meta['coordinate system string']) \
if hasattr(self, 'meta') and self.meta else (self.MetaObj.map_info, self.MetaObj.projection)
self.masks_meta = {'map info': mapI, 'coordinate system string': CSS,
'bands':len(arrays2combine),'band names':arrays2combine}
return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks} # usually not needed