Commit 12d16dde authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Bugfix for "missing L2B mask files" during continued processing that earlier...

Bugfix for "missing L2B mask files" during continued processing that earlier stopped in L2B processing; implemented proper MGRS tiling scheme according to project agreements

GEOP:
- updated some deprecated 3rd-party-library references
- added transform_any_prj(): a function fort transforming XY-coordinates from any source to any target projection
- warp_ndarray():
    - implemented extra option 'outExtent_within' allowing to output arrays with a larger geographical extent than the input image (needed for MGRS tiling scheme)
- improved some docstrings
- added prj_equal(): a function to quickly check if two projections are equal
- added snap_bounds_to_pixGrid(): a function to snap map bounds to a given pixel grid
- added clip_array_using_mapBounds(): a function for clipping arrays with a map info using a given bounding box

L0A_P:
- add_local_availability():
    - replaced a hard coded list of processing levels
    - bugfix for returning an invalid processing level if processing is already done

L1A_P:
- fill_from_disk(): bugfix for copying memory addresses  when fill_from_disk() is called in multiprocessing
- log_for_fullArr_or_firstTile(): bugfix for not handling empty arr_pos
- added get_subset_obj(), based on older code from helper_functions: a function to generate subsets of GMS objects based on bounding box that can contain image AND map coordinates whereas map coordinates can also have a different projection (needed for proper generation of MGRS tiles)
 - added to_MGRS_tiles(, based on older code from helper_functions: a function for cutting a GMS object into MGRS tiles

 L1B_P:
 - L1B_object.apply_deshift_results():
     - bugfix for not updating geoinformations of 'masks' attribute after having applied spatial shift corrections

META:
- added 'data ignore value' to metadata

IO:
- refactored module 'gms_io' to 'io'

ARR:
- added submodule 'GeoArray' to io:
    - added class 'GeoArray': a class for simplifying array access, regardless to their actual memory location (in memory or on disk)
    - added _clip_array_at_mapPos(): a function for clipping a geocoded array using a given bounding box in the same projection like the array itself
    - added  get_array_at_mapPos(): a function for clipping a geocoded array using a given bounding box that can have any projection

OUT_W:
- added 'data ignore value' to enviHdr_keyOrder
- added 'data ignore value' to mask_to_ENVI_Classification()
- added set_output_nodataVal(): a function for adding a data ignore value to an already written file
- Obj2ENVI: bugfix for not handling empty arr_pos

DB_T:
- delete_processing_results(), GMS_JOB.__delete_procdata(), GMS_JOB.delete_procdata_of_entire_job(), GMS_JOB.delete_procdata_of_failed_sceneIDs(), added functionality to delete a specific processing level from disk

HLP_F:
- revised find_nearest(): it now supports an automatic rounding algorithm that rounds a value to the nearest neighbour
- replaced a deprecated 3rd-party function reference
- get_arrSubsetBounds_from_shapelyPolyLonLat(): added flag 'ensure_valid_coords'

MRGS_tile:
- added submodule "mgrs_tile" for easily retrieving informations about MGRS tiles (bounding box, projection conversions, buffering, etc.)

SpatialIndexMediator:
- replaced wrong host address

PC:
- updated calls for MGRS tiling within L2C_map_1()
parent 8f79d577
......@@ -8,11 +8,11 @@
###############################################################################
from . import algorithms
from . import gms_io
from . import io
from . import misc
__all__=['algorithms',
'gms_io',
'io',
'misc']
__version__ = '20160905.01'
......
......@@ -47,7 +47,7 @@ except ImportError:
from gdalconst import *
from rasterio.warp import reproject as rio_reproject
from rasterio.warp import calculate_default_transform as rio_calc_transform
from rasterio.warp import RESAMPLING
from rasterio.warp import Resampling
import pyproj
from pyorbital import astronomy
import ephem
......@@ -55,9 +55,9 @@ from spectral.io import envi
from shapely.geometry import Polygon
from shapely.geometry import shape
from shapely.geometry import MultiPoint
from numba import jit, autojit
from numba import jit
from ..gms_io import envifilehandling_BD as ef
from ..io import envifilehandling_BD as ef
from ..misc import helper_functions as HLP_F
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
......@@ -2751,41 +2751,51 @@ def transform_wgs84_to_utm(lon, lat, zone=None):
UTM = pyproj.Proj(proj='utm', zone=get_utm_zone(lon) if zone is None else zone, ellps='WGS84')
return UTM(lon, lat)
def transform_any_prj(prj_src,prj_tgt,x,y):
prj_src = pyproj.Proj(get_proj4info(proj=prj_src))
prj_tgt = pyproj.Proj(get_proj4info(proj=prj_tgt))
x,y = pyproj.transform(prj_src,prj_tgt,x,y)
return x,y
def get_point_bounds(points):
mp_points = MultiPoint(points)
(min_lon, min_lat, max_lon, max_lat) = mp_points.bounds
return min_lon, min_lat, max_lon, max_lat
def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, outUL=None,
out_res=None, out_extent=None, out_dtype=None, rsp_alg=0, in_nodata=None, out_nodata=None):
def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None, outUL=None, out_res=None,
out_extent=None, out_dtype=None, rsp_alg=0, in_nodata=None, out_nodata=None, outExtent_within=True):
"""Reproject / warp a numpy array with given geo information to target coordinate system.
:param ndarray: numpy.ndarray [rows,cols,bands]
:param in_gt: input gdal GeoTransform
:param in_prj: input projection as WKT string
:param out_prj: output projection as WKT string
:param out_gt: output gdal GeoTransform as float tuple in the source coordinate system (optional)
:param outUL: [X,Y] output upper left coordinates as floats in the source coordinate system
(requires outRowsCols)
:param outRowsCols: [rows, cols] (optional)
:param out_res: output resolution as tuple of floats (x,y) in the TARGET coordinate system
:param out_extent: [left, bottom, right, top] as floats in the source coordinate system
:param out_dtype: output data type as numpy data type
:param rsp_alg: Resampling method to use. One of the following (int, default is 0):
0 = nearest neighbour, 1 = bilinear, 2 = cubic, 3 = cubic spline, 4 = lanczos,
5 = average, 6 = mode
:param in_nodata no data value of the input image
:param out_nodata no data value of the output image
:return out_arr: warped numpy array
:return out_gt: warped gdal GeoTransform
:return out_prj: warped projection as WKT string
:param ndarray: numpy.ndarray [rows,cols,bands]
:param in_gt: input gdal GeoTransform
:param in_prj: input projection as WKT string
:param out_prj: output projection as WKT string
:param out_gt: output gdal GeoTransform as float tuple in the source coordinate system (optional)
:param outUL: [X,Y] output upper left coordinates as floats in the source coordinate system
(requires outRowsCols)
:param outRowsCols: [rows, cols] (optional)
:param out_res: output resolution as tuple of floats (x,y) in the TARGET coordinate system
:param out_extent: [left, bottom, right, top] as floats in the source coordinate system
:param out_dtype: output data type as numpy data type
:param rsp_alg: Resampling method to use. One of the following (int, default is 0):
0 = nearest neighbour, 1 = bilinear, 2 = cubic, 3 = cubic spline, 4 = lanczos,
5 = average, 6 = mode
:param in_nodata: no data value of the input image
:param out_nodata: no data value of the output image
:param outExtent_within: Ensures that the output extent is within the input extent.
Otherwise an exception is raised.
:return out_arr: warped numpy array
:return out_gt: warped gdal GeoTransform
:return out_prj: warped projection as WKT string
"""
if not ndarray.flags['OWNDATA']:
temp = np.empty_like(ndarray)
temp[:] = ndarray
ndarray = temp # deep copy: converts view to its own array in order to avoid wrong output
with rasterio.drivers():
with rasterio.env.Env():
if outUL is not None:
assert outRowsCols is not None, 'outRowsCols must be given if outUL is given.'
outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL
......@@ -2803,7 +2813,7 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
# convert input array axis order to rasterio axis order
ndarray = np.swapaxes(np.swapaxes(ndarray, 0, 2), 1, 2)
bands, rows, cols = ndarray.shape
rows, cols = outRowsCols if outRowsCols else rows, cols
rows, cols = outRowsCols if outRowsCols else (rows, cols)
else:
rows, cols = ndarray.shape if outRowsCols is None else outRowsCols
......@@ -2818,22 +2828,23 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
# get dst_transform
if out_gt is None and out_extent is None:
if outRowsCols:
outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL
ulRC2bounds = lambda ul, r, c: [ul[0], ul[1] + r * in_gt[5], ul[0] + c * in_gt[1],
ul[1]] # left, bottom, right, top
outUL = [in_gt[0], in_gt[3]] if outUL is None else outUL
ulRC2bounds = lambda ul, r, c: [ul[0], ul[1] + r * in_gt[5], ul[0] + c * in_gt[1], ul[1]] # left, bottom, right, top
left, bottom, right, top = ulRC2bounds(outUL, rows, cols)
else: # outRowsCols is None and outUL is None: use in_gt
left, bottom, right, top = gt2bounds(in_gt, rows, cols)
# ,im_xmax,im_ymin,im_ymax = corner_coord_to_minmax(get_corner_coordinates(self.ds_im2shift))
elif out_extent:
left, bottom, right, top = out_extent
# input array is only a window of the actual input array
assert left >= in_gt[0] and right <= (in_gt[0] + (cols + 1) * in_gt[1]) and \
bottom >= in_gt[3] + (rows + 1) * in_gt[5] and top <= in_gt[3], \
"out_extent has to be completely within the input image bounds."
else: # out_gt is given
left, bottom, right, top = gt2bounds(in_gt, rows, cols)
if outExtent_within:
# input array is only a window of the actual input array
assert left >= in_gt[0] and right <= (in_gt[0] + (cols + 1) * in_gt[1]) and \
bottom >= in_gt[3] + (rows + 1) * in_gt[5] and top <= in_gt[3], \
"out_extent has to be completely within the input image bounds."
if out_res is None:
# get pixel resolution in target coord system
prj_in_out = (isProjectedOrGeographic(in_prj), isProjectedOrGeographic(out_prj))
......@@ -2861,8 +2872,9 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
src_transform = rasterio.transform.from_origin(in_gt[0], in_gt[3], in_gt[1], in_gt[5])
dict_rspInt_rspAlg = {0: RESAMPLING.nearest, 1: RESAMPLING.bilinear, 2: RESAMPLING.cubic,
3: RESAMPLING.cubic_spline, 4: RESAMPLING.lanczos, 5: RESAMPLING.average, 6: RESAMPLING.mode}
dict_rspInt_rspAlg = \
{0: Resampling.nearest, 1: Resampling.bilinear, 2: Resampling.cubic,
3: Resampling.cubic_spline, 4: Resampling.lanczos, 5: Resampling.average, 6: Resampling.mode}
out_arr = np.zeros((bands, out_rows, out_cols), out_dtype) \
if len(ndarray.shape) == 3 else np.zeros((out_rows, out_cols), out_dtype)
......@@ -2874,9 +2886,22 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
with warnings.catch_warnings():
warnings.simplefilter('ignore') # FIXME supresses: FutureWarning: GDAL-style transforms are deprecated and will not be supported in Rasterio 1.0.
try:
#print('INPUTS')
#print(ndarray.shape, ndarray.dtype, out_arr.shape, out_arr.dtype)
#print(in_gt)
#print(src_crs)
#print(out_gt)
#print(dst_crs)
#print(dict_rspInt_rspAlg[rsp_alg])
#print(in_nodata)
#print(out_nodata)
rio_reproject(ndarray, out_arr,
src_transform=in_gt, src_crs=src_crs, dst_transform=out_gt, dst_crs=dst_crs,
resampling=dict_rspInt_rspAlg[rsp_alg], src_nodata=in_nodata, dst_nodata=out_nodata)
from matplotlib import pyplot as plt
#print(out_arr.shape)
#plt.figure()
#plt.imshow(out_arr[:,:,1])
except KeyError:
print(in_dtype, str(in_dtype))
print(ndarray.dtype)
......@@ -2884,12 +2909,11 @@ def warp_ndarray(ndarray, in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
# convert output array axis order to GMS axis order [rows,cols,bands]
out_arr = out_arr if len(ndarray.shape) == 2 else np.swapaxes(np.swapaxes(out_arr, 0, 1), 1, 2)
if outRowsCols:
out_arr = out_arr[:outRowsCols[0], :outRowsCols[1]]
return out_arr, out_gt, out_prj
def warp_mp(args):
in_arr,gt,prj,out_prj,out_ext,rsp,in_nd = args
warped = warp_ndarray(in_arr,gt,prj,out_prj,out_extent=out_ext,rsp_alg=rsp,in_nodata=in_nd)[0]
return warped
def reproject_shapelyPoly(shapelyPoly, tgt_prj):
# type: (shapely.Polygon, str) -> shapely.Polygon
......@@ -3065,16 +3089,20 @@ def pixelToMapYX(pixelCoords, path_im=None, geotransform=None, projection=None):
def mapXY2imXY(mapXY, gt):
# type: (tuple,list) -> tuple
"""Translates given map coordinates into pixel locations according to the given image geotransform.
:param mapXY: <tuple> The map coordinates to be translated in the form (x,y).
:param gt: <list> GDAL geotransform
:param gt: <list> GDAL geotransform
:returns: <tuple> image coordinate tuple X/Y (column, row)
"""
return (mapXY[0]-gt[0])/gt[1],(mapXY[1]-gt[3])/gt[5]
def imXY2mapXY(imXY, gt):
# type: (tuple,list) -> tuple
"""Translates given pixel locations into map coordinates according to the given image geotransform.
:param imXY: <tuple> The image coordinates to be translated in the form (x,y).
:param gt: <list> GDAL geotransform
:param gt: <list> GDAL geotransform
:returns: <tuple> map coordinate tuple X/Y (mapX, mapY)
"""
return (gt[0] + imXY[0] * abs(gt[1])), (gt[3] - imXY[1] * abs(gt[5]))
......@@ -3173,7 +3201,7 @@ def mapinfo2geotransform(map_info):
"""Builds GDAL GeoTransform tuple from an ENVI map info.
:param map_info: ENVI map info (list), e.g. ['UTM', 1, 1, 192585.0, 5379315.0, 30.0, 30.0, 41, 'North', 'WGS-84']
:returns: GDAL GeoTransform, e.g. [249885.0, 30.0, 0.0, 4578615.0, 0.0, -30.0]
:rtype: tuple
:rtype: list
"""
# FIXME rotated datasets are currently not supported -> function must return rotation at gt[2] and gt[4]
if map_info[1]==1 and map_info[2]==1:
......@@ -3185,9 +3213,8 @@ def mapinfo2geotransform(map_info):
"The map info argument has to be a list of length 10. Got %s." %map_info
return [ULmapX,float(map_info[5]),0.,ULmapY,0.,-float(map_info[6])]
def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
"""Returns (UL, LL, LR, UR) in the same coordinate units like the given geotransform."""
"""Returns (ULxy, LLxy, LRxy, URxy) in the same coordinate units like the given geotransform."""
assert gdal_ds or (gt and cols and rows), \
"GEOP.get_corner_coordinates: Missing argument! Please provide either 'gdal_ds' or 'gt', 'cols' AND 'rows'."
gdal_ds_GT = gdal_ds.GetGeoTransform() if gdal_ds else gt
......@@ -3233,6 +3260,11 @@ def proj4_to_dict(proj4):
items = [item for item in proj4.replace('+','').split(' ') if '=' in item]
return {k:v for k,v in [kv.split('=') for kv in items]}
def prj_equal(prj1, prj2):
#type: (str,str) -> bool
"""Checks if the given two projections are equal."""
return get_proj4info(proj=prj1)==get_proj4info(proj=prj2)
def corner_coord_to_minmax(corner_coords):
# type: (list) -> (float,float,float,float)
"""Converts [[x1,y1],[x2,y2],[]...] to (xmin,xmax,ymin,ymax)
......@@ -3902,4 +3934,50 @@ def is_coord_grid_equal(gt,xgrid,ygrid):
if is_point_on_grid((ULx,ULy),xgrid,ygrid) and xgsd==abs(xgrid[1]-xgrid[0]) and abs(ygsd)==abs(ygrid[1]-ygrid[0]):
return True
else:
return False
\ No newline at end of file
return False
def snap_bounds_to_pixGrid(bounds, gt, roundAlg='auto'):
in_xmin, in_ymin, in_xmax, in_ymax = bounds
xgrid = np.arange(gt[0], gt[0] + 2 * gt[1], gt[1])
ygrid = np.arange(gt[3], gt[3] + 2 * abs(gt[5]), abs(gt[5]))
xmin = HLP_F.find_nearest(xgrid, in_xmin, roundAlg, extrapolate=True)
ymax = HLP_F.find_nearest(ygrid, in_ymax, roundAlg, extrapolate=True)
xmax = HLP_F.find_nearest(xgrid, in_xmax, roundAlg, extrapolate=True)
ymin = HLP_F.find_nearest(ygrid, in_ymin, roundAlg, extrapolate=True)
return xmin, ymin, xmax, ymax
def clip_array_using_mapBounds(array, bounds, im_prj, im_gt, fillVal=0):
"""
:param array:
:param bounds:
:param im_prj:
:param im_gt:
:param fillVal:
"""
print(bounds)
# get target bounds on the same grid like the input array
tgt_xmin, tgt_ymin, tgt_xmax, tgt_ymax = snap_bounds_to_pixGrid(bounds,im_gt)
print(tgt_xmin,tgt_ymax,tgt_xmax,tgt_ymin)
# get target dims
tgt_rows = int(abs((tgt_ymax - tgt_ymin) / im_gt[5])) + 1
tgt_cols = int(abs((tgt_xmax - tgt_xmin) / im_gt[1])) + 1
tgt_bands = array.shape[2] if len(array.shape) == 3 else None
tgt_shape = (tgt_rows, tgt_cols, tgt_bands) if len(array.shape) == 3 else (tgt_rows, tgt_cols)
# get array pos to fill
R, C = array.shape[:2]
tgt_gt = [tgt_xmin, im_gt[1], 0., tgt_ymax, 0., im_gt[5]]
cS, rS = [int(i) for i in mapXY2imXY(imXY2mapXY((0 , 0 ), im_gt), tgt_gt)]
cE, rE = [int(i) for i in mapXY2imXY(imXY2mapXY((C-1, R-1), im_gt), tgt_gt)]
print(rS,rE,cS,cE)
# create target array and fill it with the given pixel values at the correct geo position
tgt_arr = np.full(tgt_shape, fillVal, array.dtype)
tgt_arr[rS:rE + 1, cS:cE + 1] = array
return tgt_arr, tgt_gt, im_prj
......@@ -20,7 +20,7 @@ import glob
import builtins
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from ..gms_io import Input_reader as INP_R
from ..io import Input_reader as INP_R
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
......@@ -183,9 +183,12 @@ def add_local_availability(dataset):
DB_T.SQL_DB_to_csv()
dataset['proc_level'] = ProcL
elif len(DB_match) == 1:
proc_chain = ['L0A','L0B','L1A','L1B','L1C','L2A','L2B','L2C','L2D']
print('Found a matching %s dataset for %s. Processing skipped until %s.' \
%(ProcL,dataset['entity_ID'],proc_chain[proc_chain.index(ProcL)+1]))
try:
print('Found a matching %s dataset for %s. Processing skipped until %s.' \
%(ProcL,dataset['entity_ID'],HLP_F.proc_chain[HLP_F.proc_chain.index(ProcL)+1]))
except IndexError:
print('Found a matching %s dataset for %s. Processing already done.' \
%(ProcL,dataset['entity_ID']))
if DB_match[0][0] == ProcL:
dataset['proc_level'] = DB_match[0][0]
else:
......
......@@ -34,19 +34,23 @@ import datetime
import builtins
import time
import warnings
import copy
import matplotlib.pyplot as plt
from pyhdf import SD
from spectral.io import envi
from . import METADATA as META
from . import GEOPROCESSING as GEOP
from . import gms_cloud_classifier as CLD_P # Cloud Processor
from . import py_tools_ah
from ..misc import helper_functions as HLP_F
from ..gms_io import Input_reader as INP_R
from ..gms_io import Output_writer as OUT_W
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from shapely.geometry import box
from . import METADATA as META
from . import GEOPROCESSING as GEOP
from . import gms_cloud_classifier as CLD_P # Cloud Processor
from . import py_tools_ah
from ..misc import helper_functions as HLP_F
from ..io import Input_reader as INP_R
from ..io import Output_writer as OUT_W
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from ..misc.mgrs_tile import MGRS_tile
from ..io.GeoArray import GeoArray
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
......@@ -214,7 +218,7 @@ class L1A_object(object):
"""
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
[setattr(self, key, value) for key, value in GMSfileDict.items()] # copy all attributes from GMS file
self.__dict__ = GMSfileDict.copy() # copy all attributes from GMS file
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
self.arr_shape, self.arr_pos = tuple_GMS_subset[1]
......@@ -237,7 +241,7 @@ class L1A_object(object):
self.masks = path_masks
self.GMS_identifier['logger'] = 'not set'
return self
return copy.copy(self)
########################### core functions ####################################
......@@ -637,7 +641,8 @@ class L1A_object(object):
:param logger: a logging.logger object
"""
logger = logger if logger else self.logger
if subset is None and (self.arr_shape=='cube' or [self.arr_pos[0][0], self.arr_pos[1][0]] == [0,0]) or \
if subset is None and\
(self.arr_shape=='cube' or self.arr_pos is None or [self.arr_pos[0][0],self.arr_pos[1][0]]==[0,0]) or \
subset==['cube',None] or (subset and [subset[1][0][0], subset[1][1][0]]==[0,0]) or \
hasattr(self,'logAtThisTile') and getattr(self,'logAtThisTile'): # cube or 1st tile
logger.info(log_msg)
......@@ -962,6 +967,35 @@ class L1A_object(object):
return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds}
def build_combined_masks_array(self):
# type: () -> dict
"""Generates self.masks attribute (unsigned integer 8bit) from by concatenating all masks included in GMS obj.
The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""
arrays2combine = [aN for aN in ['mask_1bit', 'mask_clouds'] \
if hasattr(self, aN) and isinstance(getattr(self, aN), np.ndarray)]
if arrays2combine:
self.log_for_fullArr_or_firstTile('Combining masks for output writer...')
get_data = lambda arrName: getattr(self,arrName).astype(np.uint8)[:,:,None]
for aN in arrays2combine:
if False in np.equal(getattr(self,aN),getattr(self,aN).astype(np.uint8)):
warnings.warn('Some pixel values of attribute %s changed during data type '
'conversion within build_combined_masks_array().')
self.masks = get_data(arrays2combine[0]) if len(arrays2combine)==1 else \
np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
mapI, CSS = (self.meta['map info'], self.meta['coordinate system string']) \
if hasattr(self, 'meta') and self.meta else (self.MetaObj.map_info, self.MetaObj.projection)
nodataVal = HLP_F.get_outFillZeroSaturated(self.masks.dtype)[0]
self.masks_meta = {'map info': mapI, 'coordinate system string': CSS,
'bands':len(arrays2combine),'band names':arrays2combine, 'data ignore value':nodataVal}
return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks} # usually not needed
def calc_corner_positions(self):
"""Get true corner positions in the form
[UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]"""
......@@ -1097,33 +1131,6 @@ class L1A_object(object):
if update_spec_vals: self.MetaObj.spec_vals['fill'] = nodata_val
def build_combined_masks_array(self):
# type: () -> dict
"""Generates self.masks attribute (unsigned integer 8bit) from by concatenating all masks included in GMS obj.
The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""
arrays2combine = [aN for aN in ['mask_1bit', 'mask_clouds'] \
if hasattr(self, aN) and isinstance(getattr(self, aN), np.ndarray)]
if arrays2combine:
self.log_for_fullArr_or_firstTile('Combining masks for output writer...')
get_data = lambda arrName: getattr(self,arrName).astype(np.uint8)[:,:,None]
for aN in arrays2combine:
if False in np.equal(getattr(self,aN),getattr(self,aN).astype(np.uint8)):
warnings.warn('Some pixel values of attribute %s changed during data type '
'conversion within build_combined_masks_array().')
self.masks = get_data(arrays2combine[0]) if len(arrays2combine)==1 else \
np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
mapI, CSS = (self.meta['map info'], self.meta['coordinate system string']) \
if hasattr(self, 'meta') and self.meta else (self.MetaObj.map_info, self.MetaObj.projection)
self.masks_meta = {'map info': mapI, 'coordinate system string': CSS,
'bands':len(arrays2combine),'band names':arrays2combine}
return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks} # usually not needed
def calc_mean_VAA(self):
"""Calculates mean viewing azimuth angle using sensor flight line derived from corner coordinates."""
......@@ -1204,6 +1211,171 @@ class L1A_object(object):
OUT_W.Tiles_Writer(tiles, outpath, out_shape, tiles[0]['data'].dtype, self.outInterleave, self.meta, overwrite=overwrite)
def get_subset_obj(self, imBounds=None, mapBounds=None, mapBounds_prj=None, logmsg=None, v=False):
# type: (L1A_object,tuple) -> L1A_object
"""Returns a subset of the given GMS object, based on the given bounds coordinates.
Array attributes are clipped and relevant metadata keys are updated according to new extent.
:param imBounds: <tuple> tuple of image coordinates in the form (xmin,xmax,ymin,ymax)
:param mapBounds: <tuple> tuple of map coordinates in the form (xmin,xmax,ymin,ymax)
:param mapBounds_prj: <str> a WKT string containing projection of the given map bounds
(can be different to projection of the GMS object; ignored if map bounds not given)
:param logmsg: <str> a message to be logged when this method is called
:param v: <bool> verbose mode (default: False)
:return: <GMS_object> the GMS object subset
"""
if logmsg:
self.logger.info(logmsg)
# copy object
sub_GMS_obj = HLP_F.parentObjDict[self.proc_level](*HLP_F.initArgsDict[self.proc_level])
sub_GMS_obj.__dict__ = {k: getattr(self,k) for k in self.__dict__.keys() if not isinstance(getattr(self,k), np.ndarray)}.copy()
sub_GMS_obj = copy.deepcopy(sub_GMS_obj)
# clip all array attributes using the given bounds
#list_arraynames = [i for i in self.__dict__ if not callable(getattr(self, i)) and \
# isinstance(getattr(self, i), np.ndarray)]
list_arraynames = [i for i in ['arr','masks']
if hasattr(self,i) and getattr(self,i) is not None] # FIXME hardcoded
assert list_arraynames
assert imBounds or mapBounds, "Either 'imBounds' or 'mapBounds' must be given. Got nothing."
# calculate mapBounds if not already given
# TODO this would make this method callable with imBounds
# avoid disk IO if requested area is within the input array # TODO
for arrname in list_arraynames:
# get input data for array subsetting
geoArr = getattr(self,arrname) if isinstance(getattr(self,arrname),GeoArray) else getattr(self,arrname)
nodata = HLP_F.get_outFillZeroSaturated(geoArr)[0]
meta2update = sub_GMS_obj.meta if arrname == 'arr' else sub_GMS_obj.masks_meta
gt = GEOP.mapinfo2geotransform(meta2update['map info'])
prj = meta2update['coordinate system string']
# get subsetted and (possibly) reprojected array
xmin,xmax,ymin,ymax = mapBounds
sub_arr,sub_gt,sub_prj = geoArr.get_mapPos((xmin,ymin,xmax,ymax), mapBounds_prj,
fillVal=nodata, arr_gt=gt, arr_prj=prj)
# show result
if v: GeoArray(sub_arr).show()
# update array-related attributes of sub_GMS_obj
setattr(sub_GMS_obj, arrname, sub_arr)
meta2update['map info'] = GEOP.geotransform2mapinfo(sub_gt,sub_prj)
meta2update['coordinate system string'] = sub_prj
meta2update['lines'], meta2update['samples'] = sub_arr.shape[:2]
meta2update['CS_UTM_ZONE'] = GEOP.get_UTMzone(prj=sub_prj) # FIXME only works for UTM
meta2update['CS_EPSG'] = GEOP.WKT2EPSG(sub_prj)
# copy subset mask data to mask_1bit and mask_clouds
sub_GMS_obj.mask_1bit = sub_GMS_obj.masks[:,:,0]
sub_GMS_obj.mask_clouds = sub_GMS_obj.masks[:,:,1] # FIXME not dynamic
# update arr_pos
#arr_pos = ((rS, rE), (cS, cE))
#print(arr_pos)
rows, cols, bands = sub_GMS_obj.arr.shape # FIXME
sub_GMS_obj.arr_shape = 'block'
#sub_GMS_obj.arr_pos = arr_pos
# calculate new attributes 'corner_utm' and 'corner_lonlat'
ULxy, URxy, LLxy, LRxy = [[0, 0], [cols - 1, 0], [0, rows - 1], [cols - 1, rows - 1]]
utm_coord_YX = GEOP.pixelToMapYX ([ULxy, URxy, LLxy, LRxy], geotransform=sub_gt, projection=sub_prj) # FIXME asserts gt in UTM coordinates
lonlat_coord = GEOP.pixelToLatLon([ULxy, URxy, LLxy, LRxy], geotransform=sub_gt, projection=sub_prj) # ULyx,URyx,LLyx,LRyx
sub_GMS_obj.corner_utm = [[YX[1], YX[0]] for YX in utm_coord_YX] # ULxy,URxy,LLxy,LRxy
sub_GMS_obj.corner_lonlat = [[YX[1], YX[0]] for YX in lonlat_coord] # ULxy,URxy,LLxy,LRxy
# calculate 'bounds_LonLat' and 'bounds_utm'
sub_GMS_obj.bounds_LonLat = HLP_F.corner_coord_to_minmax(sub_GMS_obj.corner_lonlat)
sub_GMS_obj.bounds_utm = HLP_F.corner_coord_to_minmax(sub_GMS_obj.corner_utm)
# calculate data_corners_imXY
if isinstance(sub_GMS_obj.mask_1bit, np.ndarray):
corners_imYX = GEOP.calc_FullDataset_corner_positions(
sub_GMS_obj.mask_1bit, assert_four_corners=False, algorithm='shapely')
else: # str # FIXME not needed anymore because geoArr.get_mapPos always returns an array?
from ..io.Input_reader import read_mask_subset
subset = ('block', ((rS, rE), (cS, cE)))
mask_1bit = read_mask_subset(sub_GMS_obj.mask_1bit, 'mask_1bit', sub_GMS_obj.logger, subset)
corners_imYX = GEOP.calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=False, algorithm='shapely')
sub_GMS_obj.data_corners_imXY = [(YX[1], YX[0]) for YX in corners_imYX]
# calculate data_corners_LonLat
data_corners_LatLon = GEOP.pixelToLatLon(sub_GMS_obj.data_corners_imXY, geotransform=sub_gt, projection=sub_prj)
sub_GMS_obj.data_corners_LonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon]
# calculate data_corners_utm
data_corners_utmYX = GEOP.pixelToMapYX([ULxy, URxy, LLxy, LRxy], geotransform=sub_gt, projection=sub_prj) # FIXME asserts gt in UTM coordinates
sub_GMS_obj.data_corners_utm = [(YX[1], YX[0]) for YX in data_corners_utmYX]
# rename trueDataCornerLonLat to fullScene_corner_lonlat
sub_GMS_obj.fullScene_corner_lonlat = sub_GMS_obj.trueDataCornerLonLat
del sub_GMS_obj.trueDataCornerLonLat
del sub_GMS_obj.trueDataCornerPos # does not make sense anymore in the subset
return sub_GMS_obj
def to_MGRS_tiles(self, pixbuffer=10):
# type: (int) -> list
"""Returns a list of GMS objects representing the MGRS tiles for the given GMS object.
:param pixbuffer: <int> a buffer in pixel values used to generate an overlap between the returned MGRS tiles
:return:
"""
assert self.arr_shape == 'cube', "Only 'cube' objects can be cut into MGRS tiles. Got %s." % self.arr_shape
self.logger.info(
'Cutting scene %s (entity ID %s) into MGRS tiles...' % (self.scene_ID, self.entity_ID))
# get GeoDataFrame containing all overlapping MGRS tiles (MGRS geometries completely within nodata area are excluded)
GDF_MGRS_tiles = DB_T.get_overlapping_MGRS_tiles(job.conn_db_meta,
trueDataCornerLonLat=self.trueDataCornerLonLat)