Commit b4d7b1f0 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

major revision of previous commit; module gms_object now fully implemented;...

major revision of previous commit; module gms_object now fully implemented; fixed differences in results between Flink and Python execution mode

algorithms.GEOPROCESSING.GEOPROCESSING:
- Layerstacking(): added possible bug hint; update_dataset_related_attributes() now always executed
- get_lonlat_coord_array(): adjusted to handle GeoArray instances
- calc_VZA_array(): adjusted to handle GeoArray instances
- calc_SZA_SAA_array(): adjusted to handle GeoArray instances
- calc_RAA_array(): adjusted to handle GeoArray instances

algorithms.gms_object:
- __init__(): added attribute 'MGRS_info'
- renamed attribute mask_1bit to 'mask_nodata'
- revised set_pathes()
- revised __getstate__()
- revised __deepcopy__()
- revised coreg_needed()
- added property 'resamp_needed'
- added property 'arr'
- added property 'mask_nodata'
- added property 'mask_clouds'
- added property 'masks'
- added property 'pathGen'
- revised attributes2dict()
- revised from_disk()
- revised calc_mask_nodata()
- revised build_combined_masks_array()
- combine_tiles_to_ObjAttr(): added DeprecationWarning
- get_subset_obj(): adjusted to properly handle GeoArray instances
- added to_ENVI(), based on Obj2ENVI from OUT_W

algorithms.L1A_P.L1A_object:
- import_rasterdata() now also sets shape_fullArr
- archive_to_rasObj(): bugfix for overwriting Layerstacking result
- revised get_MetaObj()
- removed deprecated get_shape_fullArr()
- calc_TOARadRefTemp(): revised to properly handle GeoArray instances
- reference_data(): revised to properly handle GeoArray instances
- calc_cloud_mask(): revised to properly handle GeoArray instances
- calc_corner_positions(): added assertion; now robust to different WKT string formats
- revised add_rasterInfo_to_MetaObj()
- update_spec_vals_according_to_dtype(): now does not set nodata value anymore (property)

algorithms.L1B_P:
- Scene_finder:
    - ensured Python 2.7 compatibility
- L1B_object:
    - correct_spatial_shifts(): major revision fixing some bugs
    - removed deprecated join_deshift_results()
    - removed deprecated apply_deshift_results()

algorithms.L1C_P.L1C_Object:
- get_lonlat_coord_array(): commented deprecated code out
- calc_acquisition_illumination_geometry(): commented deprecated code out

algorithms.L2A_P.L2A_Object:
- get_DESHIFTER_configs(): some simplifications
- removed deprecated class DESHIFTER (now fully imported from external library CoReg_Sat)

algorithms.L2B_P.L2B_Object:
- revised __init__()

io.Output_writer:
- moved Obj2ENVI to gms_object

misc.helper_functions:
- get_job_summary(): bugfix for counting inputs given as tiles of the same sceneid multiple times

misc.path_generator:
- added __getstate__() that automatically closes loggers
- added __setstate__()

processing.pipeline:
- adjusted output writer calls
- L1A_map(): removed calls to get_shape_fullArr() and calc_mask_nodata()
- L1A_map_1(): removed calls to get_shape_fullArr() and calc_mask_nodata()
- L2A_map(): added overwrite kwarg to calc_mask_nodata()
- removed deprecated L2A_map_1()
- removed deprecated L2A_map_2()
- renamed L2B_map_1 to L2B_map
- renamed L2C_map_1 to L2C_map

processing.process_controller.process_controller:
- added keyword 'db_host'
- simplified L2A_processing()
- create_job_summary(): bugfix for always passing L2C objects to get_job_summary() instead of highest requested processing level

config:
- set_config(): added keyword 'db_host'
- Job:
    - added keyword 'db_host'
    - added attribute 'allow_subMultiprocessing'
    - database connection string is now dynamically adjusted to database host

- updated __version__
parent 5710555c
......@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller
__version__ = '20161207.01'
__version__ = '20170104.01'
__author__ = 'Daniel Scheffler'
__all__ = ['algorithms',
'io',
......
......@@ -49,6 +49,7 @@ from pyorbital import astronomy
import ephem
from shapely.geometry import MultiPoint
from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.coord_grid import snap_bounds_to_pixGrid
from py_tools_ds.ptds.geo.coord_trafo import transform_utm_to_wgs84, transform_wgs84_to_utm, mapXY2imXY, imXY2mapXY
from py_tools_ds.ptds.geo.projection import get_UTMzone, EPSG2WKT, isProjectedOrGeographic
......@@ -107,8 +108,8 @@ class GEOPROCESSING(object):
assert self.inDs is not None, "ERROR: Could not open %s!" %self.filename
elif isinstance(geodata,gdal.Dataset):
self.inDs = geodata
geodata = None
self.filepath, self.filename = None, self.inDs.GetDescription()
geodata = None
self.filepath, self.filename = None, self.inDs.GetDescription()
self.shortname, self.extension = os.path.splitext(self.filename)
# ****OBJECT ATTRIBUTES***************************************************
......@@ -180,9 +181,10 @@ class GEOPROCESSING(object):
self.subset = self.subset if self.subset[0]!='cube' else None
def update_dataset_related_attributes(self):
self.desc = self.inDs.GetDescription()
self.filepath, self.filename = os.path.split(self.desc)
self.desc = self.inDs.GetDescription()
self.filepath, self.filename = os.path.split(self.desc)
self.shortname, self.extension = os.path.splitext(self.filename)
if self.subset is None or self.subset == 'cube':
self.cols = self.inDs.RasterXSize
self.rows = self.inDs.RasterYSize
......@@ -196,6 +198,7 @@ class GEOPROCESSING(object):
self.bandsList = range(self.bandStart,self.bandEnd+1)
else:
self.subset_kwargs_to_cols_rows_bands_colStart_rowStart_bandStart()
self.DataProp = [self.cols, self.rows, self.bands, self.colStart, self.rowStart, self.bandStart]
self.drname_s = self.inDs.GetDriver().ShortName
self.drname_l = self.inDs.GetDriver().LongName
......@@ -1662,22 +1665,27 @@ class GEOPROCESSING(object):
if path_output is None:
dtype = gdalnumeric.LoadFile(layers_pathlist[0],0,0,1,1).dtype
stacked = np.empty((self.rows,self.cols,len(layers_pathlist)),dtype)
for i,p in enumerate(layers_pathlist):
self.logger.info('Adding band %s to Layerstack..' %os.path.basename(p))
if self.subset is None or self.subset[0] == 'cube':
stacked[:,:,i] = gdalnumeric.LoadFile(p)
else:
stacked[:,:,i] = gdalnumeric.LoadFile(p,self.colStart,self.rowStart,cols,rows)
return stacked
elif path_output == 'MEMORY':
elif path_output == 'MEMORY': # exec_mode == 'Flink'
stack_in_mem = gdal.GetDriverByName("MEM").Create("stack.mem", cols, rows, bands)
for idx,layer in enumerate(layers_pathlist):
current_band = gdal.Open(layer,GA_ReadOnly)
stack_in_mem.AddBand() ## data type is dynamically assigned
band_in_stack = stack_in_mem.GetRasterBand(idx+1)
if [self.rows,self.cols] == [current_band.RasterYSize,current_band.RasterXSize]:
self.logger.info('Adding band %s to Layerstack..' %os.path.basename(layer))
if self.subset is None or self.subset[0] == 'cube':
band_in_stack.WriteArray(current_band.GetRasterBand(1).ReadAsArray(), 0, 0)
else:
......@@ -1686,18 +1694,27 @@ class GEOPROCESSING(object):
else:
self.logger.info('Band %s skipped due to incompatible geometrical resolution.' %os.path.basename(layer))
current_band = band_in_stack = None
self.bands = bands
self.inDs = stack_in_mem
else:
else: # exec_mode == 'Python'
self.logger.info('Adding the following bands to Layerstack:')
[self.logger.info(os.path.basename(i)) for i in layers_pathlist]
if not os.path.isdir(os.path.dirname(path_output)): os.makedirs(os.path.dirname(path_output))
if os.path.exists(path_output): os.remove(path_output)
if os.path.exists(os.path.splitext(path_output)[0]+'.hdr'): os.remove(os.path.splitext(path_output)[0]+'.hdr')
if not os.path.isdir(os.path.dirname(path_output)):
os.makedirs(os.path.dirname(path_output))
if os.path.exists(path_output):
os.remove(path_output)
if os.path.exists(os.path.splitext(path_output)[0]+'.hdr'):
os.remove(os.path.splitext(path_output)[0]+'.hdr')
str_layers_pathlist = ' '.join(layers_pathlist)
if self.subset is None:
os.system("gdal_merge.py -q -o %s -of ENVI -seperate %s" %(path_output,str_layers_pathlist))
# FIXME this changes the format of the projection (maybe a GDAL bug?)
# FIXME normalize by EPSG2WKT(WKT2EPSG(WKT))
else:
# convert Pixel Coords to Geo Coords
orig_ds = gdal.Open(layers_pathlist[0],GA_ReadOnly)
......@@ -1711,21 +1728,45 @@ class GEOPROCESSING(object):
os.system("gdal_merge.py -q -o %s -ul_lr %s -of ENVI -seperate %s" %(path_output,ullr,str_layers_pathlist))
if [GT, PR] == [(0.0, 1.0, 0.0, 0.0, 0.0, 1.0),'']:
# delete output map info in case of arbitray coordinate system
with open (os.path.splitext(path_output)[0]+'.hdr','r') as inF: lines = inF.readlines()
# delete output map info in case of arbitrary coordinate system
with open (os.path.splitext(path_output)[0]+'.hdr','r') as inF:
lines = inF.readlines()
outContent = ''.join([i for i in lines if not re.search('map info', i, re.I)])
with open (os.path.splitext(path_output)[0]+'.hdr','w') as outF: outF.write(outContent)
with open (os.path.splitext(path_output)[0]+'.hdr','w') as outF:
outF.write(outContent)
assert os.path.exists(path_output) and os.path.exists(os.path.splitext(path_output)[0]+'.hdr'), \
"Layerstacking failed because output cannot be found."
self.inDs=gdal.Open(path_output)
self.update_dataset_related_attributes()
# writing via envi memmap (tiling-fähig)
# FileObj = envi.create_image(os.path.splitext(path_output)[0]+'.hdr',shape=(self.rows,self.cols,
# len(layers_pathlist)),dtype='int16',interleave='bsq',ext='.bsq', force=True) # 8bit for muliple masks
# in one file
# File_memmap = FileObj.open_memmap(writable=True)
# File_memmap[:,:,idx] = current_band.GetRasterBand(1).ReadAsArray()
# # validate the written format of the projection and change it if needed ## NOT NEEDED ANYMORE (GeoArray always validates projection when reading from disk)
# prj_written = GeoArray(path_output).prj
# if self.projection != prj_written and WKT2EPSG(prj_written) == WKT2EPSG(self.projection):
#
# with open(os.path.splitext(path_output)[0] + '.hdr', 'r') as inF:
# lines = inF.readlines()
# outContent = ''.join([line if not re.search('coordinate system string', line, re.I) else
# 'coordinate system string = %s' % self.projection for line in lines])
#
# with open(os.path.splitext(path_output)[0] + '.hdr', 'w') as outF:
# outF.write(outContent)
#
# self.inDs = gdal.Open(path_output)
count_outBands = GeoArray(path_output).bands
assert count_outBands == len(layers_pathlist), "Layerstacking failed because its output has only %s " \
"instead of %s bands." % (
count_outBands, len(layers_pathlist))
self.update_dataset_related_attributes()
# writing via envi memmap (tiling-fähig)
# FileObj = envi.create_image(os.path.splitext(path_output)[0]+'.hdr',shape=(self.rows,self.cols,
# len(layers_pathlist)),dtype='int16',interleave='bsq',ext='.bsq', force=True) # 8bit for muliple masks
# in one file
# File_memmap = FileObj.open_memmap(writable=True)
# File_memmap[:,:,idx] = current_band.GetRasterBand(1).ReadAsArray()
"""********Help_functions**************************************************"""
def GetBandProps(gdalBand):
......@@ -2977,7 +3018,7 @@ def get_lonlat_coord_array(shape_fullArr, arr_pos, geotransform, projection, nod
if nodata_mask is not None:
outFill = outFill if outFill else HLP_F.get_outFillZeroSaturated('float32')[0]
xcoord_ycoord_arr[nodata_mask==False] = outFill
xcoord_ycoord_arr[nodata_mask.astype(np.int8)==False] = outFill
UL_lonlat = (round(xcoord_ycoord_arr[0, 0, 0], 10), round(xcoord_ycoord_arr[0, 0, 1], 10))
UR_lonlat = (round(xcoord_ycoord_arr[0, cols - 1, 0], 10), round(xcoord_ycoord_arr[0, cols - 1, 1], 10))
......@@ -3014,8 +3055,8 @@ def calc_VZA_array(shape_fullArr, arr_pos, trueDataCornerPos, viewing_angle, FOV
:param nodata_mask: <numpy array>, used for declaring nodata values in the output VZA array
:param outFill: the value that is assigned to nodata area in the output VZA array"""
if nodata_mask is not None: assert isinstance(nodata_mask, np.ndarray), \
"'nodata_mask' must be a numpy array. Got %s" % type(nodata_mask)
if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray,np.ndarray)), \
"'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask)
colsFullArr, rowsFullArr, bandsFullArr = shape_fullArr
rows,cols,bands,rowStart,rowEnd,colStart,colEnd = get_subsetProps_from_shapeFullArr_arrPos(shape_fullArr, arr_pos)
ul, ur, ll, lr = trueDataCornerPos
......@@ -3044,7 +3085,7 @@ def calc_VZA_array(shape_fullArr, arr_pos, trueDataCornerPos, viewing_angle, FOV
if nodata_mask is not None:
outFill = outFill if outFill else HLP_F.get_outFillZeroSaturated('float32')[0]
VZA_array[nodata_mask == False] = outFill
VZA_array[nodata_mask.astype(np.int8) == False] = outFill
return VZA_array
......@@ -3116,8 +3157,8 @@ def calc_SZA_SAA_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, trueDataC
SZA/SAA = a + b*col + c*row + d*col*row.
:param lonlat_arr: """
if nodata_mask is not None: assert isinstance(nodata_mask, np.ndarray), \
"'nodata_mask' must be a numpy array. Got %s" % type(nodata_mask)
if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \
"'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask)
if accurracy == 'fine': assert lonlat_arr is not None, "Calculating SZA/SAA with accurracy set to 'fine' " \
"requires lonlat_arr to be specified. SZA/SAA is calculated with accurracy = 'coarse'."
assert accurracy in ['fine', 'coarse'], "The keyword accuracy accepts only 'fine' or 'coarse'. Got %s" % accurracy
......@@ -3175,20 +3216,20 @@ def calc_SZA_SAA_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, trueDataC
SAA_factors[3] * cols_arr * rows_arr).astype(np.float32)
if nodata_mask is not None:
SZA_array[nodata_mask == False] = outFill
SAA_array[nodata_mask == False] = outFill
SZA_array[nodata_mask.astype(np.int8) == False] = outFill
SAA_array[nodata_mask.astype(np.int8) == False] = outFill
return SZA_array, SAA_array
def calc_RAA_array(trueDataCornerLonLat, SAA_array, VAA_mean=None, nodata_mask=None, outFill=None):
if nodata_mask is not None: assert isinstance(nodata_mask, np.ndarray), \
"'nodata_mask' must be a numpy array. Got %s" % type(nodata_mask)
if nodata_mask is not None: assert isinstance(nodata_mask, (GeoArray, np.ndarray)), \
"'nodata_mask' must be a numpy array or an instance of GeoArray. Got %s" % type(nodata_mask)
if VAA_mean is None:
VAA_mean = calc_VAA_using_trueCornerLonLat(trueDataCornerLonLat)
RAA_array = SAA_array - VAA_mean
if nodata_mask is not None:
RAA_array[nodata_mask == False] = outFill
RAA_array[nodata_mask.astype(np.int8) == False] = outFill
return RAA_array
......
This diff is collapsed.
......@@ -30,9 +30,8 @@ from shapely.geometry import box
from CoReg_Sat import COREG, DESHIFTER
from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.ptds.geo.coord_trafo import reproject_shapelyPoly, transform_any_prj
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from py_tools_ds.ptds.geo.projection import prj_equal, EPSG2WKT
from py_tools_ds.ptds.geo.coord_trafo import reproject_shapelyGeometry, transform_any_prj
from py_tools_ds.ptds.geo.projection import prj_equal, EPSG2WKT, WKT2EPSG
from py_tools_ds.ptds.geo.vector.topology import get_overlap_polygon
from ..config import GMS_config as CFG
......@@ -246,7 +245,7 @@ class Scene_finder(object):
GDF['acquisitiondate'] = [*GDF['object'].map(lambda scene: scene.acquisitiondate)]
GDF['cloudcover'] = [*GDF['object'].map(lambda scene: scene.cloudcover)]
GDF['polyLonLat'] = [*GDF['object'].map(lambda scene: scene.polyLonLat)]
LonLat2UTM = lambda polyLL: reproject_shapelyPoly(polyLL, self.src_prj)
LonLat2UTM = lambda polyLL: reproject_shapelyGeometry(polyLL, 4326, self.src_prj)
GDF['polyUTM'] = [*GDF['polyLonLat'].map(LonLat2UTM)]
self.GDF_ref_scenes = GDF
......@@ -279,10 +278,10 @@ class Scene_finder(object):
if not GDF.empty:
#get overlap parameter
get_OL_prms = lambda poly: get_overlap_polygon(poly, self.src_footprint_poly)
GDF['overlapParams'] = [*GDF['polyLonLat'].map(get_OL_prms)]
GDF['overlap area'] = [*GDF['overlapParams'].map(lambda OL_prms: OL_prms['overlap area'])]
GDF['overlap percentage'] = [*GDF['overlapParams'].map(lambda OL_prms: OL_prms['overlap percentage'])]
GDF['overlap poly'] = [*GDF['overlapParams'].map(lambda OL_prms: OL_prms['overlap poly'])]
GDF['overlapParams'] = list(GDF['polyLonLat'].map(get_OL_prms))
GDF['overlap area'] = list(GDF['overlapParams'].map(lambda OL_prms: OL_prms['overlap area']))
GDF['overlap percentage'] = list(GDF['overlapParams'].map(lambda OL_prms: OL_prms['overlap percentage']))
GDF['overlap poly'] = list(GDF['overlapParams'].map(lambda OL_prms: OL_prms['overlap poly']))
del GDF['overlapParams']
# filter scenes out that have too less overlap
......@@ -295,8 +294,8 @@ class Scene_finder(object):
# get processing level of refernce scenes
query_procL = lambda sceneID: \
DB_T.get_info_from_postgreSQLdb(CFG.job.conn_database, 'scenes_proc', ['proc_level'], {'sceneid': sceneID})
GDF['temp_queryRes'] = [*GDF['sceneid'] .map(query_procL)]
GDF['proc_level'] = [*GDF['temp_queryRes'].map(lambda queryRes: queryRes[0][0] if queryRes else None)]
GDF['temp_queryRes'] = list(GDF['sceneid'] .map(query_procL))
GDF['proc_level'] = list(GDF['temp_queryRes'].map(lambda queryRes: queryRes[0][0] if queryRes else None))
GDF.drop('temp_queryRes',axis=1,inplace=True)
# filter scenes out that have not yet been processed
......@@ -312,7 +311,7 @@ class Scene_finder(object):
PG.path_generator(scene_ID=GDF_row['sceneid'], proc_level=GDF_row['proc_level']).get_path_imagedata()
check_exists = lambda path: os.path.exists(path)
GDF['path_ref'] = GDF.apply(lambda GDF_row: get_path_binary(GDF_row), axis=1)
GDF['refDs_exists'] = [*GDF['path_ref'].map(check_exists)]
GDF['refDs_exists'] = list(GDF['path_ref'].map(check_exists))
# filter scenes out where the corresponding dataset does not exist on fileserver
self.GDF_ref_scenes = GDF[GDF['refDs_exists'] == True]
......@@ -324,8 +323,8 @@ class Scene_finder(object):
# check if a proper entity ID can be gathered from database
query_eID = lambda sceneID: DB_T.get_info_from_postgreSQLdb(CFG.job.conn_database, 'scenes', ['entityid'],
{'id': sceneID}, records2fetch=1)
GDF['temp_queryRes'] = [*GDF['sceneid'] .map(query_eID)]
GDF['entityid'] = [*GDF['temp_queryRes'].map(lambda queryRes: queryRes[0][0] if queryRes else None)]
GDF['temp_queryRes'] = list(GDF['sceneid'] .map(query_eID))
GDF['entityid'] = list(GDF['temp_queryRes'].map(lambda queryRes: queryRes[0][0] if queryRes else None))
GDF.drop('temp_queryRes', axis=1, inplace=True)
# filter scenes out that have no entity ID (database errors)
......@@ -340,7 +339,7 @@ class Scene_finder(object):
get_prj = lambda path_binary: \
read_ENVIhdr_to_dict(os.path.splitext(path_binary)[0] + '.hdr')['coordinate system string']
is_prj_equal = lambda path_binary: prj_equal(self.src_prj, get_prj(path_binary))
GDF['prj_equal'] = [*GDF['path_ref'].map(is_prj_equal)]
GDF['prj_equal'] = list(GDF['path_ref'].map(is_prj_equal))
# filter scenes out that have a different projection
self.GDF_ref_scenes = GDF[GDF['prj_equal'] == True]
......@@ -664,6 +663,7 @@ class L1B_object(L1A_object):
if COREG_obj.success:
self.logger.info("Calculated map shifts (X,Y): %s / %s"
% (COREG_obj.x_shift_map, COREG_obj.y_shift_map)) # FIXME direkt in calculate_spatial_shifts loggen
else:
# TODO add database entry with error hint
[self.logger.error('ERROR during coregistration of scene %s (entity ID %s):\n%s'
......@@ -690,119 +690,66 @@ class L1B_object(L1A_object):
self.coreg_info.update({'success' : True if not self.coreg_needed else False}) # False means spatRef not available
def correct_spatial_shifts(self):
def correct_spatial_shifts(self, cliptoextent=True, v=False):
"""Corrects the spatial shifts calculated by self.coregister_spatially()."""
## get target bounds # TODO implement boxObj call instead here
trueDataCornerUTM = [transform_any_prj(EPSG2WKT(4326), self.meta_odict['coordinate system string'], x, y)
for x, y in self.trueDataCornerLonLat]
xmin, xmax, ymin, ymax = corner_coord_to_minmax(trueDataCornerUTM)
mapBounds = box(xmin, ymin, xmax, ymax).bounds
# correct shifts and clip to extent
for attrname in ['arr', 'masks']:
self.logger.info("Correcting spatial shifts for attribute '%s'..." %attrname)
# correct shifts
meta = self.meta_odict if attrname == 'arr' else self.masks_meta
gt, prj = mapinfo2geotransform(meta['map info']), meta['coordinate system string']
geoArr = GeoArray(getattr(self,attrname), tuple(gt), prj)
rspAlg = 'cubic' if attrname=='arr' else 'nearest'
DS = DESHIFTER(geoArr, self.coreg_info,
target_xyGrid=[CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy],
cliptoextent=True, clipextent=mapBounds, align_grids=True, resamp_alg=rspAlg, v=False)
DS.correct_shifts()
setattr(self,attrname, DS.arr_shifted)
# update coreg_info
if attrname=='arr':
self.coreg_info['is shifted'] = DS.is_shifted
self.coreg_info['is resampled'] = DS.is_resampled
# update geoinformations and array shape related attributes
self.logger.info("Updating geoinformations of '%s' attribute..." %attrname)
if attrname=='arr':
self.meta_odict['map info'] = DS.updated_map_info
self.meta_odict['coordinate system string'] = DS.updated_projection
self.shape_fullArr = DS.arr_shifted.shape # TODO move this into a property
self.meta_odict['lines'], self.meta_odict['samples'] = DS.arr_shifted.shape[:2]
if DS.arr_shifted.ndim == 2:
self.meta_odict['bands'] = DS.arr_shifted.shape[2]
else:
self.masks_meta['map info'] = DS.updated_map_info
self.masks_meta['coordinate system string'] = DS.updated_projection
self.masks_meta['lines'], self.masks_meta['samples'] = DS.arr_shifted.shape[:2]
def join_deshift_results(self, list_deshiftRes_groupedBySceneID):
"""
:param list_deshiftRes_groupedBySceneID: a list of deshift_results returned by L1B_P.DESHIFTER.correct_shifts
in which every item belongs to the same attribute
:type list_deshiftRes_groupedBySceneID: [collections.OrderedDict(),collections.OrderedDict(),...]
"""
list_deshiftRes_groupedBySceneID = [list_deshiftRes_groupedBySceneID]\
if not isinstance(list_deshiftRes_groupedBySceneID, list) else list_deshiftRes_groupedBySceneID
deshiftRes_groupedByAttr = HLP_F.group_dicts_by_key(list_deshiftRes_groupedBySceneID, 'attrname')
self.deshift_results = collections.OrderedDict()
for deshiftRes_group in deshiftRes_groupedByAttr:
OrdDic0 = deshiftRes_group[0]
attrname = OrdDic0['attrname']
bandsNrs = sorted([OrdDic['band'] for OrdDic in deshiftRes_group])
bandsDic = collections.OrderedDict()
for band in bandsNrs:
OrdDic = [OrdDic for OrdDic in deshiftRes_group if OrdDic['band']==band][0]
bandsDic.update({band:OrdDic})
self.deshift_results.update({attrname:bandsDic})
def apply_deshift_results(self):
"""Applies the deshifting results to the corresponding attributes of the GMS object overwriting existing arrays.
"""
#if job.exec_mode == 'Flink':
assert hasattr(self,'deshift_results'), "%s has no attribute 'deshift_results'." \
"Please execute the method 'join_deshift_results()' first!"
deshiftRes_groupedByAttr = self.deshift_results
for attrname in deshiftRes_groupedByAttr:
OrdDicsAllBands = deshiftRes_groupedByAttr[attrname]
bandsNrs = list(OrdDicsAllBands.keys()) # TODO Layerbandsassignment beachten
OrdDic0 = OrdDicsAllBands[bandsNrs[0]]
if OrdDic0['arr_shifted'] is not None:
# overwrite array, only if it contains new data (if it has been clipped or resampled by DESHIFTER)
self.logger.info("Applying results of shift-correction to attribute '%s' of entity ID %s..."
%(attrname,self.scene_ID))
compl_arr = np.dstack(tuple([OrdDicsAllBands[bandNr]['arr_shifted'] for bandNr in bandsNrs])) \
if len(bandsNrs)>1 else OrdDic0['arr_shifted']
setattr(self,attrname,compl_arr)
if cliptoextent or self.resamp_needed or (self.coreg_needed and self.coreg_info['success']):
## get target bounds # TODO implement boxObj call instead here
trueDataCornerUTM = [transform_any_prj(EPSG2WKT(4326), self.meta_odict['coordinate system string'], x, y)
for x, y in self.trueDataCornerLonLat]
xmin, xmax, ymin, ymax = corner_coord_to_minmax(trueDataCornerUTM)
mapBounds = box(xmin, ymin, xmax, ymax).bounds
# correct shifts and clip to extent
for attrname in ['arr', 'masks']:
if self.coreg_needed:
if self.coreg_info['success']:
self.logger.info("Correcting spatial shifts for attribute '%s'..." %attrname)
elif cliptoextent:
self.logger.info("Attribute '%s' has only been clipped to it's extent because no valid "
"shifts have been detected and the pixel grid equals the target grid." % attrname)
elif self.resamp_needed:
self.logger.info("Resampling attribute '%s' to target grid..." % attrname)
# correct shifts
#print(attrname)
geoArr = getattr(self, attrname)
#print('L1B718 geoArr', geoArr)
DS = DESHIFTER(geoArr, self.coreg_info,
target_xyGrid = [CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy],
cliptoextent = cliptoextent,
clipextent = mapBounds,
align_grids = True,
resamp_alg = 'cubic' if attrname=='arr' else 'nearest',
CPUs = None if CFG.job.allow_subMultiprocessing else 1,
progress = True if v else False,
q = True,
v = v)
DS.correct_shifts()
setattr(self,attrname, DS.arr_shifted)
# update coreg_info
if attrname=='arr':
self.coreg_info['is shifted'] = DS.is_shifted
self.coreg_info['is resampled'] = DS.is_resampled
# update geoinformations and array shape related attributes
self.logger.info("Updating geoinformations of '%s' attribute..." %attrname)
if attrname=='arr':
self.shape_fullArr = compl_arr.shape
self.meta_odict['lines'], self.meta_odict['samples'] = compl_arr.shape[:2]
if len(compl_arr.shape)==2: self.meta_odict['bands'] = compl_arr.shape[2]
elif attrname=='masks':
self.masks_meta['lines'], self.masks_meta['samples'] = compl_arr.shape[:2]
# overwrite geoinformations
if attrname == 'arr':
self.logger.info("Updating geoinformations of 'arr' attribute...")
self.coreg_info['is shifted'] = OrdDic0['is shifted']
self.coreg_info['is resampled'] = OrdDic0['is resampled']
if OrdDic0['updated map info']:
self.meta_odict['map info'] = OrdDic0['updated map info']
if OrdDic0['updated projection']:
self.meta_odict['projection'] = OrdDic0['updated projection']
elif attrname == 'masks':
self.logger.info("Updating geoinformations of 'masks' attribute...")
if OrdDic0['updated map info']:
self.masks_meta['map info'] = OrdDic0['updated map info']
if OrdDic0['updated projection']:
self.masks_meta['projection'] = OrdDic0['updated projection']
del self.deshift_results
self.meta_odict['map info'] = DS.updated_map_info
self.meta_odict['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection))
self.shape_fullArr = DS.arr_shifted.shape
self.meta_odict['lines'], self.meta_odict['samples'] = DS.arr_shifted.shape[:2]
if DS.arr_shifted.ndim == 2:
self.meta_odict['bands'] = DS.arr_shifted.shape[2]
else:
self.masks_meta['map info'] = DS.updated_map_info
self.masks_meta['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection))
self.masks_meta['lines'], self.masks_meta['samples'] = DS.arr_shifted.shape[:2]
self.resamp_needed = False
self.coreg_needed = False
......
......@@ -32,8 +32,6 @@ from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from ..misc import helper_functions as HLP_F
from . import GEOPROCESSING as GEOP
from ..io import Input_reader as INP_R
from ..misc import path_generator as PG
from .L1B_P import L1B_object
......@@ -64,16 +62,16 @@ class L1C_object(L1B_object):
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
self.log_for_fullArr_or_firstTile('Calculating lonlat array.',subset)
if hasattr(self, 'mask_nodata') and isinstance(self.mask_nodata, np.ndarray):
mask_1bit_temp = self.mask_nodata[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, subset)
#if hasattr(self, 'mask_nodata') and isinstance(self.mask_nodata, np.ndarray):
# mask_1bit_temp = self.mask_nodata[rS:rE + 1, cS:cE + 1]
#else:
# path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
# mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
lonlat_arr = GEOP.get_lonlat_coord_array(self.shape_fullArr, subset[1],
mapinfo2geotransform(self.meta_odict['map info']),
self.meta_odict['coordinate system string'], mask_1bit_temp, fillVal)[0]
self.meta_odict['coordinate system string'], self.mask_nodata, fillVal)[0]
if CFG.job.exec_mode == 'Flink' and subset[0]=='cube':
self.lonlat_arr = lonlat_arr
......@@ -94,11 +92,11 @@ class L1C_object(L1B_object):
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
self.log_for_fullArr_or_firstTile('Calculating acquisition and illumination geometry arrays.',subset)
if hasattr(self, 'mask_nodata') and isinstance(self.mask_nodata, np.ndarray):
mask_1bit_temp = self.mask_nodata[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, subset)
#if hasattr(self, 'mask_nodata') and isinstance(self.mask_nodata, np.ndarray):
# mask_1bit_temp = self.mask_nodata[rS:rE + 1, cS:cE + 1]
#else:
# path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
# mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
assert self.meta_odict, "Missing 'meta' attribute. Run self.Meta2ODict() first!"
......@@ -109,17 +107,17 @@ class L1C_object(L1B_object):
else:
VZA_arr = GEOP.calc_VZA_array(self.shape_fullArr, subset[1], self.trueDataCornerPos,
float(self.meta_odict['ViewingAngle']), float(self.meta_odict['FieldOfView']),
self.logger, mask_1bit_temp, fillVal)
self.logger, self.mask_nodata, fillVal)
SZA_arr, SAA_arr = GEOP.calc_SZA_SAA_array(self.shape_fullArr, subset[1], self.meta_odict['AcqDate'],
self.meta_odict['AcqTime'], self.trueDataCornerPos,
self.trueDataCornerLonLat, self.meta_odict['overpass duraction sec'],
self.logger, mask_1bit_temp, fillVal,
self.logger, self.mask_nodata, fillVal,
accurracy=CFG.job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr
if CFG.job.SZA_SAA_calculation_accurracy == 'fine' else None)
if 'IncidenceAngle_arrProv' in self.meta_odict and self.meta_odict['IncidenceAngle_arrProv']:
pass # FIXME Sentinel-2s viewing azimuth array is currently not used (instead a static VAA is used)
RAA_arr = GEOP.calc_RAA_array(self.trueDataCornerLonLat, SAA_arr, self.VAA_mean, mask_1bit_temp, fillVal)
RAA_arr = GEOP.calc_RAA_array(self.trueDataCornerLonLat, SAA_arr, self.VAA_mean, self.mask_nodata, fillVal)
if CFG.job.exec_mode == 'Flink' and subset is None:
self.VZA_arr, self.SZA_arr, self.SAA_arr, self.RAA_arr = VZA_arr, SZA_arr, SAA_arr, RAA_arr
......
This diff is collapsed.
......@@ -12,7 +12,6 @@ from scipy.interpolate import interp1d
from ..config import GMS_config as CFG
from .L2A_P import L2A_object
from ..io import Input_reader as INP_R
class L2B_object(L2A_object):
......@@ -34,9 +33,7 @@ class L2B_object(L2A_object):
self.log_for_fullArr_or_firstTile('Performing spectral homogenization (%s) with target wavelength '
'positions at %s nm.' %(kind,', '.join(np.array(tgt_cwls[:-1]).astype(str))+' and %s' %tgt_cwls[-1]))
inArray = self.arr if isinstance(self.arr,np.ndarray) else \
INP_R.read_ENVIfile(self.arr, self.arr_shape, self.arr_pos, self.logger, q=1)
self.arr = self.interpolate_cube_linear(inArray,src_cwls,tgt_cwls) if kind=='linear' else self.arr
self.arr = self.interpolate_cube_linear(self.arr,src_cwls,tgt_cwls) if kind=='linear' else self.arr