Commit 222775c8 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

implemented GMS_object as subclass of GeoArray

parent 007965bf
......@@ -1218,7 +1218,7 @@ class GEOPROCESSING(object):
LR = corners[distLR.index(min(distLR))]
# print([UL, UR, LL, LR])
# if UL[0] == 0 and UR[0] == 0:
# envi.save_image(os.path.abspath('./testing/out/__faulty_mask_1bit.hdr'), self.mask_1bit, dtype= 'uint8', interleave='bsq', ext='.bsq',force=True)
# envi.save_image(os.path.abspath('./testing/out/__faulty_mask_1bit.hdr'), self.mask_nodata, dtype= 'uint8', interleave='bsq', ext='.bsq',force=True)
UL_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection(UL, 'LonLat')
UR_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection(UR, 'LonLat')
LL_LonLat = self.pixelToWorldCoord_using_geotransform_and_projection(LL, 'LonLat')
......
......@@ -444,12 +444,15 @@ class L1A_object(GMS_object):
self.logger.info("The following metadata have been read:")
[self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.get_meta_overview().items()]
# set some object attributes directly linked to metadata
self.subsystem = self.MetaObj.Subsystem
self.nodata = self.MetaObj.spec_vals['fill']
def set_arr_desc_from_MetaObj(self):
"""Sets self.arr_desc according to MetaObj.PhysUnit. To be called directly after getting MetaObj.
"""
# TODO property
self.arr_desc = 'DN' if self.MetaObj.PhysUnit=='DN' else \
'Rad' if self.MetaObj.PhysUnit=="W * m-2 * sr-1 * micrometer-1" else \
'Ref' if re.search('TOA_Reflectance',self.MetaObj.PhysUnit,re.I) else \
......@@ -634,7 +637,7 @@ class L1A_object(GMS_object):
if rasObj.get_projection_type() == 'UTM':
self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM')
self.meta = self.MetaObj.Meta2ODict() # important in order to keep geotransform/projection
self.meta_odict = self.MetaObj.Meta2ODict() # important in order to keep geotransform/projection
if CFG.job.exec_mode=='Flink':
self.delete_tempFiles() # these files are needed later in Python execution mode
self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists)
......@@ -653,7 +656,7 @@ class L1A_object(GMS_object):
bands, rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[2:7]
arr_isPath = isinstance(self.arr, str) and os.path.isfile(self.arr) # FIXME
inPath = self.arr if arr_isPath else self.MetaObj.Dataname if \
(hasattr(self,'MetaObj') and self.MetaObj) else self.meta['Dataname'] # FIXME ersetzen durch path generator?
(hasattr(self,'MetaObj') and self.MetaObj) else self.meta_odict['Dataname'] # FIXME ersetzen durch path generator?
if not self.path_cloud_class_obj:
self.log_for_fullArr_or_firstTile('Cloud masking is not yet implemented for %s %s...'
......@@ -757,19 +760,19 @@ class L1A_object(GMS_object):
"""Get true corner positions in the form
[UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]"""
assert hasattr(self,'mask_1bit') and self.mask_1bit is not None, "The L1A object needs to have a nodata mask."
assert hasattr(self,'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask."
self.logger.info('Calculating true data corner positions (image and world coordinates)...')
if re.search('ETM+', self.sensor) and self.acquisition_date > datetime.datetime(year=2003, month=5, day=31): # FIXME add time
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_1bit,algorithm='numpy')
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy')
else:
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_1bit)
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata)
trueCorPosXY = [tuple(reversed(i)) for i in self.trueDataCornerPos]
gt = mapinfo2geotransform(self.MetaObj.map_info
if hasattr(self,'MetaObj') and self.MetaObj else self.meta['map info'])
if hasattr(self,'MetaObj') and self.MetaObj else self.meta_odict['map info'])
prj = self.MetaObj.projection if hasattr(self,'MetaObj') and self.MetaObj else \
self.meta['projection'] if 'projection' in self.meta else self.meta['coordinate system string']
self.meta_odict['projection'] if 'projection' in self.meta_odict else self.meta_odict['coordinate system string']
trueCorLatLon = pixelToLatLon(trueCorPosXY,geotransform=gt,projection=prj)
self.trueDataCornerLonLat = [tuple(reversed(i)) for i in trueCorLatLon]
......@@ -795,7 +798,7 @@ class L1A_object(GMS_object):
def add_rasterInfo_to_MetaObj(self, custom_rasObj=None):
"""Add the attributes 'rows','cols','bands','map_info','projection' and 'physUnit' to self.MetaObj"""
# TODO is this info still needed in MetaObj?
project_dir = os.path.abspath(os.curdir)
if self.MetaObj.Dataname.startswith('/vsi'): os.chdir(os.path.dirname(self.path_archive))
rasObj = custom_rasObj if custom_rasObj else GEOP.GEOPROCESSING(self.MetaObj.Dataname, self.logger)
......@@ -810,6 +813,9 @@ class L1A_object(GMS_object):
os.chdir(project_dir)
self.MetaObj.bands = 1 if len(self.arr.shape)==2 else self.arr.shape[2]
self.geotransform = mapinfo2geotransform(self.MetaObj.map_info)
self.projection = self.MetaObj.projection
def update_spec_vals_according_to_dtype(self,dtype=None):
"""Update self.MetaObj.spec_vals.
......@@ -827,6 +833,7 @@ class L1A_object(GMS_object):
dtype = arr.dtype
self.MetaObj.spec_vals['fill'],self.MetaObj.spec_vals['zero'],self.MetaObj.spec_vals['saturated'] = \
HLP_F.get_outFillZeroSaturated(dtype)
self.nodata = self.MetaObj.spec_vals['fill']
def calc_mean_VAA(self):
......
......@@ -549,7 +549,7 @@ class L1B_object(L1A_object):
def get_spatial_reference_scene(self):
boundsLonLat = corner_coord_to_minmax(self.trueDataCornerLonLat)
footprint_poly = HLP_F.CornerLonLat_to_shapelyPoly(self.trueDataCornerLonLat)
RSF = Scene_finder(boundsLonLat, self.acquisition_date, self.meta['coordinate system string'],
RSF = Scene_finder(boundsLonLat, self.acquisition_date, self.meta_odict['coordinate system string'],
footprint_poly, 20, 0, 20, 30, 10)
# run spatial query
......@@ -584,8 +584,8 @@ class L1B_object(L1A_object):
:param v: verbose mode
"""
# get spectral characteristics
shift_cwl = [float(i) for i in self.meta['wavelength']]
shift_fwhm = [float(i) for i in self.meta['bandwidths']]
shift_cwl = [float(i) for i in self.meta_odict['wavelength']]
shift_fwhm = [float(i) for i in self.meta_odict['bandwidths']]
with open(PG.path_generator(scene_ID=self.spatRef_scene.scene_ID).get_path_gmsfile()) as inF:
ref_gmsDict = json.load(inF)
ref_cwl = [float(i) for i in ref_gmsDict['meta']['wavelength']]
......@@ -645,8 +645,8 @@ class L1B_object(L1A_object):
'align_grids' : True, # FIXME not needed here
'match_gsd' : True, # FIXME not needed here
'data_corners_ref' : [[x,y] for x,y in self.spatRef_scene.polyUTM.convex_hull.exterior.coords],
'data_corners_tgt' : [transform_any_prj(EPSG2WKT(4326),self.meta['coordinate system string'],
x,y) for x,y in HLP_F.reorder_CornerLonLat(self.trueDataCornerLonLat)],
'data_corners_tgt' : [transform_any_prj(EPSG2WKT(4326), self.meta_odict['coordinate system string'],
x, y) for x,y in HLP_F.reorder_CornerLonLat(self.trueDataCornerLonLat)],
'nodata' : (HLP_F.get_outFillZeroSaturated(geoArr_ref .dtype)[0],
HLP_F.get_outFillZeroSaturated(geoArr_shift.dtype)[0]),
'ignore_errors' : True,
......@@ -678,12 +678,12 @@ class L1B_object(L1A_object):
self.coreg_info.update({'corrected_shifts_px' : {'x':0, 'y':0 }})
self.coreg_info.update({'corrected_shifts_map' : {'x':0, 'y':0 }})
self.coreg_info.update({'original map info' : self.meta['map info']})
self.coreg_info.update({'original map info' : self.meta_odict['map info']})
self.coreg_info.update({'updated map info' : None})
self.coreg_info.update({'reference scene ID' : None})
self.coreg_info.update({'reference entity ID' : None})
self.coreg_info.update({'reference geotransform': None})
self.coreg_info.update({'reference projection' : self.meta['coordinate system string']}) # must be the own projection in order to avoid overwriting with a wring EPSG
self.coreg_info.update({'reference projection' : self.meta_odict['coordinate system string']}) # must be the own projection in order to avoid overwriting with a wring EPSG
self.coreg_info.update({'reference extent' : {'rows':None, 'cols':None }})
self.coreg_info.update({'reference grid' : [list(CFG.usecase.spatial_ref_gridx),
list(CFG.usecase.spatial_ref_gridy)]})
......@@ -694,7 +694,7 @@ class L1B_object(L1A_object):
"""Corrects the spatial shifts calculated by self.coregister_spatially()."""
## get target bounds # TODO implement boxObj call instead here
trueDataCornerUTM = [transform_any_prj(EPSG2WKT(4326), self.meta['coordinate system string'], x, y)
trueDataCornerUTM = [transform_any_prj(EPSG2WKT(4326), self.meta_odict['coordinate system string'], x, y)
for x, y in self.trueDataCornerLonLat]
xmin, xmax, ymin, ymax = corner_coord_to_minmax(trueDataCornerUTM)
mapBounds = box(xmin, ymin, xmax, ymax).bounds
......@@ -704,7 +704,7 @@ class L1B_object(L1A_object):
self.logger.info("Correcting spatial shifts for attribute '%s'..." %attrname)
# correct shifts
meta = self.meta if attrname=='arr' else self.masks_meta
meta = self.meta_odict if attrname == 'arr' else self.masks_meta
gt, prj = mapinfo2geotransform(meta['map info']), meta['coordinate system string']
geoArr = GeoArray(getattr(self,attrname), tuple(gt), prj)
rspAlg = 'cubic' if attrname=='arr' else 'nearest'
......@@ -722,12 +722,12 @@ class L1B_object(L1A_object):
# update geoinformations and array shape related attributes
self.logger.info("Updating geoinformations of '%s' attribute..." %attrname)
if attrname=='arr':
self.meta['map info'] = DS.updated_map_info
self.meta['coordinate system string'] = DS.updated_projection
self.meta_odict['map info'] = DS.updated_map_info
self.meta_odict['coordinate system string'] = DS.updated_projection
self.shape_fullArr = DS.arr_shifted.shape # TODO move this into a property
self.meta['lines'], self.meta['samples'] = DS.arr_shifted.shape[:2]
self.meta_odict['lines'], self.meta_odict['samples'] = DS.arr_shifted.shape[:2]
if DS.arr_shifted.ndim == 2:
self.meta['bands'] = DS.arr_shifted.shape[2]
self.meta_odict['bands'] = DS.arr_shifted.shape[2]
else:
self.masks_meta['map info'] = DS.updated_map_info
self.masks_meta['coordinate system string'] = DS.updated_projection
......@@ -781,8 +781,8 @@ class L1B_object(L1A_object):
if attrname=='arr':
self.shape_fullArr = compl_arr.shape
self.meta['lines'],self.meta['samples'] = compl_arr.shape[:2]
if len(compl_arr.shape)==2: self.meta['bands'] = compl_arr.shape[2]
self.meta_odict['lines'], self.meta_odict['samples'] = compl_arr.shape[:2]
if len(compl_arr.shape)==2: self.meta_odict['bands'] = compl_arr.shape[2]
elif attrname=='masks':
self.masks_meta['lines'], self.masks_meta['samples'] = compl_arr.shape[:2]
......@@ -792,9 +792,9 @@ class L1B_object(L1A_object):
self.coreg_info['is shifted'] = OrdDic0['is shifted']
self.coreg_info['is resampled'] = OrdDic0['is resampled']
if OrdDic0['updated map info']:
self.meta['map info'] = OrdDic0['updated map info']
self.meta_odict['map info'] = OrdDic0['updated map info']
if OrdDic0['updated projection']:
self.meta['projection'] = OrdDic0['updated projection']
self.meta_odict['projection'] = OrdDic0['updated projection']
elif attrname == 'masks':
self.logger.info("Updating geoinformations of 'masks' attribute...")
if OrdDic0['updated map info']:
......
......@@ -55,8 +55,8 @@ class L1C_object(L1B_object):
def get_lonlat_coord_array(self, subset=None):
"""Calculates pixelwise 2D-array with longitude and latitude coordinates. Supports 3 modes for subsetting:
(1) called with given subset argument if self.mask_1bit is an array: passes array subset
(2) called with given subset argument if self.mask_1bit is NO array: reads mask subset from disk
(1) called with given subset argument if self.mask_nodata is an array: passes array subset
(2) called with given subset argument if self.mask_nodata is NO array: reads mask subset from disk
(3) called without subset argument but L1A obj represents a block subset: self.arr_pos is passed for subsetting"""
subset = subset if subset else [self.arr_shape, self.arr_pos]
......@@ -64,16 +64,16 @@ class L1C_object(L1B_object):
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
self.log_for_fullArr_or_firstTile('Calculating lonlat array.',subset)
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
if hasattr(self, 'mask_nodata') and isinstance(self.mask_nodata, np.ndarray):
mask_1bit_temp = self.mask_nodata[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', self.logger, subset)
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
lonlat_arr = GEOP.get_lonlat_coord_array(self.shape_fullArr, subset[1],
mapinfo2geotransform(self.meta['map info']),
self.meta['coordinate system string'], mask_1bit_temp, fillVal)[0]
mapinfo2geotransform(self.meta_odict['map info']),
self.meta_odict['coordinate system string'], mask_1bit_temp, fillVal)[0]
if CFG.job.exec_mode == 'Flink' and subset[0]=='cube':
self.lonlat_arr = lonlat_arr
......@@ -85,8 +85,8 @@ class L1C_object(L1B_object):
"""Calculates pixelwise arrays for viewing zenith angle (VZA), sun zenith angle (SZA),
sun azimuth angle (SAA) and relative azimuth angle (RAA).
Supports 3 modes for subsetting:
(1) called with given subset argument if self.mask_1bit is an array: passes array subset
(2) called with given subset argument if self.mask_1bit is NO array: reads mask subset from disk
(1) called with given subset argument if self.mask_nodata is an array: passes array subset
(2) called with given subset argument if self.mask_nodata is NO array: reads mask subset from disk
(3) called without subset argument but L1A obj represents a block subset: self.arr_pos is passed for subsetting"""
subset = subset if subset else [self.arr_shape, self.arr_pos]
......@@ -94,30 +94,30 @@ class L1C_object(L1B_object):
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
self.log_for_fullArr_or_firstTile('Calculating acquisition and illumination geometry arrays.',subset)
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
if hasattr(self, 'mask_nodata') and isinstance(self.mask_nodata, np.ndarray):
mask_1bit_temp = self.mask_nodata[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', self.logger, subset)
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
assert self.meta, "Missing 'meta' attribute. Run self.Meta2ODict() first!"
assert self.meta_odict, "Missing 'meta' attribute. Run self.Meta2ODict() first!"
if 'ViewingAngle_arrProv' in self.meta and self.meta['ViewingAngle_arrProv']:
VZA_arr = GEOP.adjust_acquisArrProv_to_shapeFullArr(self.meta['ViewingAngle_arrProv'],self.shape_fullArr,
subset,bandwise=0)
if 'ViewingAngle_arrProv' in self.meta_odict and self.meta_odict['ViewingAngle_arrProv']:
VZA_arr = GEOP.adjust_acquisArrProv_to_shapeFullArr(self.meta_odict['ViewingAngle_arrProv'], self.shape_fullArr,
subset, bandwise=0)
else:
VZA_arr = GEOP.calc_VZA_array(self.shape_fullArr, subset[1], self.trueDataCornerPos,
float(self.meta['ViewingAngle']), float(self.meta['FieldOfView']),
float(self.meta_odict['ViewingAngle']), float(self.meta_odict['FieldOfView']),
self.logger, mask_1bit_temp, fillVal)
SZA_arr, SAA_arr = GEOP.calc_SZA_SAA_array(self.shape_fullArr, subset[1], self.meta['AcqDate'],
self.meta['AcqTime'], self.trueDataCornerPos,
self.trueDataCornerLonLat, self.meta['overpass duraction sec'],
SZA_arr, SAA_arr = GEOP.calc_SZA_SAA_array(self.shape_fullArr, subset[1], self.meta_odict['AcqDate'],
self.meta_odict['AcqTime'], self.trueDataCornerPos,
self.trueDataCornerLonLat, self.meta_odict['overpass duraction sec'],
self.logger, mask_1bit_temp, fillVal,
accurracy=CFG.job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr
if CFG.job.SZA_SAA_calculation_accurracy == 'fine' else None)
if 'IncidenceAngle_arrProv' in self.meta and self.meta['IncidenceAngle_arrProv']:
if 'IncidenceAngle_arrProv' in self.meta_odict and self.meta_odict['IncidenceAngle_arrProv']:
pass # FIXME Sentinel-2s viewing azimuth array is currently not used (instead a static VAA is used)
RAA_arr = GEOP.calc_RAA_array(self.trueDataCornerLonLat, SAA_arr, self.VAA_mean, mask_1bit_temp, fillVal)
......
......@@ -27,7 +27,7 @@ class L2B_object(L2A_object):
self.proc_level = 'L2B'
def spectral_homogenization(self, subset=None, kind='linear'):
src_cwls = self.meta['wavelength']
src_cwls = self.meta_odict['wavelength']
tgt_cwls = CFG.usecase.target_CWL # FIXME exclude or include thermal bands; respect sorted CWLs in context of LayerBandsAssignment
if src_cwls!=tgt_cwls:
assert kind in ['linear',], "%s is not a supported kind of homogenization." %kind
......@@ -38,11 +38,11 @@ class L2B_object(L2A_object):
INP_R.read_ENVIfile(self.arr, self.arr_shape, self.arr_pos, self.logger, q=1)
self.arr = self.interpolate_cube_linear(inArray,src_cwls,tgt_cwls) if kind=='linear' else self.arr
self.meta['wavelength'] = list(tgt_cwls)
self.meta['bands'] = len(tgt_cwls)
self.meta['LayerBandsAssignment'] = [] # TODO
if 'band names' in self.meta: # FIXME bug workaround
del self.meta['band names'] # TODO
self.meta_odict['wavelength'] = list(tgt_cwls)
self.meta_odict['bands'] = len(tgt_cwls)
self.meta_odict['LayerBandsAssignment'] = [] # TODO
if 'band names' in self.meta_odict: # FIXME bug workaround
del self.meta_odict['band names'] # TODO
else:
pass # FIXME log spec homo skipped
......
......@@ -41,8 +41,10 @@ from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
class GMS_object(object):
class GMS_object(GeoArray):
def __init__(self):
super().__init__(np.zeros((0,)))
# protected attributes
self._logger = None
self._loggers_disabled = False
......@@ -50,6 +52,7 @@ class GMS_object(object):
self._LayerBandsAssignment = None
self._dict_LayerOptTherm = None
self._coreg_needed = None
self._arr = None
# defaults
self.proc_level = 'init'
......@@ -78,10 +81,9 @@ class GMS_object(object):
self.arr_pos = None # <tuple> in the form ((row_start,row_end),(col_start,col_end))
self.tile_pos = None # <list>, filled by self.get_tilepos()
self.MetaObj = None # set by self.get_MetaObj()
self.meta = None # set by self.MetaObj2ODict()
self.meta_odict = None # set by self.MetaObj2ODict()
self.GeoTransProj_ok = True # set by self.validate_GeoTransProj_GeoAlign()
self.GeoAlign_ok = True # set by self.validate_GeoTransProj_GeoAlign()
self.mask_1bit = None # set by self.calc_mask_nodata()
self.mask_clouds = None # set by self.calc_cloud_mask()
self.masks = None # set by self.build_L1A_masks()
self.masks_meta = {} # set by self.build_L1A_masks()
......@@ -146,6 +148,7 @@ class GMS_object(object):
"""Defines how the attributes of GMS object are pickled."""
self.close_GMS_loggers()
self.flush_cache() # clean array cache in order to avoid cache pickling
return self.__dict__
......@@ -230,8 +233,8 @@ class GMS_object(object):
@property
def coreg_needed(self):
if self.meta:
gt = mapinfo2geotransform(self.meta['map info'])
if self.meta_odict:
gt = mapinfo2geotransform(self.meta_odict['map info'])
self.coreg_needed = (is_coord_grid_equal(gt, CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy)
and self.dataset_ID == CFG.usecase.datasetid_spatial_ref) is False
return self._coreg_needed
......@@ -244,9 +247,19 @@ class GMS_object(object):
self._coreg_needed = value
def get_copied_dict_and_props(self, remove_privates=False):
#@property
#def arr(self):
# return self._arr
#@arr.setter
#def arr(self, geoArr_initArgs):
# self._arr = GeoArray(geoArr_initArgs)
def get_copied_dict_and_props(self, remove_privates=False): # FIXME deprecated
# type: (bool) -> dict
"""Returns a copy of the current object dictionary including the curren values of all object properties."""
"""Returns a copy of the current object dictionary including the current values of all object properties."""
# loggers must be closed
self.close_GMS_loggers()
......@@ -267,6 +280,27 @@ class GMS_object(object):
return out_dict
def attributes2dict(self, remove_privates=False):
# type: (bool) -> dict
"""Returns a copy of the current object dictionary including the curren values of all object properties."""
# loggers must be closed
self.close_GMS_loggers()
out_dict = self.__dict__.copy()
# add some selected property values
for i in ['GMS_identifier', 'LayerBandsAssignment', 'bandnames', 'coreg_needed', 'dict_LayerOptTherm',
'epsg', 'georef', 'geotransform', 'nodata', 'projection', 'shape']:
out_dict[i] = getattr(self,i)
# remove private attributes
if remove_privates:
out_dict = {k: v for k, v in out_dict.items() if not k.startswith('_')}
return out_dict
def _data_downloader(self,sensor, entity_ID):
self.logger.info('Data downloader started.')
success = False
......@@ -281,8 +315,51 @@ class GMS_object(object):
:param tuple_GMS_subset: <tuple> e.g. ('/path/gms_file.gms', ['cube', None])
"""
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
path_GMS_file, (self.arr_shape, self.arr_pos) = tuple_GMS_subset
PG_obj = PG.path_generator(self.__dict__)
path_arr = PG_obj.get_path_imagedata()
path_masks = PG_obj.get_path_maskdata()
path_maskClouds = PG_obj.get_path_cloudmaskdata()
if CFG.job.exec_mode=='Flink':
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1)
self.mask_nodata = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
self.log_for_fullArr_or_firstTile('Reading file %s as tiles...' %self.baseN \
if self.arr_pos else 'Reading file %s...' %self.baseN)
#self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # CFG.job.exec_mode=='Python'
self.arr = path_arr
self.mask_nodata = path_masks
self.mask_clouds = path_maskClouds
self.masks = path_masks
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
# copy all attributes from GMS file (private attributes are not touched since they are not included in GMS file)
for key, value in GMSfileDict.items():
if key in ['GMS_identifier', 'georef', 'dict_LayerOptTherm']:
continue # properties that should better be created on the fly
try:
setattr(self, key, value)
except:
raise AttributeError("Can't set attribute %s." %key)
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
return copy.copy(self)
def from_disk(self, tuple_GMS_subset):
"""Fills an already instanced GMS object with data from disk. Excludes array attributes in Python mode.
:param tuple_GMS_subset: <tuple> e.g. ('/path/gms_file.gms', ['cube', None])
"""
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
# copy all attributes from GMS file (private attributes are not touched since they are not included in GMS file)
for key, value in GMSfileDict.items():
......@@ -290,7 +367,7 @@ class GMS_object(object):
continue # properties that should better be created on the fly
try:
setattr(self, key, value)
except AttributeError:
except:
raise AttributeError("Can't set attribute %s." %key)
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
......@@ -302,14 +379,14 @@ class GMS_object(object):
path_maskClouds = PG_obj.get_path_cloudmaskdata()
if CFG.job.exec_mode=='Flink':
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1)
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_nodata = INP_R.read_mask_subset(path_masks, 'mask_nodata', self.logger, tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
self.log_for_fullArr_or_firstTile('Reading file %s as tiles...' %self.baseN \
if self.arr_pos else 'Reading file %s...' %self.baseN)
#self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # CFG.job.exec_mode=='Python'
self.arr = path_arr
self.mask_1bit = path_masks
self.mask_nodata = path_masks
self.mask_clouds = path_maskClouds
self.masks = path_masks
......@@ -402,20 +479,25 @@ class GMS_object(object):
return inArray.astype(np.int16) * (outScaleFactor / inScaleFactor)
def calc_mask_nodata(self, custom_nodataVal=None):
"""Calculate nodata mask using the nodata value specified in self.MetaObj.spec_vals by default."""
def calc_mask_nodata(self, fromBand=None, overwrite=False):
"""Calculates a no data mask with (values: 0=nodata; 1=data)
:param fromBand: <int> index of the band to be used (if None, all bands are used)
:param overwrite: <bool> whether to overwrite existing nodata mask that has already been calculated
:return:
"""
self.logger.info('Calculating nodata mask...') # FIXME this is the only difference to the base method
if self._mask_nodata is None or overwrite:
assert self.ndim in [2, 3], "Only 2D or 3D arrays are supported. Got a %sD array." % self.ndim
arr = self[:,:,fromBand] if self.ndim==3 and fromBand is not None else self[:]
self.logger.info('Calculating nodata mask...')
nodataVal = custom_nodataVal if custom_nodataVal else \
self.MetaObj.spec_vals['fill'] if hasattr(self,'MetaObj') and self.MetaObj else \
HLP_F.get_outFillZeroSaturated(np.int16)[0] # -9999
get_mask = lambda arr,nodata: np.all(np.where(arr==nodata,0,1),axis=2)
if self.nodata is None:
self.mask_nodata = np.ones((self.rows, self.cols), np.bool)
else:
self.mask_nodata = np.where(arr == self.nodata, 0, 1).astype(np.bool) if arr.ndim == 2 else \
np.all(np.where(arr == self.nodata, 0, 1), axis=2).astype(np.bool)
if hasattr(self,'arr') and isinstance(self.arr,np.ndarray):
self.mask_1bit = get_mask(self.arr,nodataVal)
else:
in_arr = np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.arr),0,2),0,1)
self.mask_1bit = get_mask(in_arr,nodataVal)
def calc_mask_nodataOLD(self, subset):
......@@ -429,13 +511,13 @@ class GMS_object(object):
"""Convert self.MetaObj to an OrderedDict."""
self.logger.info('Preparing extracted metadata to be written to disk...')
self.meta = self.MetaObj.Meta2ODict()
self.meta_odict = self.MetaObj.Meta2ODict()
del self.MetaObj # FIXME MetaObj should have its json encoder
def apply_nodata_mask_to_ObjAttr(self, attrname, out_nodata_val=None):
# type: (str,int)
"""Applies self.mask_1bit to the specified array attribute by setting all values where mask_1bit is 0 to the
"""Applies self.mask_nodata to the specified array attribute by setting all values where mask_nodata is 0 to the
given nodata value.
:param attrname: The attribute to apply the nodata mask to. Must be an array attribute or
......@@ -451,11 +533,11 @@ class GMS_object(object):
else:
assert isinstance(getattr(self,attrname),np.ndarray), \
'L1A_obj.%s must be a numpy array. Got type %s.' %(attrname,type(getattr(self,attrname)))
assert hasattr(self,'mask_1bit') and self.mask_1bit is not None
assert hasattr(self,'mask_nodata') and self.mask_nodata is not None
self.log_for_fullArr_or_firstTile('Applying nodata mask to L1A_object.%s...' %attrname)
nodata_val = out_nodata_val if out_nodata_val else \
HLP_F.get_outFillZeroSaturated(getattr(self, attrname).dtype)[0]
getattr(self,attrname)[self.mask_1bit==0] = nodata_val
getattr(self,attrname)[self.mask_nodata == 0] = nodata_val
if attrname=='arr': self.MetaObj.spec_vals['fill']=nodata_val
......@@ -464,7 +546,7 @@ class GMS_object(object):
"""Generates self.masks attribute (unsigned integer 8bit) from by concatenating all masks included in GMS obj.
The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""
arrays2combine = [aN for aN in ['mask_1bit', 'mask_clouds'] \
arrays2combine = [aN for aN in ['mask_nodata', 'mask_clouds'] \
if hasattr(self, aN) and isinstance(getattr(self, aN), np.ndarray)]
if arrays2combine:
self.log_for_fullArr_or_firstTile('Combining masks...')
......@@ -477,8 +559,8 @@ class GMS_object(object):
self.masks = get_data(arrays2combine[0]) if len(arrays2combine)==1 else \
np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
mapI, CSS = (self.meta['map info'], self.meta['coordinate system string']) \
if hasattr(self, 'meta') and self.meta else (self.MetaObj.map_info, self.MetaObj.projection)
mapI, CSS = (self.meta_odict['map info'], self.meta_odict['coordinate system string']) \
if hasattr(self, 'meta') and self.meta_odict else (self.MetaObj.map_info, self.MetaObj.projection)
nodataVal = HLP_F.get_outFillZeroSaturated(self.masks.dtype)[0]
self.masks_meta = {'map info': mapI, 'coordinate system string': CSS,
......@@ -490,8 +572,8 @@ class GMS_object(object):
def apply_nodata_mask_to_saved_ENVIfile(self, path_saved_ENVIhdr, custom_nodata_val=None, update_spec_vals=False):
# type: (str,int,bool)
"""Applies self.mask_1bit to a saved ENVI file with the same X/Y dimensions like self.mask_1bit by setting all
values where mask_1bit is 0 to the given nodata value.
"""Applies self.mask_nodata to a saved ENVI file with the same X/Y dimensions like self.mask_nodata by setting all
values where mask_nodata is 0 to the given nodata value.
:param path_saved_ENVIhdr: <str> The path of the ENVI file to apply the nodata mask to.
:param custom_nodata_val: <int> set the values of the given attribute to this value.
......@@ -500,7 +582,7 @@ class GMS_object(object):
self.log_for_fullArr_or_firstTile('Applying nodata mask to saved ENVI file...')
assert os.path.isfile(path_saved_ENVIhdr)