Commit 487c9e2e authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

fixed an issue during calculation of true data corners during L2A, Fix for...

fixed an issue during calculation of true data corners during L2A, Fix for deadlock during DEM calculation at 180 degrees meridian

algorithms.L1A_P:
- calc_corner_positions(): bugfix

algorithms.L1C_P.AtmCorr:
- _join_results_to_inObjs(): splitted into four separate functions
- added _join_data_ac(): based on _join_results_to_inObjs(); AC nodata values within dataset are no longer overwritten with fill values but with outFill
- added _join_data_errors(): based on _join_results_to_inObjs()
- added _join_mask_clouds(): based on _join_results_to_inObjs()
- added _join_mask_confidence_array(): based on _join_results_to_inObjs()

io.Input_reader:
- get_dem_by_extent(): fixed an issue in case of a scene directly at 180 degrees meridian

misc.definition_dicts:
- get_outFillZeroSaturated(): changed outZero value of uint16

- updated __version__


Former-commit-id: ab3aa1a5
parent 2cbb0083
......@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller
__version__ = '20170329.01'
__version__ = '20170330.01'
__author__ = 'Daniel Scheffler'
__all__ = ['algorithms',
'io',
......
......@@ -837,8 +837,9 @@ class L1A_object(GMS_object):
# set full scene corner positions (image and lonlat coordinates)
if is_dataset_provided_as_fullScene(self.GMS_identifier):
assert len(self.trueDataCornerLonLat)==4, "A %s %s dataset is provided as full scene. Thus all 4 image " \
"corners must be present within the dataset. Found %s." %len(self.trueDataCornerLonLat)
assert len(self.trueDataCornerLonLat)==4, "Dataset %s (entity ID %s) is provided as full scene. " \
"Thus exactly 4 image corners must be present within the dataset. Found %s corners instead." \
%(self.scene_ID, self.entity_ID, len(self.trueDataCornerLonLat))
self.fullSceneCornerPos = self.trueDataCornerPos
self.fullSceneCornerLonLat = self.trueDataCornerLonLat
else:
......
......@@ -224,7 +224,7 @@ class AtmCorr(object):
:param L1C_objs: one or more instances of L1C_object belonging to the same scene ID
"""
# FIXME not yet usable for data < 2012 due to missing ECMWF archive
L1C_objs = L1C_objs if isinstance(L1C_objs, tuple) else (L1C_objs,)
# hidden attributes
......@@ -783,17 +783,35 @@ class AtmCorr(object):
def _join_results_to_inObjs(self):
"""
Join results of atmospheric correction to the input GMS objects.
"""
if self.results.data_ac is not None:
self.logger.info('Joining results of atmospheric correction to input GMS objects.')
del self.logger # otherwise logging in inObjs would open a second FileHandler to the same file (which is permitted)
self.logger.info('Joining results of atmospheric correction to input GMS objects.')
del self.logger # otherwise logging in inObjs would open a second FileHandler to the same file (which is permitted)
self._join_data_ac()
self._join_data_errors()
self._join_mask_clouds()
self._join_mask_confidence_array()
# update masks (always do that because masks can also only contain one layer)
[inObj.build_combined_masks_array() for inObj in self.inObjs]
self.outObjs = self.inObjs
def _join_data_ac(self):
"""
Join ATMOSPHERICALLY CORRECTED ARRAY as 3D int8 or int16 BOA reflectance array, scaled to scale factor from config.
"""
if self.results.data_ac is not None:
for inObj in self.inObjs:
#assert isinstance(inObj.arr, (GeoArray, np.ndarray)), print(type(inObj.arr))
nodata = self.results.nodata[inObj.arr.xgsd] # 2D mask with True outside of image coverage
# assert isinstance(inObj.arr, (GeoArray, np.ndarray)), print(type(inObj.arr))
nodata = self.results.nodata[inObj.arr.xgsd] # 2D mask with True outside of image coverage
ac_bandNs = [bandN for bandN in inObj.arr.bandnames if bandN in self.results.data_ac.keys()]
out_LBA = [bN.split('B0')[1] if bN.startswith('B0') else bN.split('B')[1] for bN in ac_bandNs]
out_LBA = [bN.split('B0')[1] if bN.startswith('B0') else bN.split('B')[1] for bN in ac_bandNs]
# update metadata
inObj.arr_desc = 'BOA_Ref'
......@@ -801,49 +819,61 @@ class AtmCorr(object):
inObj.MetaObj.PhysUnit = 'BOA_Reflectance in [0-%d]' % CFG.usecase.scale_factor_BOARef
inObj.MetaObj.LayerBandsAssignment = out_LBA
inObj.MetaObj.filter_layerdependent_metadata()
inObj.meta_odict = inObj.MetaObj.to_odict()
inObj.meta_odict = inObj.MetaObj.to_odict() # actually auto-updated by getter
# join SURFACE REFLECTANCE as 3D int16 array, scaled to scale factor from config
# FIXME AC output nodata values = 0 -> new nodata areas but mask not updated
oF_refl, oZ_refl, oS_refl = get_outFillZeroSaturated(inObj.arr.dtype)
surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs))
surf_refl *= CFG.usecase.scale_factor_BOARef # scale using scale factor (output is float16)
surf_refl[nodata] = get_outFillZeroSaturated(inObj.arr.dtype)[0] # FIXME AC output nodata values = 0 -> new nodata areas but mask not updated
surf_refl *= CFG.usecase.scale_factor_BOARef # scale using scale factor (output is float16)
# FIXME really set AC nodata values to GMS outZero?
surf_refl[nodata] = oZ_refl # overwrite AC nodata values with GMS outZero
surf_refl[np.array(inObj.mask_nodata) == False] = oF_refl # apply the original nodata mask (indicating background values)
if self.results.bad_data_value is np.nan:
surf_refl[np.isnan(surf_refl)] = get_outFillZeroSaturated(inObj.arr.dtype)[0]
surf_refl[np.isnan(surf_refl)] = oF_refl
else:
surf_refl[surf_refl==self.results.bad_data_value] = get_outFillZeroSaturated(inObj.arr.dtype)[0]
surf_refl[surf_refl == self.results.bad_data_value] = oF_refl # FIXME meaningful to set AC nans to -9999?
# overwrite LayerBandsAssignment and use inObj.arr setter to generate a GeoArray
inObj.LayerBandsAssignment = out_LBA
inObj.arr = surf_refl.astype(inObj.arr.dtype) # -> int16 (also converts NaNs to 0 if needed
# join ERRORS ARRAY as 3D int8 or int16 BOA reflectance array, scaled to scale factor from config
if self.results.data_errors is not None:
ac_errors = np.dstack((self.results.data_errors[bandN] for bandN in ac_bandNs))
ac_errors *= CFG.usecase.scale_factor_errors_ac # scale using scale factor (output is float16)
out_dtype = np.int8 if CFG.usecase.scale_factor_errors_ac<=255 else np.int16
ac_errors[nodata] = get_outFillZeroSaturated(out_dtype)[0]
ac_errors = ac_errors.astype(out_dtype)
inObj.ac_errors = ac_errors # setter generates a GeoArray with the same bandnames like inObj.arr
# TODO how to handle nans?
else:
self.logger.warning("Atmospheric correction did not provide a 'data_errors' array. Maybe due to "
"missing SNR model? GMS_object.ac_errors kept None.")
inObj.arr = surf_refl.astype(inObj.arr.dtype) # -> int16 (also converts NaNs to 0 if needed
else:
self.logger.warning('Atmospheric correction did not return a result for the input array. '
'Thus the output keeps NOT atmospherically corrected.')
# join CLOUD MASK as 2D uint8 array
# NOTE: mask_clouds has also methods 'export_mask_rgb()', 'export_confidence_to_jpeg2000()', ...
mask_clouds_ac = self.results.mask_clouds.mask_array # uint8 2D array
# join confidence array for mask_clouds
if self.results.mask_clouds.mask_confidence_array is not None:
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255]
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr * CFG.usecase.scale_factor_BOARef).astype(np.int16)
cfd_arr[cfd_arr == -CFG.usecase.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0]
else:
cfd_arr = None
self.logger.warning("Atmospheric correction did not provide a 'mask_confidence_array' array for "
"attribute 'mask_clouds. GMS_object.mask_clouds_confidence kept None.")
def _join_data_errors(self):
"""
Join ERRORS ARRAY as 3D int8 or int16 BOA reflectance array, scaled to scale factor from config.
"""
if self.results.data_errors is not None:
for inObj in self.inObjs:
nodata = self.results.nodata[inObj.arr.xgsd] # 2D mask with True outside of image coverage
ac_bandNs = [bandN for bandN in inObj.arr.bandnames if bandN in self.results.data_ac.keys()]
ac_errors = np.dstack((self.results.data_errors[bandN] for bandN in ac_bandNs))
ac_errors *= CFG.usecase.scale_factor_errors_ac # scale using scale factor (output is float16)
out_dtype = np.int8 if CFG.usecase.scale_factor_errors_ac <= 255 else np.int16
ac_errors[nodata] = get_outFillZeroSaturated(out_dtype)[0]
ac_errors = ac_errors.astype(out_dtype)
inObj.ac_errors = ac_errors # setter generates a GeoArray with the same bandnames like inObj.arr
# TODO how to handle nans?
else:
self.logger.warning("Atmospheric correction did not provide a 'data_errors' array. Maybe due to "
"missing SNR model? GMS_object.ac_errors kept None.")
def _join_mask_clouds(self):
"""
Join CLOUD MASK as 2D uint8 array.
NOTE: mask_clouds has also methods 'export_mask_rgb()', 'export_confidence_to_jpeg2000()', ...
"""
if self.results.mask_clouds.mask_array is not None:
mask_clouds_ac = self.results.mask_clouds.mask_array # uint8 2D array
joined = False
for inObj in self.inObjs:
......@@ -853,41 +883,59 @@ class AtmCorr(object):
# append mask_clouds only to the input GMS object with the same dimensions
if inObj.arr.shape[:2] == mask_clouds_ac.shape:
inObj.mask_clouds = mask_clouds_ac
inObj.mask_clouds.legend = self.results.mask_clouds.mask_legend # dict(value=string, string=value)) # # FIXME this is not used later
inObj.mask_clouds.legend = self.results.mask_clouds.mask_legend # dict(value=string, string=value))
# FIXME legend is not used later
# set cloud mask nodata value
tgt_nodata = get_outFillZeroSaturated(mask_clouds_ac.dtype)[0]
tgt_nodata = get_outFillZeroSaturated(mask_clouds_ac.dtype)[0]
ac_out_nodata = self.ac_input['options']['cld_mask']['nodata_value_mask']
if tgt_nodata not in self.results.mask_clouds.mask_legend.keys():
inObj.mask_clouds[inObj.mask_clouds[:]==ac_out_nodata] = tgt_nodata
inObj.mask_clouds[inObj.mask_clouds[:] == ac_out_nodata] = tgt_nodata
mask_clouds_nodata = tgt_nodata
else:
warnings.warn('The cloud mask from AC output already uses the desired nodata value %s for the '
'class %s. Using AC output nodata value %s.'
%(tgt_nodata, self.results.mask_clouds.mask_legend[tgt_nodata], ac_out_nodata))
% (tgt_nodata, self.results.mask_clouds.mask_legend[tgt_nodata], ac_out_nodata))
mask_clouds_nodata = ac_out_nodata
inObj.mask_clouds.nodata = mask_clouds_nodata
# set cloud mask confidence array
if cfd_arr is not None:
inObj.mask_clouds_confidence = GeoArray(cfd_arr, inObj.arr.gt, inObj.arr.prj,
nodata=get_outFillZeroSaturated(cfd_arr.dtype)[0])
joined=True
# update masks (always do that because masks can also only contain one layer)
inObj.build_combined_masks_array()
joined = True
if not joined:
self.logger.warning('Cloud mask has not been appended to one of the AC inputs because there was no'
'input GMS object with the same dimensions.')
else:
# TODO: check if nevertheless a cloud mask is returned!
self.logger.warning('Atmospheric correction did not return a result for the input array. '
'Thus the output keeps NOT atmospherically corrected.')
self.logger.warning("Atmospheric correction did not provide a 'mask_clouds.mask_array' array. "
"GMS_object.mask_clouds kept None.")
self.outObjs = self.inObjs
def _join_mask_confidence_array(self):
"""
Join confidence array for mask_clouds.
"""
if self.results.mask_clouds.mask_confidence_array is not None:
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255]
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr * CFG.usecase.scale_factor_BOARef).astype(np.int16)
cfd_arr[cfd_arr == -CFG.usecase.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0]
joined = False
for inObj in self.inObjs:
# append mask_clouds only to the input GMS object with the same dimensions
if inObj.arr.shape[:2] == cfd_arr.shape:
# set cloud mask confidence array
inObj.mask_clouds_confidence = GeoArray(cfd_arr, inObj.arr.gt, inObj.arr.prj,
nodata=get_outFillZeroSaturated(cfd_arr.dtype)[0])
joined = True
if not joined:
self.logger.warning('Cloud mask confidence array has not been appended to one of the AC inputs because '
'there was no input GMS object with the same dimensions.')
else:
self.logger.warning("Atmospheric correction did not provide a 'mask_confidence_array' array for "
"attribute 'mask_clouds. GMS_object.mask_clouds_confidence kept None.")
\ No newline at end of file
......@@ -265,7 +265,7 @@ class GMS_object(object):
@property
def meta_odict(self):
if self._MetaObj:
# if there is already a MetaObj -> create meta_odict from it (ensures synchronization!)
# if there is already a MetaObj -> create new meta_odict from it (ensures synchronization!)
self._meta_odict = self._MetaObj.to_odict()
del self.MetaObj
elif not self._meta_odict:
......@@ -570,7 +570,7 @@ class GMS_object(object):
@pathGen.setter
def pathGen(self, pathGen):
assert isinstance(pathGen, PG.path_generator), 'GMS_object.pathGen can only be set to an instance of ' \
'path_generator. Got %s.' %type(pathGen)
'path_generator. Got %s.' %type(pathGen)
self._pathGen = pathGen
......
......@@ -369,9 +369,18 @@ def get_dem_by_extent(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd):
:param tgt_ygsd: output Y GSD
:return:
"""
#print(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd)
# handle corrdinate infos
# handle coordinate infos
tgt_corner_coord_lonlat = [transform_any_prj(prj, 4326, X,Y) for X,Y in cornerCoords_tgt]
#tgt_corner_coord_lonlat = [(-114, 52), (-117, 52), (-117, 50), (-114, 50)] # this is a test
#from py_tools_ds.ptds.geo.projection import EPSG2WKT
#prj = EPSG2WKT(32612)
# handle coordinates directly above 180 degress meridian
xvals = [x for x,y in tgt_corner_coord_lonlat]
if max(xvals) - min(xvals) >180:
tgt_corner_coord_lonlat = [(x, y) if x > 0 else (x + 360, y) for x, y in tgt_corner_coord_lonlat]
# get overlapping SRTM scene IDs
dsID_srtm = 225
......@@ -380,7 +389,7 @@ def get_dem_by_extent(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd):
tgt_corners_lonlat= tgt_corner_coord_lonlat,
conditions = ['datasetid=%s' % dsID_srtm])
sceneIDs_srtm = [i[0] for i in sceneIDs_srtm]
assert sceneIDs_srtm, 'No matching SRTM scene IDs found.'
assert sceneIDs_srtm, 'No matching SRTM scene IDs for DEM generation found.'
# get corresponding filenames on disk and generate GDALvsiPathes pointing to raster files within archives
archivePaths = [path_generator(scene_ID=ID).get_local_archive_path_baseN() for ID in sceneIDs_srtm]
......@@ -389,11 +398,13 @@ def get_dem_by_extent(cornerCoords_tgt, prj, tgt_xgsd, tgt_ygsd):
gdalHdrPaths = [os.path.join(archP, hdrP.split('.zip/')[1]) for archP, hdrP in zip(gdalArchivePaths, hdrPaths)]
gdalBILPaths = [os.path.splitext(hdrP)[0] + '.bil' for hdrP in gdalHdrPaths]
# run gdal merge command with temporary disk output
# run gdal merge command with temporary disk output # TODO what about using a VRT here?
t_xmin, t_xmax, t_ymin, t_ymax = corner_coord_to_minmax(tgt_corner_coord_lonlat)
project_dir = os.path.abspath(os.curdir)
os.chdir(os.path.dirname(archivePaths[0]))
path_tmp_merged = get_tempfile(ext='_dem_merged.tif')
#for i in [path_tmp_merged, t_xmin, t_ymax, t_xmax, t_ymin, ' '.join(gdalBILPaths)]:
# print(i)
cmd = 'gdal_merge.py -o %s -of Gtiff -ul_lr %s %s %s %s %s' % \
(path_tmp_merged, t_xmin, t_ymax, t_xmax, t_ymin, ' '.join(gdalBILPaths))
out, exitcode, err = HLP_F.subcall_with_output(cmd)
......
......@@ -118,7 +118,7 @@ def get_outFillZeroSaturated(dtype):
assert dtype in ['bool', 'int8', 'uint8', 'int16', 'uint16','float32'], \
"get_outFillZeroSaturated: Unknown dType: '%s'." %dtype
dict_outFill = {'bool':None, 'int8':-128, 'uint8':0 , 'int16':-9999, 'uint16':9999 , 'float32':-9999.}
dict_outZero = {'bool':None, 'int8':0 , 'uint8':1 , 'int16':0 , 'uint16':1 , 'float32':0.}
dict_outZero = {'bool':None, 'int8':0 , 'uint8':1 , 'int16':0 , 'uint16':0 , 'float32':0.}
dict_outSaturated = {'bool':None, 'int8':127 , 'uint8':256, 'int16':32767, 'uint16':65535, 'float32':65535.}
return dict_outFill[dtype], dict_outZero[dtype], dict_outSaturated[dtype]
......
......@@ -302,8 +302,8 @@ def get_path_ac_options(GMS_identifier):
ac_options_file_dic = {
'AVNIR-2': None,
'TM4': 'l8_options.json', # FIXME not yet usable due to missing ECMWF archive < 2012
'TM5': 'l8_options.json', # FIXME not yet usable due to missing ECMWF archive < 2012
'TM4': 'l8_options.json',
'TM5': 'l8_options.json',
'TM7': 'l8_options.json', # AC uses Landsat-8 options for L7 but reads only a subset of the options
'LDCM': 'l8_options.json',
'SPOT1a': None,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment