Commit 42865754 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Fix for missing ac_errors and mask_clouds_confidence arrays in processing...

Fix for missing ac_errors and mask_clouds_confidence arrays in processing levels L2B, L2C and any MGRS tiles.
parent e19e86d8
......@@ -440,7 +440,7 @@ class Dataset(object):
self._spec_homo_errors = errArr
else:
del self.ac_errors
del self.spec_homo_errors
@spec_homo_errors.deleter
def spec_homo_errors(self):
......@@ -520,7 +520,7 @@ class Dataset(object):
# clip all array attributes using the given bounds
# list_arraynames = [i for i in self.__dict__ if not callable(getattr(self, i)) and \
# isinstance(getattr(self, i), np.ndarray)]
list_arraynames = [i for i in ['arr', 'masks']
list_arraynames = [i for i in ['arr', 'masks', 'ac_errors', 'mask_clouds_confidence']
if hasattr(self, i) and getattr(self, i) is not None] # FIXME hardcoded
assert list_arraynames
assert imBounds or mapBounds, "Either 'imBounds' or 'mapBounds' must be given. Got nothing."
......@@ -536,7 +536,10 @@ class Dataset(object):
# avoid disk IO if requested area is within the input array # TODO
# subset all array attributes and update directly related metadata
####################################################################
# subset all array attributes and update directly related metadata #
####################################################################
for arrname in list_arraynames:
# get input data for array subsetting
meta2update = sub_GMS_obj.meta_odict if arrname == 'arr' else sub_GMS_obj.masks_meta
......@@ -568,6 +571,10 @@ class Dataset(object):
# FIXME not dynamic:
sub_GMS_obj.mask_clouds = sub_GMS_obj.masks[:, :, 1] if sub_GMS_obj.masks.bands > 1 else None
###################
# update metadata #
###################
# update arr_pos
sub_GMS_obj.arr_shape = 'block'
if imBounds is not None:
......
......@@ -589,7 +589,7 @@ class GMS_object(Dataset):
for geoArr in all_arrays:
if geoArr is not None:
# FIXME mask_clouds_confidence is until here no GeoArray
# FIXME mask_clouds_confidence is no GeoArray until here
# FIXME -> has no nodata value -> calculation throughs warning
geoArr_same_extent = \
GeoArray(*geoArr.get_mapPos(
......@@ -1151,6 +1151,10 @@ class GMS_object(Dataset):
if not is_tempfile:
self.log_for_fullArr_or_firstTile('Writing %s.' % print_dict[descriptor])
#########################
# GeoArray in disk mode #
#########################
if isinstance(arrayval, GeoArray) and not arrayval.is_inmem:
# object attribute contains GeoArray in disk mode. This is usually the case if the attribute has
# been read in Python exec mode from previous processing level and has NOT been modified during
......@@ -1160,6 +1164,10 @@ class GMS_object(Dataset):
"file path: %s" % (arrayname, arrayval.filePath)
path_to_array = arrayval.filePath
#############
# full cube #
#############
if self.arr_shape == 'cube':
# image data can just be copied
outpath_arr = os.path.splitext(outpath_hdr)[0] + (os.path.splitext(path_to_array)[1]
......@@ -1181,6 +1189,7 @@ class GMS_object(Dataset):
envi.write_envi_header(outpath_hdr, meta2write)
HLP_F.silentremove(path_to_array)
HLP_F.silentremove(os.path.splitext(path_to_array)[0] + '.hdr')
else:
try:
shutil.copy(path_to_array, outpath_arr) # copies file + permissions
......@@ -1188,6 +1197,7 @@ class GMS_object(Dataset):
# prevents permission error if outputfile already exists and is owned by another user
HLP_F.silentremove(outpath_arr)
shutil.copy(path_to_array, outpath_arr)
envi.write_envi_header(outpath_hdr, meta2write)
assert OUT_W.check_header_not_empty(outpath_hdr), "HEADER EMPTY: %s" % outpath_hdr
......@@ -1195,7 +1205,11 @@ class GMS_object(Dataset):
if arrayname == 'masks':
setattr(self, 'mask_nodata', outpath_arr)
else: # 'block' or 'MGRS_tile
#########################
# 'block' or 'MGRS_tile #
#########################
else:
# data have to be read in subset and then be written
if self.arr_pos:
(rS, rE), (cS, cE) = self.arr_pos
......@@ -1212,6 +1226,7 @@ class GMS_object(Dataset):
rows) # bands, rows, columns OR rows, columns
arr2write = tempArr if len(tempArr.shape) == 2 else \
np.swapaxes(np.swapaxes(tempArr, 0, 2), 0, 1) # rows, columns, (bands)
else:
# read mask data in subset
previous_procL = DEF_D.proc_chain[DEF_D.proc_chain.index(self.proc_level) - 1]
......@@ -1224,6 +1239,10 @@ class GMS_object(Dataset):
arrayval = getattr(self, arrayname) # can be a GeoArray (in mem / not in mem) or a numpy.ndarray
####################################
# np.ndarray or GeoArray in memory #
####################################
if isinstance(arrayval, np.ndarray) or isinstance(arrayval, GeoArray) and arrayval.is_inmem:
# must be an if-condition because arrayval can change attribute type from not-inmem-GeoArray
# to np.ndarray
......@@ -1235,27 +1254,36 @@ class GMS_object(Dataset):
arr2write = arr2write.arr if isinstance(arr2write, GeoArray) else arr2write
assert isinstance(arr2write, np.ndarray), 'Expected a numpy ndarray. Got %s.' % type(arr2write)
##########################
# full cube or MGRS_tile #
##########################
if self.arr_shape in ['cube', 'MGRS_tile']:
# TODO write a function that implements the algorithm from Tiles_Writer for writing cubes
# TODO -> no need for Spectral Python
# write cube-like attributes
meta2write = metaDict_to_metaODict(meta2write, self.logger)
success = 1
if arrayname not in ['mask_clouds', ]:
if compression:
success = OUT_W.write_ENVI_compressed(outpath_hdr, arr2write, meta2write)
if not success:
warnings.warn('Written compressed ENVI file is not GDAL readable! '
'Writing uncompressed file.')
if not compression or not success:
envi.save_image(outpath_hdr, arr2write, metadata=meta2write, dtype=out_dtype,
interleave=self.outInterleave, ext=self.outInterleave, force=True)
else:
if compression:
success = OUT_W.write_ENVI_compressed(outpath_hdr, arr2write, meta2write)
if not success:
warnings.warn('Written compressed ENVI file is not GDAL readable! '
'Writing uncompressed file.')
self.logger.warning('Written compressed ENVI file is not GDAL readable! '
'Writing uncompressed file.')
if not compression or not success:
class_names = meta2write['class names']
class_colors = meta2write['class lookup']
......@@ -1265,6 +1293,10 @@ class GMS_object(Dataset):
if os.path.exists(outpath_hdr):
OUT_W.reorder_ENVI_header(outpath_hdr, OUT_W.enviHdr_keyOrder)
#########################
# block-like attributes #
#########################
else:
if compression: # FIXME
warnings.warn(
......@@ -1292,16 +1324,19 @@ class GMS_object(Dataset):
self.logger.warning(
"%s can not be written, because there is no corresponding attribute." % print_dict[descriptor])
# write GMS-file and update database
######################################
# write GMS-file and update database #
######################################
# IMPORTANT: DO NOT pass the complete object but only a copy of the dictionary in order to prevent ASCII_writer
# and data_DB_updater from modifying the attributes of the object!!
if self.arr_shape in ['cube', 'MGRS_tile'] or [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]:
# cube or 1st tile
# write GMS file
# WRITE GMS FILE
self.to_GMS_file()
# create/update database
# CREATE/UPDATE DATABASE ENTRY
if not is_tempfile:
DB_T.data_DB_updater(self.attributes2dict())
......@@ -1322,7 +1357,7 @@ class GMS_object(Dataset):
'Found multiple database records for the last updated record. sceneid: %s' % self.scene_ID
self.scenes_proc_ID = res[0][0]
# copy logfile to MGRS output directory
# COPY LOGFILE TO MGRS OUTPUT DIRECTORY
if self.arr_shape == 'MGRS_tile':
shutil.copy(self.path_logfile, os.path.join(self.pathGen.get_path_procdata(),
os.path.basename(self.path_logfile)))
......
......@@ -101,12 +101,16 @@ class BaseTestCases:
# update attributes of DB_job_record and related DB entry
cls.PC.config.DB_job_record.reset_job_progress()
GMS_object.proc_status_all_GMSobjs.clear() # reset
cls.PC.config.data_list = cls.PC.add_local_availability(cls.PC.config.data_list)
[cls.validate_db_entry(ds['filename']) for ds in cls.PC.config.data_list]
cls.PC.config.ac_estimate_accuracy = True
cls.PC.config.spechomo_estimate_accuracy = True
def check_availability(self, GMS_objs, tgt_procL):
dss = self.PC.add_local_availability([GMS_object_2_dataset_dict(obj) for obj in GMS_objs])
for ds in dss:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment