Commit 9e7bd1f6 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Fixed issue #69 (Spatial homogenization leaves resampling artifacs at the image edges).

Converted GMS_object.from_sensor_subsystems, GMS_object.from_tiles and GMS_object.from_disk to class methods.
parent 3cb42d55
......@@ -477,7 +477,7 @@ class L1B_object(L1A_object):
"""
# get GMS_object for reference scene
path_gmsFile = PG.path_generator(scene_ID=self.spatRef_scene.scene_ID).get_path_gmsfile()
ref_obj = GMS_object().from_disk((path_gmsFile, ['cube', None]))
ref_obj = GMS_object.from_disk((path_gmsFile, ['cube', None]))
# get spectral characteristics
ref_cwl, shift_cwl = [[float(i) for i in GMS_obj.meta_odict['wavelength']] for GMS_obj in [ref_obj, self]]
......
......@@ -955,7 +955,6 @@ class AtmCorr(object):
# join SURFACE REFLECTANCE as 3D int16 array, scaled to scale factor from config #
##################################################################################
# FIXME AC output nodata values = 0 -> new nodata areas but mask not updated
oF_refl, oZ_refl, oS_refl = get_outFillZeroSaturated(inObj.arr.dtype)
surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs))
surf_refl *= CFG.scale_factor_BOARef # scale using scale factor (output is float16)
......@@ -1040,7 +1039,7 @@ class AtmCorr(object):
for inObj in self.inObjs:
# delete all previous cloud masks
del inObj.mask_clouds
del inObj.mask_clouds # FIXME validate if FMask product is within AC results
# append mask_clouds only to the input GMS object with the same dimensions
if inObj.arr.shape[:2] == mask_clouds_ac.shape:
......
......@@ -368,16 +368,6 @@ class Dataset(object):
"""Convert LayerbandsAssignment from format ['1','2',...] to bandnames like this: [B01, .., B8A,]."""
return ['B%s' % i if len(i) == 2 else 'B0%s' % i for i in LayerBandsAssignment]
def from_disk(self, tuple_GMS_subset):
"""Fills an already instanced GMS object with data from disk. Excludes array attributes in Python mode.
:param tuple_GMS_subset: <tuple> e.g. ('/path/gms_file.gms', ['cube', None])
"""
# TODO
return copy.copy(self)
def get_tilepos(self, target_tileshape, target_tilesize):
self.tile_pos = [[target_tileshape, tb]
for tb in get_array_tilebounds(array_shape=self.shape_fullArr, tile_shape=target_tilesize)]
......
......@@ -72,9 +72,9 @@ class GMS_object(Dataset):
self._coreg_info = None
self.job_ID = CFG.ID
# FIXME not needed anymore?:
# self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'scenes', ['datasetid'],
# {'id': self.scene_ID})[0][0]) if self.scene_ID !=-9999 else -9999
self.dataset_ID = -9999 if self.scene_ID == -9999 else \
int(DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'scenes', ['datasetid'],
{'id': self.scene_ID})[0][0])
self.scenes_proc_ID = None # set by Output writer after creation/update of db record in table scenes_proc
self.mgrs_tiles_proc_ID = None # set by Output writer after creation/update of db rec in table mgrs_tiles_proc
self.MGRS_info = None
......@@ -99,9 +99,7 @@ class GMS_object(Dataset):
# => (avoids MaybeEncodingError: Error sending result: '[<gms_preprocessing.algorithms.L2C_P.L2C_object
# object at 0x7fc44f6399e8>]'. Reason: 'error("'i' format requires -2147483648 <= number <= 2147483647",)')
if self.proc_level == 'L2C' and CFG.inmem_serialization:
# FIXME check by bandname
if self.mask_nodata is not None and self.masks.bands > 1 and self.mask_clouds is not None:
del self.masks
self.flush_array_data()
return self.__dict__
......@@ -357,7 +355,7 @@ class GMS_object(Dataset):
self._mask_clouds_confidence = cnfArr
else:
del self._mask_clouds_confidence
del self.mask_clouds_confidence
@mask_clouds_confidence.deleter
def mask_clouds_confidence(self):
......@@ -370,7 +368,7 @@ class GMS_object(Dataset):
:return:
"""
return self._ac_errors # FIXME should give a warning if None
return self._ac_errors
@ac_errors.setter
def ac_errors(self, *geoArr_initArgs):
......@@ -426,7 +424,7 @@ class GMS_object(Dataset):
:return:
"""
return self._spec_homo_errors # FIXME should give a warning if None
return self._spec_homo_errors
@spec_homo_errors.setter
def spec_homo_errors(self, *geoArr_initArgs):
......@@ -625,41 +623,44 @@ class GMS_object(Dataset):
raise RuntimeError('Archive download failed.')
return success
def from_disk(self, tuple_GMS_subset):
@classmethod
def from_disk(cls, tuple_GMS_subset):
"""Fills an already instanced GMS object with data from disk. Excludes array attributes in Python mode.
:param tuple_GMS_subset: <tuple> e.g. ('/path/gms_file.gms', ['cube', None])
"""
GMS_obj = cls()
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
# copy all attributes from GMS file (private attributes are not touched since they are not included in GMS file)
self.meta_odict = GMSfileDict['meta_odict'] # set that first in order to make some getters and setters work
GMS_obj.meta_odict = GMSfileDict['meta_odict'] # set that first in order to make some getters and setters work
for key, value in GMSfileDict.items():
if key in ['GMS_identifier', 'georef', 'dict_LayerOptTherm']:
continue # properties that should better be created on the fly
try:
setattr(self, key, value)
setattr(GMS_obj, key, value)
except Exception:
raise AttributeError("Can't set attribute %s." % key)
self.arr_shape, self.arr_pos = tuple_GMS_subset[1]
GMS_obj.arr_shape, GMS_obj.arr_pos = tuple_GMS_subset[1]
self.arr = self.pathGen.get_path_imagedata()
GMS_obj.arr = GMS_obj.pathGen.get_path_imagedata()
# self.mask_nodata and self.mask_clouds are auto-synchronized via self.masks (see their getters):
self.masks = self.pathGen.get_path_maskdata()
GMS_obj.masks = GMS_obj.pathGen.get_path_maskdata()
return copy.copy(self)
return GMS_obj
def from_sensor_subsystems(self, list_GMS_objs):
@classmethod
def from_sensor_subsystems(cls, list_GMS_objs):
# type: (List[GMS_object]) -> GMS_object
# TODO convert to classmethod
"""Merge separate GMS objects belonging to the same scene-ID into ONE GMS object.
:param list_GMS_objs: <list> of GMS objects covering the same geographic area but representing different
sensor subsystems (e.g. 3 GMS_objects for Sentinel-2 10m/20m/60m bands)
"""
GMS_obj_merged = cls()
# assertions
assert len(list_GMS_objs) > 1, "'GMS_object.from_sensor_subsystems()' expects multiple input GMS objects. " \
......@@ -690,17 +691,17 @@ class GMS_object(Dataset):
allLogs_df = allLogs_df.sort_values(0)
# set common metadata, needed for logfile
self.baseN = list_GMS_objs[0].pathGen.get_baseN(merged_subsystems=True)
self.path_logfile = list_GMS_objs[0].pathGen.get_path_logfile(merged_subsystems=True)
self.scene_ID = list_GMS_objs[0].scene_ID
GMS_obj_merged.baseN = list_GMS_objs[0].pathGen.get_baseN(merged_subsystems=True)
GMS_obj_merged.path_logfile = list_GMS_objs[0].pathGen.get_path_logfile(merged_subsystems=True)
GMS_obj_merged.scene_ID = list_GMS_objs[0].scene_ID
# write the merged logfile and flush previous logger
np.savetxt(self.path_logfile, np.array(allLogs_df), delimiter=': ', fmt="%s")
self.close_GMS_loggers()
np.savetxt(GMS_obj_merged.path_logfile, np.array(allLogs_df), delimiter=': ', fmt="%s")
GMS_obj_merged.close_GMS_loggers()
# log
self.logger.info('Merging the subsystems %s to a single GMS object...'
% ', '.join([GMS_obj.subsystem for GMS_obj in list_GMS_objs]))
GMS_obj_merged.logger.info('Merging the subsystems %s to a single GMS object...'
% ', '.join([GMS_obj.subsystem for GMS_obj in list_GMS_objs]))
##################
# MERGE METADATA #
......@@ -713,7 +714,7 @@ class GMS_object(Dataset):
elif key in ['baseN', 'path_logfile', 'scene_ID', 'subsystem']:
continue # either previously set with common values or not needed for merged GMS_object
try:
setattr(self, key, value)
setattr(GMS_obj_merged, key, value)
except Exception:
raise AttributeError("Can't set attribute %s." % key)
......@@ -721,19 +722,19 @@ class GMS_object(Dataset):
from .metadata import get_LayerBandsAssignment
# use identifier of first input GMS object for getting LBA (respects current proc_level):
gms_idf = list_GMS_objs[0].GMS_identifier
self.LayerBandsAssignment = get_LayerBandsAssignment(gms_idf, return_fullLBA=True)
bandnames = ['B%s' % i if len(i) == 2 else 'B0%s' % i for i in self.LayerBandsAssignment]
GMS_obj_merged.LayerBandsAssignment = get_LayerBandsAssignment(gms_idf, return_fullLBA=True)
bandnames = ['B%s' % i if len(i) == 2 else 'B0%s' % i for i in GMS_obj_merged.LayerBandsAssignment]
# update layer-dependent metadata with respect to remaining input GMS objects
self.meta_odict.update({
'band names': [('Band %s' % i) for i in self.LayerBandsAssignment],
'LayerBandsAssignment': self.LayerBandsAssignment,
GMS_obj_merged.meta_odict.update({
'band names': [('Band %s' % i) for i in GMS_obj_merged.LayerBandsAssignment],
'LayerBandsAssignment': GMS_obj_merged.LayerBandsAssignment,
'Subsystem': '',
'PhysUnit': self.meta_odict['PhysUnit'], # TODO can contain val_optical / val_thermal
'PhysUnit': GMS_obj_merged.meta_odict['PhysUnit'], # TODO can contain val_optical / val_thermal
})
self.subsystem = ''
del self.pathGen # must be refreshed because subsystem is now ''
self.close_GMS_loggers() # must also be refreshed because it depends on pathGen
GMS_obj_merged.subsystem = ''
del GMS_obj_merged.pathGen # must be refreshed because subsystem is now ''
GMS_obj_merged.close_GMS_loggers() # must also be refreshed because it depends on pathGen
for attrN in layerdependent_metadata:
# combine values from separate subsystems to a single value
......@@ -749,9 +750,9 @@ class GMS_object(Dataset):
# update the attribute in self.MetaObj
if attrDic_fullLBA:
val2set = [attrDic_fullLBA[bN] for bN in self.LayerBandsAssignment] \
val2set = [attrDic_fullLBA[bN] for bN in GMS_obj_merged.LayerBandsAssignment] \
if isinstance(getattr(list_GMS_objs[0].MetaObj, attrN), list) else attrDic_fullLBA
setattr(self.MetaObj, attrN, val2set)
setattr(GMS_obj_merged.MetaObj, attrN, val2set)
####################
# MERGE ARRAY DATA #
......@@ -827,7 +828,7 @@ class GMS_object(Dataset):
geoArrs_same_extent[0].gt, geoArrs_same_extent[0].prj,
bandnames=bandnames,
nodata=geoArrs_same_extent[0].nodata)
setattr(self, attrname, full_geoArr)
setattr(GMS_obj_merged, attrname, full_geoArr)
# handle the remaining arrays
else:
......@@ -835,18 +836,18 @@ class GMS_object(Dataset):
if attrname == 'dem':
# use the DEM of the first input object
# (if the grid is the same, the DEMs should be the same anyway)
self.dem = geoArrs_same_extent[0]
GMS_obj_merged.dem = geoArrs_same_extent[0]
elif attrname == 'mask_nodata':
# must not be merged -> self.arr is already merged, so just recalculate it (np.all)
self.mask_nodata = self.calc_mask_nodata(overwrite=True)
GMS_obj_merged.mask_nodata = GMS_obj_merged.calc_mask_nodata(overwrite=True)
elif attrname == 'mask_clouds':
# possibly only present in ONE subsystem (set by atm. Corr.)
mask_clouds = [msk for msk in geoArrs_same_extent if msk is not None]
if len(mask_clouds) > 1:
raise ValueError('Expected mask clouds in only one subsystem. Got %s.' % len(mask_clouds))
self.mask_clouds = mask_clouds[0] if mask_clouds else None
GMS_obj_merged.mask_clouds = mask_clouds[0] if mask_clouds else None
elif attrname == 'mask_clouds_confidence':
# possibly only present in ONE subsystem (set by atm. Corr.)
......@@ -854,11 +855,11 @@ class GMS_object(Dataset):
if len(mask_clouds_conf) > 1:
raise ValueError(
'Expected mask_clouds_conf in only one subsystem. Got %s.' % len(mask_clouds_conf))
self.mask_clouds_confidence = mask_clouds_conf[0] if mask_clouds_conf else None
GMS_obj_merged.mask_clouds_confidence = mask_clouds_conf[0] if mask_clouds_conf else None
elif attrname == 'masks':
# self.mask_nodata and self.mask_clouds will already be set here -> so just recreate it from there
self.masks = None
GMS_obj_merged.masks = None
# avoid unequal nodata edges between indiviual layers (resampling artifacts etc.) #
###################################################################################
......@@ -866,51 +867,66 @@ class GMS_object(Dataset):
# apply cloud mask to image data and all products derived from image data
# (only if image data represents BOA-Ref and cloud areas are not to be filled with TOA-Ref)
if re.search('BOA_Reflectance', self.MetaObj.PhysUnit, re.I) and not CFG.ac_fillnonclear_areas:
if re.search('BOA_Reflectance', GMS_obj_merged.MetaObj.PhysUnit, re.I) and not CFG.ac_fillnonclear_areas:
# get pixel values of areas that have not been atmospherically corrected (non-clear)
nonclear_labels = [lbl for lbl in ["Clear", "Snow", "Water", "Shadow", "Cirrus", "Cloud"]
if lbl not in CFG.ac_clear_area_labels]
cloud_mask_legend = DEF_D.get_mask_classdefinition('mask_clouds', self.satellite)
cloud_mask_legend = DEF_D.get_mask_classdefinition('mask_clouds', GMS_obj_merged.satellite)
nonclear_pixVals = [cloud_mask_legend[lbl] for lbl in nonclear_labels]
# fill non-clear areas with no data values (for all bands)
for pixVal in nonclear_pixVals:
mask_nonclear = self.mask_clouds[:] == pixVal
self.arr[mask_nonclear] = DEF_D.get_outFillZeroSaturated(self.arr.dtype)[0]
mask_nonclear = GMS_obj_merged.mask_clouds[:] == pixVal
GMS_obj_merged.arr[mask_nonclear] = DEF_D.get_outFillZeroSaturated(GMS_obj_merged.arr.dtype)[0]
if self.ac_errors:
self.ac_errors[mask_nonclear] = DEF_D.get_outFillZeroSaturated(self.ac_errors.dtype)[0]
if GMS_obj_merged.ac_errors:
GMS_obj_merged.ac_errors[mask_nonclear] = \
DEF_D.get_outFillZeroSaturated(GMS_obj_merged.ac_errors.dtype)[0]
# update no data mask
self.calc_mask_nodata(overwrite=True)
GMS_obj_merged.calc_mask_nodata(overwrite=True)
# apply updated nodata mask to array data
for attrname in ['arr', 'ac_errors', 'dem', 'mask_clouds', 'mask_clouds_confidence']:
attr_val = getattr(GMS_obj_merged, attrname)
if attr_val is not None:
attr_val[GMS_obj_merged.mask_nodata[:] == 0] = DEF_D.get_outFillZeroSaturated(attr_val.dtype)[0]
setattr(GMS_obj_merged, attrname, attr_val)
# recreate self.masks
self.build_combined_masks_array()
GMS_obj_merged.build_combined_masks_array()
# update array-dependent metadata
self.meta_odict.update({
'samples': self.arr.cols, 'lines': self.arr.rows, 'bands': self.arr.bands,
'map info': geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string': self.arr.prj, })
self.calc_corner_positions() # update corner coordinates
GMS_obj_merged.meta_odict.update({
'samples': GMS_obj_merged.arr.cols, 'lines': GMS_obj_merged.arr.rows,
'bands': GMS_obj_merged.arr.bands,
'map info': geotransform2mapinfo(GMS_obj_merged.arr.gt, GMS_obj_merged.arr.prj),
'coordinate system string': GMS_obj_merged.arr.prj, })
# update corner coordinates # (calc_corner_positions is a method of L1A_object)
GMS_obj_merged.calc_corner_positions()
# set shape of full array
self.shape_fullArr = self.arr.shape
GMS_obj_merged.shape_fullArr = GMS_obj_merged.arr.shape
return copy.copy(self)
return GMS_obj_merged
def from_tiles(self, list_GMS_tiles):
# type: (list) -> self
@classmethod
def from_tiles(cls, list_GMS_tiles):
# type: (list) -> cls
"""Merge separate GMS objects with different spatial coverage but belonging to the same scene-ID to ONE GMS object.
:param list_GMS_tiles: <list> of GMS objects that have been created by cut_GMS_obj_into_blocks()
"""
GMS_obj = cls()
if 'IMapUnorderedIterator' in str(type(list_GMS_tiles)):
list_GMS_tiles = list(list_GMS_tiles)
# copy all attributes except of array attributes
tile1 = list_GMS_tiles[0]
[setattr(self, i, getattr(tile1, i)) for i in tile1.__dict__
[setattr(GMS_obj, i, getattr(tile1, i)) for i in tile1.__dict__
if not callable(getattr(tile1, i)) and not isinstance(getattr(tile1, i), (np.ndarray, GeoArray))]
# MERGE ARRAY-ATTRIBUTES
......@@ -926,38 +942,43 @@ class GMS_object(Dataset):
% (type(samplearray), arrname)
is_3d = samplearray.ndim == 3
bands = (samplearray.shape[2],) if is_3d else () # dynamic -> works for arr, cld_arr,...
target_shape = tuple(self.shape_fullArr[:2]) + bands
target_shape = tuple(GMS_obj.shape_fullArr[:2]) + bands
target_dtype = samplearray.dtype
merged_array = self._numba_array_merger(list_GMS_tiles, arrname, target_shape, target_dtype)
merged_array = GMS_obj._numba_array_merger(list_GMS_tiles, arrname, target_shape, target_dtype)
setattr(self, arrname if not arrname.startswith('_') else arrname[1:],
setattr(GMS_obj, arrname if not arrname.startswith('_') else arrname[1:],
merged_array) # use setters if possible
# NOTE: this asserts that each attribute starting with '_' has also a property with a setter!
# UPDATE ARRAY-DEPENDENT ATTRIBUTES
self.arr_shape = 'cube'
self.arr_pos = None
GMS_obj.arr_shape = 'cube'
GMS_obj.arr_pos = None
# update MetaObj attributes
self.meta_odict.update({
'samples': self.arr.cols, 'lines': self.arr.rows, 'bands': self.arr.bands,
'map info': geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string': self.arr.prj, })
GMS_obj.meta_odict.update({
'samples': GMS_obj.arr.cols,
'lines': GMS_obj.arr.rows,
'bands': GMS_obj.arr.bands,
'map info': geotransform2mapinfo(GMS_obj.arr.gt, GMS_obj.arr.prj),
'coordinate system string': GMS_obj.arr.prj
})
# calculate data_corners_imXY (mask_nodata is always an array here because get_mapPos always returns an array)
corners_imYX = calc_FullDataset_corner_positions(
self.mask_nodata, assert_four_corners=False, algorithm='shapely')
self.trueDataCornerPos = [(YX[1], YX[0]) for YX in corners_imYX] # [UL, UR, LL, LR]
GMS_obj.mask_nodata, assert_four_corners=False, algorithm='shapely')
GMS_obj.trueDataCornerPos = [(YX[1], YX[0]) for YX in corners_imYX] # [UL, UR, LL, LR]
# calculate trueDataCornerLonLat
data_corners_LatLon = pixelToLatLon(self.trueDataCornerPos, geotransform=self.arr.gt, projection=self.arr.prj)
self.trueDataCornerLonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon]
data_corners_LatLon = pixelToLatLon(GMS_obj.trueDataCornerPos,
geotransform=GMS_obj.arr.gt, projection=GMS_obj.arr.prj)
GMS_obj.trueDataCornerLonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon]
# calculate trueDataCornerUTM
data_corners_utmYX = pixelToMapYX(self.trueDataCornerPos, geotransform=self.arr.gt,
projection=self.arr.prj) # FIXME asserts gt in UTM coordinates
self.trueDataCornerUTM = [(YX[1], YX[0]) for YX in data_corners_utmYX]
data_corners_utmYX = pixelToMapYX(GMS_obj.trueDataCornerPos, geotransform=GMS_obj.arr.gt,
projection=GMS_obj.arr.prj) # FIXME asserts gt in UTM coordinates
GMS_obj.trueDataCornerUTM = [(YX[1], YX[0]) for YX in data_corners_utmYX]
return copy.copy(self)
return GMS_obj
@staticmethod
@jit
......@@ -1062,8 +1083,8 @@ class GMS_object(Dataset):
def apply_nodata_mask_to_saved_ENVIfile(self, path_saved_ENVIhdr, custom_nodata_val=None, update_spec_vals=False):
# type: (str,int,bool) -> None
"""Applies self.mask_nodata to a saved ENVI file with the same X/Y dimensions like self.mask_nodata by setting all
values where mask_nodata is 0 to the given nodata value.
"""Applies self.mask_nodata to a saved ENVI file with the same X/Y dimensions like self.mask_nodata by setting
all values where mask_nodata is 0 to the given nodata value.
:param path_saved_ENVIhdr: <str> The path of the ENVI file to apply the nodata mask to.
:param custom_nodata_val: <int> set the values of the given attribute to this value.
......@@ -1153,7 +1174,7 @@ class GMS_object(Dataset):
elif tiles[0]['desc'] == 'lonlat_arr':
# outpath = os.path.join(os.path.abspath('./testing/out/'),'%s__%s.%s'
# %(self.baseN, tiles[0]['desc'], self.outInterleave))
self.lonlat_arr = outpath # FIXME
self.lonlat_arr = outpath
outpath = os.path.splitext(outpath)[0] + '.hdr' if not outpath.endswith('.hdr') else outpath
out_shape = self.shape_fullArr[:2] + ([tiles[0]['data'].shape[2]] if len(tiles[0]['data'].shape) == 3 else [1])
OUT_W.Tiles_Writer(tiles, outpath, out_shape, tiles[0]['data'].dtype, self.outInterleave, self.meta_odict,
......@@ -1256,7 +1277,7 @@ class GMS_object(Dataset):
if k == 'MetaObj':
continue # make sure MetaObj getter is not called -> would delete meta_odict
elif isinstance(v, datetime.datetime):
dict2write[k] = v.strftime('%Y-%m-%d %H:%M:%S.%f%z') # FIXME
dict2write[k] = v.strftime('%Y-%m-%d %H:%M:%S.%f%z')
elif isinstance(v, DatasetLogger):
if hasattr(v, 'handlers') and v.handlers[:]:
warnings.warn('Not properly closed logger at GMS_obj.logger pointing to %s.' % v.path_logfile)
......@@ -1630,7 +1651,7 @@ class GMS_object(Dataset):
if self.proc_level != 'L2A':
pathGenCurr = self.pathGen
else:
# after geometric homogenization and subsystems merging (L2A) a path generator without subsystem
# after geometric homogenization and subsystem merging (L2A) a path generator without subsystem
# is needed
dict4pathGen = self.__dict__.copy()
dict4pathGen['subsystem'] = ''
......
......@@ -169,7 +169,7 @@ def L2A_map(L1C_objs, block_size=None, return_tiles=True):
L2A_obj.correct_spatial_shifts(cliptoextent=CFG.clip_to_extent, clipextent=common_extent, clipextent_prj=4326)
# merge multiple subsystems belonging to the same scene ID to a single GMS object
L2A_obj = L2A_P.L2A_object().from_sensor_subsystems(L2A_objs) if len(L2A_objs) > 1 else L2A_objs[0]
L2A_obj = L2A_P.L2A_object.from_sensor_subsystems(L2A_objs) if len(L2A_objs) > 1 else L2A_objs[0]
# write output
if CFG.exec_L2AP[1]:
......@@ -255,7 +255,7 @@ def run_complete_preprocessing(list_dataset_dicts_per_scene): # map (scene-wise
if input_proc_level == 'L1A':
for ds in list_dataset_dicts_per_scene:
GMSfile = path_generator(ds, proc_level='L1A').get_path_gmsfile()
L1A_objects.append(L1A_P.L1A_object().from_disk([GMSfile, ['cube', None]]))
L1A_objects.append(L1A_P.L1A_object.from_disk([GMSfile, ['cube', None]]))
L1B_objects = [L1B_map(L1A_obj) for L1A_obj in L1A_objects]
del L1A_objects
......@@ -273,7 +273,7 @@ def run_complete_preprocessing(list_dataset_dicts_per_scene): # map (scene-wise
if input_proc_level == 'L1B':
for ds in list_dataset_dicts_per_scene:
GMSfile = path_generator(ds, proc_level='L1B').get_path_gmsfile()
L1B_objects.append(L1B_P.L1B_object().from_disk([GMSfile, ['cube', None]]))
L1B_objects.append(L1B_P.L1B_object.from_disk([GMSfile, ['cube', None]]))
L1C_objects = L1C_map(L1B_objects)
del L1B_objects
......@@ -292,7 +292,7 @@ def run_complete_preprocessing(list_dataset_dicts_per_scene): # map (scene-wise
if input_proc_level == 'L1C':
for ds in list_dataset_dicts_per_scene:
GMSfile = path_generator(ds, proc_level='L1C').get_path_gmsfile()
L1C_objects.append(L1C_P.L1C_object().from_disk([GMSfile, ['cube', None]]))
L1C_objects.append(L1C_P.L1C_object.from_disk([GMSfile, ['cube', None]]))
L2A_obj = L2A_map(L1C_objects, return_tiles=False)
del L1C_objects
......@@ -309,7 +309,7 @@ def run_complete_preprocessing(list_dataset_dicts_per_scene): # map (scene-wise
assert len(list_dataset_dicts_per_scene) == 1, \
'Expected only a single L2A dataset since subsystems are merged.'
GMSfile = path_generator(list_dataset_dicts_per_scene[0], proc_level='L2A').get_path_gmsfile()
L2A_obj = L2A_P.L2A_object().from_disk([GMSfile, ['cube', None]])
L2A_obj = L2A_P.L2A_object.from_disk([GMSfile, ['cube', None]])
L2B_obj = L2B_map(L2A_obj)
del L2A_obj
......@@ -326,7 +326,7 @@ def run_complete_preprocessing(list_dataset_dicts_per_scene): # map (scene-wise
assert len(list_dataset_dicts_per_scene) == 1, \
'Expected only a single L2B dataset since subsystems are merged.'
GMSfile = path_generator(list_dataset_dicts_per_scene[0], proc_level='L2B').get_path_gmsfile()
L2B_obj = L2B_P.L2B_object().from_disk([GMSfile, ['cube', None]])
L2B_obj = L2B_P.L2B_object.from_disk([GMSfile, ['cube', None]])
L2C_obj = L2C_map(L2B_obj) # type: Union[GMS_object, failed_GMS_object, List]
del L2B_obj
......
......@@ -341,8 +341,7 @@ class process_controller(object):
# NOTE: DON'T multiprocess that with MAP(GMS_object(*initargs).from_disk, work)
# in case of multiple subsystems GMS_object(*initargs) would always point to the same object in memory
# -> subsystem attribute will be overwritten each time
def init_GMS_obj(): return HLP_F.parentObjDict[prevLvl](*HLP_F.initArgsDict[prevLvl])
DB_objs = [init_GMS_obj().from_disk(tuple_GMS_subset=w) for w in work] # init
DB_objs = [HLP_F.parentObjDict[prevLvl].from_disk(tuple_GMS_subset=w) for w in work]
if DB_objs:
DB_objs = list(chain.from_iterable(DB_objs)) if list in [type(i) for i in DB_objs] else list(DB_objs)
......@@ -567,7 +566,7 @@ class process_controller(object):
grouped_L1A_Tiles = HLP_F.group_objects_by_attributes(
L1A_obj_tiles, 'scene_ID', 'subsystem') # group results
L1A_objects = MAP(L1A_P.L1A_object().from_tiles, grouped_L1A_Tiles) # reduce
L1A_objects = MAP(L1A_P.L1A_object.from_tiles, grouped_L1A_Tiles) # reduce
L1A_resObjects = MAP(L1A_map_3, L1A_objects) # map_3
......@@ -678,7 +677,7 @@ class process_controller(object):
grouped_L2A_Tiles = HLP_F.group_objects_by_attributes(self.L2A_tiles, 'scene_ID') # group results
# reduce # will be too slow because it has to pickle back really large L2A_newObjects
# L2A_newObjects = MAP(HLP_F.merge_GMS_tiles_to_GMS_obj, grouped_L2A_Tiles)
L2A_newObjects = [L2A_P.L2A_object().from_tiles(tileList) for tileList in grouped_L2A_Tiles]
L2A_newObjects = [L2A_P.L2A_object.from_tiles(tileList) for tileList in grouped_L2A_Tiles]
"""combine newly and earlier processed L2A data"""
L2A_DBObjects = self.get_DB_objects('L2B', self.L2A_tiles)
......@@ -696,12 +695,11 @@ class process_controller(object):
L2B_tiles = MAP(L2B_map, L2A_tiles)
grouped_L2B_Tiles = \
HLP_F.group_objects_by_attributes(L2B_tiles,
'scene_ID') # group results # FIXME nötig an dieser Stelle?
# group results # FIXME nötig an dieser Stelle?
grouped_L2B_Tiles = HLP_F.group_objects_by_attributes(L2B_tiles, 'scene_ID')
[L2B_tiles_group[0].delete_tempFiles() for L2B_tiles_group in grouped_L2B_Tiles]
L2B_resObjects = [L2B_P.L2B_object().from_tiles(tileList) for tileList in grouped_L2B_Tiles]
L2B_resObjects = [L2B_P.L2B_object.from_tiles(tileList) for tileList in grouped_L2B_Tiles]
self.L2B_newObjects = [obj for obj in L2B_resObjects if isinstance(obj, L2B_P.L2B_object)]
self.failed_objects += [obj for obj in L2B_resObjects if isinstance(obj, failed_GMS_object) and
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment