Commit cef2a152 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

algorithms.gms_object.GMS_object:

- from_tiles(): bugfix for not updating array-dependent attributes
- from_sensor_subsystems(): bugfix for not recreating loggers after subsystem merging
- removed deprecated MetaObj2ODict()
- get_subset_obj(): now operable from imBounds; fixed inconsistent attribute names
- added to_tiles()
algorithms.L2B_P.L2B_object:
- added log message
algorithms.METADATA:
- get_LayerbandsAssignment: bugfix
misc.helper_functions:
- cut_GMS_obj_into_blocks(): now calls GMS_object.to_tiles()
- revised cornerLonLat_to_postgreSQL_poly()
misc.mgrs_tile:
- EPSG: bugfix
processing.pipeline:
- L1A_map_1() and L2A_map(): changed call for object conversion into tiles
- removed /database/cloud_classifier from version controlling -> now external resource
- updated __version__
Former-commit-id: c9f38cb9
Former-commit-id: 9b805282
parent 5338816e
......@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller
__version__ = '20170120.02'
__version__ = '20170124.01'
__author__ = 'Daniel Scheffler'
__all__ = ['algorithms',
'io',
......
......@@ -462,14 +462,9 @@ class AtmCorr(object):
"""
# TODO add SRF object
metadata = {}
#from datetime import datetime
if not self._metadata:
del self.logger # otherwise each input object would have multiple fileHandlers
print('passing', self.inObjs[0].acq_datetime)
#print(type(self.inObjs[0].acq_datetime))
#print(datetime.strptime('2015-08-12 10:40:21 +0000', '%Y-%m-%d %H:%M:%S %z'))
#raise SystemExit
metadata['U'] = self.inObjs[0].meta_odict['EarthSunDist']
metadata['SENSING_TIME'] = self.inObjs[0].acq_datetime
#metadata['SENSING_TIME'] = datetime.strptime('2015-08-12 10:40:21 +0000', '%Y-%m-%d %H:%M:%S %z')
......
......@@ -41,7 +41,8 @@ class L2B_object(L2A_object):
if 'band names' in self.meta_odict: # FIXME bug workaround
del self.meta_odict['band names'] # TODO
else:
pass # FIXME log spec homo skipped
self.logger.info("Spectral homogenization has been skipped because the current spectral characteristics "
"are already equal to the target sensor's.")
@staticmethod
def interpolate_cube_linear(arrcube, source_CWLs, target_CWLs):
......
......@@ -1776,7 +1776,7 @@ def get_LayerBandsAssignment(GMS_identifier, nBands=None, ignore_usecase=False,
else:
# TODO check if dataset belongs to a known datasetid and use LBA of this dataset
# return a LBA matching the number of bands after spectral homogenization
LayerBandsAssignment = [str(i+1) for i in range(len(CFG.job.target_CWL))]
LayerBandsAssignment = [str(i+1) for i in range(len(CFG.usecase.target_CWL))]
return LayerBandsAssignment
......
......@@ -29,7 +29,7 @@ from py_tools_ds.ptds.io.raster.GeoArray import GeoArray, NoDataMask, CloudMas
from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal
from py_tools_ds.ptds.geo.projection import WKT2EPSG
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX, imXY2mapXY
from py_tools_ds.ptds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform
from ..misc.logging import GMS_logger
......@@ -93,7 +93,7 @@ class GMS_object(object):
self.trueDataCornerLonLat = [] # set by self.calc_corner_positions()
self.fullSceneCornerPos = [] # set by self.calc_corner_positions()
self.fullSceneCornerLonLat = [] # set by self.calc_corner_positions()
self.shape_fullArr = [None, None, None] # rows,cols,bands
self.shape_fullArr = [None, None, None] # rows,cols,bands of the full scene (not of the subset as possibly represented by self.arr.shape)
self.arr_shape = 'cube'
self.arr_desc = '' # description of data units for self.arr
self.arr_pos = None # <tuple> in the form ((row_start,row_end),(col_start,col_end))
......@@ -711,6 +711,7 @@ class GMS_object(object):
:param tuple_GMS_subset: <tuple> e.g. ('/path/gms_file.gms', ['cube', None])
"""
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
......@@ -735,7 +736,7 @@ class GMS_object(object):
def from_tiles(self, list_GMS_tiles):
# type: (list) -> GMS_object
"""Merge separate GMS objects belonging to the same scene-ID to ONE GMS object.
"""Merge separate GMS objects with different spatial coverage but belonging to the same scene-ID to ONE GMS object.
:param list_GMS_tiles: <list> of GMS objects that have been created by cut_GMS_obj_into_blocks()
"""
......@@ -748,10 +749,11 @@ class GMS_object(object):
[setattr(self, i, getattr(tile1, i)) for i in tile1.__dict__ \
if not callable(getattr(tile1, i)) and not isinstance(getattr(tile1, i), (np.ndarray, GeoArray))]
# MERGE ARRAY-ATTRIBUTES
list_arraynames = [i for i in tile1.__dict__ if not callable(getattr(tile1, i)) and \
isinstance(getattr(tile1, i), (np.ndarray, GeoArray))]
self.arr_shape = 'cube'
self.arr_pos = None
list_arraynames = ['_arr'] + [i for i in list_arraynames if i !='_arr'] # list must start with _arr, otherwise setters will not work
for arrname in list_arraynames:
samplearray = getattr(tile1, arrname)
......@@ -764,7 +766,31 @@ class GMS_object(object):
target_dtype = samplearray.dtype
merged_array = self._numba_array_merger(list_GMS_tiles, arrname, target_shape, target_dtype)
setattr(self, arrname, merged_array)
setattr(self, arrname if not arrname.startswith('_') else arrname[1:], merged_array) # use setters if possible
# NOTE: this asserts that each attribute starting with '_' has also a property with a setter!
# UPDATE ARRAY-DEPENDENT ATTRIBUTES
self.arr_shape = 'cube'
self.arr_pos = None
# update MetaObj attributes
self.meta_odict.update({
'samples':self.arr.cols, 'lines':self.arr.rows, 'bands':self.arr.bands,
'map info':geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string':self.arr.prj,})
# calculate data_corners_imXY (mask_nodata is always an array here because get_mapPos always returns an array)
corners_imYX = calc_FullDataset_corner_positions(
self.mask_nodata, assert_four_corners=False, algorithm='shapely')
self.trueDataCornerPos = [(YX[1], YX[0]) for YX in corners_imYX] # [UL, UR, LL, LR]
# calculate trueDataCornerLonLat
data_corners_LatLon = pixelToLatLon(self.trueDataCornerPos, geotransform=self.arr.gt, projection=self.arr.prj)
self.trueDataCornerLonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon]
# calculate trueDataCornerUTM
data_corners_utmYX = pixelToMapYX(self.trueDataCornerPos, geotransform = self.arr.gt, projection = self.arr.prj) # FIXME asserts gt in UTM coordinates
self.trueDataCornerUTM = [(YX[1], YX[0]) for YX in data_corners_utmYX]
return copy.copy(self)
......@@ -827,6 +853,7 @@ class GMS_object(object):
})
self.subsystem = ''
del self.pathGen # must be refreshed because subsystem is now ''
self.close_GMS_loggers() # must also be refreshed because it depends on pathGen
for attrN in ['SolIrradiance','CWL','FWHM','Offsets','OffsetsRef','Gains','GainsRef',
'ThermalConstK1','ThermalConstK2', 'ViewingAngle_arrProv', 'IncidenceAngle_arrProv']:
......@@ -953,6 +980,8 @@ class GMS_object(object):
self.meta_odict.update({
'samples':self.arr.cols, 'lines':self.arr.rows, 'bands':self.arr.bands,
'map info':geotransform2mapinfo(self.arr.gt, self.arr.prj), 'coordinate system string':self.arr.prj,})
# set shape of full array
self.shape_fullArr = self.arr.shape
return copy.copy(self)
......@@ -1025,7 +1054,7 @@ class GMS_object(object):
if self._mask_nodata is None or overwrite:
self.arr.calc_mask_nodata(fromBand=fromBand, overwrite=overwrite)
self._mask_nodata = self.arr.mask_nodata
self.mask_nodata = self.arr.mask_nodata
return self.mask_nodata
......@@ -1036,14 +1065,6 @@ class GMS_object(object):
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd,'data': data}
def MetaObj2ODict(self): # FIXME deprecated
"""Convert self.MetaObj to an OrderedDict."""
self.logger.info('Preparing extracted metadata to be written to disk...')
self.meta_odict = self.MetaObj.to_odict()
del self.MetaObj # FIXME MetaObj should have its json encoder
def apply_nodata_mask_to_ObjAttr(self, attrname, out_nodata_val=None):
# type: (str,int) -> None
"""Applies self.mask_nodata to the specified array attribute by setting all values where mask_nodata is 0 to the
......@@ -1235,7 +1256,12 @@ class GMS_object(object):
assert imBounds or mapBounds, "Either 'imBounds' or 'mapBounds' must be given. Got nothing."
# calculate mapBounds if not already given
# TODO this would make this method callable with imBounds
if not mapBounds:
rS, rE, cS, cE = imBounds
(xmin, ymax), (xmax, ymin) = [imXY2mapXY((imX,imY), self.arr.gt) for imX,imY in [(cS,rS), (cE+1,rE+1)]]
mapBounds_prj = self.arr.projection
else:
xmin, xmax, ymin, ymax = mapBounds
# avoid disk IO if requested area is within the input array # TODO
......@@ -1246,7 +1272,6 @@ class GMS_object(object):
geoArr = getattr(self,arrname)
# get subsetted and (possibly) reprojected array
xmin,xmax,ymin,ymax = mapBounds
rspAlg = 'near' if arrname=='masks' else 'cubic'
subArr = GeoArray(*geoArr.get_mapPos((xmin,ymin,xmax,ymax), mapBounds_prj, rspAlg=rspAlg),
bandnames = list(geoArr.bandnames),
......@@ -1269,13 +1294,15 @@ class GMS_object(object):
sub_GMS_obj.mask_clouds = sub_GMS_obj.masks[:,:,1] if sub_GMS_obj.masks.bands>1 else None # FIXME not dynamic
# update arr_pos
#arr_pos = ((rS, rE), (cS, cE))
#print(arr_pos)
rows, cols, bands = sub_GMS_obj.arr.shape # FIXME
sub_GMS_obj.arr_shape = 'block'
#sub_GMS_obj.arr_pos = arr_pos
sub_GMS_obj.arr_shape = 'block'
if imBounds is not None:
rS, rE, cS, cE = imBounds
sub_GMS_obj.arr_pos = ((rS, rE), (cS, cE))
else:
pass # FIXME how to set arr_pos in that case?
# calculate new attributes 'corner_utm' and 'corner_lonlat'
rows, cols, bands = sub_GMS_obj.arr.shape
ULxy, URxy, LLxy, LRxy = [[0, 0], [cols - 1, 0], [0, rows - 1], [cols - 1, rows - 1]]
utm_coord_YX = pixelToMapYX ([ULxy, URxy, LLxy, LRxy], geotransform=subArr.gt, projection=subArr.prj) # FIXME asserts gt in UTM coordinates
lonlat_coord = pixelToLatLon([ULxy, URxy, LLxy, LRxy], geotransform=subArr.gt, projection=subArr.prj) # ULyx,URyx,LLyx,LRyx
......@@ -1289,26 +1316,43 @@ class GMS_object(object):
# calculate data_corners_imXY (mask_nodata is always an array here because get_mapPos always returns an array)
corners_imYX = calc_FullDataset_corner_positions(
sub_GMS_obj.mask_nodata, assert_four_corners=False, algorithm='shapely')
sub_GMS_obj.data_corners_imXY = [(YX[1], YX[0]) for YX in corners_imYX]
del sub_GMS_obj.trueDataCornerPos # FIXME this should actually be updated
sub_GMS_obj.trueDataCornerPos = [(YX[1], YX[0]) for YX in corners_imYX]
# calculate data_corners_LonLat
data_corners_LatLon = pixelToLatLon(sub_GMS_obj.data_corners_imXY,
geotransform=subArr.gt, projection=subArr.prj)
sub_GMS_obj.data_corners_LonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon] # FIXME "data_corners_LonLat" not consistent to earlier trueDataCornerLonLat
del sub_GMS_obj.trueDataCornerLonLat # FIXME this should actually be updated
# calculate trueDataCornerLonLat
data_corners_LatLon = pixelToLatLon(sub_GMS_obj.trueDataCornerPos,
geotransform=subArr.gt, projection=subArr.prj)
sub_GMS_obj.trueDataCornerLonLat = [(YX[1], YX[0]) for YX in data_corners_LatLon]
# calculate data_corners_utm
data_corners_utmYX = pixelToMapYX([ULxy, URxy, LLxy, LRxy],
geotransform=subArr.gt, projection=subArr.prj) # FIXME asserts gt in UTM coordinates
sub_GMS_obj.data_corners_utm = [(YX[1], YX[0]) for YX in data_corners_utmYX]
# calculate trueDataCornerUTM
data_corners_utmYX = pixelToMapYX([ULxy, URxy, LLxy, LRxy],
geotransform=subArr.gt, projection=subArr.prj) # FIXME asserts gt in UTM coordinates
sub_GMS_obj.trueDataCornerUTM = [(YX[1], YX[0]) for YX in data_corners_utmYX]
return sub_GMS_obj
def to_tiles(self, blocksize=(2048,2048)):
# type: (tuple) -> self
"""Returns a generator object where items represent tiles of the given block size for the GMS object.
:param blocksize: target dimensions of the generated block tile (rows, columns)
:return: <list> of GMS_object tiles
"""
assert type(blocksize) in [list, tuple] and len(blocksize) == 2, \
"The argument 'blocksize_RowsCols' must represent a tuple of size 2."
tilepos = HLP_F.get_image_tileborders('block', blocksize, shape_fullArr=self.shape_fullArr)
for tp in tilepos:
(xmin, xmax),(ymin,ymax) = tp # e.g. [(0, 1999), (0, 999)] at a blocksize of 2000*1000 (rowsxcols)
tileObj = self.get_subset_obj(imBounds=(xmin,xmax,ymin,ymax)) # xmax+1/ymax+1?
yield tileObj
def to_MGRS_tiles(self, pixbuffer=10, v=False):
# type: (int) -> self
"""Returns a generator object where items represent the MGRS tiles for the given GMS object.
"""Returns a generator object where items represent the MGRS tiles for the GMS object.
:param pixbuffer: <int> a buffer in pixel values used to generate an overlap between the returned MGRS tiles
:param v: <bool> verbose mode
......@@ -1623,9 +1667,8 @@ class GMS_object(object):
else:
if not is_tempfile:
# TODO add loglevel to log_for_fullArr_or_firstTile and turn this message to a warning
self.log_for_fullArr_or_firstTile("%s can not be written, because there is no "
"corresponding attribute." % print_dict[descriptor])
self.logger.warning(
"%s can not be written, because there is no corresponding attribute." % print_dict[descriptor])
# write GMS-file and update database
# IMPORTANT: DO NOT pass the complete object but only a copy of the dictionary in order to prevent ASCII_writer
......
......@@ -1078,7 +1078,7 @@ def data_DB_updater(obj_dict):
'proc_level' : obj_dict['proc_level'],
'coreg_success' : obj_dict['coreg_info']['success'],
'tile_bounds' : get_tile_bounds_box(obj_dict['bounds_LonLat']),
'data_corners' : Polygon(obj_dict['data_corners_LonLat'])}
'data_corners' : Polygon(obj_dict['trueDataCornerLonLat'])}
matchExp = 'WHERE ' + ' AND '.join([get_postgreSQL_matchingExp(k, dict_dbkey_objkey[k])
for k in ['sceneid', 'mgrs_code', 'virtual_sensor_id']])
......
......@@ -48,7 +48,6 @@ from ..algorithms.L2C_P import L2C_object
from ..misc.definition_dicts import proc_chain
from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.coord_trafo import mapXY2imXY, reproject_shapelyGeometry
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax
......@@ -171,7 +170,7 @@ def get_image_tileborders(target_tileShape, target_tileSize, path_GMS_file=None,
for r in range(0, len(row_bounds), 2) for c in range(0, len(col_bounds), 2)]
def cut_GMS_obj_into_blocks(tuple__In_obj__blocksize_RowsCols): # FIXME should better call get_subset_GMS_obj (also takes care to tile map infos)
def cut_GMS_obj_into_blocks(tuple__In_obj__blocksize_RowsCols):
# type: (tuple) -> list
"""Cut a GMS object into tiles with respect to raster attributes as well as scene wide attributes.
......@@ -180,19 +179,23 @@ def cut_GMS_obj_into_blocks(tuple__In_obj__blocksize_RowsCols): # FIXME should b
In_obj, blocksize_RowsCols = tuple__In_obj__blocksize_RowsCols
assert type(blocksize_RowsCols) in [list,tuple] and len(blocksize_RowsCols) == 2, \
"The argument 'blocksize_RowsCols' must represent a list of size 2."
tilepos = get_image_tileborders('block', blocksize_RowsCols, shape_fullArr=In_obj.shape_fullArr)
GMSobj_tiles = []
for tp in tilepos:
tile = parentObjDict[In_obj.proc_level](*initArgsDict[In_obj.proc_level])
[setattr(tile, i, getattr(In_obj,i)) for i in In_obj.__dict__ \
if not callable(getattr(In_obj,i)) and not isinstance(getattr(In_obj,i),(GeoArray, np.ndarray))]
[setattr(tile, i, getattr(In_obj,i)[tp[0][0]:tp[0][1]+1,tp[1][0]:tp[1][1]+1]) \
for i in In_obj.__dict__ \
if not callable(getattr(In_obj,i)) and isinstance(getattr(In_obj,i),(GeoArray, np.ndarray))]
tile.arr_shape = 'block'
tile.arr_pos = tp
GMSobj_tiles.append(tile)
return GMSobj_tiles
#tilepos = get_image_tileborders('block', blocksize_RowsCols, shape_fullArr=In_obj.shape_fullArr)
return list(In_obj.to_tiles(blocksize=blocksize_RowsCols))
# NOTE: it's better to call get_subset_GMS_obj (also takes care of tile map infos)
# for tp in tilepos:
# (rS,rE),(cS,cE) = tp
# tile = parentObjDict[In_obj.proc_level](*initArgsDict[In_obj.proc_level])
# [setattr(tile, i, getattr(In_obj,i)) for i in In_obj.__dict__ \
# if not callable(getattr(In_obj,i)) and not isinstance(getattr(In_obj,i),(GeoArray, np.ndarray))]
# [setattr(tile, i, getattr(In_obj,i)[rS:rE+1, cS:cE+1]) \
# for i in In_obj.__dict__ \
# if not callable(getattr(In_obj,i)) and isinstance(getattr(In_obj,i),(GeoArray, np.ndarray))]
# tile.arr_shape = 'block'
# tile.arr_pos = tp
#
# yield tile
def merge_GMS_tiles_to_GMS_obj(list_GMS_tiles):
......@@ -319,12 +322,9 @@ def make_global(var_names,var_vals):
def cornerLonLat_to_postgreSQL_poly(CornerLonLat):
"""Converts a coordinate list [UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat] to a postgreSQL polygon.
:param CornerLonLat:
:param CornerLonLat: list of XY-coordinate tuples
"""
UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat = CornerLonLat
pGSQL_poly = 'POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' %(LR_LonLat[0],LR_LonLat[1], LL_LonLat[0],LL_LonLat[1],
UL_LonLat[0],UL_LonLat[1], UR_LonLat[0],UR_LonLat[1], LR_LonLat[0],LR_LonLat[1])
return pGSQL_poly
return str(Polygon(CornerLonLat))
def postgreSQL_poly_to_cornerLonLat(pGSQL_poly):
......
......@@ -8,7 +8,7 @@ from shapely.geometry import Polygon
from ..config import GMS_config as CFG
from . import database_tools as DB_T
from . import helper_functions as HLP_F
from ..algorithms import GEOPROCESSING as GEOP # FIXME import functions directly as soon as GEOPROCESSING is included in algorithms.__init__.__all__
from ..algorithms import GEOPROCESSING as GEOP
class MGRS_tile(object):
......@@ -55,7 +55,7 @@ class MGRS_tile(object):
@property
def EPSG(self):
is_south = self.poly_lonlat.centroid.xy[1][0]<0
return int('327' if is_south else '326'+str(self.UTMzone))
return int(('327' if is_south else '326')+str(self.UTMzone))
@property
def geom_wkb(self):
......@@ -101,7 +101,6 @@ class MGRS_tile(object):
buffMap = im_gt[1] * pixbuffer
mgrs_bounds = self.poly_specPrj(im_prj).buffer(buffMap).bounds
print('mgrs_bounds',mgrs_bounds)
tgt_arr, tgt_gt, im_prj = GEOP.clip_array_using_mapBounds(array, mgrs_bounds, im_prj, im_gt, nodataVal)
return tgt_arr, tgt_gt, im_prj
......
......@@ -7,7 +7,7 @@ from itertools import chain
from ..config import GMS_config as CFG
def MAP(func, args, CPUs=None, flatten_ouput=False):
def MAP(func, args, CPUs=None, flatten_output=False):
# type: (any, list, int, bool) -> list
"""Parallelize the execution of the given function.
NOTE: if Job.CPUs in config is 1, execution is not parallelized.
......@@ -15,7 +15,7 @@ def MAP(func, args, CPUs=None, flatten_ouput=False):
:param func: function to parallelize
:param args: function arguments
:param CPUs: number of CPUs to use
:param flatten_ouput: whether to flatten output list,
:param flatten_output: whether to flatten output list,
e.g. [ [ Tile1Scene1, Tile2Scene1], Tile1Scene2, Tile2Scene2] ] to
[ Tile1Scene1, Tile2Scene1, Tile1Scene2, Tile2Scene2 ]
"""
......@@ -26,7 +26,7 @@ def MAP(func, args, CPUs=None, flatten_ouput=False):
else:
results = [func(argset) for argset in args] # generator does not always work properly here
if flatten_ouput:
if flatten_output:
try:
ch = chain.from_iterable(results)
return list(ch)
......
......@@ -39,7 +39,6 @@ def L1A_map(dataset_dict): #map (scene-wise parallelization)
L1A_obj.calc_mean_VAA()
L1A_obj.calc_orbit_overpassParams() # requires corner positions
L1A_obj.apply_nodata_mask_to_ObjAttr('mask_clouds',0)
L1A_obj.MetaObj2ODict()
if CFG.job.exec__L1AP[1]:
L1A_obj.to_ENVI()
L1A_obj.delete_tempFiles()
......@@ -68,7 +67,7 @@ def L1A_map_1(dataset_dict, block_size=None): #map (scene-wise parallelization)
L1A_obj.apply_nodata_mask_to_ObjAttr('arr') # nodata mask is automatically calculated
L1A_obj.add_rasterInfo_to_MetaObj()
L1A_obj.reference_data('UTM')
tiles = HLP_F.cut_GMS_obj_into_blocks((L1A_obj, block_size if block_size else CFG.job.tiling_block_size_XY)) # cut (block-wise parallelization)
tiles = list(L1A_obj.to_tiles(block_size if block_size else CFG.job.tiling_block_size_XY)) # cut (block-wise parallelization)
return tiles
......@@ -90,7 +89,6 @@ def L1A_map_3(L1A_obj): #map (scene-wise parallelization)
L1A_obj.calc_mean_VAA()
L1A_obj.calc_orbit_overpassParams() # requires corner positions
L1A_obj.apply_nodata_mask_to_ObjAttr('mask_clouds',0)
L1A_obj.MetaObj2ODict() # requires Meta dict
if CFG.job.exec__L1AP[1]:
L1A_obj.to_ENVI()
L1A_obj.delete_tempFiles()
......@@ -184,7 +182,7 @@ def L2A_map(L1C_objs, block_size=None):
# delete tempfiles of separate subsystem GMS objects
[L2A_obj.delete_tempFiles() for L2A_obj in L2A_objs]
L2A_tiles = HLP_F.cut_GMS_obj_into_blocks((L2A_obj, block_size if block_size else CFG.job.tiling_block_size_XY))
L2A_tiles = L2A_obj.to_tiles(blocksize=block_size if block_size else CFG.job.tiling_block_size_XY)
return L2A_tiles
......
......@@ -424,7 +424,7 @@ class process_controller(object):
# map
L1A_resObjects = MAP(L1A_map, datalist_L1A_P, CPUs=12)
else: # tiles
all_L1A_tiles_map1 = MAP(L1A_map_1, datalist_L1A_P, flatten_ouput=True) # map_1 # merge results to new list of splits
all_L1A_tiles_map1 = MAP(L1A_map_1, datalist_L1A_P, flatten_output=True) # map_1 # merge results to new list of splits
L1A_obj_tiles = MAP(L1A_map_2, all_L1A_tiles_map1) # map_2
grouped_L1A_Tiles = HLP_F.group_objects_by_attributes(
......@@ -477,7 +477,7 @@ class process_controller(object):
# group by scene ID (all subsystems belonging to the same scene ID must be processed together)
grouped_L1B_Instances = HLP_F.group_objects_by_attributes(L1B_Instances, 'scene_ID')
L1C_resObjects = MAP(L1C_map, grouped_L1B_Instances, flatten_ouput=True)
L1C_resObjects = MAP(L1C_map, grouped_L1B_Instances, flatten_output=True)
else: # tiles
blocksize = (5000, 5000)
......@@ -485,7 +485,7 @@ class process_controller(object):
L1B_newTiles = []
if self.L1B_newObjects:
tuples_obj_blocksize = [(obj, blocksize) for obj in self.L1B_newObjects]
L1B_newTiles = MAP(HLP_F.cut_GMS_obj_into_blocks, tuples_obj_blocksize, flatten_ouput=True)
L1B_newTiles = MAP(HLP_F.cut_GMS_obj_into_blocks, tuples_obj_blocksize, flatten_output=True)
"""combine newly and earlier processed L1B data"""
L1B_newDBTiles = self.get_DB_objects('L1C', self.L1B_newObjects, blocksize=blocksize)
......@@ -521,7 +521,7 @@ class process_controller(object):
# group by scene ID (all subsystems belonging to the same scene ID must be processed together)
grouped_L1C_Instances = HLP_F.group_objects_by_attributes(L1C_Instances, 'scene_ID')
L2A_resTiles = MAP(L2A_map, grouped_L1C_Instances, flatten_ouput=True)
L2A_resTiles = MAP(L2A_map, grouped_L1C_Instances, flatten_output=True)
self.L2A_tiles = [obj for obj in L2A_resTiles if isinstance(obj, L2A_P.L2A_object)]
self.failed_objects += [obj for obj in L2A_resTiles if isinstance(obj, failed_GMS_object) and
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment