Commit 7df60bbb authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Config revision - intermediate state: Tests are running again.


Former-commit-id: 64594529
parent a16e19c1
...@@ -20,8 +20,8 @@ def run_from_jobid(args): ...@@ -20,8 +20,8 @@ def run_from_jobid(args):
# set up process controller instance # set up process controller instance
PC = process_controller(args.jobid, parallelization_level='scenes', db_host='geoms') # FIXME hardcoded host PC = process_controller(args.jobid, parallelization_level='scenes', db_host='geoms') # FIXME hardcoded host
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev' # PC.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev' # PC.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job # run the job
PC.run_all_processors() PC.run_all_processors()
...@@ -112,8 +112,8 @@ def _run_job(dbJob, parallelization_level='scenes'): ...@@ -112,8 +112,8 @@ def _run_job(dbJob, parallelization_level='scenes'):
warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.") # TODO warnings.warn("Currently the console argument parser sets the parallelization level to 'scenes'.") # TODO
PC = process_controller(jobid, parallelization_level=parallelization_level) PC = process_controller(jobid, parallelization_level=parallelization_level)
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev' # PC.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev' # PC.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job # run the job
PC.run_all_processors() PC.run_all_processors()
......
...@@ -143,7 +143,7 @@ class L1A_object(GMS_object): ...@@ -143,7 +143,7 @@ class L1A_object(GMS_object):
subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]] subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]]
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger, subset=subset) rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger, subset=subset)
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = rasObj.Layerstacking(paths_files2stack) self.arr = rasObj.Layerstacking(paths_files2stack)
self.path_InFilePreprocessor = paths_files2stack[0] self.path_InFilePreprocessor = paths_files2stack[0]
else: # 'MEMORY' or physical output else: # 'MEMORY' or physical output
...@@ -162,7 +162,7 @@ class L1A_object(GMS_object): ...@@ -162,7 +162,7 @@ class L1A_object(GMS_object):
subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]] subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]]
rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset) rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset)
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \ self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \
gdalnumeric.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows) gdalnumeric.LoadFile(path_file2load, rasObj.colStart, rasObj.rowStart, rasObj.cols, rasObj.rows)
self.path_InFilePreprocessor = path_file2load self.path_InFilePreprocessor = path_file2load
...@@ -190,7 +190,7 @@ class L1A_object(GMS_object): ...@@ -190,7 +190,7 @@ class L1A_object(GMS_object):
data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype) data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype)
data_arr[:, :, bidx] = data data_arr[:, :, bidx] = data
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr self.arr = data_arr
else: else:
GEOP.ndarray2gdal(data_arr, path_output, geotransform=ds.GetGeoTransform(), GEOP.ndarray2gdal(data_arr, path_output, geotransform=ds.GetGeoTransform(),
...@@ -221,7 +221,7 @@ class L1A_object(GMS_object): ...@@ -221,7 +221,7 @@ class L1A_object(GMS_object):
data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype) data_arr = np.empty(data.shape + (len(self.LayerBandsAssignment),), data.dtype)
data_arr[:, :, i] = data data_arr[:, :, i] = data
if CFG.job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr self.arr = data_arr
else: else:
GEOP.ndarray2gdal(data_arr, path_output, direction=3) GEOP.ndarray2gdal(data_arr, path_output, direction=3)
...@@ -271,7 +271,7 @@ class L1A_object(GMS_object): ...@@ -271,7 +271,7 @@ class L1A_object(GMS_object):
def calc_TOARadRefTemp(self, subset=None): def calc_TOARadRefTemp(self, subset=None):
"""Convert DN, Rad or TOA_Ref data to TOA Reflectance, to Radiance or to Surface Temperature """Convert DN, Rad or TOA_Ref data to TOA Reflectance, to Radiance or to Surface Temperature
(depending on CFG.usecase.conversion_type_optical and conversion_type_thermal). (depending on CFG.conversion_type_optical and conversion_type_thermal).
The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk
in tiles, provide an item of self.tile_pos as the 'subset' argument.""" in tiles, provide an item of self.tile_pos as the 'subset' argument."""
...@@ -305,7 +305,7 @@ class L1A_object(GMS_object): ...@@ -305,7 +305,7 @@ class L1A_object(GMS_object):
for optical_thermal in ['optical', 'thermal']: for optical_thermal in ['optical', 'thermal']:
if optical_thermal not in self.dict_LayerOptTherm.values(): if optical_thermal not in self.dict_LayerOptTherm.values():
continue continue
conv = getattr(CFG.usecase, 'conversion_type_%s' % optical_thermal) conv = getattr(CFG, 'conversion_type_%s' % optical_thermal)
conv = conv if conv != 'BOA_Ref' else 'TOA_Ref' conv = conv if conv != 'BOA_Ref' else 'TOA_Ref'
assert conv in ['Rad', 'TOA_Ref', 'Temp'], 'Unsupported conversion type: %s' % conv assert conv in ['Rad', 'TOA_Ref', 'Temp'], 'Unsupported conversion type: %s' % conv
arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1] arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1]
...@@ -330,7 +330,7 @@ class L1A_object(GMS_object): ...@@ -330,7 +330,7 @@ class L1A_object(GMS_object):
inSaturated) if conv == 'TOA_Ref' else \ inSaturated) if conv == 'TOA_Ref' else \
GEOP.DN2DegreesCelsius_fastforward(inArray, OFF, GAI, K1, K2, 0.95, inFill, inZero, inSaturated) GEOP.DN2DegreesCelsius_fastforward(inArray, OFF, GAI, K1, K2, 0.95, inFill, inZero, inSaturated)
if conv == 'TOA_Ref': if conv == 'TOA_Ref':
self.MetaObj.ScaleFactor = CFG.usecase.scale_factor_TOARef self.MetaObj.ScaleFactor = CFG.scale_factor_TOARef
elif arr_desc == 'Rad': elif arr_desc == 'Rad':
raise NotImplementedError("Conversion Rad to %s is currently not supported." % conv) raise NotImplementedError("Conversion Rad to %s is currently not supported." % conv)
...@@ -349,16 +349,16 @@ class L1A_object(GMS_object): ...@@ -349,16 +349,16 @@ class L1A_object(GMS_object):
'13 bands and it not clear for which bands the gains are provided.') '13 bands and it not clear for which bands the gains are provided.')
raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." % conv) raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." % conv)
else: # conv=='TOA_Ref' else: # conv=='TOA_Ref'
if self.MetaObj.ScaleFactor != CFG.usecase.scale_factor_TOARef: if self.MetaObj.ScaleFactor != CFG.scale_factor_TOARef:
res = self.rescale_array(inArray, CFG.usecase.scale_factor_TOARef, self.MetaObj.ScaleFactor) res = self.rescale_array(inArray, CFG.scale_factor_TOARef, self.MetaObj.ScaleFactor)
self.MetaObj.ScaleFactor = CFG.usecase.scale_factor_TOARef self.MetaObj.ScaleFactor = CFG.scale_factor_TOARef
self.log_for_fullArr_or_firstTile( self.log_for_fullArr_or_firstTile(
'Rescaling Ref data to scaling factor %d.' % CFG.usecase.scale_factor_TOARef) 'Rescaling Ref data to scaling factor %d.' % CFG.scale_factor_TOARef)
else: else:
res = inArray res = inArray
self.log_for_fullArr_or_firstTile('The input data already represents TOA ' self.log_for_fullArr_or_firstTile('The input data already represents TOA '
'reflectance with the desired scale factor of %d.' 'reflectance with the desired scale factor of %d.'
% CFG.usecase.scale_factor_TOARef) % CFG.scale_factor_TOARef)
else: # arr_desc == 'Temp' else: # arr_desc == 'Temp'
raise NotImplementedError("Conversion Temp to %s is currently not supported." % conv) raise NotImplementedError("Conversion Temp to %s is currently not supported." % conv)
...@@ -390,8 +390,8 @@ class L1A_object(GMS_object): ...@@ -390,8 +390,8 @@ class L1A_object(GMS_object):
self.update_spec_vals_according_to_dtype('int16') self.update_spec_vals_according_to_dtype('int16')
tiles_desc = '_'.join([desc for op_th, desc in zip(['optical', 'thermal'], tiles_desc = '_'.join([desc for op_th, desc in zip(['optical', 'thermal'],
[CFG.usecase.conversion_type_optical, [CFG.conversion_type_optical,
CFG.usecase.conversion_type_thermal]) CFG.conversion_type_thermal])
if desc in self.dict_LayerOptTherm.values()]) if desc in self.dict_LayerOptTherm.values()])
self.arr = dataOut self.arr = dataOut
...@@ -452,12 +452,12 @@ class L1A_object(GMS_object): ...@@ -452,12 +452,12 @@ class L1A_object(GMS_object):
dst_CS_datum='WGS84', mode='GDAL', use_workspace=True, dst_CS_datum='WGS84', mode='GDAL', use_workspace=True,
inFill=self.MetaObj.spec_vals['fill']) inFill=self.MetaObj.spec_vals['fill'])
if CFG.job.exec_mode == 'Python': if CFG.exec_mode == 'Python':
path_warped = os.path.join(self.ExtractedFolder, self.baseN + '__' + self.arr_desc) path_warped = os.path.join(self.ExtractedFolder, self.baseN + '__' + self.arr_desc)
GEOP.ndarray2gdal(rasObj.tondarray(direction=3), path_warped, importFile=rasObj.desc, direction=3) GEOP.ndarray2gdal(rasObj.tondarray(direction=3), path_warped, importFile=rasObj.desc, direction=3)
self.MetaObj.Dataname = path_warped self.MetaObj.Dataname = path_warped
self.arr = path_warped self.arr = path_warped
else: # CFG.job.exec_mode=='Flink': else: # CFG.exec_mode=='Flink':
self.arr = rasObj.tondarray(direction=3) self.arr = rasObj.tondarray(direction=3)
self.shape_fullArr = [rasObj.rows, rasObj.cols, rasObj.bands] self.shape_fullArr = [rasObj.rows, rasObj.cols, rasObj.bands]
...@@ -473,7 +473,7 @@ class L1A_object(GMS_object): ...@@ -473,7 +473,7 @@ class L1A_object(GMS_object):
self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM') self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM')
self.meta_odict = self.MetaObj.to_odict() # important in order to keep geotransform/projection self.meta_odict = self.MetaObj.to_odict() # important in order to keep geotransform/projection
if CFG.job.exec_mode == 'Flink': if CFG.exec_mode == 'Flink':
self.delete_tempFiles() # these files are needed later in Python execution mode self.delete_tempFiles() # these files are needed later in Python execution mode
self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists) self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists)
......
...@@ -76,7 +76,7 @@ class Scene_finder(object): ...@@ -76,7 +76,7 @@ class Scene_finder(object):
SpIM = SpatialIndexMediator(timeout=timeout) SpIM = SpatialIndexMediator(timeout=timeout)
self.possib_ref_scenes = \ self.possib_ref_scenes = \
SpIM.getFullSceneDataForDataset(self.boundsLonLat, self.timeStart, self.timeEnd, self.min_cloudcov, SpIM.getFullSceneDataForDataset(self.boundsLonLat, self.timeStart, self.timeEnd, self.min_cloudcov,
self.max_cloudcov, CFG.usecase.datasetid_spatial_ref, self.max_cloudcov, CFG.datasetid_spatial_ref,
refDate=self.src_AcqDate, maxDaysDelta=self.plusminus_days) refDate=self.src_AcqDate, maxDaysDelta=self.plusminus_days)
break break
except socket.timeout: except socket.timeout:
...@@ -116,7 +116,7 @@ class Scene_finder(object): ...@@ -116,7 +116,7 @@ class Scene_finder(object):
# get processing level of reference scenes # get processing level of reference scenes
procL = GeoDataFrame( procL = GeoDataFrame(
DB_T.get_info_from_postgreSQLdb(CFG.job.conn_database, 'scenes_proc', ['sceneid', 'proc_level'], DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'scenes_proc', ['sceneid', 'proc_level'],
{'sceneid': list(GDF.sceneid)}), columns=['sceneid', 'proc_level']) {'sceneid': list(GDF.sceneid)}), columns=['sceneid', 'proc_level'])
GDF = GDF.merge(procL, on='sceneid', how='left') GDF = GDF.merge(procL, on='sceneid', how='left')
GDF = GDF.where(GDF.notnull(), None) # replace NaN values with None GDF = GDF.where(GDF.notnull(), None) # replace NaN values with None
...@@ -129,7 +129,7 @@ class Scene_finder(object): ...@@ -129,7 +129,7 @@ class Scene_finder(object):
GDF['refDs_exists'] = list(GDF['path_ref'].map(lambda p: os.path.exists(p) if p else False)) GDF['refDs_exists'] = list(GDF['path_ref'].map(lambda p: os.path.exists(p) if p else False))
# check if a proper entity ID can be gathered from database # check if a proper entity ID can be gathered from database
eID = GeoDataFrame(DB_T.get_info_from_postgreSQLdb(CFG.job.conn_database, 'scenes', ['id', 'entityid'], eID = GeoDataFrame(DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'scenes', ['id', 'entityid'],
{'id': list(GDF.sceneid)}), columns=['sceneid', 'entityid']) {'id': list(GDF.sceneid)}), columns=['sceneid', 'entityid'])
GDF = GDF.merge(eID, on='sceneid', how='left') GDF = GDF.merge(eID, on='sceneid', how='left')
self.GDF_ref_scenes = GDF.where(GDF.notnull(), None) self.GDF_ref_scenes = GDF.where(GDF.notnull(), None)
...@@ -284,7 +284,7 @@ class L1B_object(L1A_object): ...@@ -284,7 +284,7 @@ class L1B_object(L1A_object):
plusminus_days = 30 plusminus_days = 30
AcqDate = self.im2shift_objDict['acquisition_date'] AcqDate = self.im2shift_objDict['acquisition_date']
date_minmax = [AcqDate - timedelta(days=plusminus_days), AcqDate + timedelta(days=plusminus_days)] date_minmax = [AcqDate - timedelta(days=plusminus_days), AcqDate + timedelta(days=plusminus_days)]
dataset_cond = 'datasetid=%s' % CFG.usecase.datasetid_spatial_ref dataset_cond = 'datasetid=%s' % CFG.datasetid_spatial_ref
cloudcov_cond = 'cloudcover < %s' % max_cloudcov cloudcov_cond = 'cloudcover < %s' % max_cloudcov
# FIXME cloudcover noch nicht für alle scenes im proc_level METADATA verfügbar # FIXME cloudcover noch nicht für alle scenes im proc_level METADATA verfügbar
dayrange_cond = "(EXTRACT(MONTH FROM scenes.acquisitiondate), EXTRACT(DAY FROM scenes.acquisitiondate)) " \ dayrange_cond = "(EXTRACT(MONTH FROM scenes.acquisitiondate), EXTRACT(DAY FROM scenes.acquisitiondate)) " \
...@@ -294,7 +294,7 @@ class L1B_object(L1A_object): ...@@ -294,7 +294,7 @@ class L1B_object(L1A_object):
def query_scenes(condlist): def query_scenes(condlist):
return DB_T.get_overlapping_scenes_from_postgreSQLdb( return DB_T.get_overlapping_scenes_from_postgreSQLdb(
CFG.job.conn_database, CFG.conn_database,
table='scenes', table='scenes',
tgt_corners_lonlat=self.trueDataCornerLonLat, tgt_corners_lonlat=self.trueDataCornerLonLat,
conditions=condlist, conditions=condlist,
...@@ -311,9 +311,9 @@ class L1B_object(L1A_object): ...@@ -311,9 +311,9 @@ class L1B_object(L1A_object):
# das ist nur Ergebnis aus scenes_proc # das ist nur Ergebnis aus scenes_proc
# -> dort liegt nur eine referenz, wenn die szene schon bei CFG.job-Beginn in Datensatzliste drin war # -> dort liegt nur eine referenz, wenn die szene schon bei CFG.job-Beginn in Datensatzliste drin war
res = DB_T.get_overlapping_scenes_from_postgreSQLdb( res = DB_T.get_overlapping_scenes_from_postgreSQLdb(
CFG.job.conn_database, CFG.conn_database,
tgt_corners_lonlat=self.trueDataCornerLonLat, tgt_corners_lonlat=self.trueDataCornerLonLat,
conditions=['datasetid=%s' % CFG.usecase.datasetid_spatial_ref], conditions=['datasetid=%s' % CFG.datasetid_spatial_ref],
add_cmds='ORDER BY scenes.cloudcover ASC', add_cmds='ORDER BY scenes.cloudcover ASC',
timeout=25000) timeout=25000)
filt_overlap_scenes = self._sceneIDList_to_filt_overlap_scenes([i[0] for i in res[:50]], 20.) filt_overlap_scenes = self._sceneIDList_to_filt_overlap_scenes([i[0] for i in res[:50]], 20.)
...@@ -354,8 +354,8 @@ class L1B_object(L1A_object): ...@@ -354,8 +354,8 @@ class L1B_object(L1A_object):
# start download of scene data not available and start L1A processing # start download of scene data not available and start L1A processing
def dl_cmd(scene_ID): print('%s %s %s' % ( def dl_cmd(scene_ID): print('%s %s %s' % (
CFG.job.java_commands['keyword'].strip(), # FIXME CFG.job.java_commands is deprecated CFG.java_commands['keyword'].strip(), # FIXME CFG.java_commands is deprecated
CFG.job.java_commands["value_download"].strip(), scene_ID)) CFG.java_commands["value_download"].strip(), scene_ID))
path = PG.path_generator(scene_ID=sc['scene_ID']).get_path_imagedata() path = PG.path_generator(scene_ID=sc['scene_ID']).get_path_imagedata()
...@@ -369,12 +369,12 @@ class L1B_object(L1A_object): ...@@ -369,12 +369,12 @@ class L1B_object(L1A_object):
# check if scene is downloading # check if scene is downloading
download_start_timeout = 5 # seconds download_start_timeout = 5 # seconds
# set timout for external processing # set timout for external processing
# -> DEPRECATED BECAUSE CREATION OF EXTERNAL CFG.job WITHIN CFG.job IS NOT ALLOWED # -> DEPRECATED BECAUSE CREATION OF EXTERNAL CFG WITHIN CFG IS NOT ALLOWED
processing_timeout = 5 # seconds # FIXME increase timeout if processing is really started processing_timeout = 5 # seconds # FIXME increase timeout if processing is really started
proc_level = None proc_level = None
while True: while True:
proc_level_chk = DB_T.get_info_from_postgreSQLdb( proc_level_chk = DB_T.get_info_from_postgreSQLdb(
CFG.job.conn_database, 'scenes', ['proc_level'], {'id': sc['scene_ID']})[0][0] CFG.conn_database, 'scenes', ['proc_level'], {'id': sc['scene_ID']})[0][0]
if proc_level != proc_level_chk: if proc_level != proc_level_chk:
print('Reference scene %s, current processing level: %s' % (sc['scene_ID'], proc_level_chk)) print('Reference scene %s, current processing level: %s' % (sc['scene_ID'], proc_level_chk))
proc_level = proc_level_chk proc_level = proc_level_chk
...@@ -391,7 +391,7 @@ class L1B_object(L1A_object): ...@@ -391,7 +391,7 @@ class L1B_object(L1A_object):
warnings.warn('L1A processing of reference scene %s (entity ID %s) timed out. ' warnings.warn('L1A processing of reference scene %s (entity ID %s) timed out. '
'Coregistration of this scene failed.' % (self.baseN, self.scene_ID)) 'Coregistration of this scene failed.' % (self.baseN, self.scene_ID))
break break
# DB_T.set_info_in_postgreSQLdb(CFG.job.conn_database,'scenes', # DB_T.set_info_in_postgreSQLdb(CFG.conn_database,'scenes',
# {'proc_level':'METADATA'},{'id':sc['scene_ID']}) # {'proc_level':'METADATA'},{'id':sc['scene_ID']})
time.sleep(5) time.sleep(5)
...@@ -408,7 +408,7 @@ class L1B_object(L1A_object): ...@@ -408,7 +408,7 @@ class L1B_object(L1A_object):
self.overlap_percentage = sc['overlap percentage'] self.overlap_percentage = sc['overlap percentage']
self.overlap_area = sc['overlap area'] self.overlap_area = sc['overlap area']
query_res = DB_T.get_info_from_postgreSQLdb(CFG.job.conn_database, 'scenes', ['entityid'], query_res = DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'scenes', ['entityid'],
{'id': self.imref_scene_ID}, records2fetch=1) {'id': self.imref_scene_ID}, records2fetch=1)
assert query_res != [], 'No entity-ID found for scene number %s' % self.imref_scene_ID assert query_res != [], 'No entity-ID found for scene number %s' % self.imref_scene_ID
self.imref_entity_ID = query_res[0][0] # [('LC81510322013152LGN00',)] self.imref_entity_ID = query_res[0][0] # [('LC81510322013152LGN00',)]
...@@ -504,7 +504,7 @@ class L1B_object(L1A_object): ...@@ -504,7 +504,7 @@ class L1B_object(L1A_object):
if spatIdxSrv_status == 'unavailable': if spatIdxSrv_status == 'unavailable':
self.logger.warning('Coregistration skipped due to unavailable Spatial Index Mediator Server!"') self.logger.warning('Coregistration skipped due to unavailable Spatial Index Mediator Server!"')
elif CFG.job.skip_coreg: elif CFG.skip_coreg:
self.logger.warning('Coregistration skipped according to user configuration.') self.logger.warning('Coregistration skipped according to user configuration.')
elif self.coreg_needed and self.spatRef_available: elif self.coreg_needed and self.spatRef_available:
...@@ -604,7 +604,7 @@ class L1B_object(L1A_object): ...@@ -604,7 +604,7 @@ class L1B_object(L1A_object):
if self.coreg_info['success']: if self.coreg_info['success']:
self.logger.info("Correcting spatial shifts for attribute '%s'..." % attrname) self.logger.info("Correcting spatial shifts for attribute '%s'..." % attrname)
elif cliptoextent and is_coord_grid_equal( elif cliptoextent and is_coord_grid_equal(
geoArr.gt, CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy): geoArr.gt, CFG.spatial_ref_gridx, CFG.spatial_ref_gridy):
self.logger.info("Attribute '%s' has only been clipped to it's extent because no valid " self.logger.info("Attribute '%s' has only been clipped to it's extent because no valid "
"shifts have been detected and the pixel grid equals the target grid." "shifts have been detected and the pixel grid equals the target grid."
% attrname) % attrname)
...@@ -615,12 +615,12 @@ class L1B_object(L1A_object): ...@@ -615,12 +615,12 @@ class L1B_object(L1A_object):
# correct shifts # correct shifts
DS = DESHIFTER(geoArr, self.coreg_info, DS = DESHIFTER(geoArr, self.coreg_info,
target_xyGrid=[CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy], target_xyGrid=[CFG.spatial_ref_gridx, CFG.spatial_ref_gridy],
cliptoextent=cliptoextent, cliptoextent=cliptoextent,
clipextent=mapBounds, clipextent=mapBounds,
align_grids=True, align_grids=True,
resamp_alg='nearest' if attrname == 'masks' else 'cubic', resamp_alg='nearest' if attrname == 'masks' else 'cubic',
CPUs=None if CFG.job.allow_subMultiprocessing else 1, CPUs=None if CFG.allow_subMultiprocessing else 1,
progress=True if v else False, progress=True if v else False,
q=True, q=True,
v=v) v=v)
......
...@@ -148,8 +148,8 @@ class L1C_object(L1B_object): ...@@ -148,8 +148,8 @@ class L1C_object(L1B_object):
meshwidth=10, meshwidth=10,
nodata_mask=None, # dont overwrite areas outside the image with nodata nodata_mask=None, # dont overwrite areas outside the image with nodata
outFill=get_outFillZeroSaturated(np.float32)[0], outFill=get_outFillZeroSaturated(np.float32)[0],
accurracy=CFG.usecase.SZA_SAA_calculation_accurracy, accurracy=CFG.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr if CFG.usecase.SZA_SAA_calculation_accurracy == 'fine' else None) lonlat_arr=self.lonlat_arr if CFG.SZA_SAA_calculation_accurracy == 'fine' else None)
return self._SZA_arr return self._SZA_arr
@SZA_arr.setter @SZA_arr.setter
...@@ -259,7 +259,7 @@ class AtmCorr(object): ...@@ -259,7 +259,7 @@ class AtmCorr(object):
path_logfile = inObj.pathGen.get_path_logfile() path_logfile = inObj.pathGen.get_path_logfile()
fileHandler = logging.FileHandler(path_logfile, mode='a') fileHandler = logging.FileHandler(path_logfile, mode='a')
fileHandler.setFormatter(logger_atmCorr.formatter_fileH) fileHandler.setFormatter(logger_atmCorr.formatter_fileH)
fileHandler.setLevel(CFG.job.log_level) fileHandler.setLevel(CFG.log_level)
logger_atmCorr.addHandler(fileHandler) logger_atmCorr.addHandler(fileHandler)
...@@ -662,7 +662,7 @@ class AtmCorr(object): ...@@ -662,7 +662,7 @@ class AtmCorr(object):
# compute cloud mask if not already provided # compute cloud mask if not already provided
if no_avail_CMs: if no_avail_CMs:
algorithm = CFG.usecase.cloud_masking_algorithm[self.inObjs[0].satellite] algorithm = CFG.cloud_masking_algorithm[self.inObjs[0].satellite]
if algorithm == 'SICOR': if algorithm == 'SICOR':
return None return None
...@@ -672,7 +672,7 @@ class AtmCorr(object): ...@@ -672,7 +672,7 @@ class AtmCorr(object):
try: try:
from .cloud_masking import Cloud_Mask_Creator from .cloud_masking import Cloud_Mask_Creator
CMC = Cloud_Mask_Creator(self.inObjs[0], algorithm=algorithm, tempdir_root=CFG.job.path_tempdir) CMC = Cloud_Mask_Creator(self.inObjs[0], algorithm=algorithm, tempdir_root=CFG.path_tempdir)
CMC.calc_cloud_mask() CMC.calc_cloud_mask()
cm_geoarray = CMC.cloud_mask_geoarray cm_geoarray = CMC.cloud_mask_geoarray
cm_array = CMC.cloud_mask_array cm_array = CMC.cloud_mask_array
...@@ -742,7 +742,7 @@ class AtmCorr(object): ...@@ -742,7 +742,7 @@ class AtmCorr(object):
t0 = time() t0 = time()
results = download_variables(date_from=self.inObjs[0].acq_datetime, results = download_variables(date_from=self.inObjs[0].acq_datetime,
date_to=self.inObjs[0].acq_datetime, date_to=self.inObjs[0].acq_datetime,
db_path=CFG.job.path_ECMWF_db, db_path=CFG.path_ECMWF_db,
max_step=120, # default max_step=120, # default
ecmwf_variables=default_products, ecmwf_variables=default_products,
processes=0, # singleprocessing processes=0, # singleprocessing
...@@ -792,7 +792,7 @@ class AtmCorr(object): ...@@ -792,7 +792,7 @@ class AtmCorr(object):
script = False script = False
# check if ECMWF data are available - if not, start the download # check if ECMWF data are available - if not, start the download
if CFG.usecase.auto_download_ecmwf: if CFG.auto_download_ecmwf:
self._check_or_download_ECMWF_data() self._check_or_download_ECMWF_data()
# validate SNR # validate SNR
...@@ -887,7 +887,7 @@ class AtmCorr(object): ...@@ -887,7 +887,7 @@ class AtmCorr(object):
# update metadata # update metadata
inObj.arr_desc = 'BOA_Ref' inObj.arr_desc = 'BOA_Ref'
inObj.MetaObj.bands = len(self.results.data_ac) inObj.MetaObj.bands = len(self.results.data_ac)
inObj.MetaObj.PhysUnit = 'BOA_Reflectance in [0-%d]' % CFG.usecase.scale_factor_BOARef inObj.MetaObj.PhysUnit = 'BOA_Reflectance in [0-%d]' % CFG.scale_factor_BOARef
inObj.MetaObj.LayerBandsAssignment = out_LBA inObj.MetaObj.LayerBandsAssignment = out_LBA
inObj.MetaObj.filter_layerdependent_metadata() inObj.MetaObj.filter_layerdependent_metadata()
inObj.meta_odict = inObj.MetaObj.to_odict() # actually auto-updated by getter inObj.meta_odict = inObj.MetaObj.to_odict() # actually auto-updated by getter
...@@ -896,7 +896,7 @@ class AtmCorr(object): ...@@ -896,7 +896,7 @@ class AtmCorr(object):
# FIXME AC output nodata values = 0 -> new nodata areas but mask not updated # FIXME AC output nodata values = 0 -> new nodata areas but mask not updated
oF_refl, oZ_refl, oS_refl = get_outFillZeroSaturated(inObj.arr.dtype) oF_refl, oZ_refl, oS_refl = get_outFillZeroSaturated(inObj.arr.dtype)
surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs)) surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs))
surf_refl *= CFG.usecase.scale_factor_BOARef # scale using scale factor (output is float16) surf_refl *= CFG.scale_factor_BOARef # scale using scale factor (output is float16)
# FIXME really set AC nodata values to GMS outZero? # FIXME really set AC nodata values to GMS outZero?
surf_refl[nodata] = oZ_refl # overwrite AC nodata values with GMS outZero surf_refl[nodata] = oZ_refl # overwrite AC nodata values with GMS outZero
# apply the original nodata mask (indicating background values) # apply the original nodata mask (indicating background values)
...@@ -927,8 +927,8 @@ class AtmCorr(object): ...@@ -927,8 +927,8 @@ class AtmCorr(object):
ac_bandNs = [bandN for bandN in inObj.arr.bandnames if bandN in self.results.data_ac.keys()] ac_bandNs = [bandN for bandN in inObj.arr.bandnames if bandN in self.results.data_ac.keys()]
ac_errors = np.dstack((self.results.data_errors[bandN] for bandN in ac_bandNs)) ac_errors = np.dstack((self.results.data_errors[bandN] for bandN in ac_bandNs))
ac_errors *= CFG.usecase.scale_factor_errors_ac # scale using scale factor (output is float16) ac_errors *= CFG.scale_factor_errors_ac # scale using scale factor (output is float16)
out_dtype = np.int8 if CFG.usecase.scale_factor_errors_ac <= 255 else np.int16 out_dtype = np.int8 if CFG.scale_factor_errors_ac <= 255 else np.int16
ac_errors[nodata] = get_outFillZeroSaturated(out_dtype)[0] ac_errors[nodata] = get_outFillZeroSaturated(out_dtype)[0]
ac_errors = ac_errors.astype(out_dtype) ac_errors = ac_errors.astype(out_dtype)
inObj.ac_errors = ac_errors # setter generates a GeoArray with the same bandnames like inObj.arr inObj.ac_errors = ac_errors # setter generates a GeoArray with the same bandnames like inObj.arr
...@@ -989,8 +989,8 @@ class AtmCorr(object): ...@@ -989,8 +989,8 @@ class AtmCorr(object):
if self.results.mask_clouds.mask_confidence_array is not None: if self.results.mask_clouds.mask_confidence_array is not None:
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255] cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255]
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1 cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr * CFG.usecase.scale_factor_BOARef).astype(np.int16) cfd_arr = (cfd_arr * CFG.scale_factor_BOARef).astype(np.int16)
cfd_arr[cfd_arr == -CFG.usecase.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0] cfd_arr[cfd_arr == -CFG.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0]
joined = False joined = False
for inObj in self.inObjs: for inObj in self.inObjs:
......
...@@ -41,7 +41,7 @@ class L2B_object(L2A_object): ...@@ -41,7 +41,7 @@ class L2B_object(L2A_object):
def spectral_homogenization(self, kind='linear'): def spectral_homogenization(self, kind='linear'):
src_cwls = self.meta_odict['wavelength'] src_cwls = self.meta_odict['wavelength']
# FIXME exclude or include thermal bands; respect sorted CWLs in context of LayerBandsAssignment # FIXME exclude or include thermal bands; respect sorted CWLs in context of LayerBandsAssignment
tgt_cwls = CFG.usecase.target_CWL tgt_cwls = CFG.target_CWL
if src_cwls != tgt_cwls: if src_cwls != tgt_cwls:
assert kind in ['linear', ], "%s is not a supported kind of homogenization." % kind assert kind in ['linear', ], "%s is not a supported kind of homogenization." % kind
self.log_for_fullArr_or_firstTile( self.log_for_fullArr_or_firstTile(
......
...@@ -71,7 +71,7 @@ class _FMASK_Runner(object): ...@@ -71,7 +71,7 @@ class _FMASK_Runner(object):
def is_GMSConfig_available(self): def is_GMSConfig_available(self):
from ..config import GMS_config as CFG from ..config import GMS_config as CFG
try: try:
if CFG.job is not None: if CFG is not None:
return True return True
except (EnvironmentError, OSError): except (EnvironmentError, OSError):
return False return False
...@@ -350,7 +350,7 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner): ...@@ -350,7 +350,7 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):