Commit db54bd81 authored by Daniel Scheffler's avatar Daniel Scheffler Committed by Mathias Peters
Browse files

Bugfix for not properly closed logfiles; bugfix for

L0B_P:
- L0B_object: bugfix for not properly closed logfiles
L1A_P:
- added assignment of L1A_object.GMS_identifier['logger']
- added verbose mode to get_MetaObj() and disabled it
- __getstate__(): bugfix
L1B_P:
- Scene_finder.filter_possib_ref_scenes(): bugfix for not filtering by projection
ENV:
- added dummy function check_paths()
HLP_F:
- close_logger(): bugfix for not checking if given logger has handlers
CFG:
- deleted two deprecated comments
PC:
- added some job IDs
- added 'job comment info' during startup
parent 77a105fc
......@@ -70,11 +70,16 @@ class L0B_object(object):
if self.path_archive_valid:
self.logger.info('Level 0B object for %s %s%s (data-ID %s) successfully created.' %(self.satellite,
self.sensor, (' '+self.subsystem) if self.subsystem not in [None,''] else '', self.entity_ID))
if hasattr(self,'logger'): del self.logger
if job.exec_mode=='Python' and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder):
os.makedirs(self.ExtractedFolder)
# close loggers
HLP_F.close_logger(self.GMS_identifier['logger'])
HLP_F.close_logger(self.logger)
del self.GMS_identifier['logger']
del self.logger
def _data_downloader(self,sensor, entity_ID):
self.logger.info('Level 0B Processing started')
success = False
......
......@@ -88,6 +88,8 @@ class L1A_object(object):
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) \
if L0B_object else None
self.GMS_identifier = L0B_object.GMS_identifier if L0B_object else None
if self.GMS_identifier:
self.GMS_identifier['logger'] = self.logger
self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(job.conn_database, 'scenes', ['datasetid'],
{'id': self.scene_ID})[0][0]) if self.scene_ID !=-9999 else -9999
self.outInterleave = 'bsq'
......@@ -150,7 +152,7 @@ class L1A_object(object):
else:
self.archive_to_rasObj(self.path_archive,self.path_InFilePreprocessor)
self.get_MetaObj()
self.get_MetaObj(v=False)
self.get_shape_fullArr()
self.GMS_identifier = self.MetaObj.get_GMS_identifier()
self.subsystem = self.MetaObj.Subsystem
......@@ -197,7 +199,7 @@ class L1A_object(object):
if hasattr(self,'logger'):
HLP_F.close_logger(self.logger)
self.logger = 'not set'
if self.GMS_identifier:
if self.GMS_identifier and 'logger' in self.GMS_identifier:
HLP_F.close_logger(self.GMS_identifier['logger'])
self.GMS_identifier['logger'] = 'not set'
if hasattr(self,'MetaObj') and self.MetaObj:
......@@ -205,7 +207,6 @@ class L1A_object(object):
self.MetaObj.logger = 'not set'
return self.__dict__
def __setstate__(self, ObjDict):
"""Defines how the attributes of GMS object are unpickled."""
......@@ -607,16 +608,20 @@ class L1A_object(object):
ds=None
def get_MetaObj(self):
def get_MetaObj(self, v=False):
"""Reads metainformation of the given file from the given ASCII metafile.
Works for: RapidEye (metadata.xml),SPOT(metadata.dim),LANDSAT(mtl.txt),ASTER(downloaded coremetadata),
ALOS(summary.txt & Leader file)
:param v:
:return:
"""
self.logger.info('Reading %s %s %s metadata...' %(self.satellite,self.sensor,self.subsystem))
self.MetaObj = META.METADATA(self.satellite, self.subsystem, self.scene_ID, self.path_InFilePreprocessor,
self.path_MetaPreprocessor, self.logger, self.LayerBandsAssignment)
self.logger.info("The following metadata have been read:")
[self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.get_meta_overview().items()]
if v:
self.logger.info("The following metadata have been read:")
[self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.get_meta_overview().items()]
def set_arr_desc_from_MetaObj(self):
......@@ -959,7 +964,7 @@ class L1A_object(object):
# # # data = self.CLD_obj(self)
# # print( time.time()-t1, os.path.basename(i), 'cld calc')
del self.GMS_identifier['logger']
del self.GMS_identifier['logger'] # FIXME does that properly close the logger?
if mask_clouds is not None:
if False in np.equal(mask_clouds,mask_clouds.astype(np.uint8)):
......
......@@ -258,6 +258,7 @@ class Scene_finder(object):
self._filter_by_proc_status()
self._filter_by_dataset_existance()
self._filter_by_entity_ID_availability()
self._filter_by_projection()
def choose_ref_scene(self):
......@@ -549,29 +550,29 @@ class L1B_object(L1A_object):
def get_spatial_reference_scene(self):
boundsLonLat = corner_coord_to_minmax(self.trueDataCornerLonLat)
footprint_poly = HLP_F.CornerLonLat_to_shapelyPoly(self.trueDataCornerLonLat)
RSG = Scene_finder(boundsLonLat, self.acquisition_date, self.meta['coordinate system string'],
RSF = Scene_finder(boundsLonLat, self.acquisition_date, self.meta['coordinate system string'],
footprint_poly, 20, 0, 20, 30, 10)
# run spatial query
self.logger.info('Querying database in order to find a suitable reference scene for co-registration.')
RSG.spatial_query(timeout=5)
if RSG.possib_ref_scenes:
self.logger.info('Query result: %s reference scenes with matching metadata.' % len(RSG.possib_ref_scenes))
RSF.spatial_query(timeout=5)
if RSF.possib_ref_scenes:
self.logger.info('Query result: %s reference scenes with matching metadata.' % len(RSF.possib_ref_scenes))
else:
self.logger.warning('Spatial query returned no matches. Coregistration impossible.')
self.spatRef_available = False
return None
# filter results
RSG.filter_possib_ref_scenes()
if RSG.GDF_ref_scenes.empty:
RSF.filter_possib_ref_scenes()
if RSF.GDF_ref_scenes.empty:
self.logger.warning('No scene fulfills all requirements to serve as spatial reference for scene %s '
'(entity ID %s). Coregistration impossible.' % (self.scene_ID, self.entity_ID))
self.spatRef_available = False
return None
# assign spatial refernce scene
self.spatRef_scene = RSG.choose_ref_scene()
self.spatRef_scene = RSF.choose_ref_scene()
self.spatRef_available = True
self.logger.info('Found a suitable reference image for coregistration: scene ID %s (entity ID %s).'
% (self.spatRef_scene.scene_ID, self.spatRef_scene.entity_ID))
......
......@@ -97,13 +97,11 @@ class job:
path_procdata_scenes = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata_scenes'))
path_procdata_MGRS = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata_MGRS'))
path_archive = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_download'))
path_spatIdxSrv = absP(query_cfg(conn_db_meta, 'path_spatial_index_mediator_server'))
path_spatIdxSrv = query_cfg(conn_db_meta, 'path_spatial_index_mediator_server')
path_earthSunDist = absP(query_cfg(conn_db_meta, 'path_earthSunDist'))
#path_earthSunDist = '/home/gfz-fe/GeoMultiSens/database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv' # FIXME!!
path_SRFs = absP(query_cfg(conn_db_meta, 'path_SRFs'))
path_cloud_classif = absP(query_cfg(conn_db_meta, 'path_cloud_classif'))
path_solar_irr = absP(query_cfg(conn_db_meta, 'path_solar_irr'))
#path_solar_irr = '/home/gfz-fe/GeoMultiSens/database/solar_irradiance/SUNp1fontenla__350-2500nm_@0.1nm_converted.txt' # FIXME!!
path_testing = absP(query_cfg(conn_db_meta, 'path_testing'))
path_benchmarks = absP(query_cfg(conn_db_meta, 'path_benchmarks'))
path_job_logs = absP(query_cfg(conn_db_meta, 'path_job_logs'))
......
......@@ -33,6 +33,12 @@ def check_dependencies(logger=None):
if not SpatIdxSrv.is_running:
_log_or_print('Attempt to start Spatial Index Mediator Server failed.', logger.info)
def check_paths():
# check existance of database paths, etc.
# check existance of cloud classifier dill objects from PG.get_path_cloud_class_obj()
pass
def _log_or_print(msg, loggerLvl = None):
if loggerLvl:
loggerLvl(msg)
......
......@@ -124,14 +124,15 @@ def setup_logger(name_logfile, path_logfile,append=1):
def close_logger(logger):
for handler in logger.handlers:
handler.close()
logger.removeHandler(handler)
if logger and hasattr(logger, 'handlers'):
for handler in logger.handlers:
handler.close()
logger.removeHandler(handler)
class GMS_logger(logging.Logger):
raise NotImplementedError
def __init__(self):
raise NotImplementedError
super.__init__(self)
@staticmethod
......
......@@ -221,7 +221,7 @@ def get_path_cloud_class_obj(GMS_identifier, get_all=False):
"By default the classifier object should be available at "
"<GMS root dir>/database/cloud_classifier/<satellite>/<sensor>/"
% (classif_objName, path_cloud_classifier_objects))
logger.warning('Cloud masking not possible for %s %s due to environement error.'
logger.warning('Cloud masking not possible for %s %s due to environment error.' # TODO move to environment
%(satellite, sensor))
classifier_path = None
else:
......
......@@ -50,7 +50,7 @@ if isdebugging: #override the existing settings in order to get write access eve
#builtins.GMS_process_ID = 26185258 # Beta-Job - 219 x L8, spatref L8
#builtins.GMS_process_ID = 26185259 # Beta-Job - 172 x L7, spatref L8
#builtins.GMS_process_ID = 26185260 # Beta-Job - 111 x S2, spatref L8
builtins.GMS_process_ID = 26185268 # 25x L7 SLC off, Zielsensor L8, spat.ref L8
#builtins.GMS_process_ID = 26185268 # 25x L7 SLC off, Zielsensor L8, spat.ref L8
#builtins.GMS_process_ID = 26185269 # 1x L7 SLC off, Bug SpatialIndexMediator
#builtins.GMS_process_ID = 26185270 # 5x L7 SLC off, Bug SpatialIndexMediator
#builtins.GMS_process_ID = 26185275 # 1x L8, spat. Ref. L8 Bug L1B_mask not found
......@@ -58,6 +58,19 @@ if isdebugging: #override the existing settings in order to get write access eve
#builtins.GMS_process_ID = 26185265 # 1x L8, Bug L2B_masks not found
#builtins.GMS_process_ID = 26185268 # "2x L8, Bug L2B_masks not found, incl. 1x bad archive"
#builtins.GMS_process_ID = 26185269 # "10x L8, Bug L2B_masks not found"
#builtins.GMS_process_ID = 26185272 # "1x S2A Sips"
#builtins.GMS_process_ID = 26185273 # "1x L7, target L8, spat.ref L8"
#builtins.GMS_process_ID = 26185275 # "1x L7, target L8, spat.ref L8 L1B Matching failed"
#builtins.GMS_process_ID = 26185276 # "1x L7, target L8, spat.ref L8 L1B Matching window became too small."
#builtins.GMS_process_ID = 26185279 # "GEOMS: 25x L7, target L8, spat.ref L8"
#builtins.GMS_process_ID = 26185280 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging NoneType object is not subscriptable within mapinfo2geotransform"
#builtins.GMS_process_ID = 26185281 # "GEOMS: 4x L7, target L8, spat.ref L8, freeze of pool.map"
#builtins.GMS_process_ID = 26185283 # "GEOMS: 10x L7, target L8, spat.ref L8, freeze of pool.map"
#builtins.GMS_process_ID = 26185284 # "GEOMS: 11x L7, target L8, spat.ref L8, freeze of pool.map"
#builtins.GMS_process_ID = 26185321 # "GEOMS: 1x L7, target L8, spat.ref L8, debugging L1B_P"
#builtins.GMS_process_ID = 26185322 # "GEOMS: 1x L7, target L8, spat.ref L8, Bug calc_shifted_cross_power_spectrum: NoneType object not iterable"
#builtins.GMS_process_ID = 26185277 # "GMS41: 10x L7, target L8, spat.ref L8, Permission errors during logging"
builtins.GMS_process_ID = 26185278 # "Beta-Job - 172 x L7, spatref L8"
......@@ -108,7 +121,8 @@ from .misc import environment as ENV # environment
########################### core functions ####################################
job.logger = HLP_F.setup_logger('log__%s' %job.ID, os.path.join(job.path_job_logs,'%s.log' % job.ID), 0)
job.logger.info('Starting job with ID %s...' %job.ID)
job.logger.info('Starting job with ID %s (comment: %s)...'
%(job.ID, DB_T.GMS_JOB(job.conn_database).from_job_ID(job.ID).comment))
@HLP_F.log_uncaught_exceptions
def L0B_L1A_map(data_list_item): #map (scene-wise parallelization)
......@@ -278,7 +292,7 @@ def is_inMEM(GMS_objects, dataset):
########################################### MAIN/ARGUMENT PARSER #######################################################
def run_processController_in_multiprocessing(usecase_data_list):
# check environment
job.logger('Checking system environment...')
job.logger.info('Checking system environment...')
ENV.check_dependencies(job.logger)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment