Commit 9de0b815 authored by Daniel Scheffler's avatar Daniel Scheffler Committed by Mathias Peters
Browse files

Added code for more securely closing loggers.

Bugfix '.fuse_hidden' files.
Removed ASCII_writer (deprecated).
parent c93ff7bc
......@@ -33,7 +33,7 @@ from .L1A_P import L1A_object
from ..misc import database_tools as DB_T
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc.logging import GMS_logger
from ..misc.logging import GMS_logger, close_logger
from ..misc.spatial_index_mediator import SpatialIndexMediator
from ..misc.definition_dicts import get_GMS_sensorcode, get_outFillZeroSaturated
......@@ -87,6 +87,17 @@ class Scene_finder(object):
self.GDF_ref_scenes = GeoDataFrame() # set by self.spatial_query()
self.ref_scene = None
def __getstate__(self):
"""Defines how the attributes of Scene_finder instances are pickled."""
close_logger(self.logger)
self.logger = None
return self.__dict__
def __del__(self):
close_logger(self.logger)
self.logger = None
def spatial_query(self, timeout=5):
"""Query the postgreSQL database to find possible reference scenes matching the specified criteria.
......
......@@ -893,6 +893,10 @@ class AtmCorr(object):
return list(self.inObjs)
finally:
# rs_image.logger must be closed properly in any case
rs_image.logger.close()
# get processing infos
self.proc_info = self.ac_input['options']['processing']
......
......@@ -34,6 +34,7 @@ from ..io.input_reader import SRF # noqa F401 # flake8 issue
from ..misc.logging import GMS_logger
from ..misc.definition_dicts import datasetid_to_sat_sen, sat_sen_to_datasetid
from ..misc.exceptions import ClassifierNotAvailableError
from ..misc.logging import close_logger
from ..model.metadata import get_LayerBandsAssignment
from .L2A_P import L2A_object
from ..model.gms_object import GMS_identifier
......@@ -166,6 +167,17 @@ class SpectralHomogenizer(object):
self.classifier_rootDir = classifier_rootDir or CFG.path_spechomo_classif
self.logger = logger or logging.getLogger(self.__class__.__name__) # FIXME own logger logs nothing
def __getstate__(self):
"""Defines how the attributes of SpectralHomogenizer instances are pickled."""
close_logger(self.logger)
self.logger = None
return self.__dict__
def __del__(self):
close_logger(self.logger)
self.logger = None
def interpolate_cube(self, arrcube, source_CWLs, target_CWLs, kind='linear'):
# type: (Union[np.ndarray, GeoArray], list, list) -> np.ndarray
"""Spectrally interpolate the spectral bands of a remote sensing image to new band positions.
......@@ -296,6 +308,17 @@ class SpectralResampler(object):
self.srf_tgt = srf_tgt
self.logger = logger or GMS_logger(__name__) # must be pickable
def __getstate__(self):
"""Defines how the attributes of SpectralResampler instances are pickled."""
close_logger(self.logger)
self.logger = None
return self.__dict__
def __del__(self):
close_logger(self.logger)
self.logger = None
@property
def wvl_1nm(self):
# spectral resampling of input image to 1 nm resolution
......@@ -919,6 +942,17 @@ class ReferenceCube_Generator(object):
if dir_refcubes and not os.path.isdir(self.dir_refcubes):
raise ValueError("%s is not a directory." % self.dir_refcubes)
def __getstate__(self):
"""Defines how the attributes of ReferenceCube_Generator instances are pickled."""
close_logger(self.logger)
self.logger = None
return self.__dict__
def __del__(self):
close_logger(self.logger)
self.logger = None
@property
def refcubes(self):
"""Return a dictionary holding instances of RefCube for each target satellite / sensor of self.tgt_sat_sen_list.
......
......@@ -42,6 +42,7 @@ from py_tools_ds.geo.coord_trafo import transform_utm_to_wgs84, transform_wgs84_
from ..options.config import GMS_config as CFG
from ..misc.definition_dicts import get_outFillZeroSaturated
from ..misc.logging import close_logger
__author__ = 'Daniel Scheffler', 'Robert Behling'
......@@ -161,6 +162,17 @@ class GEOPROCESSING(object):
"""****OBJECT METHODS******************************************************"""
def __getstate__(self):
"""Defines how the attributes of GEOPROCESSING instances are pickled."""
close_logger(self.logger)
self.logger = None
return self.__dict__
def __del__(self):
close_logger(self.logger)
self.logger = None
def subset_kwargs_to_cols_rows_bands_colStart_rowStart_bandStart(self):
shape_fullArr = [self.inDs.RasterYSize, self.inDs.RasterXSize, self.inDs.RasterCount]
self.rows, self.cols, self.bands, self.rowStart, self.rowEnd, self.colStart, self.colEnd, self.bandStart, \
......
......@@ -33,7 +33,7 @@ from ..options.config import GMS_config as CFG
from ..model import metadata as META
from ..misc import path_generator as PG
from ..misc import helper_functions as HLP_F
from ..misc.logging import GMS_logger
from ..misc.logging import GMS_logger, close_logger
from ..misc.database_tools import get_overlapping_scenes_from_postgreSQLdb
from ..misc.path_generator import path_generator
from ..misc.spatial_index_mediator import SpatialIndexMediator
......@@ -469,6 +469,18 @@ class DEM_Creator(object):
self.dsID_dic = dict(ASTER=2, SRTM=225)
self.DEM = None
def __getstate__(self):
"""Defines how the attributes of DEM_Creator are pickled."""
if self.logger not in [None, 'not set']:
close_logger(self.logger)
self.logger = None
return self.__dict__
def __del__(self):
close_logger(self.logger)
self.logger = None
@staticmethod
def _get_corner_coords_lonlat(cornerCoords_tgt, prj):
# transform to Longitude/Latitude coordinates
......
......@@ -140,63 +140,6 @@ def HDR_writer(meta_dic, outpath_hdr, logger=None):
reorder_ENVI_header(outpath_hdr, enviHdr_keyOrder)
def ASCII_writer(In, path_out_baseN):
warnings.warn(DeprecationWarning('Use <GMS_object>.to_GMS_file() instead.')) # FIXME
assert isinstance(In, dict), 'Input for ASCII writer is expected to be a dictionary. Got %s.' % type(In)
for k, v in list(In.items()):
# if isinstance(v,np.ndarray) or isinstance(v,dict) or hasattr(v,'__dict__'):
# so, wenn meta-dicts nicht ins gms-file sollen. ist aber vllt ni schlecht
# -> erlaubt lesen der metadaten direkt aus gms
if isinstance(v, datetime.datetime):
In[k] = v.strftime('%Y-%m-%d')
elif isinstance(v, logging.Logger):
if hasattr(v, 'handlers') and v.handlers[:]:
fileHandlers = [i for i in v.handlers[:] if isinstance(i, logging.FileHandler)]
for fH in fileHandlers:
warnings.warn('Not properly closed logger at GMS_obj.logger pointing to %s.' % fH.baseFilename)
In[k] = 'not set'
elif isinstance(v, collections.OrderedDict) or isinstance(v, dict):
if 'logger' in v:
if hasattr(In[k]['logger'], 'handlers') and In[k]['logger'].handlers[:]:
fileHandlers = [i for i in In[k]['logger'].handlers[:] if isinstance(i, logging.FileHandler)]
for fH in fileHandlers:
warnings.warn(
"Not properly closed logger at %s['logger'] pointing to %s." % (k, fH.baseFilename))
In[k]['logger'] = 'not set'
elif isinstance(v, np.ndarray):
# delete every 3D Array larger than 100x100
if len(v.shape) == 2 and sum(v.shape) <= 200:
In[k] = v.tolist() # numpy arrays are not jsonable
else:
del In[k]
elif hasattr(v, '__dict__'):
# löscht Instanzen von Objekten und Arrays, aber keine OrderedDicts
if hasattr(v, 'logger'):
if hasattr(In[k].logger, 'handlers') and In[k].logger.handlers[:]:
fileHandlers = [i for i in In[k].logger.handlers[:] if isinstance(i, logging.FileHandler)]
for fH in fileHandlers:
warnings.warn('Not properly closed logger at %s.logger pointing to %s.' % (k, fH.baseFilename))
In[k].logger = 'not set'
del In[k]
# class customJSONEncoder(json.JSONEncoder):
# def default(self, obj):
# if isinstance(obj, np.ndarray):
# return '> numpy array <'
# if isinstance(obj, dict): # funktioniert nicht
# return '> python dictionary <'
# if hasattr(obj,'__dict__'):
# return '> python object <'
# # Let the base class default method raise the TypeError
# return json.JSONEncoder.default(self, obj)
# json.dump(In, open(path_out_baseN,'w'), skipkeys=True,
# sort_keys=True,cls=customJSONEncoder, separators=(',', ': '),indent =4)
with open(path_out_baseN, 'w') as outF:
json.dump(In, outF, skipkeys=True, sort_keys=True, separators=(',', ': '), indent=4)
def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interleave, out_meta=None,
arr_pos=None, overwrite=True):
"""Write tiles to disk using numpy.memmap.
......
......@@ -1165,7 +1165,7 @@ def delete_processing_results(scene_ID, proc_level='all', force=False):
try:
shutil.rmtree(path_procdata)
except OSError: # directory not deletable because it is not empty
if [F for F in glob.glob(path_procdata) if not os.path.basename(F).startswith('.fuse_hidden')]:
if [F for F in os.listdir(path_procdata) if not os.path.basename(F).startswith('.fuse_hidden')]:
raise # raise OSError if there are other files than .fuse_hidden... remaining
else:
files2delete = glob.glob(os.path.join(path_procdata, '*%s*' % proc_level))
......
......@@ -92,6 +92,10 @@ class SharedResourceLock(MultiSlotLock):
super(SharedResourceLock, self).delete()
self.client.delete(self.grabbed_key_jobID)
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.close()
return super(SharedResourceLock, self).__exit__(exc_type, exc_val, exc_tb)
class IOLock(SharedResourceLock):
def __init__(self, allowed_slots=1, logger=None, **kwargs):
......@@ -100,6 +104,10 @@ class IOLock(SharedResourceLock):
if not self.disabled:
super(IOLock, self).__init__(name='IOLock', allowed_slots=allowed_slots, logger=logger, **kwargs)
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.close()
return super(IOLock, self).__exit__(exc_type, exc_val, exc_tb)
class ProcessLock(SharedResourceLock):
def __init__(self, allowed_slots=1, logger=None, **kwargs):
......@@ -108,6 +116,10 @@ class ProcessLock(SharedResourceLock):
if not self.disabled:
super(ProcessLock, self).__init__(name='ProcessLock', allowed_slots=allowed_slots, logger=logger, **kwargs)
def __exit__(self, exc_type, exc_val, exc_tb):
self.logger.close()
return super(ProcessLock, self).__exit__(exc_type, exc_val, exc_tb)
class MemoryReserver(Semaphore):
def __init__(self, mem2lock_gb, max_usage=90, logger=None, **kwargs):
......
......@@ -180,6 +180,9 @@ class GMS_logger(logging.Logger):
with open(self.path_logfile) as inF:
print(inF.read())
def __del__(self):
self.close()
def __enter__(self):
return self
......
......@@ -8,17 +8,18 @@ import warnings
from datetime import datetime, timedelta
from shapely.geometry import Polygon
import pytz
from logging import Logger
from logging import getLogger
from typing import List # noqa F401 # flake8 issue
from ..misc.exceptions import GMSEnvironmentError
from ..misc.logging import close_logger
class SpatialIndexMediatorServer:
def __init__(self, rootDir, logger=Logger(__name__)):
def __init__(self, rootDir, logger=None):
self.rootDir = rootDir
self.path_idxMedSrv = os.path.join(rootDir, 'index-mediator-server.sh')
self.logger = logger
self.logger = logger or getLogger('SpatialIndexMediatorServer')
# validate
if not os.path.isfile(self.path_idxMedSrv):
......@@ -28,6 +29,18 @@ class SpatialIndexMediatorServer:
raise GMSEnvironmentError('File path of index mediator server does not exist at %s.'
% self.path_idxMedSrv)
def __getstate__(self):
"""Defines how the attributes of SpatialIndexMediatorServer are pickled."""
if self.logger not in [None, 'not set']:
close_logger(self.logger)
self.logger = None
return self.__dict__
def __del__(self):
close_logger(self.logger)
self.logger = None
@property
def is_running(self):
return self.status['running']
......
......@@ -40,6 +40,7 @@ from py_tools_ds.numeric.array import get_array_tilebounds
from sicor.options import get_options as get_ac_options
from ..misc.logging import GMS_logger as DatasetLogger
from ..misc.logging import close_logger
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from ..model.mgrs_tile import MGRS_tile
......@@ -287,8 +288,7 @@ class GMS_object(object):
@MetaObj.deleter
def MetaObj(self):
if hasattr(self, '_MetaObj') and self._MetaObj and hasattr(self._MetaObj, 'logger') and \
self._MetaObj.logger not in [None, 'not set']:
if self._MetaObj and self._MetaObj.logger not in [None, 'not set']:
self._MetaObj.logger.close()
self._MetaObj.logger = None
......@@ -1729,10 +1729,12 @@ class GMS_object(object):
if isinstance(v, datetime.datetime):
dict2write[k] = v.strftime('%Y-%m-%d %H:%M:%S.%f%z')
elif isinstance(v, DatasetLogger):
elif isinstance(v, (DatasetLogger, logging.Logger)):
if hasattr(v, 'handlers') and v.handlers[:]:
warnings.warn('Not properly closed logger at GMS_obj.logger pointing to %s.' % v.path_logfile)
close_logger(dict2write[k])
dict2write[k] = 'not set'
elif isinstance(v, GMS_identifier):
......@@ -1744,6 +1746,7 @@ class GMS_object(object):
if hasattr(dict2write[k]['logger'], 'handlers') and dict2write[k]['logger'].handlers[:]:
warnings.warn("Not properly closed logger at %s['logger'] pointing to %s."
% (k, dict2write[k]['logger'].path_logfile))
close_logger(dict2write[k]['logger'])
dict2write[k]['logger'] = 'not set'
elif isinstance(v, np.ndarray):
......@@ -1759,6 +1762,7 @@ class GMS_object(object):
if hasattr(dict2write[k].logger, 'handlers') and dict2write[k].logger.handlers[:]:
warnings.warn("Not properly closed logger at %s.logger pointing to %s."
% (k, dict2write[k].logger.path_logfile))
close_logger(dict2write[k].logger)
dict2write[k].logger = 'not set'
del dict2write[k]
......@@ -2126,11 +2130,14 @@ class GMS_object(object):
self.logger.close() # this runs misc.logging.GMS_logger.close()
self.logger = None # also adds current captured stream to self.log
if hasattr(self, 'MetaObj') and self.MetaObj and hasattr(self.MetaObj, 'logger') and \
self.MetaObj.logger not in [None, 'not set']:
if self.MetaObj and self.MetaObj.logger not in [None, 'not set']:
self.MetaObj.logger.close()
self.MetaObj.logger = None
if self.GMS_identifier and self.GMS_identifier.logger not in [None, 'not set']:
self.GMS_identifier.logger.close()
self.GMS_identifier.logger = None
def delete_previous_proc_level_results(self):
"""Deletes results of the previous processing level if the respective flag CFG.exec_L**P[2]) is set to True.
The function is skipped if the results of the current processing level have not yet been written.
......@@ -2258,10 +2265,8 @@ class GMS_identifier(object):
def __getstate__(self):
"""Defines how the attributes of MetaObj instances are pickled."""
try:
self.logger.close()
except AttributeError:
pass
close_logger(self.logger)
self.logger = None
return self.__dict__
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment