Commit 11e1bc1c authored by Daniel Scheffler's avatar Daniel Scheffler Committed by Mathias Peters
Browse files

revised logging practices of whole GMS package (fixes Permission errors during...

revised logging practices of whole GMS package (fixes Permission errors during logging); added direct call of external DeShifter module
L0B_P:
- updated calls for logger closing
L1A_P.L1A_object:
- converted L1A_object.logger to property 'logger'
- added close_GMS_loggers()
- updated __getstate__ and __setstate__
- log_for_fullArr_or_firstTile(): added 'close logger'-calls
- MetaObj2ODict(): bugfix for not deleting MetaObj
- added to_GMS_file() on the basis of OUT_W.ASCII_writer() which is now deprecated
- delete_tempFiles(): added 'close logger'-calls
L1B_P:
- reordered imports
- updated logger getters and 'close logger'-statements
- L1B_object:
    - removed its own logger setup
    - correct_spatial_shifts(): added working version
L2A_P:
- DESHIFTER: updated logger getters and 'close logger'-statements
INP_R:
- pickle_SRF_DB(): updated logger getters and 'close logger'-statements
OUT_W:
- ASCII_writer(): now deprecated; added 'unclosed logfile' warnings
- Obj2ENVI: updated writer calls for GMS file
HLP_F:
- removed setup_logger()
- moved close_logger() to new module 'logging'
- moved GMS_logger() to new module 'logging'
- failed_GMS_object: updated logger getter
- find_nearest() is now imported from external package 'py_tools_ds'
- removed duplicate version of corner_coord_to_minmax()
logging:
- added new module logging, based on previous logging functions from HLP_F
MGRS_tile:
- added higher timeout for getting geometry from pgSQL database
SpatialIndexMediator:
- SpatialIndexMediatorServer:
    - status: revised return value
    - start(): revised warnings
    - stop(): revised return value
PC:
- job.logger is now an instance of GMS_logger
- added new version of L2A_map()
- added shutdown_loggers statement
parent db54bd81
......@@ -23,8 +23,8 @@ import re
import builtins
import collections
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc import path_generator as PG
from ..misc.logging import GMS_logger
job = builtins.GMS_config.job # read from builtins (set by process_controller)
########################### core functions ####################################
......@@ -48,7 +48,7 @@ class L0B_object(object):
self.path_procdata = PathGen.get_path_procdata()
self.ExtractedFolder = PathGen.get_path_tempdir()
self.path_logfile = PathGen.get_path_logfile()
self.logger = HLP_F.setup_logger('log__'+self.baseN, self.path_logfile, append=0)
self.logger = GMS_logger('log__'+self.baseN, self.path_logfile, append=0)
PathGen = PG.path_generator(self.__dict__) # passes a logger in addition to previous attributes
self.path_archive = PathGen.get_local_archive_path_baseN()
......@@ -75,10 +75,11 @@ class L0B_object(object):
os.makedirs(self.ExtractedFolder)
# close loggers
HLP_F.close_logger(self.GMS_identifier['logger'])
HLP_F.close_logger(self.logger)
del self.GMS_identifier['logger']
self.logger.close()
del self.logger
self.GMS_identifier['logger'].close()
del self.GMS_identifier['logger']
def _data_downloader(self,sensor, entity_ID):
self.logger.info('Level 0B Processing started')
......
......@@ -35,6 +35,9 @@ import builtins
import time
import warnings
import copy
import collections
import json
import matplotlib.pyplot as plt
from pyhdf import SD
from spectral.io import envi
......@@ -49,6 +52,7 @@ from ..io import Output_writer as OUT_W
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from ..misc.mgrs_tile import MGRS_tile
from ..misc.logging import GMS_logger
from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX
......@@ -66,6 +70,9 @@ class L1A_object(object):
def __init__(self, L0B_object):
""":param L0B_object: instance of L0B_P.L0B_object or None
"""
# protected attributes
self._logger = None
self.proc_level = 'L1A'
self.job_ID = L0B_object.job_ID if L0B_object else -9999
self.job_CPUs = L0B_object.job_CPUs if L0B_object else -9999
......@@ -85,8 +92,6 @@ class L1A_object(object):
self.baseN = L0B_object.baseN if L0B_object else ''
self.path_logfile = L0B_object.path_logfile if L0B_object else ''
self.georef = L0B_object.georef if L0B_object else -9999
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) \
if L0B_object else None
self.GMS_identifier = L0B_object.GMS_identifier if L0B_object else None
if self.GMS_identifier:
self.GMS_identifier['logger'] = self.logger
......@@ -115,10 +120,8 @@ class L1A_object(object):
self.masks_meta = {} # set by self.build_L1A_masks()
self.scenes_proc_ID = None # set by Output writer after creation/update of db record in table scenes_proc
self.mgrs_tiles_proc_ID = None # set by Output writer after creation/update of db record in table mgrs_tiles_proc
# self.CLD_obj = CLD_P.GmsCloudClassifier(classifier=self.path_cloud_class_obj)
if L0B_object:
assert os.path.exists(self.path_archive), 'Invalid path to RAW data. File %s does not exist at %s.'\
%(os.path.basename(self.path_archive), os.path.dirname(self.path_archive))
......@@ -190,34 +193,33 @@ class L1A_object(object):
self.arr, self.meta = ATM_L1A, ATM_L1A_md
#if L0B_object: del self.GMS_identifier['logger'], self.MetaObj.logger
def __getstate__(self):
"""Defines how the attributes of GMS object are pickled."""
if hasattr(self,'logger'):
HLP_F.close_logger(self.logger)
self.logger = 'not set'
if self.GMS_identifier and 'logger' in self.GMS_identifier:
HLP_F.close_logger(self.GMS_identifier['logger'])
self.GMS_identifier['logger'] = 'not set'
if hasattr(self,'MetaObj') and self.MetaObj:
HLP_F.close_logger(self.MetaObj.logger)
self.MetaObj.logger = 'not set'
self.close_GMS_loggers()
return self.__dict__
def __setstate__(self, ObjDict):
"""Defines how the attributes of GMS object are unpickled."""
self.__dict__ = ObjDict
pathLog = PG.path_generator(ObjDict).get_path_logfile() # None if GMS object instanced with None
baseN = PG.path_generator(ObjDict).get_baseN()
self.logger = HLP_F.setup_logger('log__' + baseN, pathLog, append=1)
if self.GMS_identifier:
self.GMS_identifier['logger'] = self.logger
if hasattr(self,'MetaObj') and self.MetaObj:
self.MetaObj.logger = self.logger # TODO unpickle meta to MetaObj
# TODO unpickle meta to MetaObj
@property
def logger(self):
if self._logger:
return self._logger
else:
self._logger = GMS_logger('log__' + self.baseN, self.path_logfile, append=1)
return self._logger
@logger.setter
def logger(self, logger):
self._logger = logger
@property
......@@ -235,7 +237,6 @@ class L1A_object(object):
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
self.__dict__ = GMSfileDict.copy() # copy all attributes from GMS file
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
self.arr_shape, self.arr_pos = tuple_GMS_subset[1]
......@@ -256,7 +257,6 @@ class L1A_object(object):
self.mask_clouds = path_maskClouds
self.masks = path_masks
self.GMS_identifier['logger'] = 'not set'
return copy.copy(self)
########################### core functions ####################################
......@@ -658,7 +658,7 @@ class L1A_object(object):
:param log_msg: the log message to be logged
:param subset: subset argument as sent to e.g. DN2TOARadRefTemp that indicates which tile is to be processed.
Not needed if self.arr_pos is not None.
:param logger: a logging.logger object
:param logger: a GMS_logger object
"""
logger = logger if logger else self.logger
if subset is None and\
......@@ -669,6 +669,10 @@ class L1A_object(object):
else:
pass
logger.close()
self.logger.close()
self.logger = None
@staticmethod
def rescale_array(inArray, outScaleFactor, inScaleFactor=1):
......@@ -1097,6 +1101,7 @@ class L1A_object(object):
self.logger.info('Preparing extracted metadata to be written to disk...')
self.meta = self.MetaObj.Meta2ODict()
del self.MetaObj # FIXME MetaObj should have its json encoder
def apply_nodata_mask_to_ObjAttr(self, attrname, out_nodata_val=None):
......@@ -1401,6 +1406,76 @@ class L1A_object(object):
return GMS_obj_MGRS_tiles
def to_GMS_file(self, path_gms_file=None):
self.close_GMS_loggers()
dict2write = self.__dict__.copy()
dict2write['arr_shape'], dict2write['arr_pos'] = ['cube', None]
MGRS_info = getattr(self,'MGRS_info') if hasattr(self,'MGRS_info') else None
path_gms_file = path_gms_file if path_gms_file else \
PG.path_generator(dict2write, MGRS_info=MGRS_info).get_path_gmsfile()
for k, v in list(dict2write.items()):
# if isinstance(v,np.ndarray) or isinstance(v,dict) or hasattr(v,'__dict__'):
## so, wenn meta-dicts nicht ins gms-file sollen. ist aber vllt ni schlecht -> erlaubt lesen der metadaten direkt aus gms
if isinstance(v, datetime.datetime):
dict2write[k] = v.strftime('%Y-%m-%d')
elif isinstance(v, GMS_logger):
if hasattr(v, 'handlers') and v.handlers[:]:
warnings.warn('Not properly closed logger at GMS_obj.logger pointing to %s.' % v.path_logfile)
dict2write[k] = 'not set'
elif isinstance(v, collections.OrderedDict) or isinstance(v, dict):
dict2write[k] = dict2write[k].copy()
if 'logger' in v:
if hasattr(dict2write[k]['logger'], 'handlers') and dict2write[k]['logger'].handlers[:]:
warnings.warn("Not properly closed logger at %s['logger'] pointing to %s."
% (k, dict2write[k]['logger'].path_logfile))
dict2write[k]['logger'] = 'not set'
elif isinstance(v, np.ndarray):
# delete every 3D Array larger than 100x100
if len(v.shape) == 2 and sum(v.shape) <= 200:
dict2write[k] = v.tolist() # numpy arrays are not jsonable
else:
del dict2write[k]
elif hasattr(v, '__dict__'):
# löscht Instanzen von Objekten und Arrays, aber keine OrderedDicts
if hasattr(v, 'logger'):
if hasattr(dict2write[k].logger, 'handlers') and dict2write[k].logger.handlers[:]:
warnings.warn("Not properly closed logger at %s.logger pointing to %s."
% (k, dict2write[k].logger.path_logfile))
dict2write[k].logger = 'not set'
del dict2write[k]
# class customJSONEncoder(json.JSONEncoder):
# def default(self, obj):
# if isinstance(obj, np.ndarray):
# return '> numpy array <'
# if isinstance(obj, dict): # funktioniert nicht
# return '> python dictionary <'
# if hasattr(obj,'__dict__'):
# return '> python object <'
# # Let the base class default method raise the TypeError
# return json.JSONEncoder.default(self, obj)
# json.dump(In, open(path_out_baseN,'w'),skipkeys=True,sort_keys=True,cls=customJSONEncoder,separators=(',', ': '),indent =4)
with open(path_gms_file, 'w') as outF:
json.dump(dict2write, outF, skipkeys=True, sort_keys=True, separators=(',', ': '), indent=4)
def close_GMS_loggers(self):
if self._logger not in [None, 'not set']:
self.logger.close()
self.logger = None
if self.GMS_identifier and 'logger' in self.GMS_identifier and \
self.GMS_identifier['logger'] not in [None, 'not set']:
self.GMS_identifier['logger'].close()
self.GMS_identifier['logger'] = None
if hasattr(self,'MetaObj') and self.MetaObj and hasattr(self.MetaObj,'logger') and \
self.MetaObj.logger not in [None, 'not set']:
self.MetaObj.logger.close()
self.MetaObj.logger = None
def delete_previous_proc_level_results(self):
"""Deletes results of the previous processing level if the respective flag job.exec__L**P[2]) is set to True.
The function is skipped if the results of the current processing level have not yet been written.
......@@ -1434,6 +1509,7 @@ class L1A_object(object):
"""Delete all temporary files that have been written during GMS object processing.
"""
self.logger.info('Deleting temporary data...')
if sys.platform.startswith('linux'):
if os.path.isdir(self.ExtractedFolder): shutil.rmtree(self.ExtractedFolder)
if os.path.isdir(self.ExtractedFolder):
......@@ -1464,3 +1540,6 @@ class L1A_object(object):
# delete previous proc_level results on demand (according to job.exec__L**P[2])
self.delete_previous_proc_level_results()
self.logger.close()
self.logger = None
......@@ -15,7 +15,6 @@
###############################################################################
########################### Library import ####################################
#from __future__ import (division, print_function, unicode_literals,absolute_import)
import builtins
import collections
import json
......@@ -27,22 +26,7 @@ from datetime import datetime, timedelta
import numpy as np
from geopandas import GeoDataFrame
#if socket.gethostname() == 'geoms':
# sys.path.append('/usr/lib/otb/python/')
# os.environ['ITK_AUTOLOAD_PATH'] = "/usr/lib/otb/applications"
# sys.path.append('/usr/lib/python2.7/dist-packages') # cv2
# sys.path.append('/usr/local/lib/python2.7/site-packages')
# sys.path.append('/home/gfz-fe/scheffler/python')
#if socket.gethostname() == 'mefe18':
# sys.path.append('/usr/lib64/otb/python/')
# os.environ['ITK_AUTOLOAD_PATH'] = "/usr/lib64/otb/applications"
# sys.path.append('/misc/hy5/scheffler/python')
#try: import otbApplication
#except ImportError: print('otbApplication-lib missing..')
#except SyntaxError: print('The installed otbApplication-lib throughs syntax errors.. Maybe too old?')
#try: import cv2
#except ImportError: print('cv2-lib missing..')
from shapely.geometry import box
job, usecase, GMS_call_type = builtins.GMS_config.job, builtins.GMS_config.usecase, builtins.GMS_config.GMS_call_type
......@@ -51,6 +35,7 @@ from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from .L1A_P import L1A_object
from ..misc.SpatialIndexMediator import SpatialIndexMediator
from ..misc.logging import GMS_logger
#sys.path.append('/home/gfz-fe/')
from CoReg_Sat import COREG, DESHIFTER
......@@ -58,9 +43,25 @@ from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.projection import prj_equal, EPSG2WKT
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.ptds.geo.coord_trafo import reproject_shapelyPoly, transform_any_prj
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from py_tools_ds.ptds.geo.vector.topology import get_overlap_polygon
#if socket.gethostname() == 'geoms':
# sys.path.append('/usr/lib/otb/python/')
# os.environ['ITK_AUTOLOAD_PATH'] = "/usr/lib/otb/applications"
# sys.path.append('/usr/lib/python2.7/dist-packages') # cv2
# sys.path.append('/usr/local/lib/python2.7/site-packages')
# sys.path.append('/home/gfz-fe/scheffler/python')
#if socket.gethostname() == 'mefe18':
# sys.path.append('/usr/lib64/otb/python/')
# os.environ['ITK_AUTOLOAD_PATH'] = "/usr/lib64/otb/applications"
# sys.path.append('/misc/hy5/scheffler/python')
#try: import otbApplication
#except ImportError: print('otbApplication-lib missing..')
#except SyntaxError: print('The installed otbApplication-lib throughs syntax errors.. Maybe too old?')
#try: import cv2
#except ImportError: print('cv2-lib missing..')
# <editor-fold desc="deprecated/unused functions">
# def calculate_TiePoints(im_ref,im_rpc,distance = 200):
......@@ -374,7 +375,7 @@ class Scene_finder(object):
conditions=condlist, add_cmds='ORDER BY scenes.cloudcover ASC', timeout=30000)
conds_descImportance = [dataset_cond, cloudcov_cond, dayrange_cond]
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.im2shift_objDict['path_logfile'],append=1)
temp_logger = GMS_logger('log__' + self.baseN, self.im2shift_objDict['path_logfile'], append=1)
temp_logger.info('Querying database in order to find a suitable reference scene for co-registration.')
count, filt_overlap_scenes = 0,[]
......@@ -478,7 +479,7 @@ class Scene_finder(object):
assert query_res != [], 'No entity-ID found for scene number %s' %self.imref_scene_ID
self.imref_entity_ID = query_res[0][0] # [('LC81510322013152LGN00',)]
break
HLP_F.close_logger(temp_logger)
temp_logger.close()
def sceneIDList_to_filt_overlap_scenes(self,sceneIDList,min_overlap):
......@@ -528,7 +529,6 @@ class L1B_object(L1A_object):
# populate attributes
[setattr(self, key, value) for key, value in L1A_obj.__dict__.items()]
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.proc_level = 'L1B'
......@@ -595,7 +595,7 @@ class L1B_object(L1A_object):
# exclude cirrus/oxygen band of Landsat-8/Sentinel-2
shift_bbl, ref_bbl = [False]*len(shift_cwl), [False]*len(ref_cwl) # bad band lists
for dic,s_r in zip([self.__dict__, ref_gmsDict], ['shift', 'ref']):
dic['GMS_identifier']['logger'] = None
dic['GMS_identifier']['logger'] = None # set a dummy value in order to avoid Exception
sensorcode = HLP_F.get_GMS_sensorcode(dic['GMS_identifier'])
if sensorcode in ['LDCM','S2A','S2B'] and '9' in dic['LayerBandsAssignment']:
locals()['%s_bbl' %s_r][dic['LayerBandsAssignment'].index('9')] = True
......@@ -691,31 +691,47 @@ class L1B_object(L1A_object):
self.coreg_info.update({'success' : True if not self.coreg_needed else False}) # False means spatRef not available
# <editor-fold desc="deprecated (?) function perform_deshifting">
# def perform_deshifting(self, attrname, band2process=None):
# config = get_DESHIFTER_configs(self.__dict__.copy(), attrname, band2process=band2process)
# DESHIFT_obj = DESHIFTER(config[0], config[1], **config[2]).correct_shifts()
# if DESHIFT_obj.is_resampled and DESHIFT_obj.is_shifted:
# setattr(self,attrname,DESHIFT_obj.arr_shifted) # FIXME nicht kompatibel mit band2process, weil attrname mit jedem call von perform_deshifting() wieder überschrieben wird
# self.coreg_info['is shifted'] = True
# self.coreg_info['is resampled'] = True
# self.meta['map info'] = DESHIFT_obj.updated_map_info
# self.meta['projection'] = DESHIFT_obj.updated_projection
# </editor-fold>
def correct_spatial_shifts(self, attrname, band2process=None):
raise NotImplementedError
# get target bounds
def correct_spatial_shifts(self):
"""Corrects the spatial shifts calculated by self.coregister_spatially()."""
## get target bounds # TODO implement boxObj call instead here
trueDataCornerUTM = [transform_any_prj(EPSG2WKT(4326), self.meta['coordinate system string'], x, y)
for x, y in self.trueDataCornerLonLat]
xmin, xmax, ymin, ymax = corner_coord_to_minmax(trueDataCornerUTM)
mapBounds = box(xmin, ymin, xmax, ymax).bounds
# correct shifts and clip to extent
DESHIFTER(getattr(self,attrname), self.coreg_info, )
print("DESHIFT: Only the map info of %s has been updated because 'align_grids' is turned off, the pixel "
"sizes keep the same and source and target projections are equal. " % self.entity_ID)
for attrname in ['arr', 'masks']:
self.logger.info("Correcting spatial shifts for attribute '%s'..." %attrname)
# correct shifts
meta = self.meta if attrname=='arr' else self.masks_meta
gt, prj = mapinfo2geotransform(meta['map info']), meta['coordinate system string']
geoArr = GeoArray(getattr(self,attrname), tuple(gt), prj)
DS = DESHIFTER(geoArr, self.coreg_info,
target_xyGrid=[usecase.spatial_ref_gridx, usecase.spatial_ref_gridy],
cliptoextent=True, clipextent=mapBounds, align_grids=True)
DS.correct_shifts()
setattr(self,attrname, DS.arr_shifted)
# update coreg_info
if attrname=='arr':
self.coreg_info['is shifted'] = DS.is_shifted
self.coreg_info['is resampled'] = DS.is_resampled
# update geoinformations and array shape related attributes
self.logger.info("Updating geoinformations of '%s' attribute..." %attrname)
if attrname=='arr':
self.meta['map info'] = DS.updated_map_info
self.meta['coordinate system string'] = DS.updated_projection
self.shape_fullArr = DS.arr_shifted.shape # TODO move this into a property
self.meta['lines'], self.meta['samples'] = DS.arr_shifted.shape[:2]
if DS.arr_shifted.ndim == 2:
self.meta['bands'] = DS.arr_shifted.shape[2]
else:
self.masks_meta['map info'] = DS.updated_map_info
self.masks_meta['coordinate system string'] = DS.updated_projection
self.masks_meta['lines'], self.masks_meta['samples'] = DS.arr_shifted.shape[:2]
def join_deshift_results(self, list_deshiftRes_groupedBySceneID):
......
......@@ -18,14 +18,16 @@ import numpy as np
import rasterio
from . import GEOPROCESSING as GEOP # FIXME import functions directly as soon as GEOPROCESSING is included in algorithms.__init__.__all__
from ..misc import path_generator as PG
from ..misc import helper_functions as HLP_F
from .L1C_P import L1C_object
from ..misc import path_generator as PG
from ..misc import helper_functions as HLP_F
from ..misc.logging import GMS_logger
from .L1C_P import L1C_object
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform, geotransform2mapinfo
from py_tools_ds.ptds.geo.projection import get_proj4info
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax, get_corner_coordinates
from py_tools_ds.ptds.geo.coord_trafo import pixelToMapYX
from py_tools_ds.ptds.geo.raster.reproject import warp_ndarray
from py_tools_ds.ptds.numeric.vector import find_nearest
......@@ -256,7 +258,7 @@ class DESHIFTER(object):
# type: (L2A_P.DESHIFTER) -> collections.OrderedDict
temp_logger = HLP_F.setup_logger('log__' + self.shift_baseN, self.path_logfile, append=1)
temp_logger = GMS_logger('log__' + self.shift_baseN, self.path_logfile, append=1)
t0 = time.time()
equal_prj = get_proj4info(proj=self.ref_prj)==get_proj4info(proj=self.shift_prj) \
......@@ -331,7 +333,7 @@ class DESHIFTER(object):
imref_ygrid = np.arange(r_ymin,r_ymax,self.ref_ygsd)
nearest_coord = lambda coordgrid,coord,rnd: \
HLP_F.find_nearest(coordgrid, coord, roundAlg=rnd, extrapolate=1)
find_nearest(coordgrid, coord, roundAlg=rnd, extrapolate=1)
xmin,xmax = nearest_coord(imref_xgrid,s_xmin,'off'),nearest_coord(imref_xgrid,s_xmax,'on')
ymin,ymax = nearest_coord(imref_ygrid,s_ymin,'off'),nearest_coord(imref_ygrid,s_ymax,'on')
out_gt = (xmin,xgsd,0.0,ymax,0.0,-ygsd) #(249885, 30, 0, 4578615, 0, -30)
......@@ -428,7 +430,7 @@ class DESHIFTER(object):
self.set_deshift_results()
temp_logger.info("Calculated shift-corrected array for attribute '%s', band %s... %.2fs"
%(self.attrname2deshift, self.band2process,time.time()-t0))
HLP_F.close_logger(temp_logger)
temp_logger.close()
return self.deshift_results
......
......@@ -23,9 +23,10 @@ import builtins
import warnings
import scipy.interpolate
from ..algorithms import METADATA as META
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from ..algorithms import METADATA as META
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from ..misc.logging import GMS_logger
job = builtins.GMS_config.job # read from builtins (set by process_controller)
# + misc.helper_functions.setup_logger (left out here in order to avoid circular dependencies)
......@@ -191,10 +192,9 @@ def SRF_reader(GMS_identifier):
def pickle_SRF_DB(L1A_Instances):
from ..misc.helper_functions import setup_logger
list_GMS_identifiers = [i.GMS_identifier for i in L1A_Instances]
out_dict = collections.OrderedDict()
logger = setup_logger('log__SRF2PKL', os.path.join(job.path_testing,'out/log__SRF2PKL.log'),append=0)
logger = GMS_logger('log__SRF2PKL', os.path.join(job.path_testing,'out/log__SRF2PKL.log'),append=0)
for Id,Inst in zip(list_GMS_identifiers,L1A_Instances):
Id['logger'] = logger
out_dict[Inst.satellite+'_'+Inst.sensor+(('_'+Inst.subsystem) if Inst.subsystem not in ['',None] else '')] \
......@@ -206,6 +206,7 @@ def pickle_SRF_DB(L1A_Instances):
# with open(outFilename, 'rb') as inFile:
# readFile = pickle.load(inFile)
# [print(i) for i in readFile.keys()]
logger.close()
def Solar_Irradiance_reader(resol_nm = None, wvl_min_nm = None, wvl_max_nm = None):
......
......@@ -43,6 +43,7 @@ from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from . import Input_reader as INP_R
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
......@@ -148,6 +149,7 @@ def HDR_writer(meta_dic,outpath_hdr,logger=None):
def ASCII_writer(In,path_out_baseN):
warnings.warn(DeprecationWarning())
assert isinstance(In,dict), 'Input for ASCII writer is expected to be a dictionary. Got %s.' %type(In)
for k,v in list(In.items()):
......@@ -157,9 +159,17 @@ def ASCII_writer(In,path_out_baseN):
if isinstance(v,datetime.datetime):
In[k] = v.strftime('%Y-%m-%d')
elif isinstance(v,logging.Logger):
if hasattr(v, 'handlers') and v.handlers[:]:
fileHandlers = [i for i in v.handlers[:] if isinstance(i,logging.FileHandler)]
for fH in fileHandlers:
warnings.warn('Not properly closed logger at GMS_obj.logger pointing to %s.' %fH.baseFilename)
In[k] = 'not set'
elif isinstance(v,collections.OrderedDict) or isinstance(v,dict):
if 'logger' in v:
if hasattr(In[k]['logger'], 'handlers') and In[k]['logger'].handlers[:]:
fileHandlers = [i for i in In[k]['logger'].handlers[:] if isinstance(i, logging.FileHandler)]
for fH in fileHandlers:
warnings.warn("Not properly closed logger at %s['logger'] pointing to %s." %(k,fH.baseFilename))
In[k]['logger'] = 'not set'
elif isinstance(v, np.ndarray):
# delete every 3D Array larger than 100x100
......@@ -170,6 +180,10 @@ def ASCII_writer(In,path_out_baseN):
elif hasattr(v,'__dict__'):
# löscht Instanzen von Objekten und Arrays, aber keine OrderedDicts
if hasattr(v,'logger'):
if hasattr(In[k].logger, 'handlers') and In[k].logger.handlers[:]:
fileHandlers = [i for i in In[k].logger.handlers[:] if isinstance(i, logging.FileHandler)]
for fH in fileHandlers:
warnings.warn('Not properly closed logger at %s.logger pointing to %s.' % (k, fH.baseFilename))
In[k].logger = 'not set'
del In[k]
......@@ -566,9 +580,8 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification=True, is_tempfile=False,
# IMPORTANT: DO NOT pass the complete object but only a copy of the dictionary in order to prevent ASCII_writer and
# data_DB_updater from modifying the attributes of the object!!
if InObj.arr_shape in ['cube','MGRS_tile'] or [InObj.arr_pos[0][0], InObj.arr_pos[1][0]] == [0,0]: # cube or 1st tile
dict2write = InObj.__dict__.copy()
dict2write['arr_shape'],dict2write['arr_pos'] = ['cube',None]
ASCII_writer(dict2write, PG.path_generator(InObj.__dict__.copy(),MGRS_info=MGRS_info).get_path_gmsfile())
# write GMS file
InObj.to_GMS_file()
# create/update database
if not is_tempfile:
......
......@@ -17,11 +17,11 @@ class SpatialIndexMediatorServer:
@property
def is_running(self):
return self.status[0]
return self.status['running']
@property
def port(self):
return self.status[1]
return self.status['port']
@property
def status(self):
......@@ -37,20 +37,24 @@ class SpatialIndexMediatorServer:
_port = re.search('with pid ([\d]*)', outputStr)
port = int(_port.group(1)) if _port else None
return running, port
return {'running':running, 'port':port}
def start(self):
def start(self): # FIXME can be executed twice without a message that server is already running
outputStr = self._communicate('start')
if outputStr=='success' and self.is_running:
return 'started'
else:
warnings.warn("\nStarting Spatial Index Mediator Server failed with message '%s'!"
%outputStr.replace('\n',''))
if outputStr != 'success':
warnings.warn("\nStarting Spatial Index Mediator Server failed with message '%s'!"
%outputStr.replace('\n',''))
else:
warnings.warn("\nCommunication to Spatial Index Mediator Server was successful but "
"the server is still not running."% outputStr.replace('\n', ''))
def stop(self):
outputStr = self._communicate('stop')
if re.search('index-mediator-server stopped', outputStr, re.I):
if outputStr=='success' or re.search('index-mediator-server stopped', outputStr, re.I):