Commit a6a6ecbb authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Revised logging to job logfile (now contains full log output of the job).

parent 38e625ae
......@@ -6,6 +6,8 @@ import warnings
import logging
import os
from ..options.config import GMS_config as CFG
# try:
# from StringIO import StringIO # Python 2
# except ImportError:
......@@ -13,8 +15,9 @@ import os
class GMS_logger(logging.Logger):
def __init__(self, name_logfile, fmt_suffix=None, path_logfile=None, log_level='INFO', append=True):
# type: (str, any, str, any, bool) -> None
def __init__(self, name_logfile, fmt_suffix=None, path_logfile=None, log_level='INFO', append=True,
log_to_joblog=True):
# type: (str, any, str, any, bool, bool) -> None
"""Returns a logging.logger instance pointing to the given logfile path.
:param name_logfile:
:param fmt_suffix: if given, it will be included into log formatter
......@@ -23,6 +26,7 @@ class GMS_logger(logging.Logger):
default: 'INFO')
:param append: <bool> whether to append the log message to an existing logfile (1)
or to create a new logfile (0); default=1
:param log_to_joblog: whether to additionally log all messages to the logfile of the GMS job (default=1)
"""
# private attributes
......@@ -42,6 +46,7 @@ class GMS_logger(logging.Logger):
self.formatter_ConsoleH = logging.Formatter('%(asctime)s' + (' [%s]' % fmt_suffix if fmt_suffix else '') +
': %(message)s', datefmt='%Y/%m/%d %H:%M:%S')
# set fileHandler
if path_logfile:
# create output directory
while not os.path.isdir(os.path.dirname(path_logfile)):
......@@ -60,6 +65,16 @@ class GMS_logger(logging.Logger):
else:
fileHandler = None
# set fileHandler for job logfile
if log_to_joblog:
# create FileHandler
joblog_fileHandler = logging.FileHandler(os.path.join(CFG.path_job_logs, '%s.log' % CFG.ID),
mode='a' if append else 'w')
joblog_fileHandler.setFormatter(self.formatter_fileH)
joblog_fileHandler.setLevel(log_level)
else:
joblog_fileHandler = None
# create StreamHandler # TODO add a StringIO handler
# self.streamObj = StringIO()
# self.streamHandler = logging.StreamHandler(stream=self.streamObj)
......@@ -84,6 +99,8 @@ class GMS_logger(logging.Logger):
if not self.handlers:
if fileHandler:
self.addHandler(fileHandler)
if joblog_fileHandler:
self.addHandler(joblog_fileHandler)
# self.addHandler(self.streamHandler)
self.addHandler(consoleHandler_out)
self.addHandler(consoleHandler_err)
......
......@@ -5,6 +5,7 @@ from typing import List, Tuple, Generator, Iterable, Union # noqa F401 # flake
from ..options.config import GMS_config as CFG
from ..misc import exception_handler as EXC_H
from ..misc.path_generator import path_generator
from ..misc.logging import GMS_logger
from ..misc.locks import ProcessLock
from ..algorithms import L1A_P
from ..algorithms import L1B_P
......@@ -223,7 +224,10 @@ def run_complete_preprocessing(list_dataset_dicts_per_scene): # map (scene-wise
:param list_dataset_dicts_per_scene:
:return:
"""
with ProcessLock(processes=CFG.CPUs_all_jobs):
with ProcessLock(processes=CFG.CPUs_all_jobs,
logger=GMS_logger('log__%s' % CFG.ID, fmt_suffix=list_dataset_dicts_per_scene[0]['scene_ID'],
log_level=CFG.log_level, append=True)):
if len(list(set([ds['proc_level'] for ds in list_dataset_dicts_per_scene]))) != 1:
raise ValueError('Lists of subsystem datasets with different processing levels are not supported here. '
'Received %s.' % list_dataset_dicts_per_scene)
......
......@@ -80,6 +80,10 @@ class process_controller(object):
# called_from_iPyNb = 1 if 'ipykernel/__main__.py' in sys.argv[0] else 0
path_job_logfile = os.path.join(self.config.path_job_logs, '%s.log' % self.config.ID)
if os.path.exists(path_job_logfile):
HLP_F.silentremove(path_job_logfile)
self.logger.info('Process Controller initialized for job ID %s (comment: %s).'
% (self.config.ID, self.DB_job_record.comment))
......@@ -92,9 +96,8 @@ class process_controller(object):
if self._logger and self._logger.handlers[:]:
return self._logger
else:
self._logger = GMS_logger('log__%s' % self.config.ID,
path_logfile=os.path.join(self.config.path_job_logs, '%s.log' % self.config.ID),
log_level=self.config.log_level, append=False)
self._logger = GMS_logger('log__%s' % self.config.ID, fmt_suffix='ProcessController',
log_level=self.config.log_level, append=True, log_to_joblog=True)
return self._logger
@logger.setter
......@@ -105,7 +108,7 @@ class process_controller(object):
def logger(self):
if self._logger not in [None, 'not set']:
self.logger.close()
self.logger = None
self.logger = None # FIXME _logger = None?
@property
def DB_job_record(self):
......@@ -513,9 +516,6 @@ class process_controller(object):
def shutdown(self):
"""Shutdown the process controller instance (loggers, remove temporary directories, ...)."""
del self.logger
shutdown_loggers()
# release unclosed locks
release_unclosed_locks(self.logger)
......@@ -525,6 +525,9 @@ class process_controller(object):
if os.path.exists(tempdir):
shutil.rmtree(tempdir)
del self.logger
shutdown_loggers()
def benchmark(self):
"""
Run a benchmark.
......
......@@ -96,9 +96,6 @@ class BaseTestCases:
def create_job(cls, jobID, config):
cls.PC = process_controller(jobID, **config)
cls.PC.logger.info('Execution of entire GeoMultiSens pre-processing chain started for job ID %s...'
% cls.PC.config.ID)
# update attributes of DB_job_record and related DB entry
cls.PC.config.DB_job_record.reset_job_progress()
......@@ -199,9 +196,6 @@ class BaseTestCases:
def create_job(cls, jobID, config):
cls.PC = process_controller(jobID, **config)
cls.PC.logger.info('Execution of entire GeoMultiSens pre-processing chain started for job ID %s...'
% cls.PC.config.ID)
[cls.validate_db_entry(ds['filename']) for ds in cls.PC.config.data_list]
cls.PC.config.CPUs_all_jobs = 3
......
......@@ -54,5 +54,3 @@ class Test_ProcessLock(unittest.TestCase):
def test_with_statement(self):
with ProcessLock(processes=15) as lock:
self.assertNotEquals(lock, None)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment