Commit 931fd47c authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'enhancement/revise_process_controller' into dev

Conflicts:
	gms_preprocessing/algorithms/L2B_P.py
Former-commit-id: c0ea752c
parents b6a69f65 1574242e
......@@ -34,7 +34,7 @@ class L1A_object(GMS_object):
"""Features input reader and raster-/metadata homogenization."""
def __init__(self, image_type='', satellite='', sensor='', subsystem='', sensormode='', acq_datetime=None,
entity_ID='', scene_ID=-9999, filename='', dataset_ID=-9999, **kwargs):
entity_ID='', scene_ID=-9999, filename='', dataset_ID=-9999, proc_status='', **kwargs):
""":param : instance of gms_object.GMS_object or None
"""
# TODO docstring
......@@ -65,6 +65,12 @@ class L1A_object(GMS_object):
% (self.satellite, self.sensor,
(' ' + self.subsystem) if self.subsystem not in [None, ''] else '', self.entity_ID))
# (re)set the processing status
if self.scene_ID in self.proc_status_all_GMSobjs:
del self.proc_status_all_GMSobjs[self.scene_ID]
self.proc_status = proc_status or 'initialized' # if proc_status = 'running' is given by L1A_map
def import_rasterdata(self):
if re.search("ALOS", self.satellite, re.I):
'''First 22 lines are nodata: = maybe due to an issue of the GDAL CEOS driver.
......@@ -98,6 +104,10 @@ class L1A_object(GMS_object):
% (os.path.basename(path_archive), self.scene_ID, self.entity_ID)
full_LayerBandsAssignment = META.get_LayerBandsAssignment(self.GMS_identifier, no_thermal=False, no_pan=False)
####################################################
# get list of raster files to be read from archive #
####################################################
image_files = []
is_ALOS_Landsat_S2 = \
re.search('ALOS', self.satellite) or re.search('Landsat', self.satellite) or \
......@@ -124,6 +134,9 @@ class L1A_object(GMS_object):
# create and fill raster object
if n_files2search > 1:
#####################################
# validate number of expected files #
#####################################
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31):
expected_files_count = 2 * len(full_LayerBandsAssignment)
else:
......@@ -133,10 +146,24 @@ class L1A_object(GMS_object):
% (len(full_LayerBandsAssignment), path_archive,
len(image_files))
filtered_files = [image_files[full_LayerBandsAssignment.index(i)] for i in self.LayerBandsAssignment]
###############################
# get paths of files to stack #
###############################
# NOTE: image_files is a SORTED list of image filenames; self.LayerBandsAssignment may be sorted by CWL
filtered_files = []
for bN in self.LayerBandsAssignment: # unsorted, e.g., ['1', '2', '3', '4', '5', '9', '6', '7']
for fN, b in zip(image_files, HLP_F.sorted_nicely(full_LayerBandsAssignment)): # both sorted nicely
if b == bN:
filtered_files.append(fN)
paths_files2stack = [os.path.join(gdal_path_archive, i) for i in filtered_files]
#########################
# read the raster data #
#########################
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger)
# in case a subset is to be read: prepare rasObj instance to read a subset
if subset:
full_dim = [0, rasObj.rowEnd, 0, rasObj.colEnd]
sub_dim = [subset[1][0][0], subset[1][0][1], subset[1][1][0], subset[1][0][1]]
......@@ -144,6 +171,7 @@ class L1A_object(GMS_object):
subset = ['block', [[sub_dim[0], sub_dim[1] + 1], [sub_dim[2], sub_dim[3] + 1]]]
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger, subset=subset)
# perform layer stack
if CFG.inmem_serialization and path_output is None: # numpy array output
self.arr = rasObj.Layerstacking(paths_files2stack)
self.path_InFilePreprocessor = paths_files2stack[0]
......
......@@ -242,6 +242,7 @@ class L1B_object(L1A_object):
[setattr(self, key, value) for key, value in L1A_obj.__dict__.items()]
self.proc_level = 'L1B'
self.proc_status = 'initialized'
@property
def spatRef_available(self):
......
......@@ -48,6 +48,7 @@ class L1C_object(L1B_object):
self._lonlat_arr = None
self.proc_level = 'L1C'
self.proc_status = 'initialized'
@property
def lonlat_arr(self):
......@@ -70,6 +71,10 @@ class L1C_object(L1B_object):
def lonlat_arr(self, lonlat_arr):
self._lonlat_arr = lonlat_arr
@lonlat_arr.deleter
def lonlat_arr(self):
self._lonlat_arr = None
@property
def VZA_arr(self):
"""Get viewing zenith angle.
......@@ -99,6 +104,10 @@ class L1C_object(L1B_object):
def VZA_arr(self, VZA_arr):
self._VZA_arr = VZA_arr
@VZA_arr.deleter
def VZA_arr(self):
self._VZA_arr = None
@property
def VAA_arr(self):
"""Get viewing azimuth angle.
......@@ -128,6 +137,10 @@ class L1C_object(L1B_object):
def VAA_arr(self, VAA_arr):
self._VAA_arr = VAA_arr
@VAA_arr.deleter
def VAA_arr(self):
self._VAA_arr = None
@property
def SZA_arr(self):
"""Get solar zenith angle.
......@@ -156,6 +169,10 @@ class L1C_object(L1B_object):
def SZA_arr(self, SZA_arr):
self._SZA_arr = SZA_arr
@SZA_arr.deleter
def SZA_arr(self):
self._SZA_arr = None
@property
def SAA_arr(self):
"""Get solar azimuth angle.
......@@ -171,6 +188,10 @@ class L1C_object(L1B_object):
def SAA_arr(self, SAA_arr):
self._SAA_arr = SAA_arr
@SAA_arr.deleter
def SAA_arr(self):
self._SAA_arr = None
@property
def RAA_arr(self):
"""Get relative azimuth angle.
......@@ -187,12 +208,18 @@ class L1C_object(L1B_object):
def RAA_arr(self, RAA_arr):
self._RAA_arr = RAA_arr
@RAA_arr.deleter
def RAA_arr(self):
self._RAA_arr = None
def delete_ac_input_arrays(self):
self.VZA_arr = None # not needed anymore
self.SZA_arr = None # not needed anymore
self.SAA_arr = None # not needed anymore
self.RAA_arr = None # not needed anymore
self.lonlat_arr = None # not needed anymore
"""Delete AC input arrays if they are not needed anymore."""
self.logger.info('Deleting input arrays for atmospheric correction...')
del self.VZA_arr
del self.SZA_arr
del self.SAA_arr
del self.RAA_arr
del self.lonlat_arr
# use self.dem deleter
# would have to be resampled when writing MGRS tiles
......@@ -789,6 +816,10 @@ class AtmCorr(object):
# returns an instance of S2Mask or None if cloud mask is not given by input GMS objects
) # NOTE: all keys of this dict are later converted to attributes of RSImage
# remove empty values from RSImage kwargs because SICOR treats any kind of RSImage attributes as given
# => 'None'-attributes may cause issues
rs_data = {k: v for k, v in rs_data.items() if v is not None}
script = False
# check if ECMWF data are available - if not, start the download
......
......@@ -15,3 +15,4 @@ class L2A_object(L1C_object):
[setattr(self, key, value) for key, value in L1C_obj.__dict__.items()]
self.proc_level = 'L2A'
self.proc_status = 'initialized'
......@@ -50,6 +50,7 @@ class L2B_object(L2A_object):
[setattr(self, key, value) for key, value in L2A_obj.__dict__.items()]
self.proc_level = 'L2B'
self.proc_status = 'initialized'
def spectral_homogenization(self):
"""Apply spectral homogenization, i.e., prediction of the spectral bands of the target sensor."""
......@@ -62,9 +63,10 @@ class L2B_object(L2A_object):
# FIXME exclude or include thermal bands; respect sorted CWLs in context of LayerBandsAssignment
tgt_cwls = CFG.target_CWL
tgt_sat, tgt_sen = datasetid_to_sat_sen(CFG.datasetid_spectral_ref)
# NOTE: get target LBA at L2A, because spectral characteristics of target sensor do not change after AC
tgt_LBA = get_LayerBandsAssignment(
dict(Satellite=tgt_sat, Sensor=tgt_sen, Subsystem=None,
image_type='RSD', proc_level='L2B', dataset_ID=src_dsID, logger=None))
image_type='RSD', proc_level='L2A', dataset_ID=src_dsID, logger=None))
####################################################
# special cases where homogenization is not needed #
......@@ -86,8 +88,8 @@ class L2B_object(L2A_object):
SpH = SpectralHomogenizer(classifier_rootDir=CFG.path_spechomo_classif, logger=self.logger)
if method == 'LI' or CFG.datasetid_spectral_ref is None:
# linear interpolation
# or a custom sensor has been specified -> no classifier for that case available -> linear interpolation
# linear interpolation (if intended by user or in case of custom spectral characteristics of target sensor)
# -> no classifier for that case available -> linear interpolation
im, errs = SpH.interpolate_cube(self.arr, src_cwls, tgt_cwls, kind='linear')
else:
......@@ -106,18 +108,16 @@ class L2B_object(L2A_object):
kwargs=dict(kind='linear')
))
self.arr = im # type: GeoArray
###################
# update metadata #
###################
# TODO better band names for homogenized product -> include in get_LayerBandsAssignment
self.LayerBandsAssignment = []
self.LayerBandsAssignment = tgt_LBA
self.meta_odict['wavelength'] = list(tgt_cwls)
self.meta_odict['bands'] = len(tgt_cwls)
if 'band names' in self.meta_odict: # FIXME bug workaround
del self.meta_odict['band names'] # TODO
self.arr = im # type: GeoArray
self.spec_homo_errors = errs # type: np.ndarray # int16
......@@ -175,15 +175,17 @@ class SpectralHomogenizer(object):
:param tgt_sensor: target sensor, e.g., 'OLI_TIRS'
:param tgt_LBA: target LayerBandsAssignment
:param nodataVal: no data value
:param fallback_argskwargs: arguments and keyword arguments for fallback algorithm ({'args:{}, 'kwargs': {}}
:param fallback_argskwargs: arguments and keyword arguments for fallback algorithm ({'args':{}, 'kwargs': {}}
:return: predicted array (rows x columns x bands)
:rtype: Tuple[np.ndarray, np.ndarray]
"""
# TODO: add LBA validation to .predict()
PR = RSImage_Predictor(method=method, classifier_rootDir=self.classifier_rootDir)
######################
# get the classifier #
######################
cls = None
exc = Exception()
try:
......@@ -201,8 +203,10 @@ class SpectralHomogenizer(object):
self.logger.error(traceback.format_exc())
exc = e
##################
# run prediction #
##################
if cls:
self.logger.info('Performing spectral homogenization using %s. Target is %s %s %s.'
% (method, tgt_satellite, tgt_sensor, tgt_LBA))
......
......@@ -18,6 +18,7 @@ class L2C_object(L2B_object):
[setattr(self, key, value) for key, value in L2B_obj.__dict__.items()]
self.proc_level = 'L2C'
self.proc_status = 'initialized'
def calc_geometric_accurracy(self):
pass
......
......@@ -273,7 +273,7 @@ def increment_decrement_arrayCol_in_postgreSQLdb(conn_params, tablename, col2upd
idx_val2increment=None, cond_dict=None, timeout=15000):
# type: (str, str, str, int, int, dict, int) -> Union[None, str]
"""Updates an array column of a specific postgreSQL table in the form that it increments or decrements the elements
at a given position. HINT: The column must have values like that: [0,1,0,3,1,0]
at a given position. HINT: The column must have values like that: [52,0,27,10,8,0,0,0,0]
:param conn_params: <str> connection parameters as provided by CFG.conn_params
:param tablename: <str> name of the table within the database to be update
......
......@@ -17,8 +17,8 @@ dtype_lib_IDL_Python = {0: np.bool_, 1: np.uint8, 2: np.int16, 3: np.int32, 4: n
dtype_lib_GDAL_Python = {"uint8": 1, "int8": 1, "uint16": 2, "int16": 3, "uint32": 4, "int32": 5, "float32": 6,
"float64": 7, "complex64": 10, "complex128": 11}
proc_chain = ['L1A', 'L1B', 'L1C', 'L2A', 'L2B', 'L2C']
db_jobs_statistics_def = {'downloaded': 1, 'started': 2, None: 2, 'L1A': 3, 'L1B': 4, 'L1C': 5, 'L2A': 6, 'L2B': 7,
'L2C': 8, 'FAILED': 9}
db_jobs_statistics_def = {'pending': 1, 'started': 2, None: 2, 'L1A': 3, 'L1B': 4, 'L1C': 5, 'L2A': 6, 'L2B': 7,
'L2C': 8, 'FAILED': 9} # NOTE: OrderedDicts passed to L1A_map have proc_level=None
def get_GMS_sensorcode(GMS_identifier):
......
......@@ -6,13 +6,14 @@ import shutil
import sys
import traceback
import warnings
from logging import Logger
from logging import getLogger
from typing import Union, List # noqa F401 # flake8 issue
from ..model.gms_object import GMS_object # noqa F401 # flake8 issue
from ..model.gms_object import failed_GMS_object
from ..options.config import GMS_config as CFG
from ..misc import database_tools as DB_T
from ..misc.helper_functions import is_proc_level_lower
from .definition_dicts import db_jobs_statistics_def, proc_chain
__author__ = 'Daniel Scheffler'
......@@ -36,7 +37,7 @@ def trace_unhandled_exceptions(func):
class ExceptionHandler(object):
def __init__(self, logger=None):
self.GMS_objs = None # type: Union[list, dict]
self.GMS_objs = None # type: Union[List[GMS_object], GMS_object, collections.OrderedDict, failed_GMS_object]
self.GMS_mapper_name = ''
self.GMS_mapper_failed = False
self._exc_details = None
......@@ -45,7 +46,8 @@ class ExceptionHandler(object):
@property
def logger(self):
if not self._logger:
self._logger = Logger('ExceptionHandler', level=CFG.log_level)
self._logger = getLogger('ExceptionHandler')
self._logger.setLevel(CFG.log_level)
return self._logger
@logger.setter
......@@ -65,6 +67,7 @@ class ExceptionHandler(object):
@functools.wraps(GMS_mapper) # needed to avoid pickling errors
def wrapped_GMS_mapper(GMS_objs, **kwargs):
# type: (Union[List[GMS_object], GMS_object, collections.OrderedDict, failed_GMS_object], dict) -> any
"""
:param GMS_objs: one OR multiple instances of GMS_object or one instance of failed_object
......@@ -75,17 +78,30 @@ class ExceptionHandler(object):
self.GMS_mapper_name = GMS_mapper.__name__
self.GMS_objs = GMS_objs
if not GMS_objs:
raise ValueError('Unexpected argument for %s. Received %s.' % (self.GMS_mapper_name, GMS_objs))
# noinspection PyBroadException
try:
self.handle_previously_failed()
self.update_progress_started()
# GMS_mapper inputs CONTAIN NO failed_GMS_objects -> run the mapper normally
if not self.is_failed(self.GMS_objs):
self.update_progress_started()
# run the mapper function and store its results
self.GMS_objs = GMS_mapper(GMS_objs, **kwargs)
# run the mapper function and store its results
self.GMS_objs = GMS_mapper(GMS_objs, **kwargs)
self.increment_progress()
self.increment_progress()
# GMS_mapper inputs CONTAIN failed_GMS_objects -> log and return mapper inputs as received
else:
GMS_obj = self.get_sample_GMS_obj(self.GMS_objs) # type: failed_GMS_object
# FIXME in case self.GMS_objs is a list and the failed object is not at first position
# FIXME GMS_obj.failedMapper will not work
print("Scene %s (entity ID %s) skipped %s due to an unexpected exception in %s."
% (GMS_obj.scene_ID, GMS_obj.entity_ID, self.GMS_mapper_name,
GMS_obj.failedMapper)) # TODO should be logged by PC.logger
return self.GMS_objs # type: Union[GMS_object, List[GMS_object]]
return self.GMS_objs # type: Union[GMS_object, List[GMS_object], failed_GMS_object]
except OSError:
_, exc_val, _ = self.exc_details
......@@ -127,19 +143,11 @@ class ExceptionHandler(object):
@staticmethod
def get_sample_GMS_obj(GMS_objs):
# type: (Union[list, tuple, collections.OrderedDict]) -> GMS_object
# type: (Union[list, tuple, collections.OrderedDict, failed_GMS_object]) -> Union[GMS_object, failed_GMS_object]
return \
GMS_objs if isinstance(GMS_objs, collections.OrderedDict) else \
GMS_objs[0] if isinstance(GMS_objs, (list, tuple)) else GMS_objs
def handle_previously_failed(self):
if self.is_failed(self.GMS_objs):
GMS_obj = self.get_sample_GMS_obj(self.GMS_objs) # type: failed_GMS_object
print("Scene %s (entity ID %s) skipped %s due to an unexpected exception in %s."
% (GMS_obj.scene_ID, GMS_obj.entity_ID, self.GMS_mapper_name,
GMS_obj.failedMapper)) # TODO should be logged by PC.logger
return self.GMS_objs
def update_progress_started(self):
"""in case of just initialized objects:
update statistics column in jobs table of postgreSQL database to 'started'"""
......@@ -151,14 +159,22 @@ class ExceptionHandler(object):
# update statistics column ONLY in case of full cube or first subsystem
DB_T.increment_decrement_arrayCol_in_postgreSQLdb(
CFG.conn_database, 'jobs', 'statistics', cond_dict={'id': CFG.ID},
idx_val2decrement=db_jobs_statistics_def['started'] - 1,
idx_val2decrement=db_jobs_statistics_def['pending'],
idx_val2increment=db_jobs_statistics_def['started'])
def increment_progress(self):
"""update statistics column in jobs table of postgreSQL database"""
"""Update statistics column in jobs table of postgreSQL database.
NOTE: This function ONLY receives those GMS_objects that have been sucessfully processed by the GMS_mapper.
"""
# get a GMS object from which we get the new proc_level
GMS_obj = self.get_sample_GMS_obj(self.GMS_objs)
# validate proc_level
if GMS_obj.proc_level is None:
raise ValueError('Received GMS_object for %s %s without processing level after being processed by %s.'
% (GMS_obj.entity_ID, GMS_obj.subsystem, self.GMS_mapper_name))
# NOTE: in case GMS_obj represents a subsystem and another one has already been marked as FAILED the
# failed_sceneids column and the statistics column is NOT updated once more
# check if another subsystem of the same scene ID already failed - don't increment the stats anymore
......@@ -187,11 +203,16 @@ class ExceptionHandler(object):
idx_val2increment=db_jobs_statistics_def[GMS_obj.proc_level])
@staticmethod
def update_progress_failed(failed_Obj):
"""Update statistics column in jobs table of postgreSQL database."""
def update_progress_failed(failed_Obj, procL_failed=None):
"""Update statistics column in jobs table of postgreSQL database.
:param failed_Obj: instance of gms_object failed_GMS_object
:param procL_failed: processing level to be decremented. If not given, the one from failed_Obj is used.
:return:
"""
DB_T.increment_decrement_arrayCol_in_postgreSQLdb(
CFG.conn_database, 'jobs', 'statistics', cond_dict={'id': CFG.ID},
idx_val2decrement=db_jobs_statistics_def[failed_Obj.proc_level],
idx_val2decrement=db_jobs_statistics_def[procL_failed or failed_Obj.proc_level],
idx_val2increment=db_jobs_statistics_def['FAILED'])
def handle_failed(self):
......@@ -210,16 +231,45 @@ class ExceptionHandler(object):
# add the scene ID to failed_sceneids column in jobs table of DB and update statistics column
# NOTE: in case failed_Obj represents a subsystem and another one has already been marked as FAILED the
# failed_sceneids column and the statistics column is NOT updated once more
res = DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'jobs', ['failed_sceneids'],
{'id': CFG.ID})
assert res, "Query delivered no result."
if res[0][0] is None or failed_Obj.scene_ID not in res[0][0]:
# if column is empty or scene ID is not in there
another_ss_failed = False
another_ss_succeeded = False
higher_procL = None
if failed_Obj.subsystem:
# check if another subsystem of the same scene ID has been marked as failed before
res = DB_T.get_info_from_postgreSQLdb(CFG.conn_database, 'jobs', ['failed_sceneids'], {'id': CFG.ID})
assert res, "Query delivered no result."
if res[0][0] is not None and failed_Obj.scene_ID in res[0][0]:
self.logger.debug("Found another failed subsystem of scene %s in the database.")
another_ss_failed = True
# check if another subsystem already reached a higher processing level
# NOTE: this fixes issue #50
# NOTE: This works not only for GMS_object instances but also for L1A inputs (OrderedDicts) because
# failed_GMS_object inherits from GMS_object and GMS_object.proc_status_all_GMS_objs has already
# been updated by the first subsystem (that earlier reached L1A)
# FIXME proc_status_all_GMSobjs is not available if other subsystems are processed by another
# FIXME multiprocessing worker or on another machine (cluster node)
procstatus_other_ss = {k: v for k, v in GMS_object.proc_status_all_GMSobjs[failed_Obj.scene_ID].items()
if k != failed_Obj.subsystem}
for ss, statusentry in procstatus_other_ss.items():
for procL in statusentry.keys():
if is_proc_level_lower(failed_Obj.proc_level, procL) and statusentry[procL] == 'finished':
higher_procL = procL
self.logger.debug("Found another subsystem that already reached a higher processing level.")
another_ss_succeeded = True
break
if not another_ss_failed: # applies also to full cubes
DB_T.append_item_to_arrayCol_in_postgreSQLdb(CFG.conn_database, 'jobs',
{'failed_sceneids': failed_Obj.scene_ID}, {'id': CFG.ID})
self.update_progress_failed(failed_Obj)
if not another_ss_succeeded:
self.update_progress_failed(failed_Obj)
else:
self.update_progress_failed(failed_Obj, procL_failed=higher_procL)
return failed_Obj
......
......@@ -31,6 +31,14 @@ class FmaskError(RuntimeError):
"""An error within the Fmask wrapper of gms_preprocessing."""
#######
# AC #
#######
class ACNotSupportedError(RuntimeError):
"""An error raised if there is currently no AC supported for the current sensor."""
####################################
# SPECTRAL HOMOGENIZATION EXCEPTIONS
####################################
......@@ -49,5 +57,5 @@ class ClassifierNotAvailableError(RuntimeError):
def __str__(self):
return 'No %s classifier available for predicting %s %s %s from %s %s %s'\
% (self.spechomo_method, self.src_sat, self.src_sen, self.src_LBA,
self.tgt_sat, self.tgt_sen, self.tgt_LBA)
% (self.spechomo_method, self.tgt_sat, self.tgt_sen, self.tgt_LBA,
self.src_sat, self.src_sen, self.src_LBA,)
......@@ -115,11 +115,14 @@ def sorted_nicely(iterable):
return sorted(iterable, key=alphanum_key)
def proc_level_already_present(current_lvl, target_lvl):
if current_lvl is None or proc_chain.index(current_lvl) < proc_chain.index(target_lvl):
return False
else: # current_lvl >= target_lvl
return True
def is_proc_level_lower(current_lvl, target_lvl):
# type: (str, str) -> bool
"""Return True if current_lvl is lower than target_lvl.
:param current_lvl: current processing level (to be tested)
:param target_lvl: target processing level (refernce)
"""
return current_lvl is None or proc_chain.index(current_lvl) < proc_chain.index(target_lvl)
def convert_absPathArchive_to_GDALvsiPath(path_archive):
......
......@@ -76,17 +76,28 @@ class path_generator(object):
(CFG.path_procdata_scenes, self.satellite, self.sensor, self.entity_ID)
return os.path.join(*pOrd)
def get_baseN(self):
"""Returns the basename belonging to the given scene."""
items2include = (self.satellite, self.sensor, self.subsystem, self.entity_ID) if self.subsystem else \
(self.satellite, self.sensor, self.entity_ID)
def get_baseN(self, merged_subsystems=False):
"""Returns the basename belonging to the given scene.
:param merged_subsystems: if True, a subsystem is not included in the returned basename
(usefor for merged subsystems in L2A+)
"""
if self.subsystem and not merged_subsystems:
items2include = (self.satellite, self.sensor, self.subsystem, self.entity_ID)
else:
items2include = (self.satellite, self.sensor, self.entity_ID)
if self.MGRS_info:
items2include += (self.MGRS_info['tile_ID'],)
return '__'.join(list(items2include))
def get_path_logfile(self):
"""Returns the path of the logfile belonging to the given scene, e.g. '/path/to/file/file.log'."""
return os.path.join(self.get_path_procdata(), self.get_baseN() + '.log')
def get_path_logfile(self, merged_subsystems=False):
"""Returns the path of the logfile belonging to the given scene, e.g. '/path/to/file/file.log'.
:param merged_subsystems: if True, a subsystem is not included in the returned logfile path
(usefor for merged subsystems in L2A+)
"""
return os.path.join(self.get_path_procdata(), self.get_baseN(merged_subsystems=merged_subsystems) + '.log')
def get_local_archive_path_baseN(self):
"""Returns the path of the downloaded raw data archive, e.g. '/path/to/file/file.tar.gz'."""
......
......@@ -361,6 +361,7 @@ class Dataset(object):
@property
def pathGen(self): # TODO keep that in the base class?
# type: () -> PG.path_generator
"""
Returns the path generator object for generating file pathes belonging to the GMS object.
"""
......
......@@ -3,6 +3,7 @@
import collections
import copy
import datetime
import functools
import glob
import json
import os
......@@ -11,13 +12,16 @@ import shutil
import sys
import warnings
import logging
from collections import OrderedDict
from itertools import chain
from typing import Iterable, List, Union, TYPE_CHECKING # noqa F401 # flake8 issue
import numpy as np
import spectral
from spectral.io import envi