Commit e8dcce40 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

First approach to pass status of spatial index mediator through until L1B_P.

Former-commit-id: 67cf73f4
Former-commit-id: ca579aff
parent b9d8c7bb
......@@ -208,7 +208,7 @@ class L1B_object(L1A_object):
# set defaults
self._spatRef_available = None
self.spatRef_scene = None # set by self.get_spatial_reference_scene()
self.coreg_info = {}
self._coreg_info = None
self.deshift_results = collections.OrderedDict()
if L1A_obj:
......@@ -489,10 +489,34 @@ class L1B_object(L1A_object):
return ref_band4match, shift_band4match
@property
def coreg_info(self):
if not self._coreg_info:
self._coreg_info = {
'corrected_shifts_px': {'x': 0, 'y': 0},
'corrected_shifts_map': {'x': 0, 'y': 0},
'original map info': self.meta_odict['map info'],
'updated map info': None,
'reference scene ID': None,
'reference entity ID': None,
'reference geotransform': None,
# reference projection must be the own projection in order to avoid overwriting with a wrong EPSG
'reference projection': self.meta_odict['coordinate system string'],
'reference extent': {'rows': None, 'cols': None},
'reference grid': [list(CFG.usecase.spatial_ref_gridx),
list(CFG.usecase.spatial_ref_gridy)],
'success': None
}
return self._coreg_info
@coreg_info.setter
def coreg_info(self, val):
self._coreg_info = val
def coregister_spatially(self):
if not CFG.job.skip_coreg:
if CFG.job.skip_coreg:
self.logger.warning('Coregistration skipped according to user configuration.')
elif self.coreg_needed and self.spatRef_available:
geoArr_ref = GeoArray(self.spatRef_scene.filePath)
geoArr_shift = GeoArray(self.arr)
......@@ -521,6 +545,7 @@ class L1B_object(L1A_object):
self.coreg_info.update({'reference entity ID': self.spatRef_scene.entity_ID})
if COREG_obj.success:
self.coreg_info['success'] = True
self.logger.info("Calculated map shifts (X,Y): %s / %s"
% (COREG_obj.x_shift_map,
COREG_obj.y_shift_map)) # FIXME direkt in calculate_spatial_shifts loggen
......@@ -537,21 +562,6 @@ class L1B_object(L1A_object):
self.logger.info('Coregistration of scene %s (entity ID %s) skipped because target dataset ID equals '
'reference dataset ID.' % (self.scene_ID, self.entity_ID))
self.coreg_info.update({'corrected_shifts_px': {'x': 0, 'y': 0}})
self.coreg_info.update({'corrected_shifts_map': {'x': 0, 'y': 0}})
self.coreg_info.update({'original map info': self.meta_odict['map info']})
self.coreg_info.update({'updated map info': None})
self.coreg_info.update({'reference scene ID': None})
self.coreg_info.update({'reference entity ID': None})
self.coreg_info.update({'reference geotransform': None})
# reference projection must be the own projection in order to avoid overwriting with a wrong EPSG
self.coreg_info.update({'reference projection': self.meta_odict['coordinate system string']})
self.coreg_info.update({'reference extent': {'rows': None, 'cols': None}})
self.coreg_info.update({'reference grid': [list(CFG.usecase.spatial_ref_gridx),
list(CFG.usecase.spatial_ref_gridy)]})
self.coreg_info.update(
{'success': True if not self.coreg_needed else False}) # False means spatRef not available
def correct_spatial_shifts(self, cliptoextent=True, clipextent=None, clipextent_prj=None, v=False):
# type: (bool, list, any, bool) -> None
"""Corrects the spatial shifts calculated by self.coregister_spatially().
......
......@@ -134,7 +134,7 @@ class Job(object):
self.skip_coreg = False
self.validate_exec_configs()
self.CPUs = CPUs if CPUs is not None else multiprocessing.cpu_count()
self.CPUs = 1#CPUs if CPUs is not None else multiprocessing.cpu_count()
self.allow_subMultiprocessing = allow_subMultiprocessing
self.disable_exception_handler = disable_exception_handler is False
self.log_level = log_level
......
......@@ -21,6 +21,7 @@ class GMSEnvironment(object):
def __init__(self, logger=Logger(__name__)):
self.logger = logger
self.logger.info('Checking system environment...')
self.spatIdxSrvRunning = None
def _check_spatial_index_mediator_server(self):
try:
......@@ -31,10 +32,12 @@ class GMSEnvironment(object):
if not SpatIdxSrv.is_running:
SpatIdxSrv.start()
self.spatIdxSrvRunning = True
except GMSEnvironmentError as e:
self.logger.error(e, exc_info=False)
self.logger.warning('Coregistration will be skipped!')
CFG.job.skip_coreg = True
self.spatIdxSrvRunning = False
def _check_nonpip_packages(self):
"""Check for not pip-installable packages."""
......@@ -56,7 +59,7 @@ class GMSEnvironment(object):
except ImportError:
if gdal.GetDriverByName('HDF4') is None:
msg = "The library 'pyhdf' is missing and the HDF4 driver of GDAL is not available. ASTER data cannot "\
"be proceessed! For Anaconda, run 'conda install --yes -c conda-forge pyhdf' to fix that!"
"be processed! For Anaconda, run 'conda install --yes -c conda-forge pyhdf' to fix that!"
self.logger.warning(MissingNonPipLibraryWarning(msg))
# 'sicor', # pip install git+https://gitext.gfz-potsdam.de/hollstei/sicor.git
......@@ -65,7 +68,7 @@ class GMSEnvironment(object):
import sicor # noqa F401 unused
except ImportError:
msg = "The library 'sicor' has not been installed automatically because installation requires login " \
"credentials. See installation instrucions here: https://gitext.gfz-potsdam.de/hollstei/sicor"
"credentials. See installation instrucions here: https://gitext.gfz-potsdam.de/EnMAP/sicor"
self.logger.warning(MissingNonPipLibraryWarning(msg))
def check_dependencies(self):
......
# -*- coding: utf-8 -*-
from typing import List, Tuple, Union # noqa F401 # flake8 issue
from ..config import GMS_config as CFG
from ..misc import exception_handler as EXC_H
from ..algorithms import L1A_P
......@@ -49,7 +51,7 @@ def L1A_map(dataset_dict): # map (scene-wise parallelization)
@EXC_H.log_uncaught_exceptions
def L1A_map_1(dataset_dict, block_size=None): # map (scene-wise parallelization)
# type: (dict) -> list
# type: (dict) -> List[L1A_P.L1A_object]
L1A_obj = L1A_P.L1A_object(image_type='RSD',
satellite=dataset_dict['satellite'],
......@@ -98,13 +100,16 @@ def L1A_map_3(L1A_obj): # map (scene-wise parallelization)
@EXC_H.log_uncaught_exceptions
def L1B_map(L1A_obj):
def L1B_map(L1A_obj, spatIdxSrvRunning):
# type: (L1A_P.L1A_object) -> L1B_P.L1B_object
"""L1A_obj enthält in Python- (im Gegensatz zur Flink-) Implementierung KEINE ARRAY-DATEN!,
nur die für die ganze Szene gültigen Metadaten"""
L1B_obj = L1B_P.L1B_object(L1A_obj)
L1B_obj.coregister_spatially()
if spatIdxSrvRunning:
L1B_obj.coregister_spatially()
else:
L1B_obj.logger.warning('Coregistration skipped due to unavailable Spatial Index Mediator Server!')
if CFG.job.exec_L1BP[1]:
L1B_obj.to_ENVI()
......@@ -114,7 +119,7 @@ def L1B_map(L1A_obj):
@EXC_H.log_uncaught_exceptions
def L1C_map(L1B_objs):
# type: (list) -> list
# type: (List[L1B_P.L1B_object]) -> List[L1C_P.L1C_object]
"""Atmospheric correction.
NOTE: all subsystems (containing all spatial samplings) belonging to the same scene ID are needed
......@@ -150,7 +155,7 @@ def L1C_map(L1B_objs):
@EXC_H.log_uncaught_exceptions
def L2A_map(L1C_objs, block_size=None):
# type: (list, tuple) -> list
# type: (Union[List[L1C_P.L1C_object], Tuple[L1C_P.L1C_object]]) -> List[L2A_P.L2A_object]
"""Geometric homogenization.
Performs correction of geometric displacements, resampling to target grid of the usecase and merges multiple
......
......@@ -87,8 +87,8 @@ class process_controller(object):
self.usecase = GMS_config.usecase
# check environment
GMSEnv = ENV.GMSEnvironment(self.logger)
GMSEnv.check_dependencies()
self.GMSEnv = ENV.GMSEnvironment(self.logger)
self.GMSEnv.check_dependencies()
# check if process_controller is executed by debugger
# isdebugging = 1 if True in [frame[1].endswith("pydevd.py") for frame in inspect.stack()] else False
......@@ -489,7 +489,7 @@ class process_controller(object):
L1A_DBObjects = self.get_DB_objects('L1B', self.L1A_newObjects, parallLev='scenes')
L1A_Instances = self.L1A_newObjects + L1A_DBObjects # combine newly and earlier processed L1A data
L1B_resObjects = MAP(L1B_map, L1A_Instances)
L1B_resObjects = MAP(L1B_map, [L1A_Instances, self.GMSEnv.spatIdxSrvRunning])
self.L1B_newObjects = [obj for obj in L1B_resObjects if isinstance(obj, L1B_P.L1B_object)]
self.failed_objects += [obj for obj in L1B_resObjects if isinstance(obj, failed_GMS_object) and
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment