Commit 3282d365 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

first version fully operable for Sentinel-2 (including atmospheric correction and cloud masks)

algorithms.gms_object.GMS_object:
- GMS_identifier: is only a getter now, not a singleton
- added property 'MetaObj' / 'meta_odict' -> self-synchronizing
- added LayerBandsAssignment.setter
- revised arr.setter
- revised mask_nodata.setter
- mask_clouds.getter: bugfix
- revised mask_clouds.setter
- added property 'mask_clouds_confidence'
- revised masks.setter
- revised dem.setter
- ac_options: number of CPUs are now passed (temporarily disabled)
- revised ac_errors.setter
- added LBA2bandnames
- attributes2dict: bugfix for not addin meta_odict
- revised from_tiles()
- added from_sensor_subsystems(): a function for merging multiple subsystems of the same sensor (needed for Sentinel-2 and ASTER)
- revised build_combined_masks_array()
- get_subset_obj(): multiple bugfixes
- to_GMS_file(): updated in the context of new property architecture
- delete_previous_proc_level_results: added functionality to delete subsystem products after subsystems have been merged
algorithms.gms_object.failed_GMS_object:
- removed logger creation
algorithms.L1A_P.L1A_object:
- import_metadata(): updated METADATA call
- calc_TOARadRefTemp: added warning when using Sentinel-2 Gains
- calc_corner_positions(): added UTC timezone to datetime object
algorithms.L1B_P.L1B_object:
- _get_reference_image_params_pgSQL(): replaced temp_logger by self.logger
- revised correct_spatial_shifts()
algorithms.L1C_P:
- L1C_object:
    - removed deprecated atm_corr()
- AtmCorr:
    - __init__: added warning
    - revised data property
    - _meta_get_aux_data(): changed lonlat_arr to float16
    - run_atmospheric_correction(): added 'dump' keyword
    - revised _join_results_to_inObjs()
algorithms.L2B_P.L2B_object:
- spectral_homogenization(): bugfix
algorithms.METADATA:
- METADATA:
    - __init__(): is now initialized by GMS_identifier; does not directly run metadata reader
    - added read_meta()
    - Read_Sentinel2A_xmls(): updated setting of Gains
    - refactored to_meta_odict() to to_odict() and revised the function
    - added from_odict()
    - revised filter_layerdependent_metadata()
    - added 'map_odictKeys_objAttrnames'
    - get_LayerBandsAssignment():
        - added functionality to return full LBA for Sentinel-2 and ASTER
        - now properly handles bands removed after L1C and L2B
io.Output_writer:
- mask_to_ENVI_Classification(): bugfix
misc.definition_dicts:
- get_GMS_sensorcode(): added codes for Sentinel-2 full and ASTER full
- get_outFillZeroSaturated(): added bool dtype
- is_dataset_provided_as_fullScene(): added ASTER full and Sentinel-2 full
misc.exception_handler:
- log_uncaught_exceptions(): bugfix for 'disable_exception_handler'
misc.helper_functions:
- cut_GMS_obj_into_blocks(): bugfix
misc.logging.GMS_logger:
- added scene ID to formatter
misc.mgrs_tile:
- replaced deprecated reference
misc.path_generator:
- get_baseN(): bugfix
- added get_path_ac_input_dump()
processing.multiproc:
- MAP: added functionality to disable multiprocessing
processing.pipeline:
- revised L2A_map(): new L2A calls
processing.process_controller:
- add_local_availability(): changed get_LayerBandsAssignment call
- get_DB_objects(): bugfix for copied memory link during GMS object init
- L1C_processing() raises NotImplementedError in tiled mode
- L2A_processing(): added grouping of subsystems
- updated __version__
parent 6fa974e0
...@@ -15,7 +15,7 @@ from . import config ...@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller from .processing.process_controller import process_controller
__version__ = '20170115.01' __version__ = '20170120.01'
__author__ = 'Daniel Scheffler' __author__ = 'Daniel Scheffler'
__all__ = ['algorithms', __all__ = ['algorithms',
'io', 'io',
......
...@@ -459,14 +459,13 @@ class L1A_object(GMS_object): ...@@ -459,14 +459,13 @@ class L1A_object(GMS_object):
""" """
self.logger.info('Reading %s %s %s metadata...' %(self.satellite,self.sensor,self.subsystem)) self.logger.info('Reading %s %s %s metadata...' %(self.satellite,self.sensor,self.subsystem))
print('h1') self.MetaObj = META.METADATA(self.GMS_identifier)
self.MetaObj = META.METADATA(self.satellite, self.subsystem, self.scene_ID, self.path_InFilePreprocessor, self.MetaObj.read_meta(self.scene_ID, self.path_InFilePreprocessor,
self.path_MetaPreprocessor, self.logger, self.LayerBandsAssignment) # TODO property (auto-conversion from meta_odict needed) self.path_MetaPreprocessor, self.LayerBandsAssignment)
print('h2')
if v: if v:
self.logger.info("The following metadata have been read:") self.logger.info("The following metadata have been read:")
[self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.get_meta_overview().items()] [self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.overview.items()]
# set some object attributes directly linked to metadata # set some object attributes directly linked to metadata
self.subsystem = self.MetaObj.Subsystem self.subsystem = self.MetaObj.Subsystem
...@@ -546,6 +545,15 @@ class L1A_object(GMS_object): ...@@ -546,6 +545,15 @@ class L1A_object(GMS_object):
elif arr_desc == 'TOA_Ref': elif arr_desc == 'TOA_Ref':
if conv=='Rad': if conv=='Rad':
"""http://s2tbx.telespazio-vega.de/sen2three/html/r2rusage.html?highlight=quantification182
rToa = (float)(DN_L1C_band / QUANTIFICATION_VALUE);
L = (rToa * e0__SOLAR_IRRADIANCE_For_band * cos(Z__Sun_Angles_Grid_Zenith_Values)) / (PI * U__earth_sun_distance_correction_factor);
L = (U__earth_sun_distance_correction_factor * rToa * e0__SOLAR_IRRADIANCE_For_band * cos(
Z__Sun_Angles_Grid_Zenith_Values)) / PI;"""
if re.search('Sentinel-2', self.satellite, re.I):
warnings.warn('Physical gain values unclear for Sentinel-2! This may cause errors when '
'calculating radiance from TOA Reflectance. ESA provides only 12 gain values for '
'13 bands and it not clear for which bands the gains are provided.')
raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." %conv) raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." %conv)
else: # conv=='TOA_Ref' else: # conv=='TOA_Ref'
if self.MetaObj.ScaleFactor != CFG.usecase.scale_factor_TOARef: if self.MetaObj.ScaleFactor != CFG.usecase.scale_factor_TOARef:
...@@ -663,7 +671,7 @@ class L1A_object(GMS_object): ...@@ -663,7 +671,7 @@ class L1A_object(GMS_object):
if rasObj.get_projection_type() == 'UTM': if rasObj.get_projection_type() == 'UTM':
self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM') self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM')
self.meta_odict = self.MetaObj.to_meta_odict() # important in order to keep geotransform/projection self.meta_odict = self.MetaObj.to_odict() # important in order to keep geotransform/projection
if CFG.job.exec_mode=='Flink': if CFG.job.exec_mode=='Flink':
self.delete_tempFiles() # these files are needed later in Python execution mode self.delete_tempFiles() # these files are needed later in Python execution mode
self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists) self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists)
...@@ -796,7 +804,7 @@ class L1A_object(GMS_object): ...@@ -796,7 +804,7 @@ class L1A_object(GMS_object):
return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds} return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds}
def calc_corner_positions(self): # TODO revise that function (must return fullSceneCornerLonLat def calc_corner_positions(self):
"""Get true corner positions in the form """Get true corner positions in the form
[UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]""" [UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]"""
...@@ -814,7 +822,8 @@ class L1A_object(GMS_object): ...@@ -814,7 +822,8 @@ class L1A_object(GMS_object):
assert hasattr(self,'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask." assert hasattr(self,'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask."
self.logger.info('Calculating true data corner positions (image and world coordinates)...') self.logger.info('Calculating true data corner positions (image and world coordinates)...')
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31): if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31,
tzinfo=datetime.timezone.utc):
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy', self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy',
assert_four_corners=False) assert_four_corners=False)
else: else:
......
...@@ -29,6 +29,7 @@ from shapely.geometry import box ...@@ -29,6 +29,7 @@ from shapely.geometry import box
from CoReg_Sat import COREG, DESHIFTER from CoReg_Sat import COREG, DESHIFTER
from py_tools_ds.ptds import GeoArray from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.ptds.geo.coord_trafo import reproject_shapelyGeometry, transform_any_prj from py_tools_ds.ptds.geo.coord_trafo import reproject_shapelyGeometry, transform_any_prj
from py_tools_ds.ptds.geo.projection import prj_equal, EPSG2WKT, WKT2EPSG from py_tools_ds.ptds.geo.projection import prj_equal, EPSG2WKT, WKT2EPSG
...@@ -40,7 +41,6 @@ from ..misc import database_tools as DB_T ...@@ -40,7 +41,6 @@ from ..misc import database_tools as DB_T
from ..misc import helper_functions as HLP_F from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG from ..misc import path_generator as PG
from ..misc.SpatialIndexMediator import SpatialIndexMediator from ..misc.SpatialIndexMediator import SpatialIndexMediator
from ..misc.logging import GMS_logger
from ..misc.definition_dicts import get_GMS_sensorcode, get_outFillZeroSaturated from ..misc.definition_dicts import get_GMS_sensorcode, get_outFillZeroSaturated
...@@ -452,8 +452,7 @@ class L1B_object(L1A_object): ...@@ -452,8 +452,7 @@ class L1B_object(L1A_object):
timeout = 30000) timeout = 30000)
conds_descImportance = [dataset_cond, cloudcov_cond, dayrange_cond] conds_descImportance = [dataset_cond, cloudcov_cond, dayrange_cond]
temp_logger = GMS_logger('log__' + self.baseN, self.im2shift_objDict['path_logfile'], append=1) self.logger.info('Querying database in order to find a suitable reference scene for co-registration.')
temp_logger.info('Querying database in order to find a suitable reference scene for co-registration.')
count, filt_overlap_scenes = 0,[] count, filt_overlap_scenes = 0,[]
while not filt_overlap_scenes: while not filt_overlap_scenes:
...@@ -559,7 +558,7 @@ class L1B_object(L1A_object): ...@@ -559,7 +558,7 @@ class L1B_object(L1A_object):
assert query_res != [], 'No entity-ID found for scene number %s' %self.imref_scene_ID assert query_res != [], 'No entity-ID found for scene number %s' %self.imref_scene_ID
self.imref_entity_ID = query_res[0][0] # [('LC81510322013152LGN00',)] self.imref_entity_ID = query_res[0][0] # [('LC81510322013152LGN00',)]
break break
temp_logger.close() self.logger.close()
def _sceneIDList_to_filt_overlap_scenes(self, sceneIDList, min_overlap): def _sceneIDList_to_filt_overlap_scenes(self, sceneIDList, min_overlap):
...@@ -715,30 +714,39 @@ class L1B_object(L1A_object): ...@@ -715,30 +714,39 @@ class L1B_object(L1A_object):
if not hasattr(self, 'masks') or self.masks is None: if not hasattr(self, 'masks') or self.masks is None:
self.build_combined_masks_array() # creates self.masks and self.masks_meta self.build_combined_masks_array() # creates self.masks and self.masks_meta
for attrname in ['arr', 'masks']: # exclude self.mask_nodata, self.mask_clouds from warping
del self.mask_nodata, self.mask_clouds
attributes2deshift = [attrname for attrname in ['arr', 'masks', 'dem', 'ac_errors', 'mask_clouds_confidence']
if getattr(self,'_%s'%attrname) is not None]
for attrname in attributes2deshift:
geoArr = getattr(self, attrname)
# do some logging
if self.coreg_needed: if self.coreg_needed:
if self.coreg_info['success']: if self.coreg_info['success']:
self.logger.info("Correcting spatial shifts for attribute '%s'..." %attrname) self.logger.info("Correcting spatial shifts for attribute '%s'..." %attrname)
elif cliptoextent: elif cliptoextent and \
is_coord_grid_equal(geoArr.gt, CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy):
self.logger.info("Attribute '%s' has only been clipped to it's extent because no valid " self.logger.info("Attribute '%s' has only been clipped to it's extent because no valid "
"shifts have been detected and the pixel grid equals the target grid." % attrname) "shifts have been detected and the pixel grid equals the target grid." % attrname)
elif self.resamp_needed:
self.logger.info("Resampling attribute '%s' to target grid..." % attrname)
elif self.resamp_needed: elif self.resamp_needed:
self.logger.info("Resampling attribute '%s' to target grid..." % attrname) self.logger.info("Resampling attribute '%s' to target grid..." % attrname)
# correct shifts # correct shifts
geoArr = getattr(self, attrname)
DS = DESHIFTER(geoArr, self.coreg_info, DS = DESHIFTER(geoArr, self.coreg_info,
target_xyGrid = [CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy], target_xyGrid = [CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy],
cliptoextent = cliptoextent, cliptoextent = cliptoextent,
clipextent = mapBounds, clipextent = mapBounds,
align_grids = True, align_grids = True,
resamp_alg = 'cubic' if attrname=='arr' else 'nearest', resamp_alg = 'nearest' if attrname=='masks' else 'cubic',
CPUs = None if CFG.job.allow_subMultiprocessing else 1, CPUs = None if CFG.job.allow_subMultiprocessing else 1,
progress = True if v else False, progress = True if v else False,
q = True, q = True,
v = v) v = v)
DS.correct_shifts() DS.correct_shifts()
setattr(self,attrname, DS.arr_shifted)
# update coreg_info # update coreg_info
if attrname=='arr': if attrname=='arr':
...@@ -752,8 +760,6 @@ class L1B_object(L1A_object): ...@@ -752,8 +760,6 @@ class L1B_object(L1A_object):
self.meta_odict['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection)) self.meta_odict['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection))
self.shape_fullArr = DS.arr_shifted.shape self.shape_fullArr = DS.arr_shifted.shape
self.meta_odict['lines'], self.meta_odict['samples'] = DS.arr_shifted.shape[:2] self.meta_odict['lines'], self.meta_odict['samples'] = DS.arr_shifted.shape[:2]
if DS.arr_shifted.ndim == 2:
self.meta_odict['bands'] = DS.arr_shifted.shape[2]
else: else:
self.masks_meta['map info'] = DS.updated_map_info self.masks_meta['map info'] = DS.updated_map_info
self.masks_meta['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection)) self.masks_meta['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection))
...@@ -761,9 +767,17 @@ class L1B_object(L1A_object): ...@@ -761,9 +767,17 @@ class L1B_object(L1A_object):
# NOTE: mask_nodata and mask_clouds are updated later by L2A_map mapper function (module pipeline) # NOTE: mask_nodata and mask_clouds are updated later by L2A_map mapper function (module pipeline)
# update the GeoArray instance without loosing its inherent metadata (nodata, ...)
geoArr.arr, geoArr.gt, geoArr.prj = DS.GeoArray_shifted.arr, DS.GeoArray_shifted.gt, DS.GeoArray_shifted.prj
#setattr(self,attrname, DS.GeoArray_shifted) # NOTE: don't set array earlier because setter will also
# # update arr.gt/.prj/.nodata from meta_odict
self.resamp_needed = False self.resamp_needed = False
self.coreg_needed = False self.coreg_needed = False
# recreate self.masks_nodata and self.mask_clouds from self.masks
self.mask_nodata = self.mask_nodata
self.mask_clouds = self.mask_clouds # FIXME move functionality of self.masks only to output writer and remove self.masks completely
......
...@@ -24,10 +24,7 @@ ...@@ -24,10 +24,7 @@
import warnings import warnings
import re import re
import logging import logging
try: import dill
from StringIO import StringIO # Python 2
except ImportError:
from io import StringIO # Python 3
import numpy as np import numpy as np
try: try:
...@@ -35,11 +32,13 @@ try: ...@@ -35,11 +32,13 @@ try:
except ImportError: except ImportError:
import osr import osr
from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP from . import GEOPROCESSING as GEOP
from .L1B_P import L1B_object from .L1B_P import L1B_object
from .METADATA import get_LayerBandsAssignment
from ..misc.definition_dicts import get_outFillZeroSaturated from ..misc.definition_dicts import get_outFillZeroSaturated
from S2SCAPEM.s2_ac import AC_GMS from S2SCAPEM.s2_ac import AC_GMS
...@@ -213,18 +212,6 @@ class L1C_object(L1B_object): ...@@ -213,18 +212,6 @@ class L1C_object(L1B_object):
self._RAA_arr = RAA_arr self._RAA_arr = RAA_arr
def atm_corr(self, subset=None):
"""Performs an atmospheric correction and returns atmospherically corrected reflectance data."""
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
self.log_for_fullArr_or_firstTile('Dummy atmospheric correction started.',subset) # FIXME
#SRF_dict = INP_R.SRF_reader(SRF_fold,RSD_md_L1B)
self.arr = self.arr # FIXME implement atmospheric correction
# delete array data that is not needed anymore
self.delete_ac_input_arrays()
def delete_ac_input_arrays(self): def delete_ac_input_arrays(self):
self.VZA_arr = None # not needed anymore self.VZA_arr = None # not needed anymore
self.SZA_arr = None # not needed anymore self.SZA_arr = None # not needed anymore
...@@ -269,16 +256,16 @@ class AtmCorr(object): ...@@ -269,16 +256,16 @@ class AtmCorr(object):
# append AtmCorr object to input L1C objects # append AtmCorr object to input L1C objects
#[setattr(L1C_obj, 'AtmCorr', self) for L1C_obj in self.inObjs] # too big for serialization #[setattr(L1C_obj, 'AtmCorr', self) for L1C_obj in self.inObjs] # too big for serialization
if not re.search('Sentinel-2', self.inObjs[0].satellite, re.I):
warnings.warn('Calculation of acquisition geometry arrays is currently only validated for Sentinel-2!')
# validation possible by comparing S2 angles provided by ESA with own angles
@property @property
def logger(self): def logger(self):
if self._logger and self._logger.handlers[:]: if self._logger and self._logger.handlers[:]:
return self._logger return self._logger
else: else:
## create a streamlogger
#self._logger = GMS_logger('AtmCorr_logger__' + self.inObjs[0].baseN, path_logfile=None, append=1)
#return self._logger
if len(self.inObjs)==1: if len(self.inObjs)==1:
# just use the logger of the inObj # just use the logger of the inObj
logger_atmCorr = self.inObjs[0].logger logger_atmCorr = self.inObjs[0].logger
...@@ -354,16 +341,27 @@ class AtmCorr(object): ...@@ -354,16 +341,27 @@ class AtmCorr(object):
""" """
if not self._data: if not self._data:
data_dict = {}
for inObj in self.inObjs: for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items(): for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in self._data: if bandN not in data_dict:
arr2pass = inObj.arr[:,:,bandIdx].astype(np.float32) # conversion to np.float16 will convert -9999 to -10000 arr2pass = inObj.arr[:,:,bandIdx].astype(np.float32) # conversion to np.float16 will convert -9999 to -10000
arr2pass[arr2pass==inObj.arr.nodata] = np.nan # set nodata values to np.nan arr2pass[arr2pass==inObj.arr.nodata] = np.nan # set nodata values to np.nan
self._data[bandN] = (arr2pass/inObj.meta_odict['ScaleFactor']).astype(np.float16) data_dict[bandN] = (arr2pass/inObj.meta_odict['ScaleFactor']).astype(np.float16)
else: else:
inObj.logger.warning("Band '%s' cannot be included into atmospheric correction because it " inObj.logger.warning("Band '%s' cannot be included into atmospheric correction because it "
"exists multiple times." %bandN) "exists multiple times." %bandN)
# validate: data must have all bands needed for AC
full_LBA = get_LayerBandsAssignment(self.inObjs[0].GMS_identifier, return_fullLBA=True)
all_bNs_AC = ['B%s'%i if len(i)==2 else 'B0%s'%i for i in full_LBA]
if not all([bN in list(data_dict.keys()) for bN in all_bNs_AC]):
raise RuntimeError('Atmospheric correction did not receive all the needed bands. \n\tExpected: %s;\n\t'
'Received: %s' %(str(all_bNs_AC), str(list(sorted(data_dict.keys())))))
self._data = data_dict
return self._data return self._data
...@@ -572,7 +570,7 @@ class AtmCorr(object): ...@@ -572,7 +570,7 @@ class AtmCorr(object):
viewing_zenith = {} viewing_zenith = {}
for inObj in self.inObjs: for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items(): # FIXME target keys = range(12) for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in viewing_zenith: if bandN not in viewing_zenith:
arr2pass = inObj.VZA_arr[:, :, bandIdx] if inObj.VZA_arr.ndim == 3 else inObj.VZA_arr arr2pass = inObj.VZA_arr[:, :, bandIdx] if inObj.VZA_arr.ndim == 3 else inObj.VZA_arr
viewing_zenith[bandN] = arr2pass.astype(np.float16) viewing_zenith[bandN] = arr2pass.astype(np.float16)
...@@ -589,7 +587,7 @@ class AtmCorr(object): ...@@ -589,7 +587,7 @@ class AtmCorr(object):
viewing_azimuth = {} viewing_azimuth = {}
for inObj in self.inObjs: for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items(): # FIXME target keys = range(12) for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in viewing_azimuth: if bandN not in viewing_azimuth:
arr2pass = inObj.VAA_arr[:, :, bandIdx] if inObj.VAA_arr.ndim==3 else inObj.VAA_arr arr2pass = inObj.VAA_arr[:, :, bandIdx] if inObj.VAA_arr.ndim==3 else inObj.VAA_arr
viewing_azimuth[bandN] = arr2pass.astype(np.float16) viewing_azimuth[bandN] = arr2pass.astype(np.float16)
...@@ -607,7 +605,7 @@ class AtmCorr(object): ...@@ -607,7 +605,7 @@ class AtmCorr(object):
relative_viewing_azimuth = {} relative_viewing_azimuth = {}
for inObj in self.inObjs: for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items(): # FIXME target keys = range(12) for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in relative_viewing_azimuth: if bandN not in relative_viewing_azimuth:
arr2pass = inObj.RAA_arr[:, :, bandIdx] if inObj.RAA_arr.ndim == 3 else inObj.RAA_arr arr2pass = inObj.RAA_arr[:, :, bandIdx] if inObj.RAA_arr.ndim == 3 else inObj.RAA_arr
relative_viewing_azimuth[bandN] = arr2pass.astype(np.float16) relative_viewing_azimuth[bandN] = arr2pass.astype(np.float16)
...@@ -625,8 +623,9 @@ class AtmCorr(object): ...@@ -625,8 +623,9 @@ class AtmCorr(object):
aux_data = dict( aux_data = dict(
# set lons and lats (a 2D array for all bands is enough (different band resolutions dont matter)) # set lons and lats (a 2D array for all bands is enough (different band resolutions dont matter))
lons = self.inObjs[0].lonlat_arr[::10,::10,0].astype(np.float64), # 2D array of lon values: 0° - 360° lons = self.inObjs[0].lonlat_arr[::10,::10,0].astype(np.float16), # 2D array of lon values: 0° - 360°
lats = self.inObjs[0].lonlat_arr[::10,::10,1].astype(np.float64) # 2D array of lat values: -90° - 90° lats = self.inObjs[0].lonlat_arr[::10,::10,1].astype(np.float16) # 2D array of lat values: -90° - 90°
# FIXME correct to reduce resolution here by factor 10?
) )
return aux_data return aux_data
...@@ -642,7 +641,7 @@ class AtmCorr(object): ...@@ -642,7 +641,7 @@ class AtmCorr(object):
# in case of Sentinel-2 the 20m DEM must be passed # in case of Sentinel-2 the 20m DEM must be passed
inObj4dem = [obj for obj in self.inObjs if obj.arr.xgsd==20] inObj4dem = [obj for obj in self.inObjs if obj.arr.xgsd==20]
assert inObj4dem, self.logger.warning('Sentinel-2 20m subsystem could not be found. DEM passed to ' assert inObj4dem, self.logger.warning('Sentinel-2 20m subsystem could not be found. DEM passed to '
'atmopsheric correction might have wrong resolution.') 'atmospheric correction might have wrong resolution.')
inObj4dem = inObj4dem[0] inObj4dem = inObj4dem[0]
else: else:
inObj4dem = self.inObjs[0] inObj4dem = self.inObjs[0]
...@@ -656,12 +655,13 @@ class AtmCorr(object): ...@@ -656,12 +655,13 @@ class AtmCorr(object):
return None # TODO return None # TODO
def run_atmospheric_correction(self): def run_atmospheric_correction(self, dump_ac_input=False):
# type: () -> list # type: (bool) -> list
"""Collects all input data for atmospheric correction, runs the AC and returns the corrected L1C objects """Collects all input data for atmospheric correction, runs the AC and returns the corrected L1C objects
containing surface reflectance. containing surface reflectance.
:return: list of L1C_object instances containing atmospherically corrected data :param dump_ac_input: allows to dump the inputs of AC to the scene's processing folder in case AC fails
:return: list of L1C_object instances containing atmospherically corrected data
""" """
# collect input args/kwargs for AC # collect input args/kwargs for AC
...@@ -688,7 +688,18 @@ class AtmCorr(object): ...@@ -688,7 +688,18 @@ class AtmCorr(object):
# run AC # run AC
self.logger.info('Atmospheric correction started.') self.logger.info('Atmospheric correction started.')
self.results = AC_GMS(RSImage(**rs_data), options, logger=self.logger, script=False) try:
self.results = AC_GMS(RSImage(**rs_data), options, logger=self.logger, script=False)
except:
# serialialize AC inputs
if dump_ac_input:
path_dump = self.inObjs[0].pathGen.get_path_ac_input_dump()
with open(path_dump, 'wb') as outF:
dill.dump(self.ac_input, outF)
self.logger.error('An error occurred during atmospheric correction. Inputs have been dumped to %s.'
%path_dump)
raise
# get processing infos # get processing infos
self.proc_info = self.ac_input['options']['processing'] self.proc_info = self.ac_input['options']['processing']
...@@ -707,13 +718,22 @@ class AtmCorr(object): ...@@ -707,13 +718,22 @@ class AtmCorr(object):
if self.results.data_ac is not None: if self.results.data_ac is not None:
self.logger.info('Joining results of atmospheric correction to input GMS objects.') self.logger.info('Joining results of atmospheric correction to input GMS objects.')
for inObj in self.inObjs:
for inObj in self.inObjs:
#assert isinstance(inObj.arr, (GeoArray, np.ndarray)), print(type(inObj.arr)) #assert isinstance(inObj.arr, (GeoArray, np.ndarray)), print(type(inObj.arr))
nodata = self.results.nodata[inObj.arr.xgsd] # 2D mask with True outside of image coverage nodata = self.results.nodata[inObj.arr.xgsd] # 2D mask with True outside of image coverage
ac_bandNs = [bandN for bandN in self.ac_input['options']['AC']['bands'] if bandN in inObj.arr.bandnames] ac_bandNs = [bandN for bandN in self.ac_input['options']['AC']['bands'] if bandN in inObj.arr.bandnames]
out_LBA = [bN.split('B0')[1] if bN.startswith('B0') else bN.split('B')[1] for bN in ac_bandNs] out_LBA = [bN.split('B0')[1] if bN.startswith('B0') else bN.split('B')[1] for bN in ac_bandNs]
# update metadata
inObj.arr_desc = 'BOA_Ref'
inObj.MetaObj.bands = len(self.results.data_ac)
inObj.MetaObj.PhysUnit = 'BOA_Reflectance in [0-%d]' % CFG.usecase.scale_factor_BOARef
inObj.MetaObj.LayerBandsAssignment = out_LBA
inObj.MetaObj.filter_layerdependent_metadata()
inObj.meta_odict = inObj.MetaObj.to_odict()
# join SURFACE REFLECTANCE as 3D int16 array, scaled to scale factor from config # join SURFACE REFLECTANCE as 3D int16 array, scaled to scale factor from config
surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs)) surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs))
surf_refl *= CFG.usecase.scale_factor_BOARef # scale using scale factor (output is float16) surf_refl *= CFG.usecase.scale_factor_BOARef # scale using scale factor (output is float16)
...@@ -729,36 +749,40 @@ class AtmCorr(object): ...@@ -729,36 +749,40 @@ class AtmCorr(object):
out_dtype = np.int8 if CFG.usecase.scale_factor_errors_ac<=255 else np.int16 out_dtype = np.int8 if CFG.usecase.scale_factor_errors_ac<=255 else np.int16
ac_errors[nodata] = get_outFillZeroSaturated(out_dtype)[0] ac_errors[nodata] = get_outFillZeroSaturated(out_dtype)[0]
ac_errors = ac_errors.astype(out_dtype) ac_errors = ac_errors.astype(out_dtype)
inObj.ac_errors = ac_errors # setter generates a GeoArray inObj.ac_errors = ac_errors # setter generates a GeoArray with the same bandnames like inObj.arr
# TODO how to handle nans? # TODO how to handle nans?
# join CLOUD MASK as 2D uint8 array # join CLOUD MASK as 2D uint8 array
# NOTE: mask_clouds has also methods 'export_mask_rgb()', 'export_confidence_to_jpeg2000()', ... # NOTE: mask_clouds has also methods 'export_mask_rgb()', 'export_confidence_to_jpeg2000()', ...
mask_clouds_ac = self.results.mask_clouds.mask_array # uint8 2D array
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255]
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr * CFG.usecase.scale_factor_BOARef).astype(np.int16)
cfd_arr[cfd_arr == -CFG.usecase.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0]
joined = False joined = False
for inObj in self.inObjs: for inObj in self.inObjs:
mask_clouds_ac = self.results.mask_clouds.mask_array # uint8 2D array # delete all previous cloud masks
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255] del inObj.mask_clouds
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr*CFG.usecase.scale_factor_BOARef).astype(np.int16)
cfd_arr[cfd_arr == -CFG.usecase.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0]
# append mask_clouds only to the input GMS object with the same dimensions # append mask_clouds only to the input GMS object with the same dimensions
if inObj.arr.shape[:2] == mask_clouds_ac.shape: if inObj.arr.shape[:2] == mask_clouds_ac.shape:
inObj.mask_clouds = mask_clouds_ac inObj.mask_clouds = mask_clouds_ac
inObj.mask_clouds.legend = self.results.mask_clouds.mask_legend # dict(value=string, string=value)) inObj.mask_clouds.legend = self.results.mask_clouds.mask_legend # dict(value=string, string=value)) # # TODO compatible to own legend dict?
inObj.mask_confidence_array = cfd_arr inObj.mask_clouds.nodata = self.ac_input['options']['cld_mask']['nodata_value_mask']
# TODO handle nodata value
inObj.mask_clouds_confidence = GeoArray(cfd_arr, inObj.arr.gt, inObj.arr.prj,
nodata=get_outFillZeroSaturated(cfd_arr.dtype)[0])
joined=True joined=True
# update masks (always do that because masks can also only contain one layer)
inObj.build_combined_masks_array()
if not joined: if not joined: