Commit 3282d365 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

first version fully operable for Sentinel-2 (including atmospheric correction and cloud masks)

algorithms.gms_object.GMS_object:
- GMS_identifier: is only a getter now, not a singleton
- added property 'MetaObj' / 'meta_odict' -> self-synchronizing
- added LayerBandsAssignment.setter
- revised arr.setter
- revised mask_nodata.setter
- mask_clouds.getter: bugfix
- revised mask_clouds.setter
- added property 'mask_clouds_confidence'
- revised masks.setter
- revised dem.setter
- ac_options: number of CPUs are now passed (temporarily disabled)
- revised ac_errors.setter
- added LBA2bandnames
- attributes2dict: bugfix for not addin meta_odict
- revised from_tiles()
- added from_sensor_subsystems(): a function for merging multiple subsystems of the same sensor (needed for Sentinel-2 and ASTER)
- revised build_combined_masks_array()
- get_subset_obj(): multiple bugfixes
- to_GMS_file(): updated in the context of new property architecture
- delete_previous_proc_level_results: added functionality to delete subsystem products after subsystems have been merged
algorithms.gms_object.failed_GMS_object:
- removed logger creation
algorithms.L1A_P.L1A_object:
- import_metadata(): updated METADATA call
- calc_TOARadRefTemp: added warning when using Sentinel-2 Gains
- calc_corner_positions(): added UTC timezone to datetime object
algorithms.L1B_P.L1B_object:
- _get_reference_image_params_pgSQL(): replaced temp_logger by self.logger
- revised correct_spatial_shifts()
algorithms.L1C_P:
- L1C_object:
    - removed deprecated atm_corr()
- AtmCorr:
    - __init__: added warning
    - revised data property
    - _meta_get_aux_data(): changed lonlat_arr to float16
    - run_atmospheric_correction(): added 'dump' keyword
    - revised _join_results_to_inObjs()
algorithms.L2B_P.L2B_object:
- spectral_homogenization(): bugfix
algorithms.METADATA:
- METADATA:
    - __init__(): is now initialized by GMS_identifier; does not directly run metadata reader
    - added read_meta()
    - Read_Sentinel2A_xmls(): updated setting of Gains
    - refactored to_meta_odict() to to_odict() and revised the function
    - added from_odict()
    - revised filter_layerdependent_metadata()
    - added 'map_odictKeys_objAttrnames'
    - get_LayerBandsAssignment():
        - added functionality to return full LBA for Sentinel-2 and ASTER
        - now properly handles bands removed after L1C and L2B
io.Output_writer:
- mask_to_ENVI_Classification(): bugfix
misc.definition_dicts:
- get_GMS_sensorcode(): added codes for Sentinel-2 full and ASTER full
- get_outFillZeroSaturated(): added bool dtype
- is_dataset_provided_as_fullScene(): added ASTER full and Sentinel-2 full
misc.exception_handler:
- log_uncaught_exceptions(): bugfix for 'disable_exception_handler'
misc.helper_functions:
- cut_GMS_obj_into_blocks(): bugfix
misc.logging.GMS_logger:
- added scene ID to formatter
misc.mgrs_tile:
- replaced deprecated reference
misc.path_generator:
- get_baseN(): bugfix
- added get_path_ac_input_dump()
processing.multiproc:
- MAP: added functionality to disable multiprocessing
processing.pipeline:
- revised L2A_map(): new L2A calls
processing.process_controller:
- add_local_availability(): changed get_LayerBandsAssignment call
- get_DB_objects(): bugfix for copied memory link during GMS object init
- L1C_processing() raises NotImplementedError in tiled mode
- L2A_processing(): added grouping of subsystems
- updated __version__
parent 6fa974e0
......@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller
__version__ = '20170115.01'
__version__ = '20170120.01'
__author__ = 'Daniel Scheffler'
__all__ = ['algorithms',
'io',
......
......@@ -459,14 +459,13 @@ class L1A_object(GMS_object):
"""
self.logger.info('Reading %s %s %s metadata...' %(self.satellite,self.sensor,self.subsystem))
print('h1')
self.MetaObj = META.METADATA(self.satellite, self.subsystem, self.scene_ID, self.path_InFilePreprocessor,
self.path_MetaPreprocessor, self.logger, self.LayerBandsAssignment) # TODO property (auto-conversion from meta_odict needed)
print('h2')
self.MetaObj = META.METADATA(self.GMS_identifier)
self.MetaObj.read_meta(self.scene_ID, self.path_InFilePreprocessor,
self.path_MetaPreprocessor, self.LayerBandsAssignment)
if v:
self.logger.info("The following metadata have been read:")
[self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.get_meta_overview().items()]
[self.logger.info("%20s : %-4s" % (key, val)) for key, val in self.MetaObj.overview.items()]
# set some object attributes directly linked to metadata
self.subsystem = self.MetaObj.Subsystem
......@@ -546,6 +545,15 @@ class L1A_object(GMS_object):
elif arr_desc == 'TOA_Ref':
if conv=='Rad':
"""http://s2tbx.telespazio-vega.de/sen2three/html/r2rusage.html?highlight=quantification182
rToa = (float)(DN_L1C_band / QUANTIFICATION_VALUE);
L = (rToa * e0__SOLAR_IRRADIANCE_For_band * cos(Z__Sun_Angles_Grid_Zenith_Values)) / (PI * U__earth_sun_distance_correction_factor);
L = (U__earth_sun_distance_correction_factor * rToa * e0__SOLAR_IRRADIANCE_For_band * cos(
Z__Sun_Angles_Grid_Zenith_Values)) / PI;"""
if re.search('Sentinel-2', self.satellite, re.I):
warnings.warn('Physical gain values unclear for Sentinel-2! This may cause errors when '
'calculating radiance from TOA Reflectance. ESA provides only 12 gain values for '
'13 bands and it not clear for which bands the gains are provided.')
raise NotImplementedError("Conversion TOA_Ref to %s is currently not supported." %conv)
else: # conv=='TOA_Ref'
if self.MetaObj.ScaleFactor != CFG.usecase.scale_factor_TOARef:
......@@ -663,7 +671,7 @@ class L1A_object(GMS_object):
if rasObj.get_projection_type() == 'UTM':
self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM')
self.meta_odict = self.MetaObj.to_meta_odict() # important in order to keep geotransform/projection
self.meta_odict = self.MetaObj.to_odict() # important in order to keep geotransform/projection
if CFG.job.exec_mode=='Flink':
self.delete_tempFiles() # these files are needed later in Python execution mode
self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists)
......@@ -796,7 +804,7 @@ class L1A_object(GMS_object):
return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds}
def calc_corner_positions(self): # TODO revise that function (must return fullSceneCornerLonLat
def calc_corner_positions(self):
"""Get true corner positions in the form
[UL, UR, LL, LR] as [(ULrow,ULcol),(URrow,URcol),...],[(ULLon,ULLat),(URLon,URLat),..]"""
......@@ -814,7 +822,8 @@ class L1A_object(GMS_object):
assert hasattr(self,'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask."
self.logger.info('Calculating true data corner positions (image and world coordinates)...')
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31):
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31,
tzinfo=datetime.timezone.utc):
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy',
assert_four_corners=False)
else:
......
......@@ -29,6 +29,7 @@ from shapely.geometry import box
from CoReg_Sat import COREG, DESHIFTER
from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal
from py_tools_ds.ptds.geo.coord_calc import corner_coord_to_minmax
from py_tools_ds.ptds.geo.coord_trafo import reproject_shapelyGeometry, transform_any_prj
from py_tools_ds.ptds.geo.projection import prj_equal, EPSG2WKT, WKT2EPSG
......@@ -40,7 +41,6 @@ from ..misc import database_tools as DB_T
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc.SpatialIndexMediator import SpatialIndexMediator
from ..misc.logging import GMS_logger
from ..misc.definition_dicts import get_GMS_sensorcode, get_outFillZeroSaturated
......@@ -452,8 +452,7 @@ class L1B_object(L1A_object):
timeout = 30000)
conds_descImportance = [dataset_cond, cloudcov_cond, dayrange_cond]
temp_logger = GMS_logger('log__' + self.baseN, self.im2shift_objDict['path_logfile'], append=1)
temp_logger.info('Querying database in order to find a suitable reference scene for co-registration.')
self.logger.info('Querying database in order to find a suitable reference scene for co-registration.')
count, filt_overlap_scenes = 0,[]
while not filt_overlap_scenes:
......@@ -559,7 +558,7 @@ class L1B_object(L1A_object):
assert query_res != [], 'No entity-ID found for scene number %s' %self.imref_scene_ID
self.imref_entity_ID = query_res[0][0] # [('LC81510322013152LGN00',)]
break
temp_logger.close()
self.logger.close()
def _sceneIDList_to_filt_overlap_scenes(self, sceneIDList, min_overlap):
......@@ -715,30 +714,39 @@ class L1B_object(L1A_object):
if not hasattr(self, 'masks') or self.masks is None:
self.build_combined_masks_array() # creates self.masks and self.masks_meta
for attrname in ['arr', 'masks']:
# exclude self.mask_nodata, self.mask_clouds from warping
del self.mask_nodata, self.mask_clouds
attributes2deshift = [attrname for attrname in ['arr', 'masks', 'dem', 'ac_errors', 'mask_clouds_confidence']
if getattr(self,'_%s'%attrname) is not None]
for attrname in attributes2deshift:
geoArr = getattr(self, attrname)
# do some logging
if self.coreg_needed:
if self.coreg_info['success']:
self.logger.info("Correcting spatial shifts for attribute '%s'..." %attrname)
elif cliptoextent:
elif cliptoextent and \
is_coord_grid_equal(geoArr.gt, CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy):
self.logger.info("Attribute '%s' has only been clipped to it's extent because no valid "
"shifts have been detected and the pixel grid equals the target grid." % attrname)
elif self.resamp_needed:
self.logger.info("Resampling attribute '%s' to target grid..." % attrname)
elif self.resamp_needed:
self.logger.info("Resampling attribute '%s' to target grid..." % attrname)
# correct shifts
geoArr = getattr(self, attrname)
DS = DESHIFTER(geoArr, self.coreg_info,
target_xyGrid = [CFG.usecase.spatial_ref_gridx, CFG.usecase.spatial_ref_gridy],
cliptoextent = cliptoextent,
clipextent = mapBounds,
align_grids = True,
resamp_alg = 'cubic' if attrname=='arr' else 'nearest',
resamp_alg = 'nearest' if attrname=='masks' else 'cubic',
CPUs = None if CFG.job.allow_subMultiprocessing else 1,
progress = True if v else False,
q = True,
v = v)
DS.correct_shifts()
setattr(self,attrname, DS.arr_shifted)
# update coreg_info
if attrname=='arr':
......@@ -752,8 +760,6 @@ class L1B_object(L1A_object):
self.meta_odict['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection))
self.shape_fullArr = DS.arr_shifted.shape
self.meta_odict['lines'], self.meta_odict['samples'] = DS.arr_shifted.shape[:2]
if DS.arr_shifted.ndim == 2:
self.meta_odict['bands'] = DS.arr_shifted.shape[2]
else:
self.masks_meta['map info'] = DS.updated_map_info
self.masks_meta['coordinate system string'] = EPSG2WKT(WKT2EPSG(DS.updated_projection))
......@@ -761,9 +767,17 @@ class L1B_object(L1A_object):
# NOTE: mask_nodata and mask_clouds are updated later by L2A_map mapper function (module pipeline)
# update the GeoArray instance without loosing its inherent metadata (nodata, ...)
geoArr.arr, geoArr.gt, geoArr.prj = DS.GeoArray_shifted.arr, DS.GeoArray_shifted.gt, DS.GeoArray_shifted.prj
#setattr(self,attrname, DS.GeoArray_shifted) # NOTE: don't set array earlier because setter will also
# # update arr.gt/.prj/.nodata from meta_odict
self.resamp_needed = False
self.coreg_needed = False
# recreate self.masks_nodata and self.mask_clouds from self.masks
self.mask_nodata = self.mask_nodata
self.mask_clouds = self.mask_clouds # FIXME move functionality of self.masks only to output writer and remove self.masks completely
......
......@@ -24,10 +24,7 @@
import warnings
import re
import logging
try:
from StringIO import StringIO # Python 2
except ImportError:
from io import StringIO # Python 3
import dill
import numpy as np
try:
......@@ -35,11 +32,13 @@ try:
except ImportError:
import osr
from py_tools_ds.ptds import GeoArray
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from .L1B_P import L1B_object
from .METADATA import get_LayerBandsAssignment
from ..misc.definition_dicts import get_outFillZeroSaturated
from S2SCAPEM.s2_ac import AC_GMS
......@@ -213,18 +212,6 @@ class L1C_object(L1B_object):
self._RAA_arr = RAA_arr
def atm_corr(self, subset=None):
"""Performs an atmospheric correction and returns atmospherically corrected reflectance data."""
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
self.log_for_fullArr_or_firstTile('Dummy atmospheric correction started.',subset) # FIXME
#SRF_dict = INP_R.SRF_reader(SRF_fold,RSD_md_L1B)
self.arr = self.arr # FIXME implement atmospheric correction
# delete array data that is not needed anymore
self.delete_ac_input_arrays()
def delete_ac_input_arrays(self):
self.VZA_arr = None # not needed anymore
self.SZA_arr = None # not needed anymore
......@@ -269,16 +256,16 @@ class AtmCorr(object):
# append AtmCorr object to input L1C objects
#[setattr(L1C_obj, 'AtmCorr', self) for L1C_obj in self.inObjs] # too big for serialization
if not re.search('Sentinel-2', self.inObjs[0].satellite, re.I):
warnings.warn('Calculation of acquisition geometry arrays is currently only validated for Sentinel-2!')
# validation possible by comparing S2 angles provided by ESA with own angles
@property
def logger(self):
if self._logger and self._logger.handlers[:]:
return self._logger
else:
## create a streamlogger
#self._logger = GMS_logger('AtmCorr_logger__' + self.inObjs[0].baseN, path_logfile=None, append=1)
#return self._logger
if len(self.inObjs)==1:
# just use the logger of the inObj
logger_atmCorr = self.inObjs[0].logger
......@@ -354,16 +341,27 @@ class AtmCorr(object):
"""
if not self._data:
data_dict = {}
for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in self._data:
if bandN not in data_dict:
arr2pass = inObj.arr[:,:,bandIdx].astype(np.float32) # conversion to np.float16 will convert -9999 to -10000
arr2pass[arr2pass==inObj.arr.nodata] = np.nan # set nodata values to np.nan
self._data[bandN] = (arr2pass/inObj.meta_odict['ScaleFactor']).astype(np.float16)
data_dict[bandN] = (arr2pass/inObj.meta_odict['ScaleFactor']).astype(np.float16)
else:
inObj.logger.warning("Band '%s' cannot be included into atmospheric correction because it "
"exists multiple times." %bandN)
# validate: data must have all bands needed for AC
full_LBA = get_LayerBandsAssignment(self.inObjs[0].GMS_identifier, return_fullLBA=True)
all_bNs_AC = ['B%s'%i if len(i)==2 else 'B0%s'%i for i in full_LBA]
if not all([bN in list(data_dict.keys()) for bN in all_bNs_AC]):
raise RuntimeError('Atmospheric correction did not receive all the needed bands. \n\tExpected: %s;\n\t'
'Received: %s' %(str(all_bNs_AC), str(list(sorted(data_dict.keys())))))
self._data = data_dict
return self._data
......@@ -572,7 +570,7 @@ class AtmCorr(object):
viewing_zenith = {}
for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items(): # FIXME target keys = range(12)
for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in viewing_zenith:
arr2pass = inObj.VZA_arr[:, :, bandIdx] if inObj.VZA_arr.ndim == 3 else inObj.VZA_arr
viewing_zenith[bandN] = arr2pass.astype(np.float16)
......@@ -589,7 +587,7 @@ class AtmCorr(object):
viewing_azimuth = {}
for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items(): # FIXME target keys = range(12)
for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in viewing_azimuth:
arr2pass = inObj.VAA_arr[:, :, bandIdx] if inObj.VAA_arr.ndim==3 else inObj.VAA_arr
viewing_azimuth[bandN] = arr2pass.astype(np.float16)
......@@ -607,7 +605,7 @@ class AtmCorr(object):
relative_viewing_azimuth = {}
for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items(): # FIXME target keys = range(12)
for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in relative_viewing_azimuth:
arr2pass = inObj.RAA_arr[:, :, bandIdx] if inObj.RAA_arr.ndim == 3 else inObj.RAA_arr
relative_viewing_azimuth[bandN] = arr2pass.astype(np.float16)
......@@ -625,8 +623,9 @@ class AtmCorr(object):
aux_data = dict(
# set lons and lats (a 2D array for all bands is enough (different band resolutions dont matter))
lons = self.inObjs[0].lonlat_arr[::10,::10,0].astype(np.float64), # 2D array of lon values: 0° - 360°
lats = self.inObjs[0].lonlat_arr[::10,::10,1].astype(np.float64) # 2D array of lat values: -90° - 90°
lons = self.inObjs[0].lonlat_arr[::10,::10,0].astype(np.float16), # 2D array of lon values: 0° - 360°
lats = self.inObjs[0].lonlat_arr[::10,::10,1].astype(np.float16) # 2D array of lat values: -90° - 90°
# FIXME correct to reduce resolution here by factor 10?
)
return aux_data
......@@ -642,7 +641,7 @@ class AtmCorr(object):
# in case of Sentinel-2 the 20m DEM must be passed
inObj4dem = [obj for obj in self.inObjs if obj.arr.xgsd==20]
assert inObj4dem, self.logger.warning('Sentinel-2 20m subsystem could not be found. DEM passed to '
'atmopsheric correction might have wrong resolution.')
'atmospheric correction might have wrong resolution.')
inObj4dem = inObj4dem[0]
else:
inObj4dem = self.inObjs[0]
......@@ -656,12 +655,13 @@ class AtmCorr(object):
return None # TODO
def run_atmospheric_correction(self):
# type: () -> list
def run_atmospheric_correction(self, dump_ac_input=False):
# type: (bool) -> list
"""Collects all input data for atmospheric correction, runs the AC and returns the corrected L1C objects
containing surface reflectance.
:return: list of L1C_object instances containing atmospherically corrected data
:param dump_ac_input: allows to dump the inputs of AC to the scene's processing folder in case AC fails
:return: list of L1C_object instances containing atmospherically corrected data
"""
# collect input args/kwargs for AC
......@@ -688,7 +688,18 @@ class AtmCorr(object):
# run AC
self.logger.info('Atmospheric correction started.')
self.results = AC_GMS(RSImage(**rs_data), options, logger=self.logger, script=False)
try:
self.results = AC_GMS(RSImage(**rs_data), options, logger=self.logger, script=False)
except:
# serialialize AC inputs
if dump_ac_input:
path_dump = self.inObjs[0].pathGen.get_path_ac_input_dump()
with open(path_dump, 'wb') as outF:
dill.dump(self.ac_input, outF)
self.logger.error('An error occurred during atmospheric correction. Inputs have been dumped to %s.'
%path_dump)
raise
# get processing infos
self.proc_info = self.ac_input['options']['processing']
......@@ -707,13 +718,22 @@ class AtmCorr(object):
if self.results.data_ac is not None:
self.logger.info('Joining results of atmospheric correction to input GMS objects.')
for inObj in self.inObjs:
for inObj in self.inObjs:
#assert isinstance(inObj.arr, (GeoArray, np.ndarray)), print(type(inObj.arr))
nodata = self.results.nodata[inObj.arr.xgsd] # 2D mask with True outside of image coverage
ac_bandNs = [bandN for bandN in self.ac_input['options']['AC']['bands'] if bandN in inObj.arr.bandnames]
out_LBA = [bN.split('B0')[1] if bN.startswith('B0') else bN.split('B')[1] for bN in ac_bandNs]
# update metadata
inObj.arr_desc = 'BOA_Ref'
inObj.MetaObj.bands = len(self.results.data_ac)
inObj.MetaObj.PhysUnit = 'BOA_Reflectance in [0-%d]' % CFG.usecase.scale_factor_BOARef
inObj.MetaObj.LayerBandsAssignment = out_LBA
inObj.MetaObj.filter_layerdependent_metadata()
inObj.meta_odict = inObj.MetaObj.to_odict()
# join SURFACE REFLECTANCE as 3D int16 array, scaled to scale factor from config
surf_refl = np.dstack((self.results.data_ac[bandN] for bandN in ac_bandNs))
surf_refl *= CFG.usecase.scale_factor_BOARef # scale using scale factor (output is float16)
......@@ -729,36 +749,40 @@ class AtmCorr(object):
out_dtype = np.int8 if CFG.usecase.scale_factor_errors_ac<=255 else np.int16
ac_errors[nodata] = get_outFillZeroSaturated(out_dtype)[0]
ac_errors = ac_errors.astype(out_dtype)
inObj.ac_errors = ac_errors # setter generates a GeoArray
inObj.ac_errors = ac_errors # setter generates a GeoArray with the same bandnames like inObj.arr
# TODO how to handle nans?
# join CLOUD MASK as 2D uint8 array
# NOTE: mask_clouds has also methods 'export_mask_rgb()', 'export_confidence_to_jpeg2000()', ...
mask_clouds_ac = self.results.mask_clouds.mask_array # uint8 2D array
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255]
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr * CFG.usecase.scale_factor_BOARef).astype(np.int16)
cfd_arr[cfd_arr == -CFG.usecase.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0]
joined = False
for inObj in self.inObjs:
mask_clouds_ac = self.results.mask_clouds.mask_array # uint8 2D array
cfd_arr = self.results.mask_clouds.mask_confidence_array # float32 2D array, scaled [0-1, nodata 255]
cfd_arr[cfd_arr == self.ac_input['options']['cld_mask']['nodata_value_mask']] = -1
cfd_arr = (cfd_arr*CFG.usecase.scale_factor_BOARef).astype(np.int16)
cfd_arr[cfd_arr == -CFG.usecase.scale_factor_BOARef] = get_outFillZeroSaturated(cfd_arr.dtype)[0]
# delete all previous cloud masks
del inObj.mask_clouds
# append mask_clouds only to the input GMS object with the same dimensions
if inObj.arr.shape[:2] == mask_clouds_ac.shape:
inObj.mask_clouds = mask_clouds_ac
inObj.mask_clouds.legend = self.results.mask_clouds.mask_legend # dict(value=string, string=value))
inObj.mask_confidence_array = cfd_arr
inObj.mask_clouds.legend = self.results.mask_clouds.mask_legend # dict(value=string, string=value)) # # TODO compatible to own legend dict?
inObj.mask_clouds.nodata = self.ac_input['options']['cld_mask']['nodata_value_mask']
# TODO handle nodata value
inObj.mask_clouds_confidence = GeoArray(cfd_arr, inObj.arr.gt, inObj.arr.prj,
nodata=get_outFillZeroSaturated(cfd_arr.dtype)[0])
joined=True
# update masks (always do that because masks can also only contain one layer)
inObj.build_combined_masks_array()
if not joined:
self.logger.warning('Cloud mask has not been appended to one of the AC inputs because there was no'
'input GMS object with the same dimensions.')
# update metadata
for inObj in self.inObjs:
inObj.meta_odict['bands'] = inObj.arr.bands
inObj.meta_odict['PhysUnit'] = 'BOA_Reflectance in [0-%d]' %CFG.usecase.scale_factor_BOARef
# TODO filter band specific keys within meta_odict
self.outObjs = self.inObjs
......
......@@ -33,11 +33,11 @@ class L2B_object(L2A_object):
self.log_for_fullArr_or_firstTile('Performing spectral homogenization (%s) with target wavelength '
'positions at %s nm.' %(kind,', '.join(np.array(tgt_cwls[:-1]).astype(str))+' and %s' %tgt_cwls[-1]))
self.LayerBandsAssignment = [] # TODO better band names for homogenized product -> include in get_LayerBandsAssignment
self.arr = self.interpolate_cube_linear(self.arr,src_cwls,tgt_cwls) if kind=='linear' else self.arr
self.meta_odict['wavelength'] = list(tgt_cwls)
self.meta_odict['bands'] = len(tgt_cwls)
self.meta_odict['LayerBandsAssignment'] = [] # TODO
if 'band names' in self.meta_odict: # FIXME bug workaround
del self.meta_odict['band names'] # TODO
else:
......
......@@ -38,7 +38,8 @@ from py_tools_ds.ptds.geo.projection import WKT2EPSG
from ..config import GMS_config as CFG
from ..io import envifilehandling as ef
from ..io.Input_reader import open_specific_file_within_archive
from ..io.Input_reader import open_specific_file_within_archive, Solar_Irradiance_reader, SRF_reader
from ..io.Output_writer import enviHdr_keyOrder
from . import GEOPROCESSING as GEOP
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
......@@ -48,61 +49,60 @@ from ..misc.definition_dicts import get_GMS_sensorcode
from S2SCAPEM.options import get_options as get_ac_options
# + Input Reader (has to be left out here in order to avoid circular dependencies)
class METADATA(object):
def __init__(self, satellite, subsystem, scene_ID, stacked_image, data_folderOrArchive ,logger,
LayerBandsAssignment=None):
"""**** CREATE OBJECT ****************************************************"""
#****OBJECT ATTRIBUTES***************************************************
def __init__(self, GMS_identifier):
# private attributes
self._AcqDateTime = None
self.Dataname = stacked_image
self.FolderOrArchive = data_folderOrArchive
self.Metafile = "" # File containing image metadata (automatically found)
self.EntityID = "" # ID to identify the original scene
self.SceneID = scene_ID # postgreSQL-database identifier
self.Satellite = ""
self.Sensor = ""
self.Subsystem = subsystem if subsystem else ""
# unpack GMS_identifier
self.GMS_identifier = GMS_identifier
self.image_type = GMS_identifier['image_type']
self.Satellite = GMS_identifier['Satellite']
self.Sensor = GMS_identifier['Sensor']
self.Subsystem = GMS_identifier['Subsystem']
self.proc_level = GMS_identifier['proc_level']
self.logger = GMS_identifier['logger']
self.Dataname = ''
self.FolderOrArchive = ''
self.Metafile = "" # File containing image metadata (automatically found)
self.EntityID = "" # ID to identify the original scene
self.SceneID = '' # postgreSQL-database identifier
self.Sensormode = ""
self.gResolution = -99. # resolution [m]
self.AcqDate = "" # YYYY-MM-DD
self.AcqTime = "" # HH:MM:SS
self.Offsets = [] # List of offsets in order of the bands (radiance)
self.Gains = [] # List of gains in order of the bands (radiance)
self.OffsetsRef = [] # List of offsets in order of the bands for conversion to Reflectance (Landsat8)
self.GainsRef = [] # List of gains in order of the bands for conversion to Reflectance (Landsat8)
self.gResolution = -99. # resolution [m]
self.AcqDate = "" # YYYY-MM-DD
self.AcqTime = "" # HH:MM:SS
self.Offsets = [] # List of offsets in order of the bands (radiance)
self.Gains = [] # List of gains in order of the bands (radiance)
self.OffsetsRef = [] # List of offsets in order of the bands for conversion to Reflectance (Landsat8)
self.GainsRef = [] # List of gains in order of the bands for conversion to Reflectance (Landsat8)
self.CWL = []
self.FWHM = []
self.ProcLCode = "" # processing Level: Sensor specific Code
self.ProcLStr = "" # processing Level: Sensor independent String (raw,rad cal, rad+geom cal, ortho)
self.SunElevation = -99.0 # Sun elevation angle at center of product [Deg]
self.SunAzimuth = -99.0 # Sun azimuth angle at center of product, in degrees from North (clockwise) at the time of the first image line
self.ProcLCode = "" # processing Level: Sensor specific Code
self.ProcLStr = "" # processing Level: Sensor independent String (raw,rad cal, rad+geom cal, ortho)
self.SunElevation = -99.0 # Sun elevation angle at center of product [Deg]
self.SunAzimuth = -99.0 # Sun azimuth angle at center of product, in degrees from North (clockwise) at the time of the first image line
self.SolIrradiance = []
self.ThermalConstK1 = []
self.ThermalConstK2 = []
self.EarthSunDist = 1.0
self.ViewingAngle = -99.0 # viewing zenith angle of the Sensor (offNadir) [Deg] (usually) in Case of RapidEye "+" being East and “-” being West
self.ViewingAngle = -99.0 # viewing zenith angle of the Sensor (offNadir) [Deg] (usually) in Case of RapidEye "+" being East and “-” being West
self.ViewingAngle_arrProv = {}
self.IncidenceAngle = -9999.99 # viewing azimuth angle. The angle between the view direction of the satellite and a line perpendicular to the image or tile center.[Deg]
self.IncidenceAngle = -9999.99 # viewing azimuth angle. The angle between the view direction of the satellite and a line perpendicular to the image or tile center.[Deg]
self.IncidenceAngle_arrProv = {}
self.FOV = 9999.99 # field of view of the sensor [Deg]
self.Quality = [] # Sensor specific quality code: See ro2/behling/Satelliten/doc_allg/ReadOutMetadata/SatMetadata.xls
self.FOV = 9999.99 # field of view of the sensor [Deg]
self.Quality = [] # Sensor specific quality code: See ro2/behling/Satelliten/doc_allg/ReadOutMetadata/SatMetadata.xls
self.PhysUnit = "DN"
self.ScaleFactor = -99 # Factor to get reflectance values in [0-1]. Sentinel2A provides scaling factors for the Level1C TO-reflectance products
self.CS_EPSG = -99. # EPSG-Code of coordinate system
self.ScaleFactor = -99 # Factor to get reflectance values in [0-1]. Sentinel2A provides scaling factors for the Level1C TO-reflectance products
self.CS_EPSG = -99. # EPSG-Code of coordinate system
self.CS_TYPE = ""
self.CS_DATUM = ""
self.CS_UTM_ZONE = -99.
self.WRS_path = -99.
self.WRS_row = -99.
self.CornerTieP_LonLat = [] # List of Corner Coordinates in order of Lon/Lat/DATA_X/Data_Y for all given image corners
self.CornerTieP_LonLat = [] # List of Corner Coordinates in order of Lon/Lat/DATA_X/Data_Y for all given image corners
self.CornerTieP_UTM = []
self.LayerBandsAssignment = LayerBandsAssignment if LayerBandsAssignment else [] # List of spectral bands in the same order as they are stored in the rasterfile.
self.LayerBandsAssignment = [] # List of spectral bands in the same order as they are stored in the rasterfile.
self.additional = []
self.image_type = 'RSD'
self.orbitParams = {}
......@@ -116,16 +116,7 @@ class METADATA(object):
self.projection = ""
self.wvlUnit = ""
self.spec_vals = {}
self.logger = logger
#--Read metadata
if re.search("SPOT", satellite, re.I): self.Read_SPOT_dimap2()
elif re.search("Terra", satellite, re.I): self.Read_ASTER_hdffile(self.Subsystem)
elif re.search("Sentinel-2A", satellite, re.I): self.Read_Sentinel2A_xmls()
elif re.search("LANDSAT", satellite, re.I): self.Read_LANDSAT_mtltxt(self.LayerBandsAssignment)
elif re.search("RapidEye", satellite, re.I): self.Read_RE_metaxml()
elif re.search("ALOS", satellite, re.I): self.Read_ALOS_summary(); self.Read_ALOS_LEADER () # for gains and offsets
else: self.logger.critical("\tERROR: %s is not a supported sensor or sensorname is misspelled." %satellite)
"""****OBJECT METHODS******************************************************"""
def default_attr(self):
......@@ -171,12 +162,34 @@ class METADATA(object):
self.spec_vals = {}
self.additional = []
def get_GMS_identifier(self):
GMS_identifier = collections.OrderedDict()
for AttrName,AttrVal in zip(['image_type','Satellite','Sensor','Subsystem','logger'],
[self.image_type,self.Satellite,self.Sensor,self.Subsystem,self.logger]):
if hasattr(self,AttrName): GMS_identifier[AttrName] = AttrVal
return GMS_identifier
def read_meta(self, scene_ID, stacked_image, data_folderOrArchive, LayerBandsAssignment=None):
"""
Read metadata.
"""
self.SceneID = scene_ID
self.Dataname = stacked_image
self.FolderOrArchive = data_folderOrArchive
self.LayerBandsAssignment = LayerBandsAssignment if LayerBandsAssignment else []
if re.search("SPOT", self.Satellite, re.I):
self.Read_SPOT_dimap2()
elif re.search("Terra", self.Satellite, re.I):
self.Read_ASTER_hdffile(self.Subsystem)
elif re.search("Sentinel-2A", self.Satellite, re.I):
self.Read_Sentinel2A_xmls()
elif re.search("LANDSAT", self.Satellite, re.I):
self.Read_LANDSAT_mtltxt(self.LayerBandsAssignment)
elif re.search("RapidEye", self.Satellite, re.I):