Commit ee3fd0eb authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Major update with further developments of map-reduce processing chain (working...

Major update with further developments of map-reduce processing chain (working until L1C) and full Sentinel-2 support for all processing levels
GEOP:
    - caught a bug in rasterio library that caused GEOP.warp_ndarray() to return warped images with wrong dimension (1 row more than expected),
      which caused L1B_P.COREG() to fail
    - added zoom_2Darray_to_shapeFullArr() and adjust_acquisArrProv_to_shapeFullArr()
      for adjusting dimensions of acquisition geometry arrays provided by data providers (e.g. Sentinel-2)
    - revision of calc_FullDataset_corner_positions() - now it does not fail anymore if only 2 image corners are visible in an image
      (granule with nodata on the one side)
L0A_P:
    - fixed a bug in get_AllWrittenProcL_dueLog() returning the same processing level multiple times
      if same processing level has been written out more than once
L1A_P:
    - revised L1A_object.fill_from_disk() in order to make it useable by all GMS objects in all processing levels
    - calc_mean_VAA(): corrected assignment of VAA_mean in case of Sentinel-2 granules
      where VAA cannot be calculated using true corner coordinates
    - moved cut_L1A_obj_into_blocks(), merge_L1A_tiles_to_L1A_obj(), _numba_array_merger() to HLP_F
      in order to make them usable everywhere in the processing chain, not only in L1A processing
L1B_P:
    - added logging of database queries
    - added L1B_object.set_coreg_info() and moved assignment of L1B_object.coreg_info there
L1C_P:
    - logging pattern changed so that log messages are only sent in case the full array or the first tile is processed
    - added special case of calculating viewing zenith angle for Sentinel-2 by zooming the respective provider array to the right dimension
    - moved dummy_atm_corr() to L1C_object.atm_corr() and updated log messages
    - added L1C_object.delete_tempFiles()
META:
    - removed deprecated Sentinel-2 test mode in METADATA.Read_Sentinel2A_xmls()
    - added correct assignment of METADATA.gResolution for all supported datasets
    - added 'ViewingAngle_arrProv' and 'IncidenceAngle_arrProv' to Meta2ODict()
INP_R:
    - added docstring to read_ENVI_image_data_as_array()
    - revised read_ENVI_image_data_as_array(), read_mask_subset(), GMSfile2dict()
    - revised ()
OUT_W:
    - added L1C data to param_dic
    - added the possibility to include small numpy arrays as lists in *.gms files
      (needed for Sentinel-2's 'ViewingAngle_arrProv' and 'IncidenceAngle_arrProv')
    - added docstrings to Tiles_Writer(), Obj2ENVI()
    - added add_ENVIclassificationMeta_to_meta() for modifying metadata dictionaries
      so that respective images are recognized as classification images by ENVI
    - Obj2ENVI(): added the possibility to write GMS objects that only represent a tile of a full object
      which allows writing of processed data directly out of the Flink mappers making slow back-pickling of mapper results not needed anymore
DB_T:
    - added docstrings to get_info_from_SQLdb() and get_info_from_postgreSQLdb()
    - improved error handling in get_info_from_postgreSQLdb()
HLP_F:
    - added parentObjDict and initArgsDict
      that are needed to make GMS object cutter and tile joiner generally usable in the entire processing chain
    - added exclude_val argument to find_nearest()
    - added docstring to get_image_tileborders()
    - added cut_GMS_obj_into_blocks(), merge_GMS_tiles_to_GMS_obj() and _numba_array_merger() based on similar methods from L1A_P
CONFIG:
    - increased postgreSQL database statement timeout to 15 sec
    - added a validation of output writer configurations depending on job.exec_mode
PC:
    - revised L1B_map_1()
    - added L1C_map_1()
    - revised L2A_map_1() - not fully working yet
    - revised run_processController_in_multiprocessing()
        -> now it fully supports L1A-L1C processing and includes a first version L2A processing (not fully working yet)
parent 5ff92e51
This diff is collapsed.
......@@ -159,7 +159,7 @@ def add_local_availability(dataset):
print ('%s: According to logfile no completely processed data exist at any processing level. ' \
'Dataset has to be reprocessed.' %dataset['entity_ID'])
else:
AllWrittenProcL_dueLog = HLP_F.sorted_nicely(AllWrittenProcL_dueLog)
AllWrittenProcL_dueLog = HLP_F.sorted_nicely(list(set(AllWrittenProcL_dueLog)))
return AllWrittenProcL_dueLog
if len(DB_match) == 1 or DB_match == [] or DB_match == 'database connection fault':
......
......@@ -24,11 +24,8 @@ import spectral.io.envi
import re
import sys
import glob
import subprocess
import shutil
import collections
import gdal
import gdalconst
import gdalnumeric
import zipfile
import tarfile
......@@ -37,7 +34,6 @@ import builtins
import time
import matplotlib.pyplot as plt
from pyhdf import SD
from numba import autojit
from spectral.io import envi
import algorithms.METADATA_BD as META
......@@ -186,28 +182,23 @@ class L1A_object(object):
"""Fills an already instanced L1A object with data from disk. Excludes array attributes in Python mode.
:param tuple_GMS_subset:
"""
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
[setattr(self, key, value) for key, value in GMSfileDict.items()] # copy all attributes from GMS file
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
self.arr_shape = tuple_GMS_subset[1][0]
self.arr_pos = tuple_GMS_subset[1][1]
self.logger = HLP_F.setup_logger('log__'+self.baseN, self.path_logfile, append=1)
get_hdr = lambda path_ENVIf: os.path.splitext(path_ENVIf)[0] + '.hdr'
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
self.arr_shape, self.arr_pos = tuple_GMS_subset[1]
self.logger = HLP_F.setup_logger('log__'+self.baseN, self.path_logfile, append=1)
get_hdr = lambda path_ENVIf: os.path.splitext(path_ENVIf)[0] + '.hdr'
if job.exec_mode=='Flink':
PG_obj = PG.path_generator(self.__dict__)
self.arr = INP_R.read_ENVIfile(get_hdr(PG_obj.get_path_imagedata()),self.arr_shape,self.arr_pos,self.logger)
self.mask_clouds = INP_R.read_mask_subset(PG_obj.get_path_maskdata(),'mask_clouds',self.logger,tuple_GMS_subset[1])
path_masks = PG_obj.get_path_maskdata()
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
if self.arr_pos: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
else: self.logger.info('Reading file: %s' %self.baseN)
self.build_L1A_masks()
self.GMS_identifier = collections.OrderedDict(zip(
['image_type', 'Satellite', 'Sensor', 'Subsystem', 'logger'],
[self.image_type, self.satellite, self.sensor, self.subsystem, self.logger]))
self.meta = INP_R.read_ENVIhdr_to_dict(get_hdr(self.path_Outfile_L1A),self.logger)
self.meta = INP_R.unify_envi_header_keys(self.meta) # ensure key compatibility
#[print(k, v, type(v)) for k,v in self.meta.items()]
# sys.exit()
del self.logger; self.GMS_identifier['logger'] = 'not set'
return self
......@@ -696,8 +687,10 @@ class L1A_object(object):
assert dataOut is not None
self.update_spec_vals_according_to_dtype('int16')
tiles_desc = usecase.conversion_type_optical if usecase.skip_thermal else \
'%s_%s' %(usecase.conversion_type_optical, usecase.conversion_type_thermal)
tiles_desc = '_'.join([desc for op_th,desc in zip(['optical','thermal'], [usecase.conversion_type_optical,
usecase.conversion_type_thermal]) if desc in self.dict_LayerOptTherm.values()])
#tiles_desc = usecase.conversion_type_optical if usecase.skip_thermal else \
# '%s_%s' %(usecase.conversion_type_optical, usecase.conversion_type_thermal)
if job.exec_mode=='Flink' and subset is None:
self.arr = dataOut
......@@ -1007,7 +1000,7 @@ class L1A_object(object):
"""Generates L1A_obj.masks attribute from by concatenating all masks included in L1A_obj.
The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Preparing masks to be written to disk as ENVI file...')
temp_logger.info('Generating masks and corresponding metadata......')
bandslist = [i for i in ['mask_1bit','mask_clouds'] if hasattr(self,i) and getattr(self,i) is not None]
self.masks = getattr(self,bandslist[0]).astype(np.uint8)[:,:,None] if len(bandslist) == 1 else \
......@@ -1015,10 +1008,8 @@ class L1A_object(object):
# add some information to mask header
masks_L1A_md = {'description': 'L1A mask layer:\n0 = no data\n1 = data in good condition\n2 = bad image pixel'}
for i in ['samples', 'lines', 'map info', 'header offset', 'file type', 'data type', 'interleave',
'byte order', 'coordinate system string']:
if i in self.meta.keys():
masks_L1A_md[i] = self.meta[i]
masks_L1A_md.update({i:self.meta[i] for i in ['samples', 'lines', 'map info', 'header offset', 'file type',
'data type', 'interleave', 'byte order', 'coordinate system string'] if i in self.meta.keys() })
masks_L1A_md['bands'] = len(bandslist)
if self.masks.dtype == np.uint8: masks_L1A_md['data type'] = 1
if self.masks.dtype == np.int16: masks_L1A_md['data type'] = 2
......@@ -1030,7 +1021,14 @@ class L1A_object(object):
def calc_mean_VAA(self):
"""Calculates mean viewing azimuth angle using sensor flight line derived from corner coordinates."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.VAA_mean = GEOP.calc_VAA_using_trueCornerLonLat(self.trueDataCornerLonLat)
if re.search('Sentinel-2',self.satellite,re.I):
temp_logger.warning('No precise calculation of mean viewing azimuth angle possible because orbit track '
'cannot be reconstructed from dataset since true data corner positions are unknown. '
'Mean VAA angle is filled with the mean value of the viewing azimuth array provided '
'in metadata.')
self.VAA_mean = self.MetaObj.IncidenceAngle
else:
self.VAA_mean = GEOP.calc_VAA_using_trueCornerLonLat(self.trueDataCornerLonLat)
temp_logger.info('Calculation of mean VAA...: %s' % round(self.VAA_mean, 2))
def combine_tiles_to_ObjAttr(self, tiles, target_attr):
......@@ -1088,13 +1086,13 @@ class L1A_object(object):
self.masks = outpath
elif tiles[0]['desc'] == 'lonlat_arr':
# outpath = os.path.join(os.path.abspath('./testing/out/'),'%s__%s.%s' %(self.baseN, tiles[0]['desc'], self.outInterleave))
self.lonlat_arr = outpath
self.lonlat_arr = outpath # FIXME
outpath = os.path.splitext(outpath)[0]+'.hdr' if not outpath.endswith('.hdr') else outpath
out_shape = self.shape_fullArr[:2]+([tiles[0]['data'].shape[2]] if len(tiles[0]['data'].shape)==3 else [1])
OUT_W.Tiles_Writer(tiles, outpath, out_shape, tiles[0]['data'].dtype, self.outInterleave, self.meta, overwrite=overwrite)
def delete_tempFiles(self):
"""Deleta all temporary files that have been written during L1A processing."""
"""Delete all temporary files that have been written during L1A processing."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Deleting temporary data...')
if sys.platform.startswith('linux'):
......@@ -1107,56 +1105,4 @@ class L1A_object(object):
if not glob.glob('%s/*' % pardir) and os.path.isdir(pardir):
os.rmdir(pardir)
else:
break
def cut_L1A_obj_into_blocks(tuple__In_obj__blocksize_RowsCols):
"""Cut a GMS object into tiles with respect to raster attributes as well as scene wide attributes.
:param tuple__In_obj__blocksize_RowsCols: a tuple with L1A_obj as first and [rows,cols] as second element"""
In_obj, blocksize_RowsCols = tuple__In_obj__blocksize_RowsCols
assert isinstance(blocksize_RowsCols,list) and len(blocksize_RowsCols) == 2, \
"The argument 'blocksize_RowsCols' must represent a list of size 2."
tilepos = HLP_F.get_image_tileborders('block', blocksize_RowsCols, shape_fullArr=In_obj.shape_fullArr)
L1A_tiles = []
for tp in tilepos:
tile = L1A_object(None)
[setattr(tile, i, getattr(In_obj,i)) for i in In_obj.__dict__ \
if not callable(getattr(In_obj,i)) and not isinstance(getattr(In_obj,i),np.ndarray)]
[setattr(tile, i, getattr(In_obj,i)[tp[0][0]:tp[0][1]+1,tp[1][0]:tp[1][1]+1]) \
for i in In_obj.__dict__ \
if not callable(getattr(In_obj,i)) and isinstance(getattr(In_obj,i),np.ndarray)]
tile.arr_shape = 'block'
tile.arr_pos = tp
L1A_tiles.append(tile)
return L1A_tiles
def merge_L1A_tiles_to_L1A_obj(list_L1A_tiles):
"""Merge separate GMS objects belonging to the same scene-ID to ONE GMS object
:param list_L1A_tiles: <list> of L1A objects that have been created by L1A_P.cut_L1A_obj_into_blocks()"""
if 'IMapUnorderedIterator' in str(type(list_L1A_tiles)): list_L1A_tiles = list(list_L1A_tiles)
L1A_obj = L1A_object(None)
[setattr(L1A_obj, i, getattr(list_L1A_tiles[0],i)) for i in list_L1A_tiles[0].__dict__ \
if not callable(getattr(list_L1A_tiles[0],i)) and not isinstance(getattr(list_L1A_tiles[0],i),np.ndarray)]
L1A_obj.arr_shape = 'cube'
L1A_obj.arr_pos = None
list_arraynames = [i for i in list_L1A_tiles[0].__dict__ if not callable(getattr(list_L1A_tiles[0],i)) and \
isinstance(getattr(list_L1A_tiles[0],i),np.ndarray)]
if list_arraynames:
L1A_obj = _numba_array_merger(L1A_obj,list_arraynames, list_L1A_tiles)
return L1A_obj
@autojit
def _numba_array_merger(L1A_obj, list_arraynames, list_L1A_tiles):
"""private function, e.g. called by L1A_P.merge_L1A_tiles_to_L1A_obj() in order to fasten array merging"""
for arrname in list_arraynames:
array = getattr(list_L1A_tiles[0],arrname)
is_3d = len(array.shape) == 3
bands = [array.shape[2]] if is_3d else [] # dynamic -> works for arr, cld_arr,...
target_shape = L1A_obj.shape_fullArr[:2]+bands
target_dtype = array.dtype
setattr(L1A_obj, arrname, np.empty(target_shape, dtype=target_dtype))
for idx,tile in enumerate(list_L1A_tiles):
rowStart,rowEnd = tile.arr_pos[0]
colStart,colEnd = tile.arr_pos[1]
getattr(L1A_obj, arrname)[rowStart:rowEnd+1, colStart:colEnd+1]=getattr(tile,arrname)
return L1A_obj
\ No newline at end of file
break
\ No newline at end of file
......@@ -308,6 +308,9 @@ class COREG(object):
conditions=condlist, add_cmds='ORDER BY scenes.cloudcover ASC')
conds_descImportance = [dataset_cond, cloudcov_cond, dayrange_cond]
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.im2shift_objDict['path_logfile'],append=1)
temp_logger.info('Querying database in order to find a suitable reference scene for co-registration.')
count, filt_overlap_scenes = 0,[]
while not filt_overlap_scenes:
if count==0:
......@@ -1067,7 +1070,7 @@ class DESHIFTER(object):
assert os.path.exists(self.im2shift), '%s: The image to be shifted can not be found at %s.'\
%(self.shift_entity_ID, self.im2shift)
ds = gdal.Open(self.im2shift)
assert ds is not None, 'The image %s can not be read by GDAL.' %self.shift_entity_ID
assert ds, 'The image %s can not be read by GDAL.' %self.shift_entity_ID
elif isinstance(self.im2shift,np.ndarray):
path_src = PG.get_tempfile(ext='.bsq') # FIXME schreibt .bsq in tempfile
arr_ds = GEOP.ndarray2gdal(self.im2shift, path_src, geotransform=self.shift_gt,
......@@ -1091,7 +1094,7 @@ class DESHIFTER(object):
rows, cols = self.im2shift.shape[:2]
bands = self.im2shift.shape[2] if len(self.im2shift.shape)==3 else 1
else:
raise TypeError('DESHIFTER.im2shift must contain a path or a numpy array. Got %s' &type(self.im2shift))
raise TypeError('DESHIFTER.im2shift must contain a path or a numpy array. Got %s' %type(self.im2shift))
return rows, cols, bands
def correct_shifts(self):
......@@ -1233,7 +1236,7 @@ class DESHIFTER(object):
if self.cliptoextent:
out_arr, out_gt, out_prj = GEOP.warp_ndarray(in_arr,self.shift_gt,self.shift_prj,self.ref_prj,
rsp_alg=2, in_nodata=in_nodata, out_nodata=in_nodata, out_res=(xgsd,xgsd),
out_extent=[xmin,ymin,xmax,ymax])
out_extent=[xmin,ymin,xmax,ymax]) # FIXME resamp_alg: "average" causes sinus-shaped patterns in fft images
else:
out_arr, out_gt, out_prj = GEOP.warp_ndarray(in_arr,self.shift_gt,self.shift_prj,self.ref_prj,
rsp_alg=2, in_nodata=in_nodata, out_nodata=in_nodata, out_res=(xgsd,xgsd), out_gt=out_gt)
......@@ -1272,11 +1275,17 @@ def corner_coord_to_minmax(corner_coords):
return xmin,xmax,ymin,ymax
class L1B_object(L1A_object):
def __init__(self, L1A_obj, COREG_obj):
super().__init__(None)
if L1A_obj: [setattr(self, key, value) for key,value in L1A_obj.__dict__.items()]
self.proc_level = 'L1B'
self.proc_level = 'L1B'
self.coreg_info = self.set_coreg_info(COREG_obj) if COREG_obj else None
def set_coreg_info(self, COREG_obj):
assert COREG_obj, 'Invalid COREG object. Got %s.' %COREG_obj
self.coreg_info = {
'corrected_shifts_px' : {'x':COREG_obj.x_shift_px, 'y':COREG_obj.y_shift_px },
'corrected_shifts_map' : {'x':COREG_obj.x_shift_map, 'y':COREG_obj.y_shift_map},
......@@ -1290,10 +1299,11 @@ class L1B_object(L1A_object):
'is shifted' : False,
'is resampled' : False } if COREG_obj.success else None
self.meta['original map info'] = COREG_obj.map_info_to_update if COREG_obj else None
if self.meta:
self.meta['original map info'] = COREG_obj.map_info_to_update
def perform_deshifting(self, attrname, band2process=None): # TODO: diese Methode per super() an alle folgenden Prozessierungslevel weitergeben; vielleicht @classmethod nutzen, um dem namespace späterer objekte an die methode zu übergeben?
def perform_deshifting(self, attrname, band2process=None):
config = get_DESHIFTER_configs(self.__dict__.copy(),attrname,band2process=band2process)
DESHIFT_obj = DESHIFTER(config[0],config[1],**config[2]).correct_shifts()
if DESHIFT_obj.is_resampled and DESHIFT_obj.is_shifted:
......@@ -1303,7 +1313,8 @@ class L1B_object(L1A_object):
self.meta['map info'] = DESHIFT_obj.updated_map_info
self.meta['projection'] = DESHIFT_obj.updated_projection
def apply_deshift_results(self,results_grouped_by_attr): # TODO: diese Methode per super() an alle folgenden Prozessierungslevel weitergeben; vielleicht @classmethod nutzen, um dem namespace späterer objekte an die methode zu übergeben?
def apply_deshift_results(self,results_grouped_by_attr):
"""Applies the deshifting results to the corresponding attributes of the GMS object overwriting existing arrays.
:param results_grouped_by_attr: a list of deshift_results returned by L1B_P.DESHIFTER.correct_shifts
in which every item belongs to the same attribute
......@@ -1328,6 +1339,7 @@ class L1B_object(L1A_object):
self.meta['projection'] = OrdDic0['updated projection']
def delete_tempFiles():
temp_dir = os.path.dirname(PG.get_tempfile())
files2delete = glob.glob(os.path.join(temp_dir,'GeoMultiSens_*'))
......
......@@ -52,7 +52,7 @@ class L1C_object(L1B_object):
def get_lonlat_coord_array(self, subset=None):
"""Calculates pixelwise 2D-array with longitude and latitude coordinates.Supports 3 modes for subsetting:
"""Calculates pixelwise 2D-array with longitude and latitude coordinates. Supports 3 modes for subsetting:
(1) called with given subset argument if self.mask_1bit is an array: passes array subset
(2) called with given subset argument if self.mask_1bit is NO array: reads mask subset from disk
(3) called without subset argument but L1A obj represents a block subset: self.arr_pos is passed for subsetting"""
......@@ -61,9 +61,7 @@ class L1C_object(L1B_object):
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
if subset is None and self.arr_shape == 'cube' or (subset and [subset[1][0][0], subset[1][1][0]] == [0, 0]) or \
(self.arr_pos and [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]): # cube or 1st tile
temp_logger.info('Calculating lonlat array.')
self.log_for_fullArr_or_firstTile(temp_logger,'Calculating lonlat array.',subset)
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
......@@ -73,10 +71,10 @@ class L1C_object(L1B_object):
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
lonlat_arr = GEOP.get_lonlat_coord_array(self.shape_fullArr, subset[1],
GEOP.mapinfo2geotransform(self.MetaObj.map_info),
self.MetaObj.projection, mask_1bit_temp, fillVal)[0]
GEOP.mapinfo2geotransform(self.meta['map info']),
self.meta['coordinate system string'], mask_1bit_temp, fillVal)[0]
if job.exec_mode == 'Flink' and subset is None:
if job.exec_mode == 'Flink' and subset[0]=='cube':
self.lonlat_arr = lonlat_arr
else:
return {'desc': 'lonlat_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE,
......@@ -94,10 +92,8 @@ class L1C_object(L1B_object):
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
print(subset, self.arr_pos)
if subset is None and self.arr_shape == 'cube' or (subset and [subset[1][0][0], subset[1][1][0]] == [0, 0]) or \
(self.arr_pos and [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]): # cube or 1st tile
temp_logger.info('Calculating acquisition and illumination geometry arrays.')
#print(subset, self.arr_pos)
self.log_for_fullArr_or_firstTile(temp_logger,'Calculating acquisition and illumination geometry arrays.',subset)
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
......@@ -107,9 +103,14 @@ class L1C_object(L1B_object):
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
assert self.meta, "Missing 'meta' attribute. Run self.Meta2ODict() first!"
VZA_arr = GEOP.calc_VZA_array(self.shape_fullArr, subset[1], self.trueDataCornerPos,
float(self.meta['ViewingAngle']), float(self.meta['FieldOfView']),
temp_logger, mask_1bit_temp, fillVal)
if 'ViewingAngle_arrProv' in self.meta:
VZA_arr = GEOP.adjust_acquisArrProv_to_shapeFullArr(self.meta['ViewingAngle_arrProv'],self.shape_fullArr,
subset,bandwise=0)
else:
VZA_arr = GEOP.calc_VZA_array(self.shape_fullArr, subset[1], self.trueDataCornerPos,
float(self.meta['ViewingAngle']), float(self.meta['FieldOfView']),
temp_logger, mask_1bit_temp, fillVal)
SZA_arr, SAA_arr = GEOP.calc_SZA_SAA_array(self.shape_fullArr, subset[1], self.meta['AcqDate'],
self.meta['AcqTime'], self.trueDataCornerPos,
self.trueDataCornerLonLat, self.meta['overpass duraction sec'],
......@@ -117,6 +118,8 @@ class L1C_object(L1B_object):
accurracy=job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr
if job.SZA_SAA_calculation_accurracy == 'fine' else None)
if 'IncidenceAngle_arrProv' in self.meta:
pass # FIXME Sentinel-2s viewing azimuth array is currently not used (instead a static VAA is used)
RAA_arr = GEOP.calc_RAA_array(self.trueDataCornerLonLat, SAA_arr, self.VAA_mean, mask_1bit_temp, fillVal)
if job.exec_mode == 'Flink' and subset is None:
......@@ -127,10 +130,18 @@ class L1C_object(L1B_object):
{'desc': 'SAA_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': SAA_arr},
{'desc': 'RAA_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': RAA_arr})
def atm_corr(self, subset=None):
"""Performs an atmospheric correction and returns atmospherically corrected reflectance data."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
self.log_for_fullArr_or_firstTile(temp_logger,'Dummy atmospheric correction started.',subset) # FIXME
#SRF_dict = INP_R.SRF_reader(SRF_fold,RSD_md_L1B)
self.arr = self.arr # FIXME implement atmospheric correction
def delete_tempFiles(self):
self.VZA_arr = None # not needed anymore
self.SZA_arr = None # not needed anymore
self.SAA_arr = None # not needed anymore
self.RAA_arr = None # not needed anymore
def dummy_atm_corr(L1B_obj):
"""Performs an atmospheric correction and returns atmospherically corrected reflectance data."""
temp_logger = HLP_F.setup_logger('log__' + L1B_obj.baseN, L1B_obj.path_logfile, append=1)
temp_logger.info('Dummy Level 1C Processing started.')
#SRF_dict = INP_R.SRF_reader(SRF_fold,RSD_md_L1B)
L1B_obj.arr = L1B_obj.arr/2
......@@ -72,9 +72,9 @@ class METADATA(object):
self.ThermalConstK1 = []
self.ThermalConstK2 = []
self.EarthSunDist = 1.0
self.ViewingAngle = -99.0 # viewing angle of the Sensor (ofNadir) [Deg] (usually) in Case of RapidEye "+" being East and “-” being West
self.ViewingAngle = -99.0 # viewing zenith angle of the Sensor (offNadir) [Deg] (usually) in Case of RapidEye "+" being East and “-” being West
self.ViewingAngle_arrProv = {}
self.IncidenceAngle = -9999.99 # The angle between the view direction of the satellite and a line perpendicular to the image or tile center.[Deg]
self.IncidenceAngle = -9999.99 # viewing azimuth angle. The angle between the view direction of the satellite and a line perpendicular to the image or tile center.[Deg]
self.IncidenceAngle_arrProv = {}
self.FOV = 9999.99 # field of view of the sensor [Deg]
self.Quality = [] # Sensor specific quality code: See ro2/behling/Satelliten/doc_allg/ReadOutMetadata/SatMetadata.xls
......@@ -645,7 +645,7 @@ class METADATA(object):
except AttributeError: # Level1-B data has no geodaticDatum
pass
# Corrections applied + Quality
# Corrections applied + Quality
h7 = re.search("<re:radiometricCorrectionApplied>([\w]*)</re:radiometricCorrectionApplied>[\s]*<re:radiometricCalibrationVersion>([\S]*)</re:radiometricCalibrationVersion>[\s]*<re:geoCorrectionLevel>([\S\s^<]*)</re:geoCorrectionLevel>[\s]*<re:elevationCorrectionApplied>([\S]*)</re:elevationCorrectionApplied>[\s]*<re:atmosphericCorrectionApplied>([\w]*)</re:atmosphericCorrectionApplied>[\s]*<re:productAccuracy>([\S\s^<]*)</re:productAccuracy>",mxml_, re.I)
# fuer IRIS ihre Daten
if h7 is None:
......@@ -660,7 +660,7 @@ class METADATA(object):
["atmosphericCorrectionApplied", h7.group(5)]])
self.Quality.append(["geomProductAccuracy[m]:", str(round(float(h7.group(6)),1))]) #Estimated product horizontal CE90 uncertainty [m]
# additional. missing lines, suspectlines, binning, shifting, masking
# additional. missing lines, suspectlines, binning, shifting, masking
h81 = re.findall("<re:percentMissingLines>([^<]*)</re:percentMissingLines>", mxml_, re.I)
h82 = re.findall("<re:percentSuspectLines>([^<]*)</re:percentSuspectLines>", mxml_, re.I)
h83 = re.findall("<re:binning>([^<]*)</re:binning>", mxml_, re.I)
......@@ -1088,17 +1088,13 @@ class METADATA(object):
def Read_Sentinel2A_xmls(self):
"""Read metadata from Sentinel-2A generic xml and granule xml"""
# self.default_attr()
#TEST convert sceneID to granule Id
if self.SceneID and self.SceneID!=-9999:
res = DB_T.get_info_from_postgreSQLdb(job.conn_db_meta,'scenes','entityid',{'id':self.SceneID}) # FIXME
assert len(res) != 0, "Invalid ScenebID given - no corresponding scene with the ID=%s found in database.\n" % self.SceneID
assert len(res) == 1, "Error in database. The sceneid %s exists more than once. \n" % self.SceneID
S2AgranuleID = res[0][0]
else:
warnings.warn("Read_Sentinel2A_xmls(): Missing scene ID. "
"Using S2A_OPER_MSI_L1C_TL_MTI__20150813T201603_A000734_T32TQT_N01.03' instead.")
S2AgranuleID = 'S2A_OPER_MSI_L1C_TL_MTI__20150813T201603_A000734_T32TQT_N01.03'
assert self.SceneID is not None and self.SceneID!=-9999, "Read_Sentinel2A_xmls(): Missing scene ID. "
res = DB_T.get_info_from_postgreSQLdb(job.conn_db_meta,'scenes','entityid',{'id':self.SceneID})
assert len(res) != 0, \
"Invalid SceneID given - no corresponding scene with the ID=%s found in database.\n" % self.SceneID
assert len(res) == 1, "Error in database. The sceneid %s exists more than once. \n" % self.SceneID
S2AgranuleID = res[0][0]
if os.path.isdir(self.FolderOrArchive):
glob_res = glob.glob(os.path.join(self.FolderOrArchive,'S2A*.xml'))
......@@ -1107,7 +1103,7 @@ class METADATA(object):
xml_Scene_root = ET.parse(glob_res[0]).getroot() # xml_parser from file
glob_res = glob.glob(os.path.join(self.FolderOrArchive,'GRANULE/'+S2AgranuleID+'/S2A*.xml'))
assert len(glob_res)>0, 'No /GRANULE/<S2AgranuleID>/S2A*.xml file can be found in %s/!' %self.FolderOrArchive
self.Metafile = self.Metafile + ", " + glob_res[0] # FIXME Daniel nutzt du self.Metafile irgendwo???
self.Metafile = self.Metafile + ", " + glob_res[0]
xml_GR_root = ET.parse(glob_res).getroot() # xml_parser from file
else: # archive
......@@ -1116,7 +1112,7 @@ class METADATA(object):
xml_str_, Metafile_ = HLP_F.open_specific_file_within_archive(self.FolderOrArchive,'*.SAFE/GRANULE/'+
S2AgranuleID+'/S2A*.xml')
xml_GR_root = ET.fromstring(xml_str_)
self.Metafile = self.Metafile + ", " + Metafile_ # FIXME Daniel nutzt du self.Metafile irgendwo???
self.Metafile = self.Metafile + ", " + Metafile_
# define Sentinel 2A metadata (hard coded)
......@@ -1180,10 +1176,8 @@ class METADATA(object):
#geometricResolution
self.gResolution = subsytem_Res_dic[self.Subsystem]
# determine metadata from extracted metadata values
self.EarthSunDist = self.get_EarthSunDistance(self.AcqDate)
# Solar irradiance, central wavelengths , full width half maximum per band
self.wvlUnit = 'Nanometers'
......@@ -1225,7 +1219,7 @@ class METADATA(object):
self.ViewingAngle_arrProv = meta_temp['viewing_zenith']
self.IncidenceAngle_arrProv = meta_temp['viewing_azimuth']
LBA2Id_dic ={'1':0,'2':1,'3':2,'4':3,'5':4,'6':5,'7':6,'8':7,'8A':8,'9':9,'10':10,'11':11,'12':12}
LBA2Id_dic = {'1':0,'2':1,'3':2,'4':3,'5':4,'6':5,'7':6,'8':7,'8A':8,'9':9,'10':10,'11':11,'12':12}
filter_dic = lambda AngleArr: {LBAn:AngleArr[LBA2Id_dic[LBAn]] for LBAn in self.LayerBandsAssignment}
self.ViewingAngle_arrProv = filter_dic(self.ViewingAngle_arrProv)
self.IncidenceAngle_arrProv = filter_dic(self.IncidenceAngle_arrProv)
......@@ -1233,7 +1227,7 @@ class METADATA(object):
# Quality flags # FIXME does not work (at least with old data)
Quality_temp = (xml_Scene_root.find(".//Technical_Quality_Assessment"))
Quality_temp.find("./DEGRADED_ANC_DATA_PERCENTAGE").text
Quality_temp.find ("./DEGRADED_ANC_DATA_PERCENTAGE").text
self.Quality.append(["DEGRADED_ANC_DATA_PERCENTAGE", Quality_temp.find("./DEGRADED_ANC_DATA_PERCENTAGE").text])
self.Quality.append(["DEGRADED_MSI_DATA_PERCENTAGE", Quality_temp.find("./DEGRADED_MSI_DATA_PERCENTAGE").text])
Quality_temp2 = xml_Scene_root.find(".//Quality_Inspections")
......@@ -1314,8 +1308,9 @@ class METADATA(object):
self.cols = rasObj.cols
self.bands = rasObj.bands
if rasObj.projection!='':
self.map_info = GEOP.geotransform2mapinfo(rasObj.geotransform,rasObj.projection)
self.projection = rasObj.projection
self.map_info = GEOP.geotransform2mapinfo(rasObj.geotransform,rasObj.projection)
self.projection = rasObj.projection
self.gResolution = abs(rasObj.geotransform[1])
dict_conv_physUnit = {'Rad':"W * m-2 * sr-1 * micrometer-1",
'Ref':'TOA_Reflectance in [0-10000]',
'Temp':'Degrees Celsius with scale factor = 100'}
......@@ -1382,7 +1377,11 @@ class METADATA(object):
Meta['ThermalConstK2'] = self.ThermalConstK2
Meta['EarthSunDist'] = self.EarthSunDist
Meta['ViewingAngle'] = self.ViewingAngle
if self.ViewingAngle_arrProv is not None:
Meta['ViewingAngle_arrProv'] = {k:v.tolist() for k,v in self.ViewingAngle_arrProv.items()}
Meta['IncidenceAngle'] = self.IncidenceAngle
if self.IncidenceAngle_arrProv is not None:
Meta['IncidenceAngle_arrProv'] = {k:v.tolist() for k,v in self.IncidenceAngle_arrProv.items()}
Meta['FieldOfView'] = self.FOV
Meta['PhysUnit'] = self.PhysUnit
Meta['Quality'] = self.Quality
......
......@@ -21,6 +21,7 @@ import multiprocessing
import socket
import psycopg2
import collections
import warnings
import osr
os.environ['DISPLAY'] = '127.0.0.0:0.0'
......@@ -87,17 +88,17 @@ class job:
path_archive = joinP(path_fileserver, 'database/sampledata/')
elif GMS_call_type == 'webapp':
conn_database = "dbname='usgscache' user='gmsdb' password='gmsdb' host='geoms' connect_timeout=3 options=" \
"'-c statement_timeout=10000'" # 10sec
"'-c statement_timeout=15000'" # 15sec
conn_db_meta = conn_database
path_fileserver = query_cfg(conn_db_meta, 'path_data_root')
path_procdata = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata'))
path_archive = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_download'))
path_earthSunDist = absP(query_cfg(conn_db_meta, 'path_earthSunDist'))
path_earthSunDist = '/home/gfz-fe/GeoMultiSens/database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv' # FIXME!!
#path_earthSunDist = '/home/gfz-fe/GeoMultiSens/database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv' # FIXME!!
path_SRFs = absP(query_cfg(conn_db_meta, 'path_SRFs'))
path_cloud_classif = absP(query_cfg(conn_db_meta, 'path_cloud_classif'))
path_solar_irr = absP(query_cfg(conn_db_meta, 'path_solar_irr'))
path_solar_irr = '/home/gfz-fe/GeoMultiSens/database/solar_irradiance/SUNp1fontenla__350-2500nm_@0.1nm_converted.txt' # FIXME!!
#path_solar_irr = '/home/gfz-fe/GeoMultiSens/database/solar_irradiance/SUNp1fontenla__350-2500nm_@0.1nm_converted.txt' # FIXME!!
path_testing = absP(query_cfg(conn_db_meta, 'path_testing'))
path_benchmarks = absP(query_cfg(conn_db_meta, 'path_benchmarks'))
path_job_logs = absP(query_cfg(conn_db_meta, 'path_job_logs'))
......@@ -107,13 +108,19 @@ class job:
# processor configuration: [run processor, write output]
exec__L1AP = [1, 1]
exec__L1BP = [0, 1]
exec__L1CP = [0, 1]
exec__L1BP = [1, 1]
exec__L1CP = [1, 1]
exec__L1DP = [1, 1]
exec__L2AP = [0, 0]
exec__L2BP = [0, 0]
exec__L2CP = [0, 0]
exec__L2DP = [0, 0]
exec__L2AP = [1, 1]
exec__L2BP = [0, 1]
exec__L2CP = [0, 1]
exec__L2DP = [0, 1]
if exec_mode=='Python':
for i in ['L1AP','L1BP','L1CP','L2AP','L2BP','L2CP','L2DP']:
if locals()['exec__%s' %i][1]==0:
warnings.warn("If job.exec_mode is set to 'Python' the output writer for %s has to be enabled because "
"any operations on GMS_obj.arr read the intermediate results from disk. Turning it on.." %i)
locals()['exec__%s' % i][1] = 1
assert os.path.isdir(job.path_archive), "Given archive folder '%s' does not exist. Execution stopped" % job.path_archive
if not os.path.isdir(job.path_job_logs): os.makedirs(job.path_job_logs)
......@@ -150,7 +157,7 @@ class usecase:
datasetid_spatial_ref = query_job(job.conn_db_meta, 'datasetid_spatial_ref')
#conversion_type_optical = 'Rad' # 'Rad' / 'Ref' # FIXME
#conversion_type_thermal = 'Temp' # 'Rad' / 'Temp' # FIXME
scale_factor_TOARef = 10000 # TODO -> Datenbank
scale_factor_TOARef = int(query_cfg(job.conn_db_meta, 'scale_factor_TOARef'))
align_coord_grids = 0 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
......
......@@ -14,7 +14,6 @@
from spectral.io import envi as envi
import numpy as np
import os
import sys
import json
import spectral
import collections
......@@ -23,7 +22,7 @@ import dill
import builtins
import warnings
import scipy.interpolate
import gms_io.Output_writer as OUT_W
import algorithms.METADATA_BD as META
import misc.database_tools as DB_T
import misc.path_generator as PG
......@@ -31,94 +30,57 @@ job = builtins.GMS_config.job # read from builtins (set by process_controller)
# + misc.helper_functions.setup_logger (left out here in order to avoid circular dependencies)
########################### core functions ####################################