Commit 9142f468 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

First prototype of algorithms for geometrical and spectral resolution working...

First prototype of algorithms for geometrical and spectral resolution working in map-reduce context (L2A, L2B).
GEOP:
    - renamed GEOPROCESSING_BD.py to GEOPROCESSING.py
    - moved get_prjLonLat(), get_proj4info(), corner_coord_to_minmax() to GEOP
    - added docstrings to DN2Rad(), DN2TOARef(), TOARad2Kelvin_fastforward(), DN2DegreesCelsius_fastforward()
L1A_P:
    - L1A_object.fill_arr_from_disk(): further silencing of console outputs
L1B_P:
    - moved get_DESHIFTER_configs() and class DESHIFTER() to L2A_P
    - adjusted initial values for COREG attributes related to reference image (not None anymore in order to make L2A_P work if shift calculation failed)
    - increased database statement timeouts for queries within get_reference_image_params() to 25sek
    - L1B_object():
        - added attribute "deshift_results"
        - removed deprecated code
        - added join_deshift_results()
        - revised apply_deshift_results()
L2A_P:
    -  added get_DESHIFTER_configs() and class DESHIFTER() from L1B_P
    - fixed two bugs in DESHIFTER.correct_shifts() where DESHIFTER.band2process was not respected and whole image cube was read instead of only one band
    - added class L2A_object()
L2B_P:
    - added class L2B_object()
    - L2B_object():
        - added interpolate_cube_linear()
        - added spectral_homogenization()
META:
    - renamed METADATA_BD.py to METADATA.py
INP_R:
    - added quiet mode to read_ENVIfile()
OUT_W:
    - added enviHdr_keyOrder using list from reorder_envi_header()
    - fixed a bug in reorder_ENVI_header() that caused repetitions of header keys
    - adjusted print_dict within mask_to_ENVI_Classification() in order to also support L2A and L2B
HLP_F:
    - added parent objects for L2A and L2B in parentObjDict
    - added type hints to cut_GMS_obj_into_blocks() and merge_GMS_tiles_to_GMS_obj()
GITIGNORE:
    - updated .gitignore file
CFG:
    - added virtual_sensor_id, datasetid_spectral_ref, target_CWL, target_FWHM to usecase class by querying the database
PC:
    - added type hints to mapper functions
    - revised L2A_map_2()
    - added L2B_map_1()
    - revised/added L2A algorithm calls (only Flink mode is supported so far)
    - added L2B algorithm calls (only Flink mode is supported so far)
pgDB:
    - added Sentinel-2A virtual sensors to virtual_sensors table (different spatial resolutions)
    - added wavelengths positions and band widths to virtual_sensors table
parent 8abcf2ed
...@@ -2,19 +2,8 @@ ...@@ -2,19 +2,8 @@
.idea/ .idea/
BAK/ BAK/
OLD/ OLD/
database/old/
database/cloud_classifier/ database/cloud_classifier/
database/sampledata/ database/sampledata/
database/metadata/ database/metadata/
database/processed_data/ database/processed_data/
testing/out/
algorithms/OLD/
*.pyc
gms_io/robert/
gms_io/robert UNIX-Format konvertiert/
gms_io/robert ALT/
gms_io/landsat_downloader/
algorithms/METADATA.py
...@@ -210,7 +210,7 @@ class GEOPROCESSING(object): ...@@ -210,7 +210,7 @@ class GEOPROCESSING(object):
self.originY = self.geotransform[3] self.originY = self.geotransform[3]
self.pixelWidth = self.geotransform[1] self.pixelWidth = self.geotransform[1]
self.pixelHeight = self.geotransform[5] self.pixelHeight = self.geotransform[5]
self.rot1 = self.geotransform[2] self.rot1 = self.geotransform[2] # FIXME check
self.rot2 = self.geotransform[4] self.rot2 = self.geotransform[4]
self.extent = [self.originX, self.originY, self.originX + (self.cols * self.pixelWidth), self.extent = [self.originX, self.originY, self.originX + (self.cols * self.pixelWidth),
self.originY + (self.rows * self.pixelHeight)] # [ulx, uly, lrx, lry] self.originY + (self.rows * self.pixelHeight)] # [ulx, uly, lrx, lry]
...@@ -318,8 +318,8 @@ class GEOPROCESSING(object): ...@@ -318,8 +318,8 @@ class GEOPROCESSING(object):
self.update_dataset_related_attributes() self.update_dataset_related_attributes()
def DN2TOARefOLI(self, offsetsRef, gainsRef, zenith, outPath=None, fill=None, zero=None, saturated=None, cutNeg=True, def DN2TOARefOLI(self, offsetsRef, gainsRef, zenith, outPath=None, fill=None, zero=None, saturated=None,
optshift=None, v=0): cutNeg=True, optshift=None, v=0):
"""----METHOD_3a---------------------------------------------------------- """----METHOD_3a----------------------------------------------------------
converts OLI DN data to TOA Reflectance. http://landsat.usgs.gov/Landsat8_Using_Product.php converts OLI DN data to TOA Reflectance. http://landsat.usgs.gov/Landsat8_Using_Product.php
...@@ -3143,6 +3143,38 @@ def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None): ...@@ -3143,6 +3143,38 @@ def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
gdal_ds_GT = None gdal_ds_GT = None
return ext return ext
def get_prjLonLat(fmt='wkt'):
# type: (str) -> Any
"""Returns standard geographic projection (EPSG 4326) in the WKT or PROJ4 format.
:param fmt: <str> target format - 'WKT' or 'PROJ4'
"""
assert re.search('wkt',fmt,re.I) or re.search('Proj4',fmt,re.I), 'unsupported output format'
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
return srs.ExportToWkt() if re.search('wkt',fmt,re.I) else srs.ExportToProj4()
def get_proj4info(ds=None,proj=None):
# type: (gdal.Dataset,str) -> str
"""Returns PROJ4 formatted projection info for the given gdal.Dataset or projection respectivly,
e.g. '+proj=utm +zone=43 +datum=WGS84 +units=m +no_defs '
:param ds: <gdal.Dataset> the gdal dataset to get PROJ4 info for
:param proj: <str> the projection to get PROJ4 formatted info for
"""
assert ds or proj, "Specify at least one of the arguments 'ds' or 'proj'"
srs = osr.SpatialReference()
srs.ImportFromWkt(ds.GetProjection() if ds else proj)
return srs.ExportToProj4()
def corner_coord_to_minmax(corner_coords):
# type: (list) -> (float,float,float,float)
"""Converts [[x1,y1],[x2,y2],[]...] to (xmin,xmax,ymin,ymax)
:param corner_coords: list of coordinates like [[x1,y1],[x2,y2],[]...]
"""
x_vals = [int(i[0]) for i in corner_coords]
y_vals = [int(i[1]) for i in corner_coords]
xmin,xmax,ymin,ymax = min(x_vals),max(x_vals),min(y_vals),max(y_vals)
return xmin,xmax,ymin,ymax
def get_footprint_polygon(CornerLonLat): def get_footprint_polygon(CornerLonLat):
""" Converts a list of coordinates into a shapely polygon object. """ Converts a list of coordinates into a shapely polygon object.
:param CornerLonLat: a list of coordinate tuples like [[lon,lat], [lon. lat], ..] :param CornerLonLat: a list of coordinate tuples like [[lon,lat], [lon. lat], ..]
...@@ -3169,6 +3201,21 @@ def get_overlap_polygon(poly1, poly2): ...@@ -3169,6 +3201,21 @@ def get_overlap_polygon(poly1, poly2):
return {'overlap poly':None, 'overlap percentage':0, 'overlap area':0} return {'overlap poly':None, 'overlap percentage':0, 'overlap area':0}
def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None, cutNeg=True): def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None, cutNeg=True):
#type: (np.ndarray,list,list,int,int,int,bool) -> np.ndarray
"""Convert DN to Radiance [W * m-2 * sr-1 * micrometer-1]
!!! InputGains and Offsets should be in [W * m-2 * sr-1 * micrometer-1]
:param ndarray: <np.ndarray> array of DNs to be converted into radiance
:param offsets: [W * m-2 * sr-1 * micrometer-1]:
list that includes the offsets of the individual rasterbands [offset_band1, offset_band2,
... ,offset_bandn] or optional input as number if Dataset has only 1 Band
:param gains: [W * m-2 * sr-1 * micrometer-1]:
list that includes the gains of the individual rasterbands [gain_band1, gain_band2, ... ,
gain_bandn] or optional input as number if Dataset has only 1 Band
:param inFill: pixelvalues allocated to background/dummy/fill pixels
:param inZero: pixelvalues allocated to zero radiance
:param inSaturated: pixelvalues allocated to saturated pixels
:param cutNeg: cutNegvalues -> all negative values set to 0
"""
assert isinstance(offsets,list) and isinstance(gains,list), \ assert isinstance(offsets,list) and isinstance(gains,list), \
"Offset and Gain parameters have to be provided as two lists containing gains and offsets for \ "Offset and Gain parameters have to be provided as two lists containing gains and offsets for \
each band in ascending order. Got offsets as type '%s' and gains as type '%s'." %(type(offsets),type(gains)) each band in ascending order. Got offsets as type '%s' and gains as type '%s'." %(type(offsets),type(gains))
...@@ -3200,6 +3247,25 @@ def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None, ...@@ -3200,6 +3247,25 @@ def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None,
def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist, def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
inFill=None, inZero=None, inSaturated=None, cutNeg=True): inFill=None, inZero=None, inSaturated=None, cutNeg=True):
#type: (np.ndarray,list,list,list,float,float,int,int,int,bool) -> np.ndarray
"""Converts DN data to TOA Reflectance.
:param ndarray: <np.ndarray> array of DNs to be converted into TOA reflectance
:param offsets: list: of offsets of each rasterband [W * m-2 * sr-1 * micrometer-1]
[offset_band1, offset_band2, ... ,offset_bandn] or optional as number if Dataset has
only 1 Band
:param gains: list: of gains of each rasterband [W * m-2 * sr-1 * micrometer-1]
[gain_band1, gain_band2, ... ,gain_bandn] or optional as number if Dataset has
only 1 Band
:param irradiances: list: of irradiance of each band [W * m-2 * micrometer-1]
[irradiance_band1, irradiance_band2, ... ,irradiance_bandn]
:param zenith: number: sun zenith angle
:param earthSunDist: earth-sun- distance for a certain day
:param inFill: number: pixelvalues allocated to background/dummy/fill pixels
:param inZero: number: pixelvalues allocated to zero radiance
:param inSaturated: number: pixelvalues allocated to saturated pixles
:param cutNeg: bool: if true. all negative values turned to zero. default: True
:return: Int16 TOA_Reflectance in [0-10000]
"""
assert isinstance(offsets,list) and isinstance(gains,list) and isinstance(irradiances, list), \ assert isinstance(offsets,list) and isinstance(gains,list) and isinstance(irradiances, list), \
"Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \ "Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \
"irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \ "irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \
...@@ -3231,7 +3297,16 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist, ...@@ -3231,7 +3297,16 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZero=None, inSaturated=None): def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZero=None, inSaturated=None):
# type: (np.ndarray,list,list,float,int,int,int) -> np.ndarray # type: (np.ndarray,list,list,float,int,int,int) -> np.ndarray
"""Convert top-of-atmosphere radiances of thermal bands to temperatures in Kelvin
by applying the inverse of the Planck function.
:param ndarray: <np.ndarray> array of TOA radiance values to be converted into Kelvin
:param K1:
:param K2:
:param emissivity:
:param inFill:
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2] bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([K1,K2],['K1', 'K2']): for arg,argname in zip([K1,K2],['K1', 'K2']):
assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \ assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \
...@@ -3261,7 +3336,19 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ ...@@ -3261,7 +3336,19 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ
def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.95, def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.95,
inFill=None, inZero=None, inSaturated=None): inFill=None, inZero=None, inSaturated=None):
"""Convert thermal DNs to temperatures in degrees Celsius
by calculating TOARadiance and applying the inverse of the Planck function.
:param ndarray: <np.ndarray> array of DNs to be converted into Degrees Celsius
:param offsets:
:param gains:
:param K1:
:param K2:
:param emissivity:
:param inFill:
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2] bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([offsets,gains,K1,K2],['Offset', 'Gain','K1','K2']): for arg,argname in zip([offsets,gains,K1,K2],['Offset', 'Gain','K1','K2']):
assert isinstance(offsets,list) and isinstance(gains,list), \ assert isinstance(offsets,list) and isinstance(gains,list), \
...@@ -3721,17 +3808,4 @@ def get_subsetProps_from_subsetArg(shape_fullArr,subset): ...@@ -3721,17 +3808,4 @@ def get_subsetProps_from_subsetArg(shape_fullArr,subset):
return collections.OrderedDict(zip( return collections.OrderedDict(zip(
['rows','cols','bands','rowStart','rowEnd','colStart','colEnd','bandStart','bandEnd','bandsList'], ['rows','cols','bands','rowStart','rowEnd','colStart','colEnd','bandStart','bandEnd','bandsList'],
[ rows , cols , bands , rowStart , rowEnd , colStart , colEnd , bandStart , bandEnd , bandsList ])) [ rows , cols , bands , rowStart , rowEnd , colStart , colEnd , bandStart , bandEnd , bandsList ]))
\ No newline at end of file
...@@ -24,7 +24,7 @@ from gms_io import Input_reader as INP_R ...@@ -24,7 +24,7 @@ from gms_io import Input_reader as INP_R
from misc import helper_functions as HLP_F from misc import helper_functions as HLP_F
from misc import database_tools as DB_T from misc import database_tools as DB_T
from misc import path_generator as PG from misc import path_generator as PG
from algorithms.METADATA_BD import get_LayerBandsAssignment from algorithms.METADATA import get_LayerBandsAssignment
########################### core functions #################################### ########################### core functions ####################################
def get_entity_IDs_within_AOI(): # called in console mode def get_entity_IDs_within_AOI(): # called in console mode
......
...@@ -36,8 +36,8 @@ import matplotlib.pyplot as plt ...@@ -36,8 +36,8 @@ import matplotlib.pyplot as plt
from pyhdf import SD from pyhdf import SD
from spectral.io import envi from spectral.io import envi
from algorithms import METADATA_BD as META from algorithms import METADATA as META
from algorithms import GEOPROCESSING_BD as GEOP from algorithms import GEOPROCESSING as GEOP
from algorithms import gms_cloud_classifier as CLD_P # Cloud Processor from algorithms import gms_cloud_classifier as CLD_P # Cloud Processor
from algorithms import py_tools_ah from algorithms import py_tools_ah
from gms_io import envifilehandling_BD as ef from gms_io import envifilehandling_BD as ef
...@@ -194,11 +194,11 @@ class L1A_object(object): ...@@ -194,11 +194,11 @@ class L1A_object(object):
path_masks = PG_obj.get_path_maskdata() path_masks = PG_obj.get_path_maskdata()
path_maskClouds = PG_obj.get_path_cloudmaskdata() path_maskClouds = PG_obj.get_path_cloudmaskdata()
if job.exec_mode=='Flink': if job.exec_mode=='Flink':
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger) self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1)
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1]) self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1]) self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
if self.arr_pos: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos)) self.log_for_fullArr_or_firstTile(self.logger,'Reading file %s as tiles...' %self.baseN \
else: self.logger.info('Reading file: %s' %self.baseN) if self.arr_pos else 'Reading file %s...' %self.baseN)
#self.masks is only needed by Output writer to masks combined -> generated there and on demand #self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # job.exec_mode=='Python' else: # job.exec_mode=='Python'
self.arr = path_arr self.arr = path_arr
......
This diff is collapsed.
...@@ -29,7 +29,7 @@ except ImportError: ...@@ -29,7 +29,7 @@ except ImportError:
import osr import osr
from misc import helper_functions as HLP_F from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP from algorithms import GEOPROCESSING as GEOP
from gms_io import Input_reader as INP_R from gms_io import Input_reader as INP_R
from misc import path_generator as PG from misc import path_generator as PG
from algorithms.L1B_P import L1B_object from algorithms.L1B_P import L1B_object
......
This diff is collapsed.
...@@ -4,4 +4,39 @@ Created on Fri Jan 16 11:57:31 2015 ...@@ -4,4 +4,39 @@ Created on Fri Jan 16 11:57:31 2015
@author: danschef @author: danschef
""" """
import builtins
import numpy as np
from scipy.interpolate import interp1d
from misc import helper_functions as HLP_F
from algorithms.L2A_P import L2A_object
usecase = builtins.GMS_config.usecase
class L2B_object(L2A_object):
def __init__(self, L2A_obj):
super().__init__(None)
if L2A_obj: [setattr(self, key, value) for key,value in L2A_obj.__dict__.items()]
self.proc_level = 'L2B'
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) if L2A_obj else None
def spectral_homogenization(self, subset=None, kind='linear'):
tgt_cwls = usecase.target_CWL # FIXME exclude or include thermal bands; respect sorted CWLs in context of LayerBandsAssignment
assert kind in ['linear',], "%s is not a supported kind of homogenization." %kind
self.log_for_fullArr_or_firstTile(self.logger,'Performing spectral homogenization (%s) with target wavelength '
'positions at %s nm.' %(kind,', '.join(np.array(tgt_cwls[:-1]).astype(str))+' and %s' %tgt_cwls[-1]))
if isinstance(self.arr,np.ndarray):
if kind=='linear': self.arr = self.interpolate_cube_linear(tgt_cwls)
self.meta['wavelength'] = list(tgt_cwls)
self.meta['bands'] = len(tgt_cwls)
self.meta['LayerBandsAssignment'] = [] # TODO
del self.meta['band names'] # TODO
else:
raise NotImplementedError('got %s' %type(self.arr))
def interpolate_cube_linear(self, target_CWLs):
# type: (list,list) -> np.ndarray
orig_CWLs, target_CWLs = np.array(self.meta['wavelength']), np.array(target_CWLs)
outarr = interp1d(np.array(orig_CWLs), self.arr, axis=2, kind='linear', fill_value='extrapolate')(target_CWLs)
outarr = outarr.astype(np.int16)
return outarr
\ No newline at end of file
...@@ -33,7 +33,7 @@ except ImportError: import osr ...@@ -33,7 +33,7 @@ except ImportError: import osr
from gms_io import envifilehandling_BD as ef from gms_io import envifilehandling_BD as ef
from algorithms import GEOPROCESSING_BD as GEOP from algorithms import GEOPROCESSING as GEOP
from misc import helper_functions as HLP_F from misc import helper_functions as HLP_F
from misc import database_tools as DB_T from misc import database_tools as DB_T
......
...@@ -43,13 +43,14 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor ...@@ -43,13 +43,14 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
query_cfg = lambda conn_param, key: get_info_from_postgreSQLdb(conn_param,'config','value',{'key': "'%s'" % key})[0][0] query_cfg = lambda conn_param, key: get_info_from_postgreSQLdb(conn_param,'config','value',{'key': "'%s'" % key})[0][0]
query_job = lambda conn_param, col: get_info_from_postgreSQLdb(conn_param,'scenes_jobs',col,{'id':GMS_process_ID})[0][0] query_job = lambda conn_param, col: get_info_from_postgreSQLdb(conn_param,'scenes_jobs',col,{'id':GMS_process_ID})[0][0]
query_vir = lambda conn_param, col, VSID: get_info_from_postgreSQLdb(conn_param, 'virtual_sensors', col, {'id':VSID})[0][0]
absP, joinP = lambda x: os.path.abspath(x), lambda *x: os.path.join(*x) absP, joinP = lambda x: os.path.abspath(x), lambda *x: os.path.join(*x)
class job: class job:
ID = GMS_process_ID ID = GMS_process_ID
call_type = GMS_call_type call_type = GMS_call_type
exec_mode = 'Python' exec_mode = 'Flink'
#exec_mode = 'Python' #exec_mode = 'Python'
assert exec_mode in ['Flink','Python'] assert exec_mode in ['Flink','Python']
start_time = str(datetime.datetime.now()) start_time = str(datetime.datetime.now())
...@@ -109,8 +110,8 @@ class job: ...@@ -109,8 +110,8 @@ class job:
exec__L1AP = [1, 1] exec__L1AP = [1, 1]
exec__L1BP = [1, 1] exec__L1BP = [1, 1]
exec__L1CP = [1, 1] exec__L1CP = [1, 1]
exec__L2AP = [0, 1] exec__L2AP = [1, 1]
exec__L2BP = [0, 1] exec__L2BP = [1, 1]
exec__L2CP = [0, 1] exec__L2CP = [0, 1]
exec__L2DP = [0, 1] exec__L2DP = [0, 1]
if exec_mode=='Python': if exec_mode=='Python':
...@@ -145,6 +146,10 @@ class usecase: ...@@ -145,6 +146,10 @@ class usecase:
conversion_type_optical = 'Ref' # 'Rad' / 'Ref' conversion_type_optical = 'Ref' # 'Rad' / 'Ref'
conversion_type_thermal = 'Rad' # 'Rad' / 'Temp' conversion_type_thermal = 'Rad' # 'Rad' / 'Temp'
scale_factor_TOARef = 10000 scale_factor_TOARef = 10000
virtual_sensor_id = 10 # Sentinel-2A 10m
datasetid_spectral_ref = 249 # Sentinel-2A
target_CWL = []
target_FWHM = []
elif GMS_call_type == 'webapp': elif GMS_call_type == 'webapp':
#skip_thermal = int(query_cfg(job.conn_db_meta, 'skip_thermal')) #skip_thermal = int(query_cfg(job.conn_db_meta, 'skip_thermal'))
skip_thermal = True skip_thermal = True
...@@ -153,14 +158,20 @@ class usecase: ...@@ -153,14 +158,20 @@ class usecase:
conversion_type_optical = query_cfg(job.conn_db_meta, 'conversion_type_optical') conversion_type_optical = query_cfg(job.conn_db_meta, 'conversion_type_optical')
conversion_type_thermal = query_cfg(job.conn_db_meta, 'conversion_type_thermal') conversion_type_thermal = query_cfg(job.conn_db_meta, 'conversion_type_thermal')
datasetid_spatial_ref = query_job(job.conn_db_meta, 'datasetid_spatial_ref') datasetid_spatial_ref = query_job(job.conn_db_meta, 'datasetid_spatial_ref')
virtual_sensor_id = query_job(job.conn_db_meta, 'virtualsensorid')
virtual_sensor_id = virtual_sensor_id if virtual_sensor_id!=-1 else 10 # Sentinel-2A 10m
datasetid_spectral_ref = query_vir(job.conn_db_meta, 'spectral_characteristics_datasetid', virtual_sensor_id)
target_CWL = query_vir(job.conn_db_meta, 'wavelengths_pos', virtual_sensor_id)
target_FWHM = query_vir(job.conn_db_meta, 'band_width', virtual_sensor_id)
#conversion_type_optical = 'Rad' # 'Rad' / 'Ref' # FIXME #conversion_type_optical = 'Rad' # 'Rad' / 'Ref' # FIXME
#conversion_type_thermal = 'Temp' # 'Rad' / 'Temp' # FIXME #conversion_type_thermal = 'Temp' # 'Rad' / 'Temp' # FIXME
scale_factor_TOARef = int(query_cfg(job.conn_db_meta, 'scale_factor_TOARef')) scale_factor_TOARef = int(query_cfg(job.conn_db_meta, 'scale_factor_TOARef'))
align_coord_grids = 0 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl. align_coord_grids = 1 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
# FIXME: Zeitreihen ein grid aligning voraussetzt # FIXME: Zeitreihen ein grid aligning voraussetzt
userInp_target_gsd = 30 # [meters], overridden if match_gsd==True userInp_target_gsd = 20 # [meters], overridden if match_gsd==True
match_gsd = True match_gsd = True
userdef_EPSG = None # FIXME nutzerdefinierte Einstellung: int(query(job.conn_db_meta,'userdef_EPSG')) userdef_EPSG = None # FIXME nutzerdefinierte Einstellung: int(query(job.conn_db_meta,'userdef_EPSG'))
......
...@@ -23,16 +23,16 @@ import builtins ...@@ -23,16 +23,16 @@ import builtins
import warnings import warnings
import scipy.interpolate import scipy.interpolate
import algorithms.METADATA_BD as META from algorithms import METADATA as META
import misc.database_tools as DB_T from misc import database_tools as DB_T
import misc.path_generator as PG from misc import path_generator as PG
job = builtins.GMS_config.job # read from builtins (set by process_controller) job = builtins.GMS_config.job # read from builtins (set by process_controller)
# + misc.helper_functions.setup_logger (left out here in order to avoid circular dependencies) # + misc.helper_functions.setup_logger (left out here in order to avoid circular dependencies)
########################### core functions #################################### ########################### core functions ####################################
def read_ENVIfile(path,arr_shape,arr_pos,logger=None, return_meta=False): def read_ENVIfile(path,arr_shape,arr_pos,logger=None, return_meta=False, q=0):
hdr_path = os.path.splitext(path)[0] + '.hdr' if not os.path.splitext(path)[1] == '.hdr' else path hdr_path = os.path.splitext(path)[0] + '.hdr' if not os.path.splitext(path)[1] == '.hdr' else path
return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=logger,return_meta=return_meta) return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=logger,return_meta=return_meta,q=q)
def read_ENVIhdr_to_dict(hdr_path, logger=None): def read_ENVIhdr_to_dict(hdr_path, logger=None):
# type: (str,logging.logger) -> dict # type: (str,logging.logger) -> dict
...@@ -107,17 +107,16 @@ def read_mask_subset(path_masks,bandname,logger,subset=None): ...@@ -107,17 +107,16 @@ def read_mask_subset(path_masks,bandname,logger,subset=None):
def GMSfile2dict(path_GMSfile): def GMSfile2dict(path_GMSfile):
""" Converts a JSON file (like the GMS file) to a Python dictionary with keys and values. """ Converts a JSON file (like the GMS file) to a Python dictionary with keys and values.
:param path_GMSfile: :param path_GMSfile: absolute path on disk
absolute path on disk :return: the corresponding Python dictionary
:return:
the corresponding Python dictionary
""" """
return json.load(open(path_GMSfile)) return json.load(open(path_GMSfile))
def unify_envi_header_keys(header_dict): def unify_envi_header_keys(header_dict):
"""Ensures the compatibility of ENVI header keys written by Spectral-Python the code internal attribute names. """Ensures the compatibility of ENVI header keys written by Spectral-Python the code internal attribute names.
(ENVI header keys are always lowercase in contrast to the attribute names used in code). (ENVI header keys are always lowercase in contrast to the attribute names used in code).
:param header_dict: """ :param header_dict:
"""
refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit', refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit',
'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle'] 'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle']
unified_header_dict = header_dict unified_header_dict = header_dict
......
...@@ -24,7 +24,6 @@ import datetime ...@@ -24,7 +24,6 @@ import datetime
import ogr import ogr
import osr import osr
import builtins import builtins
import time
from itertools import chain from itertools import chain
from misc import helper_functions as HLP_F from misc import helper_functions as HLP_F
...@@ -33,12 +32,21 @@ from misc import path_generator as PG ...@@ -33,12 +32,21 @@ from misc import path_generator as PG
from gms_io import Input_reader as INP_R from gms_io import Input_reader as INP_R
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller) job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
enviHdr_keyOrder = \
['ENVI', 'description', 'samples', 'lines', 'bands', 'header offset', 'file type', 'data type', 'interleave',
'sensor type', 'byte order', 'reflectance scale factor', 'map info', 'coordinate system string',
'corner coordinates lonlat', 'image_type', 'Satellite', 'Sensor', 'Subsystem', 'EntityID', 'arr_pos', 'arr_shape',
'Metafile', 'gResolution', 'AcqDate', 'AcqTime', 'wavelength', 'bandwidths', 'band names', 'LayerBandsAssignment',
'data gain values', 'data offset values', 'reflectance gain values', 'reflectance offset values', 'ThermalConstK1',
'ThermalConstK2', 'ProcLCode', 'PhysUnit', 'wavelength units', 'SunElevation', 'SunAzimuth', 'SolIrradiance',
'EarthSunDist','ViewingAngle', 'IncidenceAngle', 'FieldOfView', 'scene length', 'overpass duraction sec',
'Quality', 'Additional']
def HDR_writer(meta_dic,outpath_hdr,logger=None): def HDR_writer(meta_dic,outpath_hdr,logger=None):
if logger is not None: if logger is not None:
logger.info('Writing %s header ...' %os.path.basename(outpath_hdr)) logger.info('Writing %s header ...' %os.path.basename(outpath_hdr))
envi.write_envi_header(outpath_hdr,meta_dic) envi.write_envi_header(outpath_hdr,meta_dic)
reorder_ENVI_header(outpath_hdr) reorder_ENVI_header(outpath_hdr,enviHdr_keyOrder)
def ASCII_writer(In,path_out_baseN): def ASCII_writer(In,path_out_baseN):
assert isinstance(In,dict), 'Input for ASCII writer is expected to be a dictionary. Got %s.' %type(In) assert isinstance(In,dict), 'Input for ASCII writer is expected to be a dictionary. Got %s.' %type(In)
...@@ -104,28 +112,29 @@ def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interlea ...@@ -104,28 +112,29 @@ def Tiles_Writer(tileList_or_Array, out_path, out_shape, out_dtype, out_interlea
data = tileList_or_Array if is_3D else tileList_or_Array[:,:,None] data = tileList_or_Array if is_3D else tileList_or_Array[:,:,None]
memmap[rS:rE+1,cS:cE+1,:] = data memmap[rS:rE+1,cS:cE+1,:] = data
def reorder_ENVI_header(path_hdr): def reorder_ENVI_header(path_hdr,tgt_keyOrder):
# type: (str,list)
"""Reorders the keys of an ENVI header file according to the implemented order. Keys given in the target order list
but missing the ENVI fileare skipped.
This function is a workaround for envi.write_envi_header of Spectral Python Library that always writes the given
metadata dictinary as an unordered dict.
:param path_hdr: <str> path of the target ENVI file
:param tgt_keyOrder: <list> list of target keys in the correct order
"""
with open(path_hdr, 'r') as inF: with open(path_hdr, 'r') as inF:
items = inF.read().split('\n') items = inF.read().split('\n')
HLP_F.silentremove(path_hdr) HLP_F.silentremove(path_hdr)
order = ['ENVI', 'description', 'samples', 'lines', 'bands', 'header offset', 'file type', 'data type',
'interleave', 'sensor type', 'byte order', 'reflectance scale factor', 'map info',
'coordinate system string', 'corner coordinates lonlat', 'image_type', 'Satellite', 'Sensor', 'Subsystem',
'EntityID', 'arr_pos', 'arr_shape', 'Metafile', 'gResolution', 'AcqDate', 'AcqTime', 'wavelength',
'bandwidths', 'band names', 'LayerBandsAssignment', 'data gain values', 'data offset values',
'reflectance gain values', 'reflectance offset values', 'ThermalConstK1', 'ThermalConstK2', 'ProcLCode',
'PhysUnit', 'wavelength units', 'SunElevation', 'SunAzimuth', 'SolIrradiance', 'EarthSunDist',
'ViewingAngle', 'IncidenceAngle', 'FieldOfView', 'scene length', 'overpass duraction sec', 'Quality',
'Additional']
with open(path_hdr, 'w') as outFile: with open(path_hdr, 'w') as outFile:
for paramName in order: for paramName in tgt_keyOrder:
for item in items: for item in items:
if item.startswith(paramName) or item.startswith(paramName.lower()): if item.startswith(paramName) or item.startswith(paramName.lower()):
outFile.write(item + '\n') outFile.write(item + '\n')
items.remove(item) items.remove(item)