Commit 5ff92e51 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Sentinel-2 compatibility update - L1A/L1B processing now fully compatible with Sentinel-2 data

L0A_P: added subset splitting for Sentinel-2 and Aster
L0B_P: added filename attribute that now give a clear hint about filename on disk
L1A_P:
    - archive_to_rasObj(): added support for Sentinel-2
    - added set_arr_desc_from_MetaObj()
    - added log_for_fullArr_or_firstTile()
    - added rescale_array()
    - DN2TOARadRefTemp() renamed to calc_TOARadRefTemp()
    - calc_TOARadRefTemp(): revised and facilitated
        - supports now DNs and Refs as Input
        - supports radiometric rescaling of arrays
    - calc_cloud_mask(): some bugfixes and code improvements
    - apply_nodata_mask_to_ObjAttr(): can now call apply_nodata_mask_to_saved_ENVIfile()
    - build_L1A_masks(): bugfix
L1B_P:
    - fixed a bug that prevented adding of coreg_info to L1B_object if no reference scene had been available within COREG
    - added inheritance
L1C_P:
    - added inheritance
METADATA:
    - added Read_Sentinel2A_xmls(): metadata parser for Sentinel-2 data
    - some bugfixes and code improvements
    - added a ScaleFactor attribute
    - revised SPOT GMS_sensorcodes
    - revised self.PhysUnit
    - removed deprecated self.Satellite_GMS()
    - added Sentinel-2 metadata to isPAN(), isTHERMAL(), get_orbit_params(), get_special_values(), get_LayerBandsAssignment()
OUT_W: bugfix
INP_R: bugfix
HLP_F:
    - revised SPOT GMS_sensorcodes
    - improved group_objects_by_attributes()
    - added find_in_xml_root(), find_in_xml(), get_values_from_xml(), stack_detectors()
PG:
    - added Sentinel-2 compatibility
    - changed path derivation using entityid to filename
CFG: added usecase.scale_factor_TOARef
- added Sentinel-2 SRFs to database
parent 588c814d
......@@ -85,7 +85,6 @@ def get_data_list_of_current_jobID(): # called in webapp mode
data_list =[]
for sceneid in sceneids:
#for sceneid in [6004823, 5843537]: #FIXME!!!
# add from postgreSQL-DB: proc_level, scene_ID, datasetid, image_type, satellite, sensor, subsystem,
# acquisition_date, entity_ID, filename
ds = DB_T.get_scene_and_dataset_infos_from_postgreSQLdb(sceneid)
......@@ -96,7 +95,18 @@ def get_data_list_of_current_jobID(): # called in webapp mode
ds['sensormode'] = get_sensormode(ds)
if usecase.skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan
data_list.append(ds)
if re.search("Sentinel-2A",ds['satellite'],re.I):
for subsystem in ['S2A10','S2A20','S2A60']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
elif re.search("Terra", ds['satellite'], re.I):
for subsystem in ['VNIR1', 'VNIR2', 'SWIR','TIR']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
else:
data_list.append(ds)
'''OrderedDict([('datasetid', 104), ('image_type', 'RSD'), ('satellite', 'Landsat-8'), ('sensor', 'OLI_TIRS'),
('subsystem', ''), ('acquisition_date', datetime.datetime(2013, 7, 3, 5, 48, 32)),
('entityid', 'LC81510322013184LGN00'), ('filename', 'LC81510322013184LGN00.tar.gz'), ('sensormode', 'M')])'''
......@@ -105,8 +115,8 @@ def get_data_list_of_current_jobID(): # called in webapp mode
def LandsatID2dataset(ID_list):
dataset_list =[]
for ID in ID_list:
dataset = {'image_type': 'RSD', 'satellite': None, 'sensor': None, 'subsystem': None, 'acquisition_date': None,
'entity_ID': ID}
dataset = dict(image_type='RSD', satellite=None, sensor=None, subsystem=None, acquisition_date=None,
entity_ID=ID)
dataset['satellite'] = 'Landsat-5' if ID[:3]=='LT5' else 'Landsat-7' if ID[:3]=='LE7' else 'Landsat-8' \
if ID[:3]=='LC8' else dataset['satellite']
dataset['sensor'] = 'TM' if ID[:3]=='LT5' else 'ETM+' if ID[:3]=='LE7' else 'OLI_TIRS' \
......
......@@ -42,6 +42,7 @@ class L0B_object(object):
self.acquisition_date = data_list_posX['acquisition_date']
self.entity_ID = data_list_posX['entity_ID']
self.scene_ID = data_list_posX['scene_ID']
self.filename = data_list_posX['filename']
PathGen = PG.path_generator(self.__dict__)
self.baseN = PathGen.get_baseN()
self.path_procdata = PathGen.get_path_procdata()
......
This diff is collapsed.
......@@ -55,145 +55,148 @@ try: import cv2
except ImportError: print('cv2-lib missing..')
job, usecase, GMS_call_type = builtins.GMS_config.job, builtins.GMS_config.usecase, builtins.GMS_config.GMS_call_type
from gms_io import Output_writer as OUT_W
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP
from misc import path_generator as PG
from misc import database_tools as DB_T
from gms_io import Output_writer as OUT_W
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP
from misc import path_generator as PG
from misc import database_tools as DB_T
from algorithms.L1A_P import L1A_object
########################### core functions ####################################
def calculate_TiePoints(im_ref,im_rpc,distance = 200):
detector = cv2.FeatureDetector_create ("SIFT")
descriptor = cv2.DescriptorExtractor_create("SIFT")
TieP_ref = detector.detect(im_ref) # gibt Liste von Keypoints aus -> type(skp[0]): cv2.KeyPoint
TieP_ref, Detector_ref = descriptor.compute(im_ref, TieP_ref) # Detector = numpy-array - shape: Anzahl Keypoints x 128
print('%s temporary master tie points found.' %len(TieP_ref))
TieP_rpc = detector.detect(im_rpc)
TieP_rpc, Detector_rpc = descriptor.compute(im_rpc, TieP_rpc)
print('%s temporary slave tie points found.' %len(TieP_rpc))
flann_params = dict(algorithm=1, trees=4)
flann = cv2.flann_Index(Detector_ref, flann_params)
idx, dist = flann.knnSearch(Detector_rpc, 1, params={})
del flann
dist = dist[:,0]/2500.0
dist = dist.reshape(-1,).tolist()
idx = idx.reshape(-1).tolist()
indices = range(len(dist))
indices.sort(key=lambda i: dist[i])
dist = [dist[i] for i in indices]
idx = [idx[i] for i in indices]
TieP_ref_final = []
for i, dis in itertools.izip(idx, dist):
if dis < distance:
TieP_ref_final.append(TieP_ref[i])
flann = cv2.flann_Index(Detector_rpc, flann_params)
idx, dist = flann.knnSearch(Detector_ref, 1, params={})
del flann
dist = dist[:,0]/2500.0
dist = dist.reshape(-1,).tolist()
idx = idx.reshape(-1).tolist()
indices = range(len(dist))
indices.sort(key=lambda i: dist[i])
dist = [dist[i] for i in indices]
idx = [idx[i] for i in indices]
TieP_rpc_final = []
for i, dis in itertools.izip(idx, dist):
if dis < distance:
TieP_rpc_final.append(TieP_rpc[i])
return TieP_ref_final,TieP_rpc_final
def Orfeo_homologous_points_extraction(im_ref,im_rpc):
HomologousPointsExtraction = otbApplication.Registry.CreateApplication("HomologousPointsExtraction")
# The following lines set all the application parameters:
# HomologousPointsExtraction.SetParameterString("in1", "sensor_stereo_left.tif")
# HomologousPointsExtraction.SetParameterString("in2", "sensor_stereo_right.tif")
HomologousPointsExtraction.SetParameterString("in1", im_ref)
HomologousPointsExtraction.SetParameterString("in2", im_rpc)
HomologousPointsExtraction.SetParameterString("mode","full")
HomologousPointsExtraction.SetParameterString("out", "homologous.txt")
# The following line execute the application
HomologousPointsExtraction.ExecuteAndWriteOutput()
def generate_RPCs(RSD_L1A, DGM_L1A, masks_L1A,path_out_baseN =''):
''' Generates RPC model and returns RPC points as list. '''
print('\n##################### Level 1B Processing #####################')
logging.info('Level 1B Processing started.')
if isinstance(RSD_L1A,np.ndarray):
# The following line creates an instance of the GenerateRPCSensorModel application
GenerateRPCSensorModel = otbApplication.Registry.CreateApplication("GenerateRPCSensorModel")
# The following lines set all the application parameters:
GenerateRPCSensorModel.SetParameterString("outgeom", "output.geom")
GenerateRPCSensorModel.SetParameterString("inpoints", "points.txt")
GenerateRPCSensorModel.SetParameterString("map","epsg")
GenerateRPCSensorModel.SetParameterInt ("map.epsg.code", 32631)
# The following line execute the application
GenerateRPCSensorModel.ExecuteAndWriteOutput()
else:
logging.info('L1B-Processor accepts only numpy arrays as first input argument. Execution stopped.')
raise ValueError('L1B-Processor accepts only numpy arrays as first input argument.')
print('Generating dummy RPCs...')
logging.info('Generating dummy RPCs...')
list_RPCs = [0]*93
return list_RPCs
def update_metadata(list_RPCs, L1A_meta2update, path_out_baseN =''):
''' Adds RPC points to metadata of RS data, masks, atmospheric layers and DGM. '''
if isinstance(L1A_meta2update,dict):
# metadata dictionary updater
L1A_meta2update['rpc coeffs'] = list_RPCs
L1B_meta = L1A_meta2update
# L1B_meta = L1A_meta2update.update({'rpc coeffs':list_RPCs}) # result = None
return L1B_meta
elif isinstance(L1A_meta2update,str) and os.path.splitext(L1A_meta2update)[1] in ['.BSQ', '.bsq'] and os.path.isfile(os.path.splitext(L1A_meta2update)[0] + '.hdr'):
# header file updater
hdr_path = os.path.splitext(L1A_meta2update)[0] + '.hdr'
L1A_meta2update = envi.read_envi_header(hdr_path)
L1A_meta2update['rpc coeffs'] = list_RPCs
L1B_meta = L1A_meta2update
return L1B_meta
else:
logging.info('L1B-Processor accepts only L1A metadata dictionaries or path-strings to L1A header files as second input argument. Execution stopped.')
raise ValueError('L1B-Processor accepts only L1A metadata dictionaries or path-strings to L1A header files as second input argument.')
def convert_to_8bit(Inimage,In_dtype):
image_min,image_max = (np.min(Inimage), np.max(Inimage))
# lut = np.arange(np.iinfo(In_dtype).max, dtype=In_dtype)
lut = np.arange(2**16, dtype='uint16')
Outimage = np.array(lut, copy=True)
Outimage.clip(image_min, image_max, out=Outimage)
Outimage -= image_min
Outimage //= (image_max - image_min + 1) / 256.
lut = Outimage.astype(np.uint8)
return np.take(lut, Inimage)
def L1B_P__main(L1A_Instances):
for i in L1A_Instances:
if i.image_type == 'RSD':
if i.georef == 'Master':
if i.arr_shape == 'cube':
im_ref = convert_to_8bit(i.arr[:,:,0],i.arr.dtype)
else:
im_ref = convert_to_8bit(i.arr,i.arr.dtype)
print('Calling tie point calculation with master image %s' %i.entity_ID)
else:
if i.arr_shape == 'cube':
im_rpc = convert_to_8bit(i.arr[:,:,0],i.arr.dtype)
else:
im_rpc = convert_to_8bit(i.arr,i.arr.dtype)
print('Calling tie point calculation with slave image %s' %i.entity_ID)
print(im_ref.shape,im_rpc.shape)
# Orfeo_homologous_points_extraction(im_ref,im_rpc) # funktioniert nur von Platte
TieP_ref,TieP_rpc = calculate_TiePoints(im_ref,im_rpc)
print('%s master tie points calculated.' %len(TieP_ref))
# <editor-fold desc="deprecated/unused functions">
# def calculate_TiePoints(im_ref,im_rpc,distance = 200):
# detector = cv2.FeatureDetector_create ("SIFT")
# descriptor = cv2.DescriptorExtractor_create("SIFT")
#
# TieP_ref = detector.detect(im_ref) # gibt Liste von Keypoints aus -> type(skp[0]): cv2.KeyPoint
# TieP_ref, Detector_ref = descriptor.compute(im_ref, TieP_ref) # Detector = numpy-array - shape: Anzahl Keypoints x 128
# print('%s temporary master tie points found.' %len(TieP_ref))
# TieP_rpc = detector.detect(im_rpc)
# TieP_rpc, Detector_rpc = descriptor.compute(im_rpc, TieP_rpc)
# print('%s temporary slave tie points found.' %len(TieP_rpc))
#
# flann_params = dict(algorithm=1, trees=4)
# flann = cv2.flann_Index(Detector_ref, flann_params)
# idx, dist = flann.knnSearch(Detector_rpc, 1, params={})
# del flann
# dist = dist[:,0]/2500.0
# dist = dist.reshape(-1,).tolist()
# idx = idx.reshape(-1).tolist()
# indices = range(len(dist))
# indices.sort(key=lambda i: dist[i])
# dist = [dist[i] for i in indices]
# idx = [idx[i] for i in indices]
# TieP_ref_final = []
# for i, dis in itertools.izip(idx, dist):
# if dis < distance:
# TieP_ref_final.append(TieP_ref[i])
#
# flann = cv2.flann_Index(Detector_rpc, flann_params)
# idx, dist = flann.knnSearch(Detector_ref, 1, params={})
# del flann
# dist = dist[:,0]/2500.0
# dist = dist.reshape(-1,).tolist()
# idx = idx.reshape(-1).tolist()
# indices = range(len(dist))
# indices.sort(key=lambda i: dist[i])
# dist = [dist[i] for i in indices]
# idx = [idx[i] for i in indices]
# TieP_rpc_final = []
# for i, dis in itertools.izip(idx, dist):
# if dis < distance:
# TieP_rpc_final.append(TieP_rpc[i])
#
# return TieP_ref_final,TieP_rpc_final
#
# def Orfeo_homologous_points_extraction(im_ref,im_rpc):
# HomologousPointsExtraction = otbApplication.Registry.CreateApplication("HomologousPointsExtraction")
# # The following lines set all the application parameters:
# # HomologousPointsExtraction.SetParameterString("in1", "sensor_stereo_left.tif")
# # HomologousPointsExtraction.SetParameterString("in2", "sensor_stereo_right.tif")
# HomologousPointsExtraction.SetParameterString("in1", im_ref)
# HomologousPointsExtraction.SetParameterString("in2", im_rpc)
# HomologousPointsExtraction.SetParameterString("mode","full")
# HomologousPointsExtraction.SetParameterString("out", "homologous.txt")
# # The following line execute the application
# HomologousPointsExtraction.ExecuteAndWriteOutput()
#
# def generate_RPCs(RSD_L1A, DGM_L1A, masks_L1A,path_out_baseN =''):
# ''' Generates RPC model and returns RPC points as list. '''
# print('\n##################### Level 1B Processing #####################')
# logging.info('Level 1B Processing started.')
# if isinstance(RSD_L1A,np.ndarray):
# # The following line creates an instance of the GenerateRPCSensorModel application
# GenerateRPCSensorModel = otbApplication.Registry.CreateApplication("GenerateRPCSensorModel")
#
# # The following lines set all the application parameters:
# GenerateRPCSensorModel.SetParameterString("outgeom", "output.geom")
# GenerateRPCSensorModel.SetParameterString("inpoints", "points.txt")
# GenerateRPCSensorModel.SetParameterString("map","epsg")
# GenerateRPCSensorModel.SetParameterInt ("map.epsg.code", 32631)
#
# # The following line execute the application
# GenerateRPCSensorModel.ExecuteAndWriteOutput()
# else:
# logging.info('L1B-Processor accepts only numpy arrays as first input argument. Execution stopped.')
# raise ValueError('L1B-Processor accepts only numpy arrays as first input argument.')
#
# print('Generating dummy RPCs...')
# logging.info('Generating dummy RPCs...')
# list_RPCs = [0]*93
# return list_RPCs
#
# def update_metadata(list_RPCs, L1A_meta2update, path_out_baseN =''):
# ''' Adds RPC points to metadata of RS data, masks, atmospheric layers and DGM. '''
# if isinstance(L1A_meta2update,dict):
# # metadata dictionary updater
# L1A_meta2update['rpc coeffs'] = list_RPCs
# L1B_meta = L1A_meta2update
# # L1B_meta = L1A_meta2update.update({'rpc coeffs':list_RPCs}) # result = None
# return L1B_meta
#
# elif isinstance(L1A_meta2update,str) and os.path.splitext(L1A_meta2update)[1] in ['.BSQ', '.bsq'] and os.path.isfile(os.path.splitext(L1A_meta2update)[0] + '.hdr'):
# # header file updater
# hdr_path = os.path.splitext(L1A_meta2update)[0] + '.hdr'
# L1A_meta2update = envi.read_envi_header(hdr_path)
# L1A_meta2update['rpc coeffs'] = list_RPCs
# L1B_meta = L1A_meta2update
# return L1B_meta
#
# else:
# logging.info('L1B-Processor accepts only L1A metadata dictionaries or path-strings to L1A header files as second input argument. Execution stopped.')
# raise ValueError('L1B-Processor accepts only L1A metadata dictionaries or path-strings to L1A header files as second input argument.')
#
# def convert_to_8bit(Inimage,In_dtype):
# image_min,image_max = (np.min(Inimage), np.max(Inimage))
# # lut = np.arange(np.iinfo(In_dtype).max, dtype=In_dtype)
# lut = np.arange(2**16, dtype='uint16')
# Outimage = np.array(lut, copy=True)
# Outimage.clip(image_min, image_max, out=Outimage)
# Outimage -= image_min
# Outimage //= (image_max - image_min + 1) / 256.
# lut = Outimage.astype(np.uint8)
# return np.take(lut, Inimage)
#
# def L1B_P__main(L1A_Instances):
# for i in L1A_Instances:
# if i.image_type == 'RSD':
# if i.georef == 'Master':
# if i.arr_shape == 'cube':
# im_ref = convert_to_8bit(i.arr[:,:,0],i.arr.dtype)
# else:
# im_ref = convert_to_8bit(i.arr,i.arr.dtype)
# print('Calling tie point calculation with master image %s' %i.entity_ID)
# else:
# if i.arr_shape == 'cube':
# im_rpc = convert_to_8bit(i.arr[:,:,0],i.arr.dtype)
# else:
# im_rpc = convert_to_8bit(i.arr,i.arr.dtype)
# print('Calling tie point calculation with slave image %s' %i.entity_ID)
# print(im_ref.shape,im_rpc.shape)
# # Orfeo_homologous_points_extraction(im_ref,im_rpc) # funktioniert nur von Platte
# TieP_ref,TieP_rpc = calculate_TiePoints(im_ref,im_rpc)
# print('%s master tie points calculated.' %len(TieP_ref))
# </editor-fold>
class COREG(object):
......@@ -1269,10 +1272,10 @@ def corner_coord_to_minmax(corner_coords):
return xmin,xmax,ymin,ymax
class L1B_object(object):
class L1B_object(L1A_object):
def __init__(self, L1A_obj, COREG_obj):
[setattr(self, key, value) for key,value in L1A_obj.__dict__.items()]
super().__init__(None)
if L1A_obj: [setattr(self, key, value) for key,value in L1A_obj.__dict__.items()]
self.proc_level = 'L1B'
self.coreg_info = {
'corrected_shifts_px' : {'x':COREG_obj.x_shift_px, 'y':COREG_obj.y_shift_px },
......@@ -1285,9 +1288,9 @@ class L1B_object(object):
'reference geotransform': COREG_obj.ref_gt,
'reference extent' : {'cols':COREG_obj.ds_imref.RasterXSize, 'rows':COREG_obj.ds_imref.RasterYSize},
'is shifted' : False,
'is resampled' : False }
'is resampled' : False } if COREG_obj.success else None
self.meta['original map info'] = COREG_obj.map_info_to_update
self.meta['original map info'] = COREG_obj.map_info_to_update if COREG_obj else None
def perform_deshifting(self, attrname, band2process=None): # TODO: diese Methode per super() an alle folgenden Prozessierungslevel weitergeben; vielleicht @classmethod nutzen, um dem namespace späterer objekte an die methode zu übergeben?
......
......@@ -32,20 +32,23 @@ import misc.helper_functions as HLP_F
import algorithms.GEOPROCESSING_BD as GEOP
import gms_io.Input_reader as INP_R
import misc.path_generator as PG
from algorithms.L1B_P import L1B_object
job = builtins.GMS_config.job
########################### core functions ####################################
class L1C_object(object):
class L1C_object(L1B_object):
#"""@DynamicAttrs"""
def __init__(self, L1B_obj):
[setattr(self, key, value) for key,value in L1B_obj.__dict__.items()]
self.proc_level = 'L1C'
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
super().__init__(None,None)
if L1B_obj: [setattr(self, key, value) for key,value in L1B_obj.__dict__.items()]
self.proc_level = 'L1C'
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1) if L1B_obj else None
self.lonlat_arr = None # set by self.get_lonlat_coord_array()
self.VZA_arr = None # set by self.calc_VZA_array()
self.SZA_arr = None # set by self.calc_SZA_SAA_array()
self.SAA_arr = None # set by self.calc_SZA_SAA_array()
self.RAA_arr = None # set by self.calc_RAA_array()
self.RAA_arr = None # set by self.calc_RAA_array()
def get_lonlat_coord_array(self, subset=None):
......
This diff is collapsed.
......@@ -93,9 +93,11 @@ class job:
path_procdata = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_procdata'))
path_archive = joinP(path_fileserver, query_cfg(conn_db_meta, 'foldername_download'))
path_earthSunDist = absP(query_cfg(conn_db_meta, 'path_earthSunDist'))
path_earthSunDist = '/home/gfz-fe/GeoMultiSens/database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv' # FIXME!!
path_SRFs = absP(query_cfg(conn_db_meta, 'path_SRFs'))
path_cloud_classif = absP(query_cfg(conn_db_meta, 'path_cloud_classif'))
path_solar_irr = absP(query_cfg(conn_db_meta, 'path_solar_irr'))
path_solar_irr = '/home/gfz-fe/GeoMultiSens/database/solar_irradiance/SUNp1fontenla__350-2500nm_@0.1nm_converted.txt' # FIXME!!
path_testing = absP(query_cfg(conn_db_meta, 'path_testing'))
path_benchmarks = absP(query_cfg(conn_db_meta, 'path_benchmarks'))
path_job_logs = absP(query_cfg(conn_db_meta, 'path_job_logs'))
......@@ -105,7 +107,7 @@ class job:
# processor configuration: [run processor, write output]
exec__L1AP = [1, 1]
exec__L1BP = [1, 1]
exec__L1BP = [0, 1]
exec__L1CP = [0, 1]
exec__L1DP = [1, 1]
exec__L2AP = [0, 0]
......@@ -137,9 +139,10 @@ class usecase:
sort_bands_by_cwl = True
conversion_type_optical = 'Ref' # 'Rad' / 'Ref'
conversion_type_thermal = 'Rad' # 'Rad' / 'Temp'
scale_factor_TOARef = 10000
elif GMS_call_type == 'webapp':
skip_thermal = int(query_cfg(job.conn_db_meta, 'skip_thermal'))
#skip_thermal = False # Fixme
#skip_thermal = int(query_cfg(job.conn_db_meta, 'skip_thermal'))
skip_thermal = False # Fixme
skip_pan = int(query_cfg(job.conn_db_meta, 'skip_pan'))
sort_bands_by_cwl = int(query_cfg(job.conn_db_meta, 'sort_bands_by_cwl'))
conversion_type_optical = query_cfg(job.conn_db_meta, 'conversion_type_optical')
......@@ -147,6 +150,8 @@ class usecase:
datasetid_spatial_ref = query_job(job.conn_db_meta, 'datasetid_spatial_ref')
#conversion_type_optical = 'Rad' # 'Rad' / 'Ref' # FIXME
#conversion_type_thermal = 'Temp' # 'Rad' / 'Temp' # FIXME
scale_factor_TOARef = 10000 # TODO -> Datenbank
align_coord_grids = 0 # FIXME: könnte später sinnlos werden, da gemeinsame Auswertung von Multisensordaten inkl.
# FIXME: Zeitreihen ein grid aligning voraussetzt
......@@ -166,8 +171,6 @@ def get_usecase_coord_grid():
return geotransform , EPSG, GSD_meters
# def init_gms_globals():
# global CLD_obj
# CLD_obj = 'not_set'
......
......@@ -21,6 +21,7 @@ import collections
import re
import dill
import builtins
import warnings
import scipy.interpolate
import gms_io.Output_writer as OUT_W
import algorithms.METADATA_BD as META
......@@ -79,6 +80,7 @@ def read_ENVIhdr_to_dict(hdr_path, logger=None):
return SpyFileheader.metadata
def read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None, return_meta=False,q=0):
hdr_path = os.path.splitext(hdr_path)[0]+'.hdr' if os.path.splitext(hdr_path)[1]=='.bsq' else hdr_path
if not os.path.isfile(hdr_path):
if not q:
if logger: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
......@@ -203,6 +205,7 @@ def SRF_reader(GMS_identifier):
SRF_dict = collections.OrderedDict((bandname,None) for bandname in ['band_%s' %LayerBandsAssignment[i]
for i in range(len(LayerBandsAssignment))])
SRF_path = os.path.join(job.path_SRFs,satellite,sensor)
if os.path.isdir(SRF_path):
for key in SRF_dict.keys():
try:
......@@ -217,6 +220,7 @@ def SRF_reader(GMS_identifier):
# logger.info('SRFs for the following %s bands read:' %[i for i in SRF_dict.keys()])
else:
warnings.warn('SRF database directory not available (Got %s).' %SRF_path)
SRF_dict = {}
logger.warning("No spectral response functions available for '%s %s'. Preconfigured values are used for solar "\
"irradiance and central wavelength instead." %(satellite,sensor))
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
......@@ -93,8 +93,9 @@ def ASCII_writer(In,path_out_baseN):
# json.dump(In, open(path_out_baseN,'w'),skipkeys=True,sort_keys=True,cls=customJSONEncoder,separators=(',', ': '),indent =4)
json.dump(In, open(path_out_baseN,'w'),skipkeys=True,sort_keys=True,separators=(',', ': '),indent =4)
def Tiles_Writer(tile_list, out_path, out_shape, out_dtype, out_interleave, out_meta = {}, overwrite=True):
def Tiles_Writer(tile_list, out_path, out_shape, out_dtype, out_interleave, out_meta = None, overwrite=True):
# print(tile_list, out_path, out_shape, out_dtype, out_interleave, out_meta, overwrite)
out_meta = out_meta if out_meta else {}
out_meta.update(out_meta)
if os.path.exists(out_path) and not overwrite:
FileObj = spectral.open_image(out_path)
......@@ -186,7 +187,7 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification = True):
for arrayname in attributes2write:
image_type_dict = {'arr':InObj.image_type, 'masks':'MAS', 'mask_clouds':'MAC'}
descriptor = '%s_%s' %(image_type_dict[arrayname],InObj.proc_level)
if hasattr(InObj,arrayname):
if hasattr(InObj,arrayname) and getattr(InObj,arrayname) is not None:
outpath_hdr = get_outpath_hdr(InObj,image_type_dict[arrayname])
out_dtype = param_dic[descriptor][2]
meta_attr = 'meta' if arrayname=='arr' else 'masks_meta' if arrayname=='masks' else 'unknown'
......@@ -211,7 +212,7 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification = True):
if os.path.splitext(path_to_array)[1] =='':
outpath_arr = '%s.%s' %(os.path.splitext(outpath_hdr)[0],InObj.outInterleave)
try: shutil.copy(path_to_array,outpath_arr) # copies file + permissions
except: # prevents permission error if outputfile already exists and is owned by another user
except PermissionError: # prevents permission error if outputfile already exists and is owned by another user
os.remove(outpath_arr)
print('copying %s' %path_to_array)
shutil.copy(path_to_array,outpath_arr)
......
......@@ -62,7 +62,8 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
if connection is None: return 'database connection fault'
cursor = connection.cursor()
condition = "WHERE " + " AND ".join(["%s=%s" %(k,v) for k,v in cond_dict.items()])
cursor.execute("SELECT " +','.join(vals2return)+ " FROM " +tablename+ " " + condition)
cursor.execute("SELECT " + ','.join(vals2return) + " FROM " + tablename + " " + condition)
records2return = cursor.fetchall() if records2fetch == 0 else [cursor.fetchone()] if records2fetch == 1 else \
cursor.fetchmany(size = records2fetch) # e.g. [('LE71950282003121EDC00',), ('LE71950282003105ASN00',)]
cursor.close()
......
......@@ -25,6 +25,7 @@ from multiprocessing import sharedctypes
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import pyplot as plt
from subprocess import Popen, PIPE
from xml.etree.ElementTree import QName
import algorithms.gms_cloud_classifier as CLD_P # Cloud Processor
import misc.database_tools as DB_T
......@@ -137,22 +138,33 @@ def get_GMS_sensorcode(GMS_identifier):
'Landsat-7_ETM+_SAM' : 'TM7',
'Landsat-8_OLI_TIRS' : 'LDCM',
'Landsat-8_LDCM' : 'LDCM',
'SPOT-1_HRV1' : 'S1a', #MS
'SPOT-1_HRV2' : 'S1b',
'SPOT-2_HRV1' : 'S2a',
'SPOT-2_HRV2' : 'S2b',
'SPOT-3_HRV1' : 'S3a',
'SPOT-3_HRV2' : 'S3b',
'SPOT-4_HRVIR1' : 'S4a',
'SPOT-4_HRVIR2' : 'S4b',
'SPOT-5_HRG1' : 'S5a', #PAN HRG2A
'SPOT-5_HRG2' : 'S5b', #MS HRG2J
'SPOT-1_HRV1' : 'SPOT1a', #MS
'SPOT-1_HRV2' : 'SPOT1b',
'SPOT-2_HRV1' : 'SPOT2a',
'SPOT-2_HRV2' : 'SPOT2b',
'SPOT-3_HRV1' : 'SPOT3a',
'SPOT-3_HRV2' : 'SPOT3b',
'SPOT-4_HRVIR1' : 'SPOT4a',
'SPOT-4_HRVIR2' : 'SPOT4b',
'SPOT-5_HRG1' : 'SPOT5a', #PAN HRG2A
'SPOT-5_HRG2' : 'SPOT5b', #MS HRG2J
'RapidEye-1_MSI' : 'RE1',
'RapidEye-2_MSI' : 'RE2',
'RapidEye-3_MSI' : 'RE3',
'RapidEye-4_MSI' : 'RE4',
'RapidEye-5_MSI' : 'RE5',
'SRTM_SRTM2' : 'SRTM2' ,
'Terra_ASTER_VNIR1' : 'AST_V1',
'Terra_ASTER_VNIR2' : 'AST_V2',
'Terra_ASTER_SWIR' : 'AST_S',
'Terra_ASTER_TIR' : 'AST_T' }
'Terra_ASTER_TIR' : 'AST_T',
'Sentinel-2A_MSI_S2A10' : 'S2A10',
'Sentinel-2A_MSI_S2A20' : 'S2A20',
'Sentinel-2A_MSI_S2A60' : 'S2A60',
'Sentinel-2B_MSI_S2B10' : 'S2B10',
'Sentinel-2B_MSI_S2B20' : 'S2B20',
'Sentinel-2B_MSI_S2B60' : 'S2B60'
}
try:
return sensorcode_dic[meta_sensorcode]
except KeyError:
......@@ -170,12 +182,13 @@ def convert_absPathArchive_to_GDALvsiPath(path_archive):
file_suffix = '.tar.gz' if path_archive.endswith('.tar.gz') else file_suffix
return os.path.join(gdal_prefix_dict[file_suffix],os.path.basename(path_archive))
def open_specific_file_within_archive(path_archive,matching_expression, logger=None, read_mode='r'):
def open_specific_file_within_archive(path_archive,matching_expression, read_mode='r'):
file_suffix = os.path.splitext(path_archive)[1][1:]
file_suffix = 'tar.gz' if path_archive.endswith('tar.gz') else file_suffix
assert file_suffix in ['zip','tar','gz','tgz', 'tar.gz'], '*.%s files are not supported.' %file_suffix
if file_suffix == 'zip':
archive = zipfile.ZipFile(path_archive,'r')
#[print(i) for i in archive.namelist()]
matching_files = fnmatch.filter(archive.namelist(), matching_expression)
count_matching_files = len(matching_files)
content_file = archive.read(matching_files[0])
......@@ -302,8 +315,8 @@ def get_mask_colormap(maskname):
'Unknown Class' : [255,0,0]}
else: return None
def group_objects_by_attribute(object_list, attr):
get_attr = operator.attrgetter(attr)
def group_objects_by_attributes(object_list, *attributes):
get_attr = operator.attrgetter(*attributes)
return [list(g) for k, g in itertools.groupby(sorted(object_list, key=get_attr), get_attr)]
def group_tuples_by_keys_of_tupleElements(tuple_list, tupleElement_index, key):
......@@ -387,6 +400,53 @@ def corner_coord_to_minmax(corner_coords):