Commit 891590f2 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Some major bugfixes within preparation of L1B-P.

- moved generation of absolute paths from L0B-P to an external path_generator class within HLP_F
- renamed GMS_proc_level to proc_level
- converted L1A_obj.__call__() to L1A_obj.fill_from_disk()
- added multiple docstrings
- Bugfix: L1A_obj-attribute names read from ENVI header now compatible with further processing
- Bugfix: fillvals are now passed correctly to calc_VZA_array, calc_SZA_SAA_array, calc_RAA_array
- Bugfix: added missing argument to numba_array_merger
- added a dummy version of L1B_P.get_opt_winpos_winsize()
- GEOP: added functions for getting the overlap between two footprint polygons
- INP_R: get_list_GMSfiles() now compatible with webapp GMS_call_type
- Bugfix: OUT_W: fixed a bug that cause overwriting of L1A_obj attributes by OUT_W
- HLP_F: added functions for conversion of postgreSQL geometry, postgreSQL polygon and internal CornerLonLat lists
- added detection of debugger that runs process controller
- commented multiple sections of process controller
- some style adjustments
- revised settings of environment variables in run.sh
parent 68cefc6f
......@@ -44,6 +44,8 @@ import pyproj
from pyorbital import astronomy
import ephem
import spectral.io.envi as envi
from shapely.geometry import Polygon
from shapely.geometry import shape
import gms_io.envifilehandling_BD as ef
import misc.helper_functions as HLP_F
......@@ -1853,15 +1855,14 @@ class GEOPROCESSING(object):
[1, ur[1], ur[0], ur[1] * ur[0]],
[1, ll[1], ll[0], ll[1] * ll[0]],
[1, lr[1], lr[0], lr[1] * lr[0]]])
const_matrix = np.array([viewing_angle - FOV / 2., # VZA@UL
viewing_angle + FOV / 2., # VZA@UR
viewing_angle - FOV / 2., # VZA@LL
const_matrix = np.array([viewing_angle - FOV / 2., # VZA@UL
viewing_angle + FOV / 2., # VZA@UR
viewing_angle - FOV / 2., # VZA@LL
viewing_angle + FOV / 2.]) # VZA@LR
factors = np.linalg.solve(coeff_matrix, const_matrix)
VZA_array = (
factors[0] + factors[1] * cols_arr + factors[2] * rows_arr + factors[3] * cols_arr * rows_arr).astype(
np.float32)
VZA_array = (factors[0] + factors[1] * cols_arr + factors[2] * rows_arr + factors[3] * \
cols_arr * rows_arr).astype(np.float32)
if assignNoData is not None:
if isinstance(assignNoData, list) and isinstance(assignNoData[0], np.ndarray):
......@@ -1965,8 +1966,8 @@ class GEOPROCESSING(object):
rows_arr[None, :] = range(rowStart, rowStart + rows) # rows (int)
rows_arr = rows_arr.T
alt_UL_rad, az_UL_rad = astronomy.get_alt_az(time_end, trueDataCornerLonLat[0][0], trueDataCornerLonLat[0][1])
alt_UR_rad, az_UR_rad = astronomy.get_alt_az(time_end, trueDataCornerLonLat[1][0], trueDataCornerLonLat[1][1])
alt_UL_rad, az_UL_rad = astronomy.get_alt_az(time_end, trueDataCornerLonLat[0][0], trueDataCornerLonLat[0][1])
alt_UR_rad, az_UR_rad = astronomy.get_alt_az(time_end, trueDataCornerLonLat[1][0], trueDataCornerLonLat[1][1])
alt_LL_rad, az_LL_rad = astronomy.get_alt_az(time_start, trueDataCornerLonLat[2][0], trueDataCornerLonLat[2][1])
alt_LR_rad, az_LR_rad = astronomy.get_alt_az(time_start, trueDataCornerLonLat[3][0], trueDataCornerLonLat[3][1])
......@@ -1979,8 +1980,8 @@ class GEOPROCESSING(object):
[1, ll[1], ll[0], ll[1] * ll[0]],
[1, lr[1], lr[0], lr[1] * lr[0]]])
SZA_const_matrix = np.array([SZA_UL, SZA_UR, SZA_LL, SZA_LR])
SZA_factors = np.linalg.solve(SZA_SAA_coeff_matrix, SZA_const_matrix)
SZA_array = (SZA_factors[0] + SZA_factors[1] * cols_arr + SZA_factors[2] * rows_arr + SZA_factors[
SZA_factors = np.linalg.solve(SZA_SAA_coeff_matrix, SZA_const_matrix)
SZA_array = (SZA_factors[0] + SZA_factors[1] * cols_arr + SZA_factors[2] * rows_arr + SZA_factors[
3] * cols_arr * rows_arr).astype(np.float32)
SAA_const_matrix = np.array([SAA_UL, SAA_UR, SAA_LL, SAA_LR])
......@@ -2459,7 +2460,6 @@ class GEOPROCESSING(object):
with open (os.path.splitext(path_output)[0]+'.hdr','r') as inF: lines = inF.readlines()
outContent = ''.join([i for i in lines if not re.search('map info', i, re.I)])
with open (os.path.splitext(path_output)[0]+'.hdr','w') as outF: outF.write(outContent)
assert os.path.exists(path_output) and os.path.exists(os.path.splitext(path_output)[0]+'.hdr'), \
"Layerstacking failed because output cannot be found."
......@@ -3652,6 +3652,14 @@ def EPSG2Proj4(EPSG_code):
srs.ImportFromEPSG(EPSG_code)
return srs.ExportToProj4()
def get_footprint_polygon(CornerLonLat): ## NOT tested!
return Polygon(CornerLonLat)
def get_overlap_polygon(poly1, poly2):
overlap_poly = poly1.intersection(poly2)
overlap_percentage = 100 * shape(overlap_poly).area / shape(poly2).area
return overlap_poly, overlap_percentage
# def get_lonlat_coord_array(rasObj_or_ENVI_file): ### doch wieder in object? -> object muss eh instanziert werden, um aufnahmegeometrin zu berechnen
# '''Returns numpy array containing longitude pixel coordinates (band 0) and latitude pixel coordinates (band 1).'''
# if isinstance(rasObj_or_ENVI_file,basestring):
......
......@@ -13,7 +13,13 @@
###############################################################################
########################### Library import ####################################
import os,re,datetime,glob,sys, psycopg2, collections
import os
import re
import datetime
import glob
import sys
import psycopg2
import collections
job, usecase = GMS_config.job, GMS_config.usecase # read from builtins (set by process_controller)
from gms_io import Input_reader as INP_R
......@@ -60,11 +66,12 @@ def get_entity_IDs_within_AOI(): # called in console mode
data_list.append({'image_type':'ATM','satellite':'ATM-data', 'sensor':'unknown', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'dummy_ID'})
for ds in data_list:
ds['proc_level'] = 'L0A'
ds['acquisition_date'] = datetime.datetime.strptime(ds['acquisition_date'], '%Y-%m-%d')
ds['subsystem'] = '' if ds['subsystem']==None else ds['subsystem']
#ds['scene_ID'] = '_'.join([ds['satellite'],ds['sensor'],ds['subsystem'],ds['entity_ID']])
ds['scene_ID'] = ds['entity_ID']
ds['sensormode'] = get_sensormode(ds)
ds['subsystem'] = '' if ds['subsystem']==None else ds['subsystem']
#ds['scene_ID'] = '_'.join([ds['satellite'],ds['sensor'],ds['subsystem'],ds['entity_ID']])
ds['scene_ID'] = ds['entity_ID']
ds['sensormode'] = get_sensormode(ds)
if usecase.skip_thermal:
data_list = [ds for ds in data_list if not ds['subsystem'] == 'TIR'] # removes ASTER TIR in case of skip_thermal
if usecase.skip_pan:
......@@ -88,6 +95,7 @@ def get_data_list_of_current_jobID(): # called in webapp mode
scenedata = resultset[0]
ds = collections.OrderedDict()
ds.update({'proc_level' :'L0A'})
ds.update({'scene_ID' :sceneid})
ds.update({'datasetid' :scenedata[0]})
ds.update({'image_type' :query('datasets' ,'image_type',{'id':scenedata[0]})[0][0]})
......@@ -125,8 +133,7 @@ def LandsatID2dataset(ID_list):
def get_sensormode(dataset):
if re.search('SPOT',dataset['satellite']):
path_archive = L0B_P._get_local_archive_path_baseN(dataset['image_type'],dataset['satellite'],\
dataset['sensor'], dataset['entity_ID'],None)
path_archive = HLP_F.path_generator(dataset).get_local_archive_path_baseN()
dim_ = HLP_F.open_specific_file_within_archive(path_archive,'*/scene01/metadata.dim')[0]
SPOT_mode = re.search("<SENSOR_CODE>([a-zA-Z0-9]*)</SENSOR_CODE>",dim_, re.I).group(1)
assert SPOT_mode in ['J','X','XS','A','P','M'], 'Unknown SPOT sensor mode: %s' %SPOT_mode
......@@ -142,8 +149,7 @@ def add_local_availability(dataset):
DB_match = INP_R.get_info_from_SQLdb(job.path_database,'processed_data',['proc_level','LayerBandsAssignment'],\
{'image_type':dataset['image_type'],'satellite':dataset['satellite'], 'sensor':dataset['sensor'],\
'subsystem':dataset['subsystem'], 'sensormode':dataset['sensormode'], 'entity_ID':dataset['entity_ID']})
path_logfile = L0B_P.get_path_logfile(job.path_procdata,dataset['satellite'],dataset['sensor'],\
dataset['subsystem'], dataset['acquisition_date'],dataset['entity_ID'])
path_logfile = HLP_F.path_generator(dataset).get_path_logfile()
def get_HighestProcL_dueLog(path_logfile):
if os.path.exists(path_logfile):
......@@ -178,26 +184,26 @@ def add_local_availability(dataset):
OUT_W.data_DB_updater(GMS_file_dict)
if job.call_type == 'console':
OUT_W.data_DB_to_csv()
dataset['GMS_proc_level'] = HighestProcL_dueLog
dataset['proc_level'] = HighestProcL_dueLog
elif len(DB_match) == 1:
print('Found a matching %s dataset for %s. Processing skipped until %s.' \
%(HighestProcL_dueLog,dataset['entity_ID'],HighestProcL_dueLog))
if DB_match[0][0] == HighestProcL_dueLog:
dataset['GMS_proc_level'] = DB_match[0][0]
dataset['proc_level'] = DB_match[0][0]
else:
dataset['GMS_proc_level'] = HighestProcL_dueLog
dataset['proc_level'] = HighestProcL_dueLog
else:
print('Found a matching dataset for %s but with a different LayerBandsAssignment. ' \
'Dataset has to be reprocessed.' %dataset['entity_ID'])
dataset['GMS_proc_level'] = None
dataset['proc_level'] = None
else:
dataset['GMS_proc_level'] = None
dataset['proc_level'] = None
else:
dataset['GMS_proc_level'] = None
dataset['proc_level'] = None
elif len(DB_match) > 1:
print ('According to database there are multiple matches for the dataset %s. Dataset has to be reprocessed.' \
%dataset['entity_ID'])
dataset['GMS_proc_level'] = None
dataset['proc_level'] = None
else:
dataset['GMS_proc_level'] = None
dataset['proc_level'] = None
return dataset
\ No newline at end of file
......@@ -38,13 +38,13 @@ class L0B_object(object):
self.acquisition_date = data_list_posX['acquisition_date']
self.entity_ID = data_list_posX['entity_ID']
self.scene_ID = data_list_posX['scene_ID']
self.baseN = get_baseN(self.sensor,self.subsystem, self.entity_ID)
self.path_procdata = get_path_procdata(job.path_procdata,self.satellite,self.sensor,\
self.acquisition_date,self.entity_ID)
self.path_logfile = os.path.join(self.path_procdata, self.baseN+'.log')
PG = HLP_F.path_generator(self.__dict__)
self.baseN = PG.get_baseN()
self.path_procdata = PG.get_path_procdata()
self.path_logfile = PG.get_path_logfile()
self.logger = HLP_F.setup_logger('log__'+self.baseN, self.path_logfile,self.job_CPUs, append=0)
self.path_archive = _get_local_archive_path_baseN(self.image_type, self.satellite, self.sensor, \
self.entity_ID, self.logger)
PG = HLP_F.path_generator(self.__dict__) # passes a logger in addition to previous attributes
self.path_archive = PG.get_local_archive_path_baseN()
if not os.path.isfile(self.path_archive) and not os.path.isdir(self.path_archive):
self.logger.info("The %s dataset '%s' has not been processed earlier and no corresponding raw data archive"
......@@ -66,34 +66,4 @@ class L0B_object(object):
" > download source code for Landsat here < "
if success == False:
self.logger.critical("Download for %s dataset '%s' failed. No further processing possible." %(sensor,entity_ID))
return success
def get_path_procdata(basepath_procdata, satellite, sensor, AcqDate, entity_ID):
return os.path.join(basepath_procdata, satellite, sensor, AcqDate.strftime('%Y-%m-%d'), entity_ID)
def get_baseN(sensor,subsystem,entity_ID):
return sensor+'__'+entity_ID if subsystem in ['',None] else ('__').join([sensor,subsystem,entity_ID])
def get_path_logfile(basepath_procdata, satellite, sensor, subsystem, AcqDate, entity_ID):
return os.path.join(get_path_procdata(basepath_procdata, satellite, sensor, AcqDate, entity_ID),
get_baseN(sensor,subsystem,entity_ID)+'.log')
def _get_local_archive_path_baseN(image_type, satellite, sensor, entity_ID, logger): # must be callable from L0A-P
if image_type == 'RSD' and satellite is not None:
folder_rawdata = os.path.join(job.path_archive,satellite,sensor)
extensions_found = [ext for ext in ['.tar.gz','.zip','.hdf'] if os.path.exists(os.path.join(folder_rawdata, entity_ID+ext))]
assert len(extensions_found) > 0, 'The dataset %s.* cannot be found at %s' %(entity_ID,folder_rawdata)
assert len(extensions_found) == 1, '''The folder %s contains multiple files identified as raw data to be processed.
Choosing first one..''' %folder_rawdata
return os.path.join(folder_rawdata, entity_ID+extensions_found[0])
if image_type == 'DGM':
if satellite != None and re.search(satellite,'SRTM',re.I) != None:
return os.path.join(job.path_archive,'srtm2/', entity_ID+'_sub.bsq')
if image_type == 'ATM':
return os.path.join(job.path_archive,'atm_data/', entity_ID + '.bsq')
try: logger.critical('Given dataset specification is not yet supported. Specified parameters: image_type: %s;"'
' satellite: %s; sensor: %s' %(image_type,satellite,sensor))
except AttributeError: print('Given dataset specification is not yet supported. Specified parameters: '
'image_type: %s; satellite: %s; sensor: %s' %(image_type,satellite,sensor))
\ No newline at end of file
return success
\ No newline at end of file
......@@ -119,6 +119,7 @@ class L1A_object(object):
self.subsystem = self.MetaObj.Subsystem
self.arr_desc = 'DN'
self.validate_GeoTransProj_GeoAlign()
# self.meta is assigned by self.MetaObj2SpyfileheaderMeta() called process controller
if L0B_object.image_type == 'DGM':
''' Reads DGM raw data and returns numpy array and metadata dictionary. '''
......@@ -157,10 +158,9 @@ class L1A_object(object):
del self.logger, self.GMS_identifier['logger'], self.MetaObj.logger
def __call__(self,tuple_GMS_subset):
# def fill(self,tuple_GMS_subset):
def fill_from_disk(self,tuple_GMS_subset):
path_GMS_file = tuple_GMS_subset[0]
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
GMSfileDict = INP_R.GMSfile2dict(path_GMS_file)
for key,value in zip(GMSfileDict.keys(), GMSfileDict.values()):
setattr(self, key, value)
self.arr_shape = tuple_GMS_subset[1][0]
......@@ -178,6 +178,7 @@ class L1A_object(object):
# self.arr, self.meta = INP_R.read_ENVIfile(os.path.splitext(self.path_Outfile_L1A)[0]+'.hdr', \
# self.arr_shape, self.arr_pos,self.logger)
self.meta = INP_R.read_ENVIhdr_to_dict(path_img_hdr,self.logger)
self.meta = INP_R.unify_envi_header_keys(self.meta) # ensure key compatibility
# for k,v in self.meta.items():
# print (k,v,type(v))
# sys.exit()
......@@ -776,6 +777,7 @@ class L1A_object(object):
self.meta = self.MetaObj.Meta2SpyfileheaderMeta()
self.path_Outfile_L1A = self.MetaObj.Dataname
def apply_nodata_mask_to_ObjAttr(self, attrname, custom_nodata_val=None):
if hasattr(self,attrname):
nodata_val = HLP_F.get_outFillZeroSaturated(getattr(self,attrname).dtype)[0] \
......@@ -845,8 +847,8 @@ class L1A_object(object):
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)
data = rasObj.get_lonlat_coord_array(assignNoData=[mask_1bit_temp,fillVal])[0]
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
data = rasObj.get_lonlat_coord_array(assignNoData=[mask_1bit_temp,fillVal])[0]
return {'desc': 'lonlat_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd, 'data': data}
......@@ -859,7 +861,9 @@ class L1A_object(object):
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
#print(self.meta)
#sys.exit()
data = rasObj.calc_VZA_array(float(self.meta['ViewingAngle']), float(self.meta['FieldOfView']),\
self.trueDataCornerPos,self.shape_fullArr, [mask_1bit_temp,fillVal])
return {'desc': 'VZA_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
......@@ -874,7 +878,7 @@ class L1A_object(object):
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
SZA_data, SAA_data = rasObj.calc_SZA_SAA_array(self.meta['AcqDate'], self.meta['AcqTime'],self.trueDataCornerPos, \
self.trueDataCornerLonLat,self.shape_fullArr, self.meta['overpass duraction sec'], \
assignNoData=[mask_1bit_temp,fillVal],accurracy=job.SZA_SAA_calculation_accurracy, \
......@@ -899,7 +903,7 @@ class L1A_object(object):
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
data = rasObj.calc_RAA_array(self.trueDataCornerLonLat, self.SAA_arr, self.VAA_mean, assignNoData=[mask_1bit_temp,fillVal])
return {'desc': 'RAA_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd, 'data': data}
......@@ -1031,7 +1035,7 @@ def merge_L1A_tiles_to_L1A_obj(list_L1A_tiles):
return L1A_obj
@autojit
def numba_array_merger(L1A_obj, list_L1A_tiles):
def numba_array_merger(L1A_obj, list_arraynames, list_L1A_tiles):
for ndarray in list_arraynames:
target_shape = L1A_obj.shape_fullArr[:2]+[getattr(list_L1A_tiles[0],ndarray).shape[2]] \
if len(getattr(list_L1A_tiles[0],ndarray).shape) == 3 else L1A_obj.shape_fullArr[:2]
......
......@@ -39,6 +39,11 @@ try:
import cv2
except: print('cv2-lib missing..')
job, usecase = GMS_config.job, GMS_config.usecase
from gms_io import Input_reader as INP_R
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP
########################### core functions ####################################
def dummy_calculate_spatial_shifts(L1A_obj,L1A_obj_Ref):
L1A_obj.x_shift = np.zeros_like(L1A_obj.arr,dtype=np.float)
......@@ -180,14 +185,12 @@ def L1B_P__main(L1A_Instances):
class L1B_object(object):
def __init__(self, L1A_Instance):
self.imref = None
self.imref = None
self.x_shift_px = None
self.y_shift_px = None
def get_reference_image(self):
"""database query?"""
pass
def calculate_spatial_shifts(self):
v=1
......@@ -237,14 +240,25 @@ class L1B_object(object):
#shift_image_by_updating_map_info(x_totalshift, y_totalshift, path_im1, path_imout)
shift_image_by_updating_map_info(x_totalshift, y_totalshift, path_im1, path_imout)
def get_opt_winpos_winsize(self):
"""according to DGM, cloud_mask, trueCornerLonLat"""
def get_opt_winpos_winsize(trueDataCornerLonLat):
"""according to DGM, cloud_mask, trueCornerLonLat
Input:
- trueCornerLonLat: [UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat]"""
#pgSQL_geom = INP_R.get_info_from_postgreSQLdb(job.conn_database,'scenes_proc','bounds', {'sceneid':scene_ID})[0][0]
#trueCornerLonLat = HLP_F.postgreSQL_poly_to_cornerLonLat(HLP_F.postgreSQL_geometry_to_postgreSQL_poly(pgSQL_geom))
#footprint_poly = GEOP.get_footprint_polygon(trueDataCornerLonLat)
center_coord = (np.mean([trueDataCornerLonLat[0][0],trueDataCornerLonLat[3][0]]), \
np.mean([trueDataCornerLonLat[0][1],trueDataCornerLonLat[3][1]]))
win_pos = center_coord
win_sz = 512
return win_pos, win_sz
def get_reference_image(self):
"""database query?"""
pass
def get_image_windows_to_match(self):
def get_image_windows_to_match():
ds_imref,ds_im2shift = gdal.Open(path_imref), gdal.Open(path_im2shift)
imref_poly, im2shift_poly = get_footprint_polygon(path_im0), get_footprint_polygon(path_im1)
overlap_poly, overlap_percentage = get_overlap_polygon(imref_poly, im2shift_poly)
......
......@@ -761,7 +761,11 @@ class METADATA(object):
# additional GainMode
h3 = re.search("GROUP[\s]*=[\s]*GAININFORMATION[\s\S]*END_GROUP[\s]*=[\s]*GAININFORMATION",genericmeta_, re.I)
h31= re.findall('VALUE[\s]*=[\s]*[\S]?\"[0-9A-Z]*\", \"([A-Z]*)\"',h3.group(0), re.I)
gains = {'HGH':[170.8,179.0,106.8,27.5,8.8,7.9,7.55,5.27,4.02, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'], 'NOR':[427.0, 358.0, 218.0, 55.0, 17.6, 15.8, 15.1, 10.55, 8.04, 28.17, 27.75, 26.97, 23.30, 21.38], 'LOW':[569.0, 477.0, 290.0, 73.3, 23.4, 21.0, 20.1, 14.06, 10.72, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'], 'LO1':[569.0, 477.0, 290.0, 73.3, 23.4, 21.0, 20.1, 14.06, 10.72, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'],'LO2':['N/A','N/A','N/A',73.3, 103.5, 98.7, 83.8, 62.0, 67.0, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'],'OFF':"OFF"}
gains = {'HGH':[170.8,179.0,106.8,27.5,8.8,7.9,7.55,5.27,4.02, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'], \
'NOR':[427.0, 358.0, 218.0, 55.0, 17.6, 15.8, 15.1, 10.55, 8.04, 28.17, 27.75, 26.97, 23.30, 21.38], \
'LOW':[569.0, 477.0, 290.0, 73.3, 23.4, 21.0, 20.1, 14.06, 10.72, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'], \
'LO1':[569.0, 477.0, 290.0, 73.3, 23.4, 21.0, 20.1, 14.06, 10.72, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'], \
'LO2':['N/A','N/A','N/A',73.3, 103.5, 98.7, 83.8, 62.0, 67.0, 'N/A', 'N/A', 'N/A', 'N/A', 'N/A'],'OFF':"OFF"}
self.additional.append([["GainMode:"],["Max_radiance:"]])
for x, i in enumerate(h31[:15]):
self.additional[-1][-2].append(i)
......
......@@ -6,6 +6,10 @@
# GFZ Potsdam, Section 1.4
#
###############################################################################
# vars from builtins:
# GMS_call_type <- process_controller
# GMS_process_ID <- process_controller
assert GMS_call_type in ['console','webapp'], \
"builtins.GMS_call_type '%s' is not a valid call_type. Use 'console' or 'webapp' instead!" %GMS_call_type
import datetime
......
......@@ -124,10 +124,19 @@ def read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None, return
return image_data
def GMSfile2dict(path_GMSfile):
""" Converts a JSON file (like the GMS file) to a Python dictionary with keys and values.
:param path_GMSfile:
absolute path on disk
:return:
the corresponding Python dictionary
"""
return json.load(open(path_GMSfile))
def _unify_envi_header_keys(header_dict):
refkeys = ['AcqDate', 'AcqTime', 'Additional', 'IncidenceAngle', 'Metafile', 'PhysUnit', 'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle']
def unify_envi_header_keys(header_dict):
"""Ensures the compatibility of ENVI header keys written by Spectral-Python the code internal attribute names.
(ENVI header keys are always lowercase in contrast to the attribute names used in code)."""
refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit', \
'ProcLCode', 'Quality', 'Satellite', 'Sensor', 'SunAzimuth', 'SunElevation', 'ViewingAngle']
unified_header_dict = header_dict
for key in header_dict.keys():
for refkey in refkeys:
......@@ -158,7 +167,7 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
connection = psycopg2.connect(conn_params)
if connection is None: return 'database connection fault'
cursor = connection.cursor()
condition = "WHERE " + " AND ".join(["%s=%s" %(k,v) for k,v in cond_dict.items()])
condition = "WHERE " + " AND ".join(["%s=%s" %(k,v) for k,v in cond_dict.items()])
cursor.execute("SELECT " +','.join(vals2return)+ " FROM " +tablename+ " " + condition)
records2return = cursor.fetchall() if records2fetch == 0 else [cursor.fetchone()] if records2fetch == 1 else \
cursor.fetchmany(size = records2fetch) # e.g. [('LE71950282003121EDC00',), ('LE71950282003105ASN00',)]
......@@ -167,14 +176,28 @@ def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,recor
return records2return
def get_list_GMSfiles(dataset_list__target__tuple):
dataset_list,target = dataset_list__target__tuple[0] if not isinstance(dataset_list__target__tuple[0],dict) else [dataset_list__target__tuple[0]],dataset_list__target__tuple[1]
returned_tuples=[]
for dataset in dataset_list:
# returned_tuples = get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'job_ID':job_ID,'image_type':'RSD','georef':'Slave','proc_level':'L1A'})
returned_tuples = returned_tuples + get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'image_type':dataset['image_type'],'entity_ID':dataset['entity_ID'],'subsystem':dataset['subsystem'],'proc_level':target})
# returned_tuples = returned_tuples + get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'image_type':dataset['image_type'],'entity_ID':'LE71510322000093SGS00','subsystem':dataset['subsystem'],'proc_level':target})
# returned_tuples = returned_tuples + get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'image_type':dataset['image_type'],'entity_ID':'LC81510322013184LGN00','subsystem':dataset['subsystem'],'proc_level':target})
GMS_list = [os.path.join(returned_tuples[i][0],returned_tuples[i][1]+'_%s.gms' %target) for i in range(len(returned_tuples)) if os.path.isfile(os.path.join(returned_tuples[i][0],returned_tuples[i][1]+'_%s.gms' %target))]
"""Returns a list of absolute paths linking to gms-files of truely written datasets that fullfill certain criteria.
Input:
- dataset_list__target__tuple: tuple([dataset1_dictionary, dataset2_dictionary], target GMS processing level)
Output:
- [/path/to/gms_file1.gms, /path/to/gms_file1.gms]
"""
dataset_list,target = dataset_list__target__tuple[0] if not isinstance(dataset_list__target__tuple[0],dict) \
else [dataset_list__target__tuple[0]],dataset_list__target__tuple[1]
if GMS_call_type == 'webapp':
GMS_list = []
for dataset in dataset_list:
path_gms_file = HLP_F.path_generator(dataset).get_path_gmsfile()
if os.path.exists(path_gms_file):
GMS_list.append(path_gms_file)
else: # GMS_call_type == 'console'
returned_tuples=[]
for dataset in dataset_list:
# returned_tuples = get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'job_ID':job_ID,'image_type':'RSD','georef':'Slave','proc_level':'L1A'})
returned_tuples = returned_tuples + get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'image_type':dataset['image_type'],'entity_ID':dataset['entity_ID'],'subsystem':dataset['subsystem'],'proc_level':target})
# returned_tuples = returned_tuples + get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'image_type':dataset['image_type'],'entity_ID':'LE71510322000093SGS00','subsystem':dataset['subsystem'],'proc_level':target})
# returned_tuples = returned_tuples + get_info_from_SQLdb(job.path_database,'processed_data',['path_procdata','baseN'],{'image_type':dataset['image_type'],'entity_ID':'LC81510322013184LGN00','subsystem':dataset['subsystem'],'proc_level':target})
GMS_list = [os.path.join(returned_tuples[i][0],returned_tuples[i][1]+'_%s.gms' %target) for i in range(len(returned_tuples)) if os.path.isfile(os.path.join(returned_tuples[i][0],returned_tuples[i][1]+'_%s.gms' %target))]
return GMS_list
def SRF_reader(GMS_identifier):
......@@ -226,6 +249,7 @@ def Solar_Irradiance_reader(resol_nm = None, wvl_min_nm = None, wvl_max_nm = Non
return sol_irr
def get_path_cloud_class_obj(GMS_identifier, get_all=False):
"""Returns the absolute path of the the training data used by cloud classifier."""
GMS_sensorcode = HLP_F.get_GMS_sensorcode(GMS_identifier)
satellite,sensor,logger = (GMS_identifier['Satellite'],GMS_identifier['Sensor'],GMS_identifier['logger'])
path_cloud_classifier_objects = os.path.join(job.path_cloud_classif,satellite,sensor)
......
......@@ -13,8 +13,22 @@
########################### Library import ####################################
#from __future__ import (division, print_function, absolute_import)
# unicode literals cause writing errors
from spectral.io import envi as envi
import pip,numpy as np,os,sys,sqlite3,csv,json,collections,shutil,spectral,inspect,errno,dill,datetime,psycopg2
from spectral.io import envi
import pip
import numpy as np
import os
import sys
import sqlite3
import csv
import json
import collections
import shutil
import spectral
import inspect
import errno
import dill
import datetime
import psycopg2
import misc.helper_functions as HLP_F
job, usecase = GMS_config.job, GMS_config.usecase # read from builtins (set by process_controller)
......@@ -214,8 +228,10 @@ def Obj2ENVI(InObj, write_masks_as_ENVI_classification = True):
InObj.logger.warning("%s can not be written, because there is no corresponding attribute." % param_dic[descriptor][0])
# write GMS-file
ASCII_writer(InObj.__dict__.copy(), os.path.join(InObj.path_procdata,'%s_%s.gms' %(InObj.baseN, InObj.proc_level)),InObj.logger)
data_DB_updater(InObj.__dict__)
# IMPORTANT: DO NOT pass the complete object but only a copy of the dictionary in order to prevent ASCII_writer and
# data_DB_updater from modifying the attributes of the object!!
ASCII_writer(InObj.__dict__.copy(), HLP_F.path_generator(InObj.__dict__.copy()).get_path_gmsfile(),InObj.logger)
data_DB_updater(InObj.__dict__.copy())
InObj.logger.info('%s data successfully saved.' %InObj.proc_level)
del InObj.logger
......@@ -267,10 +283,7 @@ def data_DB_updater(obj_dict):
print('Database connection could not be established. Database entry could not be created or updated.')
else:
# generate geometry
UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat = obj_dict['trueDataCornerLonLat']
obj_dict['trueDataCornerLonLat'] = \
'POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' %(LR_LonLat[0],LR_LonLat[1], LL_LonLat[0],LL_LonLat[1],\
UL_LonLat[0],UL_LonLat[1], UR_LonLat[0],UR_LonLat[1], LR_LonLat[0],LR_LonLat[1])
obj_dict['trueDataCornerLonLat'] = HLP_F.cornerLonLat_to_postgreSQL_poly(obj_dict['trueDataCornerLonLat'])
cursor = connection.cursor()
fullColumnList = ['sceneid','georef','proc_level','layer_bands_assignment','bounds']
dict_dbkey_objkey = {'sceneid':'scene_ID','georef':'georef','proc_level':'proc_level', \
......@@ -281,17 +294,17 @@ def data_DB_updater(obj_dict):
new_record = [list2str(val) if isinstance(val,list) else val for val in new_record] # e.g. LayerBandsA.
cursor.execute("INSERT INTO scenes_proc(sceneid,georef,proc_level,layer_bands_assignment,bounds) VALUES\
('%s','%s','%s','%s',ST_GeomFromText('%s'));" %tuple(new_record))
else: # udate existing entry
else: # update existing entry
values2update = [obj_dict[dict_dbkey_objkey[dbkey]] for dbkey in \
['georef','proc_level','layer_bands_assignment','bounds']]
values2update = [list2str(val) if isinstance(val,list) else val for val in values2update] # e.g. LayerB.
cursor.execute("UPDATE scenes_proc set georef='%s',proc_level='%s',layer_bands_assignment='%s', \
bounds=ST_GeomFromText('%s');" %tuple(values2update))
bounds=ST_GeomFromText('%s') WHERE sceneid='%s';" \
%(tuple(values2update + [obj_dict['scene_ID']])))
if 'connection' in locals(): connection.commit()
if 'connection' in locals(): connection.close()
def data_DB_to_csv():
if not os.path.exists(job.path_database) or not os.path.getsize(job.path_database)> 0:
print('No database conversion to CSV performed, because DB does not exist or DB is empty.')
......
......@@ -22,6 +22,7 @@ import tarfile
import zipfile
import fnmatch
import inspect
import psycopg2
from multiprocessing import sharedctypes
import algorithms.gms_cloud_classifier as CLD_P # Cloud Processor
config = GMS_config # read from builtins (set by process_controller)
......@@ -238,6 +239,10 @@ def get_mask_classdefinition(maskname):
else: return None
def get_outFillZeroSaturated(dtype):
"""Returns the values for 'fill-', 'zero-' and 'saturated' pixels of an image
to be written with regard to the target data type.
Input:
- dtype: data type of the image to be written"""
dtype = str(np.dtype(dtype))
assert dtype in ['int8', 'uint8', 'int16', 'uint16','float32'], \
"get_outFillZeroSaturated: Unknown dType: '%s'." %dtype
......@@ -370,3 +375,81 @@ def update_metaDB_if_needed(satellite,sensor,subsystem,dates2check):
# pd_dataframe[pd_dataframe['acquisitionDate'] > '2003-05-28']
# pd_dataframe['acquisitionDate'].max()
# combined.to_sql("cps_raw.cps_basic_tabulation", engine, if_exists='append')
def cornerLonLat_to_postgreSQL_poly(CornerLonLat):
"""Converts a coordinate list [UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat] to a postgreSQL polygon."""
UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat = CornerLonLat
pGSQL_poly = 'POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' %(LR_LonLat[0],LR_LonLat[1], LL_LonLat[0],LL_LonLat[1],\
UL_LonLat[0],UL_LonLat[1], UR_LonLat[0],UR_LonLat[1], LR_LonLat[0],LR_LonLat[1])
return pGSQL_poly
def postgreSQL_poly_to_cornerLonLat(pGSQL_poly):
"""Converts a postgreSQL polygon to a coordinate list [UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat]."""
fl = [float(i) for i in re.findall(r"[-+]?\d*\.\d+|\d+",pGSQL_poly)]
CornerLonLat = [(fl[4],fl[5]), (fl[6],fl[7]), (fl[2],fl[3]), (fl[0],fl[1])] # UL,UR,LL,LR
return CornerLonLat
def postgreSQL_geometry_to_postgreSQL_poly(geom):
connection = psycopg2.connect(config.job.conn_database)