Commit b2d71396 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

major update that fixes a lot of IDE warnings and errors

- some bugfixes in L1B_P
parent 6c280204
This diff is collapsed.
......@@ -17,15 +17,11 @@ import os
import re
import datetime
import glob
import sys
import psycopg2
import collections
import builtins
job, usecase = GMS_config.job, GMS_config.usecase # read from builtins (set by process_controller)
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from gms_io import Input_reader as INP_R
from gms_io import Output_writer as OUT_W
from misc import helper_functions as HLP_F
from algorithms import L0B_P as L0B_P
from misc import database_tools as DB_T
from misc import path_generator as PG
from algorithms.METADATA_BD import get_LayerBandsAssignment
......@@ -92,9 +88,9 @@ def get_data_list_of_current_jobID(): # called in webapp mode
for sceneid in sceneids:
ds = DB_T.get_scene_and_dataset_infos_from_postgreSQLdb(sceneid)
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
if usecase.skip_thermal and ds['subsystem']=='TIR': continue # removes ASTER TIR in case of skip_thermal
ds['subsystem'] = '' if ds['subsystem']==None else ds['subsystem']
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
ds['sensormode'] = get_sensormode(ds)
if usecase.skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan
......@@ -131,26 +127,26 @@ def add_local_availability(dataset):
DB_match = DB_T.get_info_from_postgreSQLdb \
(job.conn_database,'scenes_proc',['proc_level','layer_bands_assignment'], {'sceneid':dataset['scene_ID']})
else: ## job.call_type == 'console'
DB_match = DB_T.get_info_from_SQLdb(job.path_database,'processed_data',['proc_level','LayerBandsAssignment'],\
{'image_type':dataset['image_type'],'satellite':dataset['satellite'], 'sensor':dataset['sensor'],\
DB_match = DB_T.get_info_from_SQLdb(job.path_database,'processed_data',['proc_level','LayerBandsAssignment'],
{'image_type':dataset['image_type'],'satellite':dataset['satellite'], 'sensor':dataset['sensor'],
'subsystem':dataset['subsystem'], 'sensormode':dataset['sensormode'], 'entity_ID':dataset['entity_ID']})
path_logfile = PG.path_generator(dataset).get_path_logfile()
def get_HighestProcL_dueLog(path_logfile):
if os.path.exists(path_logfile):
logfile = open(path_logfile,'r').read()
def get_HighestProcL_dueLog(path_log):
if os.path.exists(path_log):
logfile = open(path_log, 'r').read()
AllWrittenProcL_dueLog = re.findall(":*(\S*\s*) data successfully saved.",logfile,re.I)
if AllWrittenProcL_dueLog != []:
HighestProcL_dueLog = HLP_F.sorted_nicely(AllWrittenProcL_dueLog)[-1]
return HighestProcL_dueLog
ProcL = HLP_F.sorted_nicely(AllWrittenProcL_dueLog)[-1]
else:
print ('%s: According to logfile no completely processed data exist at any processing level. ' \
'Dataset has to be reprocessed.' %dataset['entity_ID'])
return None
ProcL = None
else:
print ("No logfile named '%s' found for %s at %s. Dataset has to be reprocessed." \
%(os.path.basename(path_logfile), dataset['entity_ID'], os.path.dirname(path_logfile)))
return None
% (os.path.basename(path_log), dataset['entity_ID'], os.path.dirname(path_log)))
ProcL = None
return ProcL
if len(DB_match) == 1 or DB_match == [] or DB_match == 'database connection fault':
HighestProcL_dueLog = get_HighestProcL_dueLog(path_logfile)
......@@ -158,8 +154,8 @@ def add_local_availability(dataset):
assumed_path_GMS_file = '%s_%s.gms' %(os.path.splitext(path_logfile)[0],HighestProcL_dueLog)
if os.path.isfile(assumed_path_GMS_file):
GMS_file_dict = INP_R.GMSfile2dict(assumed_path_GMS_file)
target_LayerBandsAssignment = get_LayerBandsAssignment({'image_type': dataset['image_type'],\
'Satellite': dataset['satellite'], 'Sensor': dataset['sensor'],'Subsystem': dataset['subsystem'], \
target_LayerBandsAssignment = get_LayerBandsAssignment({'image_type': dataset['image_type'],
'Satellite': dataset['satellite'], 'Sensor': dataset['sensor'],'Subsystem': dataset['subsystem'],
'logger': None}, 1 if dataset['sensormode'] == 'P' else None)
if target_LayerBandsAssignment == GMS_file_dict['LayerBandsAssignment']:
if DB_match == [] or DB_match == 'database connection fault':
......
......@@ -18,11 +18,13 @@
########################### Library import ####################################
#from __future__ import (division, print_function, unicode_literals,absolute_import)
import os, re,sys
import os
import re
import builtins
import misc.helper_functions as HLP_F
import misc.path_generator as PG
job = GMS_config.job # read from builtins (set by process_controller)
job = builtins.GMS_config.job # read from builtins (set by process_controller)
########################### core functions ####################################
......@@ -57,7 +59,7 @@ class L0B_object(object):
self.georef = 1 if self.image_type == 'RSD' and re.search('OLI', self.sensor, re.I) else 0
if self.path_archive_valid:
self.logger.info('Level 0B object for %s %s%s (data-ID %s) successfully created.' %(self.satellite, \
self.logger.info('Level 0B object for %s %s%s (data-ID %s) successfully created.' %(self.satellite,
self.sensor, (' '+self.subsystem) if self.subsystem not in [None,''] else '', self.entity_ID))
if hasattr(self,'logger'): del self.logger
......@@ -65,6 +67,6 @@ class L0B_object(object):
self.logger.info('Level 0B Processing started')
success = False
" > download source code for Landsat here < "
if success == False:
if not success:
self.logger.critical("Download for %s dataset '%s' failed. No further processing possible." %(sensor,entity_ID))
return success
\ No newline at end of file
This diff is collapsed.
......@@ -16,19 +16,22 @@
########################### Library import ####################################
#from __future__ import (division, print_function, unicode_literals,absolute_import)
import numpy as np
import os
import spectral.io.envi
import itertools
import logging
import sys
import os
import re
import socket
import itertools
import pyfftw
import gdal
import osr
import subprocess
import re
import sys
import time
import builtins
from spectral.io import envi
import gdal
import numpy as np
import osr
import pyfftw
if socket.gethostname() == 'geoms':
sys.path.append('/usr/lib/otb/python/')
os.environ['ITK_AUTOLOAD_PATH'] = "/usr/lib/otb/applications"
......@@ -44,8 +47,7 @@ try:
import cv2
except: print('cv2-lib missing..')
job, usecase = GMS_config.job, GMS_config.usecase
from gms_io import Input_reader as INP_R
job, usecase, GMS_call_type = builtins.GMS_config.job, builtins.GMS_config.usecase, builtins.GMS_config.GMS_call_type
from gms_io import Output_writer as OUT_W
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP
......@@ -193,11 +195,14 @@ def L1B_P__main(L1A_Instances):
class COREG(object):
def __init__(self, dict_L1A_Instance,v):
assert dict_L1A_Instance['proc_level'] == 'L1A', 'L1B_P only processes L1A data, not %s.' %dict_L1A_Instance['proc_level']
self.max_shift = 5 # TODO: was passiert, wenn erkannter shift zu groß ist? win_pos/imref/win_size ändern und neu rechnen?
self.max_iter = 5
self.v = v
#[print(k,v) for k,v in L1A_Instance.items()]
self.im2shift = dict_L1A_Instance['arr'] # can be numpy array or path to disk
self.scene_ID = dict_L1A_Instance['scene_ID']
self.baseN = dict_L1A_Instance['baseN']
self.trueDataCornerLonLat = dict_L1A_Instance['trueDataCornerLonLat']
self.path_im2shift = PG.path_generator(dict_L1A_Instance).get_path_imagedata()
self.footprint_poly = HLP_F.CornerLonLat_to_shapelyPoly(dict_L1A_Instance['trueDataCornerLonLat'])
......@@ -241,7 +246,7 @@ class COREG(object):
"""optionally write shapes (always LonLat because GMS shapely polygons are always LonLat"""
if self.v:
get_baseN = lambda path: os.path.splitext(os.path.basename(path))[0]
self.verbose_out = os.path.join(PG.path_generator(dict_L1A_Instance).get_path_procdata(), \
self.verbose_out = os.path.join(PG.path_generator(dict_L1A_Instance).get_path_procdata(),
'CoReg_verboseOut__%s__shifted_to__%s' %(get_baseN(self.path_im2shift), get_baseN(self.path_imref)))
if not os.path.isdir(self.verbose_out): os.makedirs(self.verbose_out)
OUT_W.write_shp(self.imref_footprint_poly, os.path.join(self.verbose_out,'poly_imref.shp'), get_prjLonLat())
......@@ -254,7 +259,7 @@ class COREG(object):
self.get_opt_bands4matching()
"""get no data values"""
self.nodata = self.get_nodata_vals() # [ref,shift] # FIXME die werden doch nur für die truecorners gebraucht!
self.get_nodata_vals() # [ref,shift] # FIXME die werden doch nur für die truecorners gebraucht!
"""2. get optimal window position and size to be used for shift calculation (within overlap)"""
self.win_pos = None # set by self.get_opt_winpos_winsize()
......@@ -274,9 +279,11 @@ class COREG(object):
#scene_ID = 14536400 # LE71510322000093SGS00 im2shift
"""postgreSQL query: get IDs of overlapping scenes"""
#query_res = DB_T.get_overlapping_scenes_from_postgreSQLdb(job.conn_database, scene_ID=self.scene_ID)
query_res = DB_T.get_overlapping_scenes_from_postgreSQLdb( \
query_res = DB_T.get_overlapping_scenes_from_postgreSQLdb(
job.conn_database, trueDataCornerLonLat=self.trueDataCornerLonLat)
sceneIDs_overlap = [i[0] for i in query_res] # [5524037,5545073] # LC81510322013152LGN00 imref bei 14536400 # LE71510322000093SGS00
assert sceneIDs_overlap != [], 'No reference scene found for %s (scene ID %s).' %(self.baseN, self.scene_ID)
# TODO später ein Download ür nötige Szene starten?
"""find reference scenes that cover at least 20% of the scene with the given ID"""
dict_sceneID_poly = [{'scene_ID':id,'scene poly':HLP_F.scene_ID_to_shapelyPolygon(id)} for id in sceneIDs_overlap] # always returns LonLot polygons
......@@ -352,21 +359,21 @@ class COREG(object):
"""L1A_obj shift in GDAL-Array konvertieren""" # TODO
return ds
def get_cloudmask():
def get_cloudmask(self):
pass # TODO
def get_DGM():
def get_DGM(self):
pass # TODO
def get_opt_bands4matching(self):
self.imref_band4match = 1 # TODO
self.im2shift_band4match = 1 # TODO
ref_bands, shift_bands = self.ds_imref.RasterCount, self.ds_im2shift.RasterCount
assert self.imref_band4match <= ref_bands and self.imref_band4match >= 1, 'The reference '\
assert ref_bands >= self.imref_band4match >= 1, 'The reference '\
"image has %s %s. So '-rb' must be %s%s." \
%(ref_bands,'bands' if ref_bands>1 else 'band', 'between 1 and ' if ref_bands>1 else '', ref_bands)
assert self.im2shift_band4match <= shift_bands and self.im2shift_band4match >= 1, 'The image to be shifted '\
"has %s %s. So '-sb' must be %s%s." %(shift_bands,'bands' if shift_bands>1 else 'band', \
% (ref_bands,'bands' if ref_bands>1 else 'band', 'between 1 and ' if ref_bands>1 else '', ref_bands)
assert shift_bands >= self.im2shift_band4match >= 1, 'The image to be shifted '\
"has %s %s. So '-sb' must be %s%s." % (shift_bands,'bands' if shift_bands>1 else 'band',
'between 1 and ' if shift_bands>1 else '', shift_bands)
def get_nodata_vals(self):
......@@ -375,7 +382,7 @@ class COREG(object):
self.nodata = [get_nodata(self.ds_imref),get_nodata(self.ds_im2shift)]
def calculate_spatial_shifts(self):
im0, im1, imfft_gsd_mapvalues, gsd_factor = get_image_windows_to_match(self.ds_imref,self.ds_im2shift, \
im0, im1, imfft_gsd_mapvalues, gsd_factor = get_image_windows_to_match(self.ds_imref,self.ds_im2shift,
self.win_pos, self.win_size, self.imref_band4match, self.im2shift_band4match, v=self.v)
if self.v: print('gsd_factor', gsd_factor)
if self.v: print('imfft_gsd_mapvalues',imfft_gsd_mapvalues)
......@@ -417,19 +424,9 @@ class COREG(object):
print('Calculated total shifts in reference image units (X/Y): %s/%s' %(x_totalshift,y_totalshift))
if max([x_totalshift,y_totalshift]) > self.max_shift:
raise RuntimeError("The calculated shift is recognized as too large to be valid. "
"If you know that it is valid, just set the '-max_shift' parameter to an appropriate value. Otherwise "\
"try to use a different window size for matching via the '-ws' parameter or define the spectral bands "\
"to be used for matching manually ('-br' and '-bs'.)")
def correct_shifts(self):
equal_prj = get_proj4info(proj=self.ref_prj)==get_proj4info(proj=self.shift_prj)
if equal_prj and not self.align_grids and not self.match_gsd and \
self.out_gsd in [None,[self.shift_xgsd,self.shift_ygsd]]:
self.shift_image_by_updating_map_info()
elif equal_prj and self.align_grids: # match_gsd and out_gsd are respected
self.align_coordinate_grids()
else: # match_gsd and out_gsd are respected ### TODO: out_proj implementieren
self.resample_without_grid_aligning()
"If you know that it is valid, just set the '-max_shift' parameter to an appropriate value. Otherwise "
"try to use a different window size for matching via the '-ws' parameter or define the spectral bands "
"to be used for matching manually ('-br' and '-bs'.)")
def get_updated_map_info(self):
print('Original map info:', self.map_info_to_update)
......@@ -445,16 +442,16 @@ class COREG(object):
self.updated_map_info[4] = str(float(self.map_info_to_update[4]) + self.y_shift_map)
print('Updated map info:',self.updated_map_info)
def correct_shifts(self):
equal_prj = get_proj4info(proj=self.ref_prj)==get_proj4info(proj=self.shift_prj)
if equal_prj and not self.align_grids and not self.match_gsd and \
self.out_gsd in [None,[self.shift_xgsd,self.shift_ygsd]]:
self.shift_image_by_updating_map_info()
elif equal_prj and self.align_grids: # match_gsd and out_gsd are respected
self.align_coordinate_grids()
else: # match_gsd and out_gsd are respected ### TODO: out_proj implementieren
self.resample_without_grid_aligning()
class L1B_object(object):
def __init__(self, L1A_obj, COREG_obj):
[setattr(self, key, value) for key,value in L1A_obj.__dict__.items()]
self.proc_level = 'L1B'
self.corrected_shifts_px = {'x':COREG_obj.x_shift_px, 'y':COREG_obj.y_shift_px}
self.corrected_shifts_map = {'x':COREG_obj.x_shift_map, 'y':COREG_obj.y_shift_map}
self.meta['original map info'] = COREG_obj.map_info_to_update
self.meta['map info'] = COREG_obj.updated_map_info
# + scene_ID und entityid von imref
def get_prjLonLat(fmt='wkt'):
assert re.search('wkt',fmt,re.I) or re.search('Proj4',fmt,re.I), 'unsupported output format'
......@@ -526,12 +523,12 @@ def get_image_windows_to_match(ds_imref,ds_im2shift, win_pos, win_sz, rb, sb, te
# gdalwarp für im2shift mit pixel aggregate
rsp_algor = 'average'
if gdal.VersionInfo().startswith('1'):
print("WARNING: The GDAL version on this server does not yet support the resampling algorithm "\
"'average'. This can affect the correct detection of subpixel shifts. To avoid this "\
print("WARNING: The GDAL version on this server does not yet support the resampling algorithm "
"'average'. This can affect the correct detection of subpixel shifts. To avoid this "
"please update GDAL to a version above 2.0.0!")
rsp_algor = 'cubic'
cmd = "gdalwarp -r %s -tr %s %s -t_srs '%s' -of %s -te %s %s %s %s %s %s -overwrite%s" \
%(rsp_algor,imfft_gsd_mapvalues, imfft_gsd_mapvalues, imref_proj, outFmt, min(map_xvals),\
%(rsp_algor,imfft_gsd_mapvalues, imfft_gsd_mapvalues, imref_proj, outFmt, min(map_xvals),
min(map_yvals),max(map_xvals),max(map_yvals),path_im2shift,path_im2shift_clip,' -q' if not v else '')
# te_srs is not neccessary because -t_srs = imref_proj and output extent is derived from imref
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
......@@ -540,7 +537,7 @@ def get_image_windows_to_match(ds_imref,ds_im2shift, win_pos, win_sz, rb, sb, te
# im1 daten in array einlesen
ds_im2shift_clip = gdal.OpenShared(path_im2shift_clip) if not tempAsENVI else gdal.Open(path_im2shift_clip)
im2shift_clip_data= ds_im2shift_clip.GetRasterBand(sb).ReadAsArray()
[os.remove(p) for p in [path_im2shift_clip, os.path.splitext(path_im2shift_clip)[0]+'.hdr', \
[os.remove(p) for p in [path_im2shift_clip, os.path.splitext(path_im2shift_clip)[0]+'.hdr',
path_im2shift_clip +'.aux.xml'] if tempAsENVI and os.path.exists(p)]
else:
......@@ -558,7 +555,7 @@ def get_image_windows_to_match(ds_imref,ds_im2shift, win_pos, win_sz, rb, sb, te
imref_max_poss_sz = min([ds_imref.RasterXSize, ds_imref.RasterYSize])
clip_sz = clip_sz if not clip_sz > imref_max_poss_sz else win_sz * im2shift_gsd/imref_gsd
if clip_sz > imref_max_poss_sz:
print('WARNING: The given target window size equals %s px in the image to be shifted which exceeds '\
print('WARNING: The given target window size equals %s px in the image to be shifted which exceeds '
'its extent. Thus it has been reduced to the maximum possible size.' %clip_sz)
clip_sz = imref_max_poss_sz
......@@ -574,8 +571,8 @@ def get_image_windows_to_match(ds_imref,ds_im2shift, win_pos, win_sz, rb, sb, te
# imref mit imref_box ausclippen und per pixel aggregate auf auflösung von im2shift downsamplen
rsp_algor = 'average'
if gdal.VersionInfo().startswith('1'):
print("WARNING: The GDAL version on this server does not yet support the resampling algorithm "\
"'average'. This can affect the correct detection of subpixel shifts. To avoid this "\
print("WARNING: The GDAL version on this server does not yet support the resampling algorithm "
"'average'. This can affect the correct detection of subpixel shifts. To avoid this "
"please update GDAL to a version above 2.0.0!")
rsp_algor = 'cubic'
cmd = "gdalwarp -r %s -tr %s %s -t_srs '%s' -of %s -te %s %s %s %s %s %s -overwrite%s" \
......@@ -590,13 +587,13 @@ def get_image_windows_to_match(ds_imref,ds_im2shift, win_pos, win_sz, rb, sb, te
imref_clip_data= ds_im2ref_clip.GetRasterBand(sb).ReadAsArray()
ds_im2ref_clip.FlushCache()
ds_im2ref_clip = None
[os.remove(p) for p in [path_imref_clip, os.path.splitext(path_imref_clip)[0]+'.hdr', \
[os.remove(p) for p in [path_imref_clip, os.path.splitext(path_imref_clip)[0]+'.hdr',
path_imref_clip +'.aux.xml'] if tempAsENVI and os.path.exists(p)]
# im2shift mit imref_box_map ausclippen (cubic, da pixelgrenzen verschoben werden müssen aber auflösung
# gleich bleibt)
cmd = "gdalwarp -r cubic -tr %s %s -t_srs '%s' -of %s -te %s %s %s %s %s %s -overwrite%s" \
%(imfft_gsd_mapvalues, imfft_gsd_mapvalues, imref_proj, outFmt, min(map_xvals),min(map_yvals), \
%(imfft_gsd_mapvalues, imfft_gsd_mapvalues, imref_proj, outFmt, min(map_xvals),min(map_yvals),
max(map_xvals),max(map_yvals),path_im2shift,path_im2shift_clip,' -q' if not v else '')
# te_srs is not neccessary because -t_srs = imref_proj and output extent is derived from imref
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
......@@ -605,7 +602,7 @@ def get_image_windows_to_match(ds_imref,ds_im2shift, win_pos, win_sz, rb, sb, te
# im2shift_clip einlesen
ds_im2shift_clip = gdal.OpenShared(path_im2shift_clip) if not tempAsENVI else gdal.Open(path_im2shift_clip)
im2shift_clip_data= ds_im2shift_clip.GetRasterBand(sb).ReadAsArray()
[os.remove(p) for p in [path_im2shift_clip, os.path.splitext(path_im2shift_clip)[0]+'.hdr', \
[os.remove(p) for p in [path_im2shift_clip, os.path.splitext(path_im2shift_clip)[0]+'.hdr',
path_im2shift_clip +'.aux.xml'] if tempAsENVI and os.path.exists(p)]
......@@ -625,13 +622,12 @@ def get_opt_fftw_winsize(im_shape, target_size=None,v=0):
def calc_shifted_cross_power_spectrum(im0,im1,window_size=1024,precision=np.complex64, v=0):
"""Calculates shifted cross power spectrum for quantifying x/y-shifts.
Inputs:
- im0: reference image
- im1: subject image to shift
- window_size: size of image area to be processed
- precision: to be quantified as a datatype
Output:
- 2D-numpy-array of the shifted cross power spectrum
:param im0: reference image
:param im1: subject image to shift
:param window_size: size of image area to be processed
:param precision: to be quantified as a datatype
:param v: verbose
:return: 2D-numpy-array of the shifted cross power spectrum
"""
window_size = get_opt_fftw_winsize(im0.shape, target_size=window_size,v=v)
......@@ -646,10 +642,10 @@ def calc_shifted_cross_power_spectrum(im0,im1,window_size=1024,precision=np.comp
fft_arr1 = np.fft.fft2(in_arr1)
if v: print('forward FFTW: %.2fs' %(time.time() -t0))
eps = abs(fft_arr1).max() * 1e-15
eps = np.abs(fft_arr1).max() * 1e-15
# cps == cross-power spectrum of im0 and im2
temp = (fft_arr0 * fft_arr1.conjugate()) / (abs(fft_arr0) * abs(fft_arr1) + eps)
temp = (fft_arr0 * fft_arr1.conjugate()) / (np.abs(fft_arr0) * np.abs(fft_arr1) + eps)
if v: t0 = time.time()
if 'pyfft' in globals():
......@@ -658,7 +654,7 @@ def calc_shifted_cross_power_spectrum(im0,im1,window_size=1024,precision=np.comp
ifft_arr = np.fft.ifft2(temp)
if v: print('backward FFTW: %.2fs' %(time.time() -t0))
cps = abs(ifft_arr)
cps = np.abs(ifft_arr)
# scps = shifted cps
scps = np.fft.fftshift(cps)
if v and GMS_call_type is not 'webapp':
......@@ -749,3 +745,13 @@ def get_total_shifts(x_intshift,y_intshift,x_subshift,y_subshift):
return x_intshift+x_subshift, y_intshift+y_subshift
#=====================================================================================================================
class L1B_object(object):
def __init__(self, L1A_obj, COREG_obj):
[setattr(self, key, value) for key,value in L1A_obj.__dict__.items()]
self.proc_level = 'L1B'
self.corrected_shifts_px = {'x':COREG_obj.x_shift_px, 'y':COREG_obj.y_shift_px}
self.corrected_shifts_map = {'x':COREG_obj.x_shift_map, 'y':COREG_obj.y_shift_map}
self.meta['original map info'] = COREG_obj.map_info_to_update
self.meta['map info'] = COREG_obj.updated_map_info
# + scene_ID und entityid von imref
This diff is collapsed.
......@@ -10,15 +10,15 @@
# GMS_call_type <- process_controller
# GMS_process_ID <- process_controller
import builtins
GMS_call_type, GMS_process_ID = builtins.GMS_call_type, builtins.GMS_process_ID
assert GMS_call_type in ['console','webapp'], \
"builtins.GMS_call_type '%s' is not a valid call_type. Use 'console' or 'webapp' instead!" %GMS_call_type
import datetime
import os
import sys
import multiprocessing
import socket
import psycopg2
import algorithms.gms_cloud_classifier as CLD_P # Cloud Processor
def get_info_from_postgreSQLdb(conn_params,tablename,vals2return,cond_dict,records2fetch=0):
if not isinstance(vals2return,list): vals2return = [vals2return]
......@@ -91,6 +91,17 @@ class job:
path_benchmarks = absP(query(conn_db_meta,'path_benchmarks'))
path_job_logs = absP(query(conn_db_meta,'path_job_logs'))
# processor configuration: [run processor, write output]
exec__L0BP = [1]
exec__L1AP = [1, 1]
exec__L1BP = [1, 0]
exec__L1CP = [1, 1]
exec__L1DP = [1, 1]
exec__L2AP = [0, 0]
exec__L2BP = [0, 0]
exec__L2CP = [0, 0]
exec__L2DP = [0, 0]
assert os.path.isdir(job.path_archive), "Given archive folder '%s' does not exist. Execution stopped" % job.path_archive
if not os.path.isdir(job.path_job_logs): os.makedirs(job.path_job_logs)
......@@ -122,16 +133,6 @@ class usecase:
conversion_type_optical = query(job.conn_db_meta,'conversion_type_optical')
conversion_type_thermal = query(job.conn_db_meta,'conversion_type_thermal')
# processor configuration: [run processor, write output]
exec__L0BP = [1]
exec__L1AP = [1, 1]
exec__L1BP = [1, 0]
exec__L1CP = [1, 1]
exec__L1DP = [1, 1]
exec__L2AP = [0, 0]
exec__L2BP = [0, 0]
exec__L2CP = [0, 0]
exec__L2DP = [0, 0]
os.environ['DISPLAY'] = '127.0.0.0:0.0'
# def init_gms_globals():
......
......@@ -12,76 +12,67 @@
########################### Library import ####################################
from spectral.io import envi as envi
import glob
import pip
import numpy as np
import os
import sys
import sqlite3
import csv
import json
import spectral
import collections
import re
import pickle
import imp
import dill
import psycopg2
from io import StringIO as StringIO
import builtins
import scipy.interpolate
#import misc.helper_functions as HLP_F
import gms_io.Output_writer as OUT_W
import algorithms.GEOPROCESSING_BD as GEOP
import algorithms.METADATA_BD as META
import misc.database_tools as DB_T
import misc.path_generator as PG
job = GMS_config.job # read from builtins (set by process_controller)
job = builtins.GMS_config.job # read from builtins (set by process_controller)
# + misc.helper_functions.setup_logger (left out here in order to avoid circular dependencies)
########################### core functions ####################################
class out_object(object):
def __init__(self, tuple_GMS_subset):
from algorithms.L1A_P import L1A_object
from misc.helper_functions import setup_logger
path_GMS_file = tuple_GMS_subset[0]
self.arr_shape = tuple_GMS_subset[1][0]
self.arr_pos = tuple_GMS_subset[1][1]
GMS_file = json.load(open(path_GMS_file))
for key,value in zip(GMS_file.keys(), GMS_file.values()):
setattr(self, key, value)
if self.arr_pos is None:
print('Reading file: %s' % (self.baseN))
else:
print('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
from misc.helper_functions import setup_logger
[setattr(self, key, value) for key,value in GMS_file.items()]
if self.arr_pos is None: print('Reading file: %s' %self.baseN)
else: print('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
self.logger = setup_logger('log__'+self.baseN, self.path_logfile, self.job_CPUs,append=1)
self.GMS_identifier = collections.OrderedDict({'image_type':self.image_type, 'Satellite':self.satellite,'Sensor':self.sensor,'Subsystem': self.subsystem, 'logger':self.logger})
self.GMS_identifier = collections.OrderedDict({'image_type':self.image_type, 'Satellite':self.satellite,
'Sensor':self.sensor,'Subsystem': self.subsystem, 'logger':self.logger})
self.meta = read_ENVIhdr_to_dict(os.path.splitext(self.path_Outfile_L1A)[0]+'.hdr', self.logger)
# Methods
if self.proc_level == 'L1A':
from algorithms.L1A_P import L1A_object
self.get_lonlat_coord_array = getattr(L1A_object,'get_lonlat_coord_array')
# for method in dir(L1A_object):
# if callable(getattr(L1A_object, method)):
# print(method)
# setattr(self,method,getattr(L1A_object,method))
# [setattr(self,method,getattr(L1A_object,method)) for method in dir(L1A_object) if callable(getattr(L1A_object, method)) and not method.startswith('__')]
# [setattr(self,method,getattr(L1A_object,method)) for method in dir(L1A_object)
# if callable(getattr(L1A_object, method)) and not method.startswith('__')]
# del self.DN2RadRef_mp
# del self.DN2RadRef
# print(dir(self))
if self.image_type == 'RSD':
self.mask, self.mask_meta = read_ENVIfile(os.path.join(self.path_procdata,self.baseN+OUT_W.param_dic['MAS_'+self.proc_level][1]), self.arr_shape, self.arr_pos)
self.mask, self.mask_meta = read_ENVIfile(os.path.join(self.path_procdata,self.baseN+
OUT_W.param_dic['MAS_'+self.proc_level][1]), self.arr_shape, self.arr_pos)
del self.logger; self.GMS_identifier['logger'] = 'not set'
def ENVI2object(tuple_GMS_arrshp_arrpos):
return out_object(tuple_GMS_arrshp_arrpos)
def read_ENVIfile(hdr_path,arr_shape,arr_pos,logger=None):
return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None,return_meta=True)
return read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=logger,return_meta=True)
def read_ENVIhdr_to_dict(hdr_path, logger=None):
if not os.path.isfile(hdr_path):
if logger!=None: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
if logger is not None: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
else: print ('read_ENVIfile: Input data not found at %s.'%hdr_path)
else:
SpyFileheader = envi.open(hdr_path)
......@@ -89,10 +80,10 @@ def read_ENVIhdr_to_dict(hdr_path, logger=None):
def read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None, return_meta=False):
if not os.path.isfile(hdr_path):
if logger!=None: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
if logger is not None: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
else: print ('read_ENVIfile: Input data not found at %s.'%hdr_path)
else:
if logger!=None: logger.info('Reading %s ...' %(os.path.basename(hdr_path)))
if logger is not None: logger.info('Reading %s ...' %(os.path.basename(hdr_path)))
File_obj = spectral.open_image(hdr_path)
SpyFileheader = envi.open(hdr_path)
if arr_shape == 'cube':
......@@ -118,14 +109,14 @@ def read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None, return
"row_bounds (2-tuple of ints): (a, b) -> Rows a through b-1 will be read."
"col_bounds (2-tuple of ints): (a, b) -> Columns a through b-1 will be read."
"bands (list of ints): Optional list of bands to read. If not specified, all bands are read."
image_data = File_obj.read_subregion ((arr_pos[0][0],arr_pos[0][1]+1),(arr_pos[1][0],arr_pos[1][1]+1),arr_pos[2])
image_data = \
File_obj.read_subregion ((arr_pos[0][0],arr_pos[0][1]+1),(arr_pos[1][0],arr_pos[1][1]+1),arr_pos[2])
else:
if logger is not None: logger.critical("Array shape '%s' is not known. Known array shapes are cube, row, col, band, block, pixel, custom." %arr_shape); sys.exit()
else: print("Array shape '%s' is not known. Known array shapes are cube, row, col, band, block, pixel, custom." %arr_shape); sys.exit()
if return_meta:
return image_data, SpyFileheader.metadata
else:
return image_data
if logger is not None: logger.critical("Array shape '%s' is not known. Known array shapes are cube, row, "\
"col, band, block, pixel, custom." %arr_shape); sys.exit()
else: print("Array shape '%s' is not known. Known array shapes are cube, row, col, band, block, pixel, "\
"custom." %arr_shape); sys.exit()
return (image_data, SpyFileheader.metadata) if return_meta else image_data
def GMSfile2dict(path_GMSfile):
""" Converts a JSON file (like the GMS file) to a Python dictionary with keys and values.
......@@ -138,8 +129,9 @@ def GMSfile2dict(path_GMSfile):
def unify_envi_header_keys(header_dict):
"""Ensures the compatibility of ENVI header keys written by Spectral-Python the code internal attribute names.
(ENVI header keys are always lowercase in contrast to the attribute names used in code)."""
refkeys = ['AcqDate', 'AcqTime', 'Additional', 'FieldOfView', 'IncidenceAngle', 'Metafile', 'PhysUnit', \