Commit cfadc485 authored by Daniel Scheffler's avatar Daniel Scheffler Committed by Mathias Peters
Browse files

Merge remote-tracking branch 'gms-hu-inf/master' into hu_inf

parent c9816574
......@@ -57,7 +57,8 @@ from py_tools_ds.ptds.geo.coord_trafo import transform_utm_to_wgs84, transf
from py_tools_ds.ptds.geo.projection import get_UTMzone, EPSG2WKT, isProjectedOrGeographic
from py_tools_ds.ptds.geo.raster.reproject import warp_ndarray
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from .. import unified_config as CFG
class GEOPROCESSING(object):
"""****CREATE OBJECT ****************************************************"""
......@@ -1024,7 +1025,7 @@ class GEOPROCESSING(object):
# %(dst_EPSG_code, in_nodataVal,out_nodataVal, translatedFile, warpedFile))
os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -t_srs EPSG:%s -tps -r \
cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=%s -q %s %s' \
%(dst_EPSG_code,inFill,out_nodataVal,job.CPUs,translatedFile,warpedFile))
%(dst_EPSG_code,inFill,out_nodataVal,CFG.get_job().CPUs,translatedFile,warpedFile))
# import shutil
# shutil.copy(translatedFile, '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/testing/out/') ## only for bugfixing
# shutil.copy(translatedFile+'.hdr','//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/testing/out/') ## only for bugfixing
......@@ -1045,7 +1046,7 @@ class GEOPROCESSING(object):
# %(dst_EPSG_code,in_nodataVal,out_nodataVal,translatedFile,warpedFile))
os.system('gdalwarp -of VRT --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps -r \
cubic -srcnodata %s -dstnodata %s -overwrite -multi -wo NUM_THREADS=%s -q %s %s' \
% (dst_EPSG_code, inFill, out_nodataVal, job.CPUs, translatedFile, warpedFile))
% (dst_EPSG_code, inFill, out_nodataVal, CFG.get_job().CPUs, translatedFile, warpedFile))
# print('warped')
print('GDAL warping time',time.time()-t0)
......
......@@ -18,23 +18,27 @@ import re
import datetime
import glob
import builtins
import psycopg2
import psycopg2.extras
from collections import OrderedDict
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from ..io import Input_reader as INP_R
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from .METADATA import get_LayerBandsAssignment
from .. import unified_config as CFG
########################### core functions ####################################
def get_entity_IDs_within_AOI(): # called in console mode
data_list =[]
if re.search('ALOS', ','.join(usecase.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
if re.search('ALOS', ','.join(CFG.get_usecase().filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-02', 'entity_ID':'A1002553-001-P6100002-AODS-201007300008'}) # TAR-ID 1B1
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2007-09-27', 'entity_ID':'20070927_L1B2_ALAV2A089152780'}) # extracted Folder 1B2
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-19', 'entity_ID':'20090719_L1B2_ALAV2A185572780'}) # extracted Folder 1B2
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'20100421_L1B2_ALAV2A225832780'}) # extracted Folder 1B2
if re.search('Terra', ','.join(usecase.filt_datasets)):
if re.search('Terra', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR1', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
......@@ -43,23 +47,23 @@ def get_entity_IDs_within_AOI(): # called in console mode
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'TIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
if re.search('Landsat', ','.join(usecase.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
if re.search('Landsat', ','.join(CFG.get_usecase().filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type':'RSD','satellite':'Landsat-5', 'sensor':'TM', 'subsystem':None, 'acquisition_date':'1996-10-24', 'entity_ID':'LT51510321996298XXX01'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2002-08-15', 'entity_ID':'LE70050152002227EDC00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2000-04-02', 'entity_ID':'LE71510322000093SGS00'}) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(job.path_archive,'Landsat-7/ETM+/*.tar.gz'))]) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(job.path_archive,'Landsat-8/OLI_TIRS/*.tar.gz'))]) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(CFG.get_job().path_archive,'Landsat-7/ETM+/*.tar.gz'))]) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(CFG.get_job().path_archive,'Landsat-8/OLI_TIRS/*.tar.gz'))]) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-07-03', 'entity_ID':'LC81510322013184LGN00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-06-01', 'entity_ID':'LC81510322013152LGN00'}) # TAR-ID ~6% Cloud cover
if re.search('SPOT', ','.join(usecase.filt_datasets)):
if re.search('SPOT', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'SPOT-1', 'sensor':'HRV1', 'subsystem':None, 'acquisition_date':'1986-07-17', 'entity_ID':'00197112001'})
data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112009'})
data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112010'})
if re.search('RapidEye', ','.join(usecase.filt_datasets)):
if re.search('RapidEye', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'RapidEye-5','sensor':'MSI', 'subsystem':None, 'acquisition_date':'2014-04-23', 'entity_ID':'4357606_2014-04-23_RE5_3A_180259'})
if re.search('SRTM', ','.join(usecase.filt_datasets)):
if re.search('SRTM', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'DGM','satellite':'SRTM', 'sensor':'SRTM2', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'srtm-1arcsec-version2jan2015-39-42n-70-85'})
if re.search('ATM', ','.join(usecase.filt_datasets)):
if re.search('ATM', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'ATM','satellite':'ATM-data', 'sensor':'unknown', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'dummy_ID'})
for ds in data_list:
......@@ -69,47 +73,74 @@ def get_entity_IDs_within_AOI(): # called in console mode
#ds['scene_ID'] = '_'.join([ds['satellite'],ds['sensor'],ds['subsystem'],ds['entity_ID']])
ds['scene_ID'] = ds['entity_ID']
ds['sensormode'] = get_sensormode(ds)
if usecase.skip_thermal:
if CFG.get_usecase().skip_thermal:
data_list = [ds for ds in data_list if not ds['subsystem'] == 'TIR'] # removes ASTER TIR in case of skip_thermal
if usecase.skip_pan:
if CFG.get_usecase().skip_pan:
data_list = [ds for ds in data_list if not ds['sensormode'] == 'P'] # removes e.g. SPOT PAN in case of skip_pan
return data_list
def get_data_list_of_current_jobID(): # called in webapp mode
query = lambda tablename,vals2return,cond_dict,records2fetch=0:\
DB_T.get_info_from_postgreSQLdb(job.conn_database,tablename,vals2return,cond_dict,records2fetch)
resultset = query('jobs','sceneids',{'id':job.ID},3)
assert len(resultset) != 0, "Invalid jobID given - no corresponding job with the ID=%s found in database.\n" %job.ID
assert len(resultset) == 1, "Error in database. The jobID %s exists more than once. \n" %job.ID
sceneids = resultset[0][0]
job = CFG.get_job()
usecase = CFG.get_usecase()
data_list = []
with psycopg2.connect(job.conn_database) as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
cur.execute("""
WITH jobs_unnested AS (
SELECT id, unnest(sceneids) AS sceneid FROM jobs
)
SELECT jobs_unnested.sceneid,
scenes.datasetid,
scenes.acquisitiondate,
scenes.entityid,
scenes.filename,
COALESCE(scenes_proc.proc_level::text, 'L0A') AS proc_level,
datasets.image_type,
satellites.name AS satellite,
sensors.name AS sensor,
subsystems.name AS subsystem
FROM jobs_unnested
LEFT OUTER JOIN scenes ON scenes.id = jobs_unnested.sceneid
LEFT OUTER JOIN scenes_proc ON scenes_proc.sceneid = jobs_unnested.sceneid
LEFT OUTER JOIN datasets ON datasets.id = datasetid
LEFT OUTER JOIN satellites ON satellites.id = satelliteid
LEFT OUTER JOIN sensors ON sensors.id = sensorid
LEFT OUTER JOIN subsystems ON subsystems.id = subsystemid
WHERE jobs_unnested.id = %s
""",
(job.ID,))
data_list =[] # TODO ab hier 2. Funktion
for sceneid in sceneids:
# add from postgreSQL-DB: proc_level, scene_ID, datasetid, image_type, satellite, sensor, subsystem,
# acquisition_date, entity_ID, filename
ds = DB_T.get_scene_and_dataset_infos_from_postgreSQLdb(sceneid)
for row in cur.fetchall():
ds = OrderedDict()
ds["proc_level"] = row["proc_level"]
ds["scene_ID"] = row["sceneid"]
ds["datasetid"] = row["datasetid"]
ds["image_type"] = row["image_type"]
ds["satellite"] = row["satellite"]
ds["sensor"] = row["sensor"]
ds["subsystem"] = row["subsystem"]
ds["acquisition_date"] = row["acquisitiondate"]
ds["entity_ID"] = row["entityid"]
ds["filename"] = row["filename"]
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
if usecase.skip_thermal and ds['subsystem']=='TIR': continue # removes ASTER TIR in case of skip_thermal
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
ds['sensormode'] = get_sensormode(ds)
if usecase.skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
if CFG.get_usecase().skip_thermal and ds['subsystem']=='TIR': continue # removes ASTER TIR in case of skip_thermal
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
ds['sensormode'] = get_sensormode(ds)
if CFG.get_usecase().skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan
if re.search("Sentinel-2A",ds['satellite'],re.I):
for subsystem in ['S2A10','S2A20','S2A60']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
elif re.search("Terra", ds['satellite'], re.I):
for subsystem in ['VNIR1', 'VNIR2', 'SWIR','TIR']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
else:
data_list.append(ds)
'''OrderedDict([('datasetid', 104), ('image_type', 'RSD'), ('satellite', 'Landsat-8'), ('sensor', 'OLI_TIRS'),
('subsystem', ''), ('acquisition_date', datetime.datetime(2013, 7, 3, 5, 48, 32)),
('entityid', 'LC81510322013184LGN00'), ('filename', 'LC81510322013184LGN00.tar.gz'), ('sensormode', 'M')])'''
if re.search("Sentinel-2A",ds['satellite'],re.I):
for subsystem in ['S2A10','S2A20','S2A60']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
elif re.search("Terra", ds['satellite'], re.I):
for subsystem in ['VNIR1', 'VNIR2', 'SWIR','TIR']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
else:
data_list.append(ds)
return data_list
def LandsatID2dataset(ID_list):
......@@ -138,11 +169,11 @@ def get_sensormode(dataset):
return 'M'
def add_local_availability(dataset):
if job.call_type == 'webapp':
if CFG.get_job().call_type == 'webapp':
DB_match = DB_T.get_info_from_postgreSQLdb \
(job.conn_database,'scenes_proc',['proc_level','layer_bands_assignment'], {'sceneid':dataset['scene_ID']})
else: ## job.call_type == 'console'
DB_match = DB_T.get_info_from_SQLdb(job.path_database,'processed_data',['proc_level','LayerBandsAssignment'],
(CFG.get_job().conn_database,'scenes_proc',['proc_level','layer_bands_assignment'], {'sceneid':dataset['scene_ID']})
else: ## CFG.get_job().call_type == 'console'
DB_match = DB_T.get_info_from_SQLdb(CFG.get_job().path_database,'processed_data',['proc_level','LayerBandsAssignment'],
{'image_type':dataset['image_type'],'satellite':dataset['satellite'], 'sensor':dataset['sensor'],
'subsystem':dataset['subsystem'], 'sensormode':dataset['sensormode'], 'entity_ID':dataset['entity_ID']})
path_logfile = PG.path_generator(dataset).get_path_logfile()
......@@ -179,7 +210,7 @@ def add_local_availability(dataset):
'logfile %s has been written successfully. Recreating missing database entry.' \
%(dataset['entity_ID'],ProcL))
DB_T.data_DB_updater(GMS_file_dict)
if job.call_type == 'console':
if CFG.get_job().call_type == 'console':
DB_T.SQL_DB_to_csv()
dataset['proc_level'] = ProcL
elif len(DB_match) == 1:
......@@ -206,4 +237,4 @@ def add_local_availability(dataset):
dataset['proc_level'] = None
else:
dataset['proc_level'] = None
return dataset
\ No newline at end of file
return dataset
......@@ -25,15 +25,16 @@ import collections
from ..misc import path_generator as PG
from ..misc.logging import GMS_logger
job = builtins.GMS_config.job # read from builtins (set by process_controller)
from .. import unified_config as CFG
########################### core functions ####################################
class L0B_object(object):
def __init__(self, data_list_posX):
self.proc_level = 'L0B'
self.job_ID = job.ID
self.job_CPUs = job.CPUs
self.job_ID = CFG.get_job().ID
self.job_CPUs = CFG.get_job().CPUs
self.image_type = data_list_posX['image_type']
self.satellite = data_list_posX['satellite']
self.sensor = data_list_posX['sensor']
......@@ -56,7 +57,7 @@ class L0B_object(object):
['image_type' ,'Satellite' ,'Sensor' ,'Subsystem' ,'logger' ],
[self.image_type,self.satellite,self.sensor,self.subsystem,self.logger]) )
self.path_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier,
get_all=True if job.bench_CLD_class else False)
get_all=True if CFG.get_job().bench_CLD_class else False)
if not os.path.isfile(self.path_archive) and not os.path.isdir(self.path_archive):
self.logger.info("The %s dataset '%s' has not been processed earlier and no corresponding raw data archive"
......@@ -71,7 +72,7 @@ class L0B_object(object):
self.logger.info('Level 0B object for %s %s%s (data-ID %s) successfully created.' %(self.satellite,
self.sensor, (' '+self.subsystem) if self.subsystem not in [None,''] else '', self.entity_ID))
if job.exec_mode=='Python' and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder):
if CFG.get_job().exec_mode=='Python' and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder):
os.makedirs(self.ExtractedFolder)
# close loggers
......@@ -87,4 +88,4 @@ class L0B_object(object):
" > download source code for Landsat here < "
if not success:
self.logger.critical("Download for %s dataset '%s' failed. No further processing possible." %(sensor,entity_ID))
return success
\ No newline at end of file
return success
......@@ -59,9 +59,7 @@ from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX
from py_tools_ds.ptds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform
from py_tools_ds.ptds.geo.projection import WKT2EPSG
from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from .. import unified_config as CFG
############################# L1A object ######################################
class L1A_object(object):
......@@ -95,7 +93,7 @@ class L1A_object(object):
self.GMS_identifier = L0B_object.GMS_identifier if L0B_object else None
if self.GMS_identifier:
self.GMS_identifier['logger'] = self.logger
self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(job.conn_database, 'scenes', ['datasetid'],
self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(CFG.get_job().conn_database, 'scenes', ['datasetid'],
{'id': self.scene_ID})[0][0]) if self.scene_ID !=-9999 else -9999
self.outInterleave = 'bsq'
self.LayerBandsAssignment = ''
......@@ -127,7 +125,7 @@ class L1A_object(object):
%(os.path.basename(self.path_archive), os.path.dirname(self.path_archive))
assert isinstance(self.path_archive, str), 'Invalid path to RAW data. Got %s instead of string or unicode.'\
%type(self.path_archive)
if job.exec_mode=='Python' and self.ExtractedFolder: assert os.path.exists(self.path_archive), \
if CFG.get_job().exec_mode=='Python' and self.ExtractedFolder: assert os.path.exists(self.path_archive), \
'Invalid path for temporary files. Directory %s does not exist.' %self.ExtractedFolder
self.logger.info('\n\n########### Level 1A Processing started ############\n')
......@@ -137,7 +135,7 @@ class L1A_object(object):
if self.sensormode != 'P' else META.get_LayerBandsAssignment(self.GMS_identifier, nBands = 1)
self.dict_LayerOptTherm = META.get_dict_LayerOptTherm(self.GMS_identifier,self.LayerBandsAssignment)
if job.exec_mode=='Python':
if CFG.get_job().exec_mode=='Python':
self.path_InFilePreprocessor = os.path.join(self.ExtractedFolder, '%s%s_DN.bsq' \
%(self.entity_ID,('_'+self.subsystem if re.search("Terra", self.satellite, re.I) else '')))
else: # Flink
......@@ -225,8 +223,8 @@ class L1A_object(object):
@property
def coreg_needed(self):
gt = mapinfo2geotransform(self.meta['map info'])
return (is_coord_grid_equal(gt, usecase.spatial_ref_gridx, usecase.spatial_ref_gridy) and
self.dataset_ID == usecase.datasetid_spatial_ref) is False
return (is_coord_grid_equal(gt, CFG.get_usecase().spatial_ref_gridx, CFG.get_usecase().spatial_ref_gridy) and
self.dataset_ID == CFG.get_usecase().datasetid_spatial_ref) is False
def fill_from_disk(self,tuple_GMS_subset):
......@@ -244,14 +242,14 @@ class L1A_object(object):
path_arr = PG_obj.get_path_imagedata()
path_masks = PG_obj.get_path_maskdata()
path_maskClouds = PG_obj.get_path_cloudmaskdata()
if job.exec_mode=='Flink':
if CFG.get_job().exec_mode=='Flink':
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1)
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
self.log_for_fullArr_or_firstTile('Reading file %s as tiles...' %self.baseN \
if self.arr_pos else 'Reading file %s...' %self.baseN)
#self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # job.exec_mode=='Python'
else: # CFG.get_job().exec_mode=='Python'
self.arr = path_arr
self.mask_1bit = path_masks
self.mask_clouds = path_maskClouds
......@@ -447,7 +445,7 @@ class L1A_object(object):
# bands = band_nd
# band_names.append(band)
# self.logger.info(band)
# elif usecase.skip_thermal and META.isTHERMAL(self.GMS_identifier, LayerNr):
# elif CFG.get_usecase().skip_thermal and META.isTHERMAL(self.GMS_identifier, LayerNr):
# self.logger.info(
# 'Band %s skipped because Layerstacking has been called with skipthermal = True.' % band)
# elif META.isPAN(self.GMS_identifier, LayerNr):
......@@ -535,7 +533,7 @@ class L1A_object(object):
sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))]
subset = ['block',[[sub_dim[0],sub_dim[1]+1],[sub_dim[2],sub_dim[3]+1]]]
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger,subset=subset)
if job.exec_mode == 'Flink' and path_output is None: # numpy array output
if CFG.get_job().exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = rasObj.Layerstacking(paths_files2stack)
self.path_InFilePreprocessor = paths_files2stack[0]
else: # 'MEMORY' or physical output
......@@ -553,7 +551,7 @@ class L1A_object(object):
sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))]
subset = ['block',[[sub_dim[0],sub_dim[1]+1],[sub_dim[2],sub_dim[3]+1]]]
rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset)
if job.exec_mode=='Flink' and path_output is None: # numpy array output
if CFG.get_job().exec_mode=='Flink' and path_output is None: # numpy array output
self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \
gdalnumeric.LoadFile(path_file2load, rasObj.colStart,rasObj.rowStart,rasObj.cols,rasObj.rows)
self.path_InFilePreprocessor = path_file2load
......@@ -577,7 +575,7 @@ class L1A_object(object):
data = gdalnumeric.LoadFile(sds_name)
if bidx == 0: data_arr = np.empty(data.shape+(len(self.LayerBandsAssignment),),data.dtype)
data_arr[:,:,bidx] = data
if job.exec_mode == 'Flink' and path_output is None: # numpy array output
if CFG.get_job().exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr
else:
GEOP.ndarray2gdal(data_arr,path_output,geotransform=ds.GetGeoTransform(),
......@@ -600,7 +598,7 @@ class L1A_object(object):
data = hdfFile.select(dsIdx)[:]
if i == 0: data_arr = np.empty(data.shape+(len(self.LayerBandsAssignment),),data.dtype)
data_arr[:,:,i] = data
if job.exec_mode == 'Flink' and path_output is None: # numpy array output
if CFG.get_job().exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr
else:
GEOP.ndarray2gdal(data_arr,path_output,direction=3)
......@@ -684,7 +682,7 @@ class L1A_object(object):
def calc_TOARadRefTemp(self, subset=None):
"""Convert DN or Ref data to TOA Reflectance, to Radiance or to Surface Temperature
(depending on usecase.conversion_type_optical and conversion_type_thermal).
(depending on CFG.get_usecase().conversion_type_optical and conversion_type_thermal).
The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk
in tiles, provide an item of self.tile_pos as the 'subset' argument."""
......@@ -713,7 +711,7 @@ class L1A_object(object):
data_optical, data_thermal, optical_bandsList, thermal_bandsList = None, None, [], []
for optical_thermal in ['optical', 'thermal']:
if optical_thermal not in self.dict_LayerOptTherm.values(): continue
conv = getattr(usecase, 'conversion_type_%s' % optical_thermal)
conv = getattr(CFG.get_usecase(), 'conversion_type_%s' % optical_thermal)
assert conv in ['Rad', 'Ref', 'Temp'], 'Unsupported conversion type: %s' %conv
arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1]
assert arr_desc in ['DN','Rad', 'Ref', 'Temp'], 'Unsupported array description: %s' %arr_desc
......@@ -731,7 +729,7 @@ class L1A_object(object):
res = GEOP.DN2Rad(inArray, OFF, GAI, inFill, inZero, inSaturated) if conv == 'Rad' else \
GEOP.DN2TOARef(inArray,OFF,GAI,IRR,zen,esd,inFill,inZero,inSaturated) if conv == 'Ref' else \
GEOP.DN2DegreesCelsius_fastforward(inArray,OFF,GAI,K1,K2,0.95,inFill,inZero,inSaturated)
if conv=='Ref': self.MetaObj.ScaleFactor = usecase.scale_factor_TOARef
if conv=='Ref': self.MetaObj.ScaleFactor = CFG.get_usecase().scale_factor_TOARef
elif arr_desc == 'Rad':
raise NotImplementedError("Conversion Rad to %s is currently not supported." %conv)
......@@ -740,15 +738,15 @@ class L1A_object(object):
if conv=='Rad':
raise NotImplementedError("Conversion Ref to Rad is currently not supported." % conv)
else: # conv=='Ref'
if self.MetaObj.ScaleFactor != usecase.scale_factor_TOARef:
res = self.rescale_array(inArray, usecase.scale_factor_TOARef, self.MetaObj.ScaleFactor)
self.MetaObj.ScaleFactor = usecase.scale_factor_TOARef
if self.MetaObj.ScaleFactor != CFG.get_usecase().scale_factor_TOARef:
res = self.rescale_array(inArray, CFG.get_usecase().scale_factor_TOARef, self.MetaObj.ScaleFactor)
self.MetaObj.ScaleFactor = CFG.get_usecase().scale_factor_TOARef
self.log_for_fullArr_or_firstTile(
'Rescaling Ref data to scaling factor %d.' %usecase.scale_factor_TOARef)
'Rescaling Ref data to scaling factor %d.' %CFG.get_usecase().scale_factor_TOARef)
else:
res = inArray
self.log_for_fullArr_or_firstTile('The input data already represents TOA '
'reflectance with the aimed scale factor of %d.' %usecase.scale_factor_TOARef)
'reflectance with the aimed scale factor of %d.' %CFG.get_usecase().scale_factor_TOARef)
else: # arr_desc == 'Temp'
raise NotImplementedError("Conversion Temp to %s is currently not supported." %conv)
......@@ -772,8 +770,8 @@ class L1A_object(object):
assert dataOut is not None
#
self.update_spec_vals_according_to_dtype('int16')
tiles_desc = '_'.join([desc for op_th,desc in zip(['optical','thermal'], [usecase.conversion_type_optical,
usecase.conversion_type_thermal]) if desc in self.dict_LayerOptTherm.values()])
tiles_desc = '_'.join([desc for op_th,desc in zip(['optical','thermal'], [CFG.get_usecase().conversion_type_optical,
CFG.get_usecase().conversion_type_thermal]) if desc in self.dict_LayerOptTherm.values()])
self.arr = dataOut
self.arr_desc = tiles_desc
......@@ -826,12 +824,12 @@ class L1A_object(object):
rasObj.georeference_by_TieP_or_inherent_GCPs(TieP=self.MetaObj.CornerTieP_LonLat, dst_CS=out_CS,
dst_CS_datum='WGS84', mode='GDAL', use_workspace=True, inFill=self.MetaObj.spec_vals['fill'])
if job.exec_mode=='Python':
if CFG.get_job().exec_mode=='Python':
path_warped = os.path.join(self.ExtractedFolder, self.baseN+'__' + self.arr_desc)
GEOP.ndarray2gdal(rasObj.tondarray(direction=3), path_warped, importFile=rasObj.desc, direction=3)
self.MetaObj.Dataname = path_warped
self.arr = path_warped
else: # job.exec_mode=='Flink':
else: # CFG.get_job().exec_mode=='Flink':
self.arr = rasObj.tondarray(direction=3)
self.shape_fullArr = [rasObj.rows,rasObj.cols,rasObj.bands]
......@@ -848,7 +846,7 @@ class L1A_object(object):
self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM')
self.meta = self.MetaObj.Meta2ODict() # important in order to keep geotransform/projection
if job.exec_mode=='Flink':
if CFG.get_job().exec_mode=='Flink':
self.delete_tempFiles() # these files are needed later in Python execution mode
self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists)
......@@ -897,14 +895,14 @@ class L1A_object(object):
mask_clouds = None
else:
self.log_for_fullArr_or_firstTile('Calculating cloud mask...', subset)
#if usecase.skip_thermal: # FIXME
#if CFG.get_usecase().skip_thermal: # FIXME
if not in_mem:
subset = subset if subset else ['cube',None]
rasObj = GEOP.GEOPROCESSING(inPath, self.logger, subset=subset)
self.arr = rasObj.tondarray(direction=3)
self.GMS_identifier['logger'] = self.logger
if not job.bench_CLD_class:
if not CFG.get_job().bench_CLD_class:
self.path_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier)
CLD_obj = CLD_P.GmsCloudClassifier(classifier=self.path_cloud_class_obj)
assert CLD_obj, 'Error loading cloud classifier.'
......@@ -944,7 +942,7 @@ class L1A_object(object):
# vals = dpoints[dpoints[:,0] == cond][:,2].astype(np.float)
# pos = [j - (1 - space) / 2. + i * width for j in range(1,len(categories)+1)]
# ax.bar(pos, vals, width=width)
print(os.path.join(job.path_testing,'out/%s_benchmark_cloud_classifiers.png' %self.baseN))
print(os.path.join(CFG.get_job().path_testing,'out/%s_benchmark_cloud_classifiers.png' %self.baseN))
fig.savefig(os.path.abspath('./testing/out/%s_benchmark_cloud_classifiers.png' %self.baseN),
format='png',dpi=300,bbox_inches='tight')
mask_clouds = None
......@@ -1173,7 +1171,7 @@ class L1A_object(object):
def combine_tiles_to_ObjAttr(self, tiles, target_attr):
# type: (list,str)
"""Combines tiles, e.g. produced by L1A_P.L1A_object.DN2TOARadRefTemp() to a single attribute.
If usecase.job.exec_mode == 'Python' the produced attribute is additionally written to disk.
If CFG.get_usecase().CFG.get_job().exec_mode == 'Python' the produced attribute is additionally written to disk.
:param tiles: <list> a list of dictionaries with the keys 'desc', 'data', 'row_start','row_end',
'col_start' and 'col_end'
......@@ -1195,7 +1193,7 @@ class L1A_object(object):
self.arr_desc = tiles[0]['desc']
self.arr_shape = 'cube' if len(self.arr.shape)==3 else 'band' if len(self.arr.shape)==2 else 'unknown'
if job.exec_mode=='Python': # and not 'Flink'
if CFG.get_job().exec_mode=='Python': # and not 'Flink'
path_radref_file = os.path.join(self.ExtractedFolder, self.baseN+'__' + self.arr_desc)
# path_radref_file = os.path.abspath('./testing/out/%s_TOA_Ref' % self.baseN)
while not os.path.isdir(os.path.dirname(path_radref_file)):
......@@ -1219,7 +1217,7 @@ class L1A_object(object):
self.logger.info("Writing tiles '%s' temporarily to disk..." % tiles[0]['desc'])
outpath = os.path.join(self.ExtractedFolder, '%s__%s.%s' %(self.baseN, tiles[0]['desc'], self.outInterleave))
if usecase.conversion_type_optical in tiles[0]['desc'] or usecase.conversion_type_thermal in tiles[0]['desc']:
if CFG.get_usecase().conversion_type_optical in tiles[0]['desc'] or CFG.get_usecase().conversion_type_thermal in tiles[0]['desc']:
self.meta = self.MetaObj.Meta2ODict() # important in order to keep geotransform/projection
self.arr_desc = tiles[0]['desc']
self.arr = outpath
......@@ -1360,7 +1358,7 @@ class L1A_object(object):
'Cutting scene %s (entity ID %s) into MGRS tiles...' % (self.scene_ID, self.entity_ID))
# get GeoDataFrame containing all overlapping MGRS tiles (MGRS geometries completely within nodata area are excluded)
GDF_MGRS_tiles = DB_T.get_overlapping_MGRS_tiles(job.conn_db_meta,
GDF_MGRS_tiles = DB_T.get_overlapping_MGRS_tiles(CFG.get_job().conn_db_meta,
trueDataCornerLonLat=self.trueDataCornerLonLat)
# calculate image coordinate bounds of the full GMS object for each MGRS tile within the GeoDataFrame
......@@ -1478,12 +1476,12 @@ class L1A_object(object):
def delete_previous_proc_level_results(self):
"""Deletes results of the previous processing level if the respective flag job.exec__L**P[2]) is set to True.
"""Deletes results of the previous processing level if the respective flag CFG.get_job().exec__L**P[2]) is set to True.
The function is skipped if the results of the current processing level have not yet been written.
"""
tgt_proc_level = HLP_F.proc_chain[HLP_F.proc_chain.index(self.proc_level)-1]
if not tgt_proc_level.startswith('L0') and getattr(job, 'exec__%sP' %tgt_proc_level)[2]:
if not tgt_proc_level.startswith('L0') and getattr(CFG.get_job(), 'exec__%sP' %tgt_proc_level)[2]:
PGobj = PG.path_generator(self.__dict__.copy(), proc_level=tgt_proc_level)
files2delete = [PGobj.get_path_imagedata(),
......@@ -1538,7 +1536,7 @@ class L1A_object(object):
for f in files2delete:
HLP_F.silentremove(f)
# delete previous proc_level results on demand (according to job.exec__L**P[2])
# delete previous proc_level results on demand (according to CFG.get_job().exec__L**P[2])
self.delete_previous_proc_level_results()
self.logger.close()
......
......@@ -28,8 +28,6 @@ import numpy as np
from geopandas import GeoDataFrame
from shapely.geometry import box
job, usecase, GMS_call_type = builtins.GMS_config.job, builtins.GMS_config.usecase, builtins.GMS_config.GMS_call_type
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
......@@ -37,6 +35,8 @@ from .L1A_P import L1A_object
from ..misc.SpatialIndexMediator import SpatialIndexMediator
from ..misc.logging import GMS_logger
from .. import unified_config as CFG
#sys.path.append('/home/gfz-fe/')
from CoReg_Sat import COREG, DESHIFTER
from py_tools_ds.ptds import GeoArray
......@@ -231,7 +231,7 @@ class Scene_finder(object):
SpIM = SpatialIndexMediator(timeout=timeout)
self.possib_ref_scenes = \
SpIM.getFullSceneDataForDataset(self.boundsLonLat , self.timeStart, self.timeEnd, self.min_cloudcov,
self.max_cloudcov, usecase.datasetid_spatial_ref,
self.max_cloudcov, CFG.get_usecase().datasetid_spatial_ref,
refDate=self.src_AcqDate, maxDaysDelta=self.plusminus_days)
break
except socket.timeout:
......@@ -297,7 +297,7 @@ class Scene_finder(object):
if not GDF.empty:
# get processing level of refernce scenes
query_procL = lambda sceneID: \
DB_T.get_info_from_postgreSQLdb(job.conn_database, 'scenes_proc', ['proc_level'], {'sceneid': sceneID})
DB_T.get_info_from_postgreSQLdb(CFG.get_job().conn_database, 'scenes_proc', ['proc_level'], {'sceneid': sceneID})
GDF['temp_queryRes'] = [*GDF['sceneid'] .map(query_procL)]
GDF['proc_level'] = [*GDF['temp_queryRes'].map(lambda queryRes: queryRes[0][0] if queryRes else None)]
GDF.drop('temp_queryRes',axis=1,inplace=True)
......@@ -325,7 +325,7 @@ class Scene_finder(object):
GDF = self.GDF_ref_scenes
if not GDF.empty: