Commit 91a15708 authored by Daniel Scheffler's avatar Daniel Scheffler Committed by Mathias Peters
Browse files

Merge remote-tracking branch 'gms-hu-inf/master' into hu_inf

parent cb049975
...@@ -57,7 +57,8 @@ from py_tools_ds.ptds.geo.coord_trafo import transform_utm_to_wgs84, transf ...@@ -57,7 +57,8 @@ from py_tools_ds.ptds.geo.coord_trafo import transform_utm_to_wgs84, transf
from py_tools_ds.ptds.geo.projection import get_UTMzone, EPSG2WKT, isProjectedOrGeographic from py_tools_ds.ptds.geo.projection import get_UTMzone, EPSG2WKT, isProjectedOrGeographic
from py_tools_ds.ptds.geo.raster.reproject import warp_ndarray from py_tools_ds.ptds.geo.raster.reproject import warp_ndarray
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller) from .. import unified_config as CFG
class GEOPROCESSING(object): class GEOPROCESSING(object):
"""****CREATE OBJECT ****************************************************""" """****CREATE OBJECT ****************************************************"""
...@@ -1024,7 +1025,7 @@ class GEOPROCESSING(object): ...@@ -1024,7 +1025,7 @@ class GEOPROCESSING(object):
# %(dst_EPSG_code, in_nodataVal,out_nodataVal, translatedFile, warpedFile)) # %(dst_EPSG_code, in_nodataVal,out_nodataVal, translatedFile, warpedFile))
os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -t_srs EPSG:%s -tps -r \ os.system('gdalwarp -of ENVI --config GDAL_CACHEMAX 2048 -wm 2048 -t_srs EPSG:%s -tps -r \
cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=%s -q %s %s' \ cubic -srcnodata %s -dstnodata %s -multi -overwrite -wo NUM_THREADS=%s -q %s %s' \
%(dst_EPSG_code,inFill,out_nodataVal,job.CPUs,translatedFile,warpedFile)) %(dst_EPSG_code,inFill,out_nodataVal,CFG.get_job().CPUs,translatedFile,warpedFile))
# import shutil # import shutil
# shutil.copy(translatedFile, '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/testing/out/') ## only for bugfixing # shutil.copy(translatedFile, '//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/testing/out/') ## only for bugfixing
# shutil.copy(translatedFile+'.hdr','//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/testing/out/') ## only for bugfixing # shutil.copy(translatedFile+'.hdr','//misc/hy5/scheffler/Skripte_Models/python/GeoMultiSens/testing/out/') ## only for bugfixing
...@@ -1045,7 +1046,7 @@ class GEOPROCESSING(object): ...@@ -1045,7 +1046,7 @@ class GEOPROCESSING(object):
# %(dst_EPSG_code,in_nodataVal,out_nodataVal,translatedFile,warpedFile)) # %(dst_EPSG_code,in_nodataVal,out_nodataVal,translatedFile,warpedFile))
os.system('gdalwarp -of VRT --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps -r \ os.system('gdalwarp -of VRT --config GDAL_CACHEMAX 2048 -wm 2048 -ot Int16 -t_srs EPSG:%s -tps -r \
cubic -srcnodata %s -dstnodata %s -overwrite -multi -wo NUM_THREADS=%s -q %s %s' \ cubic -srcnodata %s -dstnodata %s -overwrite -multi -wo NUM_THREADS=%s -q %s %s' \
% (dst_EPSG_code, inFill, out_nodataVal, job.CPUs, translatedFile, warpedFile)) % (dst_EPSG_code, inFill, out_nodataVal, CFG.get_job().CPUs, translatedFile, warpedFile))
# print('warped') # print('warped')
print('GDAL warping time',time.time()-t0) print('GDAL warping time',time.time()-t0)
......
...@@ -18,23 +18,27 @@ import re ...@@ -18,23 +18,27 @@ import re
import datetime import datetime
import glob import glob
import builtins import builtins
import psycopg2
import psycopg2.extras
from collections import OrderedDict
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
from ..io import Input_reader as INP_R from ..io import Input_reader as INP_R
from ..misc import helper_functions as HLP_F from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T from ..misc import database_tools as DB_T
from ..misc import path_generator as PG from ..misc import path_generator as PG
from .METADATA import get_LayerBandsAssignment from .METADATA import get_LayerBandsAssignment
from .. import unified_config as CFG
########################### core functions #################################### ########################### core functions ####################################
def get_entity_IDs_within_AOI(): # called in console mode def get_entity_IDs_within_AOI(): # called in console mode
data_list =[] data_list =[]
if re.search('ALOS', ','.join(usecase.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode if re.search('ALOS', ','.join(CFG.get_usecase().filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-02', 'entity_ID':'A1002553-001-P6100002-AODS-201007300008'}) # TAR-ID 1B1 data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-02', 'entity_ID':'A1002553-001-P6100002-AODS-201007300008'}) # TAR-ID 1B1
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2007-09-27', 'entity_ID':'20070927_L1B2_ALAV2A089152780'}) # extracted Folder 1B2 data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2007-09-27', 'entity_ID':'20070927_L1B2_ALAV2A089152780'}) # extracted Folder 1B2
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-19', 'entity_ID':'20090719_L1B2_ALAV2A185572780'}) # extracted Folder 1B2 data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-19', 'entity_ID':'20090719_L1B2_ALAV2A185572780'}) # extracted Folder 1B2
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'20100421_L1B2_ALAV2A225832780'}) # extracted Folder 1B2 data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'20100421_L1B2_ALAV2A225832780'}) # extracted Folder 1B2
if re.search('Terra', ','.join(usecase.filt_datasets)): if re.search('Terra', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR1', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR1', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
...@@ -43,23 +47,23 @@ def get_entity_IDs_within_AOI(): # called in console mode ...@@ -43,23 +47,23 @@ def get_entity_IDs_within_AOI(): # called in console mode
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'TIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'TIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
if re.search('Landsat', ','.join(usecase.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode if re.search('Landsat', ','.join(CFG.get_usecase().filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type':'RSD','satellite':'Landsat-5', 'sensor':'TM', 'subsystem':None, 'acquisition_date':'1996-10-24', 'entity_ID':'LT51510321996298XXX01'}) # TAR-ID data_list.append({'image_type':'RSD','satellite':'Landsat-5', 'sensor':'TM', 'subsystem':None, 'acquisition_date':'1996-10-24', 'entity_ID':'LT51510321996298XXX01'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2002-08-15', 'entity_ID':'LE70050152002227EDC00'}) # TAR-ID # data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2002-08-15', 'entity_ID':'LE70050152002227EDC00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2000-04-02', 'entity_ID':'LE71510322000093SGS00'}) # TAR-ID # data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2000-04-02', 'entity_ID':'LE71510322000093SGS00'}) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(job.path_archive,'Landsat-7/ETM+/*.tar.gz'))]) # TAR-ID data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(CFG.get_job().path_archive,'Landsat-7/ETM+/*.tar.gz'))]) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(job.path_archive,'Landsat-8/OLI_TIRS/*.tar.gz'))]) # TAR-ID data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(CFG.get_job().path_archive,'Landsat-8/OLI_TIRS/*.tar.gz'))]) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-07-03', 'entity_ID':'LC81510322013184LGN00'}) # TAR-ID # data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-07-03', 'entity_ID':'LC81510322013184LGN00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-06-01', 'entity_ID':'LC81510322013152LGN00'}) # TAR-ID ~6% Cloud cover # data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-06-01', 'entity_ID':'LC81510322013152LGN00'}) # TAR-ID ~6% Cloud cover
if re.search('SPOT', ','.join(usecase.filt_datasets)): if re.search('SPOT', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'SPOT-1', 'sensor':'HRV1', 'subsystem':None, 'acquisition_date':'1986-07-17', 'entity_ID':'00197112001'}) data_list.append({'image_type':'RSD','satellite':'SPOT-1', 'sensor':'HRV1', 'subsystem':None, 'acquisition_date':'1986-07-17', 'entity_ID':'00197112001'})
data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112009'}) data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112009'})
data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112010'}) data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112010'})
if re.search('RapidEye', ','.join(usecase.filt_datasets)): if re.search('RapidEye', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'RapidEye-5','sensor':'MSI', 'subsystem':None, 'acquisition_date':'2014-04-23', 'entity_ID':'4357606_2014-04-23_RE5_3A_180259'}) data_list.append({'image_type':'RSD','satellite':'RapidEye-5','sensor':'MSI', 'subsystem':None, 'acquisition_date':'2014-04-23', 'entity_ID':'4357606_2014-04-23_RE5_3A_180259'})
if re.search('SRTM', ','.join(usecase.filt_datasets)): if re.search('SRTM', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'DGM','satellite':'SRTM', 'sensor':'SRTM2', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'srtm-1arcsec-version2jan2015-39-42n-70-85'}) data_list.append({'image_type':'DGM','satellite':'SRTM', 'sensor':'SRTM2', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'srtm-1arcsec-version2jan2015-39-42n-70-85'})
if re.search('ATM', ','.join(usecase.filt_datasets)): if re.search('ATM', ','.join(CFG.get_usecase().filt_datasets)):
data_list.append({'image_type':'ATM','satellite':'ATM-data', 'sensor':'unknown', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'dummy_ID'}) data_list.append({'image_type':'ATM','satellite':'ATM-data', 'sensor':'unknown', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'dummy_ID'})
for ds in data_list: for ds in data_list:
...@@ -69,47 +73,74 @@ def get_entity_IDs_within_AOI(): # called in console mode ...@@ -69,47 +73,74 @@ def get_entity_IDs_within_AOI(): # called in console mode
#ds['scene_ID'] = '_'.join([ds['satellite'],ds['sensor'],ds['subsystem'],ds['entity_ID']]) #ds['scene_ID'] = '_'.join([ds['satellite'],ds['sensor'],ds['subsystem'],ds['entity_ID']])
ds['scene_ID'] = ds['entity_ID'] ds['scene_ID'] = ds['entity_ID']
ds['sensormode'] = get_sensormode(ds) ds['sensormode'] = get_sensormode(ds)
if usecase.skip_thermal: if CFG.get_usecase().skip_thermal:
data_list = [ds for ds in data_list if not ds['subsystem'] == 'TIR'] # removes ASTER TIR in case of skip_thermal data_list = [ds for ds in data_list if not ds['subsystem'] == 'TIR'] # removes ASTER TIR in case of skip_thermal
if usecase.skip_pan: if CFG.get_usecase().skip_pan:
data_list = [ds for ds in data_list if not ds['sensormode'] == 'P'] # removes e.g. SPOT PAN in case of skip_pan data_list = [ds for ds in data_list if not ds['sensormode'] == 'P'] # removes e.g. SPOT PAN in case of skip_pan
return data_list return data_list
def get_data_list_of_current_jobID(): # called in webapp mode def get_data_list_of_current_jobID(): # called in webapp mode
query = lambda tablename,vals2return,cond_dict,records2fetch=0:\ job = CFG.get_job()
DB_T.get_info_from_postgreSQLdb(job.conn_database,tablename,vals2return,cond_dict,records2fetch) usecase = CFG.get_usecase()
resultset = query('jobs','sceneids',{'id':job.ID},3) data_list = []
assert len(resultset) != 0, "Invalid jobID given - no corresponding job with the ID=%s found in database.\n" %job.ID with psycopg2.connect(job.conn_database) as conn:
assert len(resultset) == 1, "Error in database. The jobID %s exists more than once. \n" %job.ID with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
sceneids = resultset[0][0] cur.execute("""
WITH jobs_unnested AS (
SELECT id, unnest(sceneids) AS sceneid FROM jobs
)
SELECT jobs_unnested.sceneid,
scenes.datasetid,
scenes.acquisitiondate,
scenes.entityid,
scenes.filename,
COALESCE(scenes_proc.proc_level::text, 'L0A') AS proc_level,
datasets.image_type,
satellites.name AS satellite,
sensors.name AS sensor,
subsystems.name AS subsystem
FROM jobs_unnested
LEFT OUTER JOIN scenes ON scenes.id = jobs_unnested.sceneid
LEFT OUTER JOIN scenes_proc ON scenes_proc.sceneid = jobs_unnested.sceneid
LEFT OUTER JOIN datasets ON datasets.id = datasetid
LEFT OUTER JOIN satellites ON satellites.id = satelliteid
LEFT OUTER JOIN sensors ON sensors.id = sensorid
LEFT OUTER JOIN subsystems ON subsystems.id = subsystemid
WHERE jobs_unnested.id = %s
""",
(job.ID,))
data_list =[] # TODO ab hier 2. Funktion for row in cur.fetchall():
for sceneid in sceneids: ds = OrderedDict()
# add from postgreSQL-DB: proc_level, scene_ID, datasetid, image_type, satellite, sensor, subsystem, ds["proc_level"] = row["proc_level"]
# acquisition_date, entity_ID, filename ds["scene_ID"] = row["sceneid"]
ds = DB_T.get_scene_and_dataset_infos_from_postgreSQLdb(sceneid) ds["datasetid"] = row["datasetid"]
ds["image_type"] = row["image_type"]
ds["satellite"] = row["satellite"]
ds["sensor"] = row["sensor"]
ds["subsystem"] = row["subsystem"]
ds["acquisition_date"] = row["acquisitiondate"]
ds["entity_ID"] = row["entityid"]
ds["filename"] = row["filename"]
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor'] ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
if usecase.skip_thermal and ds['subsystem']=='TIR': continue # removes ASTER TIR in case of skip_thermal if CFG.get_usecase().skip_thermal and ds['subsystem']=='TIR': continue # removes ASTER TIR in case of skip_thermal
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem'] ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
ds['sensormode'] = get_sensormode(ds) ds['sensormode'] = get_sensormode(ds)
if usecase.skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan if CFG.get_usecase().skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan
if re.search("Sentinel-2A",ds['satellite'],re.I): if re.search("Sentinel-2A",ds['satellite'],re.I):
for subsystem in ['S2A10','S2A20','S2A60']: for subsystem in ['S2A10','S2A20','S2A60']:
sub_ds = ds.copy() sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem sub_ds['subsystem'] = subsystem
data_list.append(sub_ds) data_list.append(sub_ds)
elif re.search("Terra", ds['satellite'], re.I): elif re.search("Terra", ds['satellite'], re.I):
for subsystem in ['VNIR1', 'VNIR2', 'SWIR','TIR']: for subsystem in ['VNIR1', 'VNIR2', 'SWIR','TIR']:
sub_ds = ds.copy() sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem sub_ds['subsystem'] = subsystem
data_list.append(sub_ds) data_list.append(sub_ds)
else: else:
data_list.append(ds) data_list.append(ds)
'''OrderedDict([('datasetid', 104), ('image_type', 'RSD'), ('satellite', 'Landsat-8'), ('sensor', 'OLI_TIRS'),
('subsystem', ''), ('acquisition_date', datetime.datetime(2013, 7, 3, 5, 48, 32)),
('entityid', 'LC81510322013184LGN00'), ('filename', 'LC81510322013184LGN00.tar.gz'), ('sensormode', 'M')])'''
return data_list return data_list
def LandsatID2dataset(ID_list): def LandsatID2dataset(ID_list):
...@@ -138,11 +169,11 @@ def get_sensormode(dataset): ...@@ -138,11 +169,11 @@ def get_sensormode(dataset):
return 'M' return 'M'
def add_local_availability(dataset): def add_local_availability(dataset):
if job.call_type == 'webapp': if CFG.get_job().call_type == 'webapp':
DB_match = DB_T.get_info_from_postgreSQLdb \ DB_match = DB_T.get_info_from_postgreSQLdb \
(job.conn_database,'scenes_proc',['proc_level','layer_bands_assignment'], {'sceneid':dataset['scene_ID']}) (CFG.get_job().conn_database,'scenes_proc',['proc_level','layer_bands_assignment'], {'sceneid':dataset['scene_ID']})
else: ## job.call_type == 'console' else: ## CFG.get_job().call_type == 'console'
DB_match = DB_T.get_info_from_SQLdb(job.path_database,'processed_data',['proc_level','LayerBandsAssignment'], DB_match = DB_T.get_info_from_SQLdb(CFG.get_job().path_database,'processed_data',['proc_level','LayerBandsAssignment'],
{'image_type':dataset['image_type'],'satellite':dataset['satellite'], 'sensor':dataset['sensor'], {'image_type':dataset['image_type'],'satellite':dataset['satellite'], 'sensor':dataset['sensor'],
'subsystem':dataset['subsystem'], 'sensormode':dataset['sensormode'], 'entity_ID':dataset['entity_ID']}) 'subsystem':dataset['subsystem'], 'sensormode':dataset['sensormode'], 'entity_ID':dataset['entity_ID']})
path_logfile = PG.path_generator(dataset).get_path_logfile() path_logfile = PG.path_generator(dataset).get_path_logfile()
...@@ -179,7 +210,7 @@ def add_local_availability(dataset): ...@@ -179,7 +210,7 @@ def add_local_availability(dataset):
'logfile %s has been written successfully. Recreating missing database entry.' \ 'logfile %s has been written successfully. Recreating missing database entry.' \
%(dataset['entity_ID'],ProcL)) %(dataset['entity_ID'],ProcL))
DB_T.data_DB_updater(GMS_file_dict) DB_T.data_DB_updater(GMS_file_dict)
if job.call_type == 'console': if CFG.get_job().call_type == 'console':
DB_T.SQL_DB_to_csv() DB_T.SQL_DB_to_csv()
dataset['proc_level'] = ProcL dataset['proc_level'] = ProcL
elif len(DB_match) == 1: elif len(DB_match) == 1:
...@@ -206,4 +237,4 @@ def add_local_availability(dataset): ...@@ -206,4 +237,4 @@ def add_local_availability(dataset):
dataset['proc_level'] = None dataset['proc_level'] = None
else: else:
dataset['proc_level'] = None dataset['proc_level'] = None
return dataset return dataset
\ No newline at end of file
...@@ -25,15 +25,16 @@ import collections ...@@ -25,15 +25,16 @@ import collections
from ..misc import path_generator as PG from ..misc import path_generator as PG
from ..misc.logging import GMS_logger from ..misc.logging import GMS_logger
job = builtins.GMS_config.job # read from builtins (set by process_controller)
from .. import unified_config as CFG
########################### core functions #################################### ########################### core functions ####################################
class L0B_object(object): class L0B_object(object):
def __init__(self, data_list_posX): def __init__(self, data_list_posX):
self.proc_level = 'L0B' self.proc_level = 'L0B'
self.job_ID = job.ID self.job_ID = CFG.get_job().ID
self.job_CPUs = job.CPUs self.job_CPUs = CFG.get_job().CPUs
self.image_type = data_list_posX['image_type'] self.image_type = data_list_posX['image_type']
self.satellite = data_list_posX['satellite'] self.satellite = data_list_posX['satellite']
self.sensor = data_list_posX['sensor'] self.sensor = data_list_posX['sensor']
...@@ -56,7 +57,7 @@ class L0B_object(object): ...@@ -56,7 +57,7 @@ class L0B_object(object):
['image_type' ,'Satellite' ,'Sensor' ,'Subsystem' ,'logger' ], ['image_type' ,'Satellite' ,'Sensor' ,'Subsystem' ,'logger' ],
[self.image_type,self.satellite,self.sensor,self.subsystem,self.logger]) ) [self.image_type,self.satellite,self.sensor,self.subsystem,self.logger]) )
self.path_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier, self.path_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier,
get_all=True if job.bench_CLD_class else False) get_all=True if CFG.get_job().bench_CLD_class else False)
if not os.path.isfile(self.path_archive) and not os.path.isdir(self.path_archive): if not os.path.isfile(self.path_archive) and not os.path.isdir(self.path_archive):
self.logger.info("The %s dataset '%s' has not been processed earlier and no corresponding raw data archive" self.logger.info("The %s dataset '%s' has not been processed earlier and no corresponding raw data archive"
...@@ -71,7 +72,7 @@ class L0B_object(object): ...@@ -71,7 +72,7 @@ class L0B_object(object):
self.logger.info('Level 0B object for %s %s%s (data-ID %s) successfully created.' %(self.satellite, self.logger.info('Level 0B object for %s %s%s (data-ID %s) successfully created.' %(self.satellite,
self.sensor, (' '+self.subsystem) if self.subsystem not in [None,''] else '', self.entity_ID)) self.sensor, (' '+self.subsystem) if self.subsystem not in [None,''] else '', self.entity_ID))
if job.exec_mode=='Python' and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder): if CFG.get_job().exec_mode=='Python' and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder):
os.makedirs(self.ExtractedFolder) os.makedirs(self.ExtractedFolder)
# close loggers # close loggers
...@@ -87,4 +88,4 @@ class L0B_object(object): ...@@ -87,4 +88,4 @@ class L0B_object(object):
" > download source code for Landsat here < " " > download source code for Landsat here < "
if not success: if not success:
self.logger.critical("Download for %s dataset '%s' failed. No further processing possible." %(sensor,entity_ID)) self.logger.critical("Download for %s dataset '%s' failed. No further processing possible." %(sensor,entity_ID))
return success return success
\ No newline at end of file
...@@ -59,9 +59,7 @@ from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX ...@@ -59,9 +59,7 @@ from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX
from py_tools_ds.ptds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform from py_tools_ds.ptds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform
from py_tools_ds.ptds.geo.projection import WKT2EPSG from py_tools_ds.ptds.geo.projection import WKT2EPSG
from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal
from .. import unified_config as CFG
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
############################# L1A object ###################################### ############################# L1A object ######################################
class L1A_object(object): class L1A_object(object):
...@@ -95,7 +93,7 @@ class L1A_object(object): ...@@ -95,7 +93,7 @@ class L1A_object(object):
self.GMS_identifier = L0B_object.GMS_identifier if L0B_object else None self.GMS_identifier = L0B_object.GMS_identifier if L0B_object else None
if self.GMS_identifier: if self.GMS_identifier:
self.GMS_identifier['logger'] = self.logger self.GMS_identifier['logger'] = self.logger
self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(job.conn_database, 'scenes', ['datasetid'], self.dataset_ID = int(DB_T.get_info_from_postgreSQLdb(CFG.get_job().conn_database, 'scenes', ['datasetid'],
{'id': self.scene_ID})[0][0]) if self.scene_ID !=-9999 else -9999 {'id': self.scene_ID})[0][0]) if self.scene_ID !=-9999 else -9999
self.outInterleave = 'bsq' self.outInterleave = 'bsq'
self.LayerBandsAssignment = '' self.LayerBandsAssignment = ''
...@@ -127,7 +125,7 @@ class L1A_object(object): ...@@ -127,7 +125,7 @@ class L1A_object(object):
%(os.path.basename(self.path_archive), os.path.dirname(self.path_archive)) %(os.path.basename(self.path_archive), os.path.dirname(self.path_archive))
assert isinstance(self.path_archive, str), 'Invalid path to RAW data. Got %s instead of string or unicode.'\ assert isinstance(self.path_archive, str), 'Invalid path to RAW data. Got %s instead of string or unicode.'\
%type(self.path_archive) %type(self.path_archive)
if job.exec_mode=='Python' and self.ExtractedFolder: assert os.path.exists(self.path_archive), \ if CFG.get_job().exec_mode=='Python' and self.ExtractedFolder: assert os.path.exists(self.path_archive), \
'Invalid path for temporary files. Directory %s does not exist.' %self.ExtractedFolder 'Invalid path for temporary files. Directory %s does not exist.' %self.ExtractedFolder
self.logger.info('\n\n########### Level 1A Processing started ############\n') self.logger.info('\n\n########### Level 1A Processing started ############\n')
...@@ -137,7 +135,7 @@ class L1A_object(object): ...@@ -137,7 +135,7 @@ class L1A_object(object):
if self.sensormode != 'P' else META.get_LayerBandsAssignment(self.GMS_identifier, nBands = 1) if self.sensormode != 'P' else META.get_LayerBandsAssignment(self.GMS_identifier, nBands = 1)
self.dict_LayerOptTherm = META.get_dict_LayerOptTherm(self.GMS_identifier,self.LayerBandsAssignment) self.dict_LayerOptTherm = META.get_dict_LayerOptTherm(self.GMS_identifier,self.LayerBandsAssignment)
if job.exec_mode=='Python': if CFG.get_job().exec_mode=='Python':
self.path_InFilePreprocessor = os.path.join(self.ExtractedFolder, '%s%s_DN.bsq' \ self.path_InFilePreprocessor = os.path.join(self.ExtractedFolder, '%s%s_DN.bsq' \
%(self.entity_ID,('_'+self.subsystem if re.search("Terra", self.satellite, re.I) else ''))) %(self.entity_ID,('_'+self.subsystem if re.search("Terra", self.satellite, re.I) else '')))
else: # Flink else: # Flink
...@@ -225,8 +223,8 @@ class L1A_object(object): ...@@ -225,8 +223,8 @@ class L1A_object(object):
@property @property
def coreg_needed(self): def coreg_needed(self):
gt = mapinfo2geotransform(self.meta['map info']) gt = mapinfo2geotransform(self.meta['map info'])
return (is_coord_grid_equal(gt, usecase.spatial_ref_gridx, usecase.spatial_ref_gridy) and return (is_coord_grid_equal(gt, CFG.get_usecase().spatial_ref_gridx, CFG.get_usecase().spatial_ref_gridy) and
self.dataset_ID == usecase.datasetid_spatial_ref) is False self.dataset_ID == CFG.get_usecase().datasetid_spatial_ref) is False
def fill_from_disk(self,tuple_GMS_subset): def fill_from_disk(self,tuple_GMS_subset):
...@@ -244,14 +242,14 @@ class L1A_object(object): ...@@ -244,14 +242,14 @@ class L1A_object(object):
path_arr = PG_obj.get_path_imagedata() path_arr = PG_obj.get_path_imagedata()
path_masks = PG_obj.get_path_maskdata() path_masks = PG_obj.get_path_maskdata()
path_maskClouds = PG_obj.get_path_cloudmaskdata() path_maskClouds = PG_obj.get_path_cloudmaskdata()
if job.exec_mode=='Flink': if CFG.get_job().exec_mode=='Flink':
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1) self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1)
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1]) self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1]) self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
self.log_for_fullArr_or_firstTile('Reading file %s as tiles...' %self.baseN \ self.log_for_fullArr_or_firstTile('Reading file %s as tiles...' %self.baseN \
if self.arr_pos else 'Reading file %s...' %self.baseN) if self.arr_pos else 'Reading file %s...' %self.baseN)
#self.masks is only needed by Output writer to masks combined -> generated there and on demand #self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # job.exec_mode=='Python' else: # CFG.get_job().exec_mode=='Python'
self.arr = path_arr self.arr = path_arr
self.mask_1bit = path_masks self.mask_1bit = path_masks
self.mask_clouds = path_maskClouds self.mask_clouds = path_maskClouds
...@@ -447,7 +445,7 @@ class L1A_object(object): ...@@ -447,7 +445,7 @@ class L1A_object(object):
# bands = band_nd # bands = band_nd
# band_names.append(band) # band_names.append(band)
# self.logger.info(band) # self.logger.info(band)
# elif usecase.skip_thermal and META.isTHERMAL(self.GMS_identifier, LayerNr): # elif CFG.get_usecase().skip_thermal and META.isTHERMAL(self.GMS_identifier, LayerNr):
# self.logger.info( # self.logger.info(
# 'Band %s skipped because Layerstacking has been called with skipthermal = True.' % band) # 'Band %s skipped because Layerstacking has been called with skipthermal = True.' % band)
# elif META.isPAN(self.GMS_identifier, LayerNr): # elif META.isPAN(self.GMS_identifier, LayerNr):
...@@ -535,7 +533,7 @@ class L1A_object(object): ...@@ -535,7 +533,7 @@ class L1A_object(object):
sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))] sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))]
subset = ['block',[[sub_dim[0],sub_dim[1]+1],[sub_dim[2],sub_dim[3]+1]]] subset = ['block',[[sub_dim[0],sub_dim[1]+1],[sub_dim[2],sub_dim[3]+1]]]
rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger,subset=subset) rasObj = GEOP.GEOPROCESSING(paths_files2stack[0], self.logger,subset=subset)
if job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.get_job().exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = rasObj.Layerstacking(paths_files2stack) self.arr = rasObj.Layerstacking(paths_files2stack)
self.path_InFilePreprocessor = paths_files2stack[0] self.path_InFilePreprocessor = paths_files2stack[0]
else: # 'MEMORY' or physical output else: # 'MEMORY' or physical output
...@@ -553,7 +551,7 @@ class L1A_object(object): ...@@ -553,7 +551,7 @@ class L1A_object(object):
sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))] sub_dim = [sub_dim[i] if sub_dim[i] else full_dim[i] for i in range(len(sub_dim))]
subset = ['block',[[sub_dim[0],sub_dim[1]+1],[sub_dim[2],sub_dim[3]+1]]] subset = ['block',[[sub_dim[0],sub_dim[1]+1],[sub_dim[2],sub_dim[3]+1]]]
rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset) rasObj = GEOP.GEOPROCESSING(path_file2load, self.logger, subset=subset)
if job.exec_mode=='Flink' and path_output is None: # numpy array output if CFG.get_job().exec_mode=='Flink' and path_output is None: # numpy array output
self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \ self.arr = gdalnumeric.LoadFile(path_file2load) if subset is None else \
gdalnumeric.LoadFile(path_file2load, rasObj.colStart,rasObj.rowStart,rasObj.cols,rasObj.rows) gdalnumeric.LoadFile(path_file2load, rasObj.colStart,rasObj.rowStart,rasObj.cols,rasObj.rows)
self.path_InFilePreprocessor = path_file2load self.path_InFilePreprocessor = path_file2load
...@@ -577,7 +575,7 @@ class L1A_object(object): ...@@ -577,7 +575,7 @@ class L1A_object(object):
data = gdalnumeric.LoadFile(sds_name) data = gdalnumeric.LoadFile(sds_name)
if bidx == 0: data_arr = np.empty(data.shape+(len(self.LayerBandsAssignment),),data.dtype) if bidx == 0: data_arr = np.empty(data.shape+(len(self.LayerBandsAssignment),),data.dtype)
data_arr[:,:,bidx] = data data_arr[:,:,bidx] = data
if job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.get_job().exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr self.arr = data_arr
else: else:
GEOP.ndarray2gdal(data_arr,path_output,geotransform=ds.GetGeoTransform(), GEOP.ndarray2gdal(data_arr,path_output,geotransform=ds.GetGeoTransform(),
...@@ -600,7 +598,7 @@ class L1A_object(object): ...@@ -600,7 +598,7 @@ class L1A_object(object):
data = hdfFile.select(dsIdx)[:] data = hdfFile.select(dsIdx)[:]
if i == 0: data_arr = np.empty(data.shape+(len(self.LayerBandsAssignment),),data.dtype) if i == 0: data_arr = np.empty(data.shape+(len(self.LayerBandsAssignment),),data.dtype)
data_arr[:,:,i] = data data_arr[:,:,i] = data
if job.exec_mode == 'Flink' and path_output is None: # numpy array output if CFG.get_job().exec_mode == 'Flink' and path_output is None: # numpy array output
self.arr = data_arr self.arr = data_arr
else: else:
GEOP.ndarray2gdal(data_arr,path_output,direction=3) GEOP.ndarray2gdal(data_arr,path_output,direction=3)
...@@ -684,7 +682,7 @@ class L1A_object(object): ...@@ -684,7 +682,7 @@ class L1A_object(object):
def calc_TOARadRefTemp(self, subset=None): def calc_TOARadRefTemp(self, subset=None):
"""Convert DN or Ref data to TOA Reflectance, to Radiance or to Surface Temperature """Convert DN or Ref data to TOA Reflectance, to Radiance or to Surface Temperature
(depending on usecase.conversion_type_optical and conversion_type_thermal). (depending on CFG.get_usecase().conversion_type_optical and conversion_type_thermal).
The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk The function can be executed by a L1A_object representing a full scene or a tile. To process a file from disk
in tiles, provide an item of self.tile_pos as the 'subset' argument.""" in tiles, provide an item of self.tile_pos as the 'subset' argument."""
...@@ -713,7 +711,7 @@ class L1A_object(object): ...@@ -713,7 +711,7 @@ class L1A_object(object):
data_optical, data_thermal, optical_bandsList, thermal_bandsList = None, None, [], [] data_optical, data_thermal, optical_bandsList, thermal_bandsList = None, None, [], []
for optical_thermal in ['optical', 'thermal']: for optical_thermal in ['optical', 'thermal']:
if optical_thermal not in self.dict_LayerOptTherm.values(): continue if optical_thermal not in self.dict_LayerOptTherm.values(): continue
conv = getattr(usecase, 'conversion_type_%s' % optical_thermal) conv = getattr(CFG.get_usecase(), 'conversion_type_%s' % optical_thermal)
assert conv in ['Rad', 'Ref', 'Temp'], 'Unsupported conversion type: %s' %conv assert conv in ['Rad', 'Ref', 'Temp'], 'Unsupported conversion type: %s' %conv
arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1] arr_desc = self.arr_desc.split('/')[0] if optical_thermal == 'optical' else self.arr_desc.split('/')[-1]
assert arr_desc in ['DN','Rad', 'Ref', 'Temp'], 'Unsupported array description: %s' %arr_desc assert arr_desc in ['DN','Rad', 'Ref', 'Temp'], 'Unsupported array description: %s' %arr_desc
...@@ -731,7 +729,7 @@ class L1A_object(object): ...@@ -731,7 +729,7 @@ class L1A_object(object):
res = GEOP.DN2Rad(inArray, OFF, GAI, inFill, inZero, inSaturated) if conv == 'Rad' else \ res = GEOP.DN2Rad(inArray, OFF, GAI, inFill, inZero, inSaturated) if conv == 'Rad' else \
GEOP.DN2TOARef(inArray,OFF,GAI,IRR,zen,esd,inFill,inZero,inSaturated) if conv == 'Ref' else \ GEOP.DN2TOARef(inArray,OFF,GAI,IRR,zen,esd,inFill,inZero,inSaturated) if conv == 'Ref' else \
GEOP.DN2DegreesCelsius_fastforward(inArray,OFF,GAI,K1,K2,0.95,inFill,inZero,inSaturated) GEOP.DN2DegreesCelsius_fastforward(inArray,OFF,GAI,K1,K2,0.95,inFill,inZero,inSaturated)
if conv=='Ref': self.MetaObj.ScaleFactor = usecase.scale_factor_TOARef if conv=='Ref': self.MetaObj.ScaleFactor = CFG.get_usecase().scale_factor_TOARef
elif arr_desc == 'Rad': elif arr_desc == 'Rad':
raise NotImplementedError("Conversion Rad to %s is currently not supported." %conv) raise NotImplementedError("Conversion Rad to %s is currently not supported." %conv)
...@@ -740,15 +738,15 @@ class L1A_object(object): ...@@ -740,15 +738,15 @@ class L1A_object(object):
if conv=='Rad': if conv=='Rad':
raise NotImplementedError("Conversion Ref to Rad is currently not supported." % conv) raise NotImplementedError("Conversion Ref to Rad is currently not supported." % conv)
else: # conv=='Ref' else: # conv=='Ref'
if self.MetaObj.ScaleFactor != usecase.scale_factor_TOARef: