Commit 007965bf authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

implemented "GMS_object" as parent object for all GMS objects; dissolved L0A_P...

implemented "GMS_object" as parent object for all GMS objects; dissolved L0A_P to config, and META; deleted L0B_P
algorithms:
- added new module 'gms_object', based on parts of L1A_object
    - GMS_object now provides the complete basic functionality of all GMS objects in later processing levels
- L0A_P has been dissolved/moved to config and METADATA
- L0B_P removed (deprecated)
algorithms.L1A_P.L1A_object:
- revised __init__() (L1A_object now inherits from GMS_object)
- added import_rasterdata()
- moved functions providing basic GMS functionality to GMS_object
- refactored fill_from_disk() to from_disk()
algorithms.L1B_P - L2C-P:
- updated __init__() functions due to new object structure
io.Output_writer:
- Obj2ENVI: data_DB_updater now receives a dictionary including current state of all properties
misc.database_tools.GMS_JOB:
- added __repr__()
- bugfix for using CFG.job.conn_database instead of self.conn
- revised from_job_ID(): self.dataframe is now properly filled
misc.helper_functions:
- failed_GMS_object now inherits from GMS_object
- get_GMS_sensorcode() now does not need a logger anymore
processing.pipeline:
- refactored L0A_L1A_map to L1A_map and L0B_L1A_map_1 to L1A_map_1
- revised L1A_map and L1A_map_1 according to new structure of L1A_object
processing.process_controller:
- added attribute '_DB_job_record'
- added property 'DB_job_record'
- removed get_data_list() (deprecated)
- stop(): added docstring
- L1A_processing(): edited docstring and added logger output; changed reduce call (!)
- L1B_processing(): edited docstring and added logger output
- L1C_processing(): edited docstring and added logger output; changed reduce call (!)
- L2A_processing(): edited docstring and added logger output
- L2B_processing(): edited docstring and added logger output; changed reduce call (!)
- L2C_processing(): edited docstring and added logger output
config:
- Job: added attribute 'DB_job_record'
- Usecase:
    - added attributes '_job' and 'data_list'
    - moved get_usecase_coord_grid() from top level of module here
    - added get_entity_IDs_within_AOI() from L0A_P
    - added get_data_list_of_current_jobID() from L0A_P
- updated __version__
parent c588839b
......@@ -15,7 +15,7 @@ from . import config
from .processing.process_controller import process_controller
__version__ = '20161202.03'
__version__ = '20161207.01'
__author__ = 'Daniel Scheffler'
__all__ = ['algorithms',
'io',
......
# -*- coding: utf-8 -*-
###############################################################################
#
# Level 0A Processor:
#
# Performed operations:
# Searches for remote sensing date within given area of interest and returns
# a list of the found data that contains image type, satellite, sensor,
# acquisition date and data ID.
#
# Written by Daniel Scheffler
#
###############################################################################
########################### Library import ####################################
import datetime
import glob
import os
import re
from collections import OrderedDict
import psycopg2
import psycopg2.extras
from ..config import GMS_config as CFG
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
########################### core functions ####################################
def get_entity_IDs_within_AOI(): # called in console mode
data_list =[]
if re.search('ALOS', ','.join(CFG.usecase.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-02', 'entity_ID':'A1002553-001-P6100002-AODS-201007300008'}) # TAR-ID 1B1
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2007-09-27', 'entity_ID':'20070927_L1B2_ALAV2A089152780'}) # extracted Folder 1B2
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2009-07-19', 'entity_ID':'20090719_L1B2_ALAV2A185572780'}) # extracted Folder 1B2
data_list.append({'image_type':'RSD','satellite':'ALOS', 'sensor':'AVNIR-2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'20100421_L1B2_ALAV2A225832780'}) # extracted Folder 1B2
if re.search('Terra', ','.join(CFG.usecase.filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR1', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'TIR', 'acquisition_date':'2007-11-08', 'entity_ID':'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR1', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'VNIR2', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'SWIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type':'RSD','satellite':'Terra', 'sensor':'ASTER', 'subsystem':'TIR', 'acquisition_date':'2002-06-08', 'entity_ID':'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
if re.search('Landsat', ','.join(CFG.usecase.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type':'RSD','satellite':'Landsat-5', 'sensor':'TM', 'subsystem':None, 'acquisition_date':'1996-10-24', 'entity_ID':'LT51510321996298XXX01'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2002-08-15', 'entity_ID':'LE70050152002227EDC00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2000-04-02', 'entity_ID':'LE71510322000093SGS00'}) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(CFG.job.path_archive,'Landsat-7/ETM+/*.tar.gz'))]) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(os.path.join(CFG.job.path_archive,'Landsat-8/OLI_TIRS/*.tar.gz'))]) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-07-03', 'entity_ID':'LC81510322013184LGN00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-06-01', 'entity_ID':'LC81510322013152LGN00'}) # TAR-ID ~6% Cloud cover
if re.search('SPOT', ','.join(CFG.usecase.filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'SPOT-1', 'sensor':'HRV1', 'subsystem':None, 'acquisition_date':'1986-07-17', 'entity_ID':'00197112001'})
data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112009'})
data_list.append({'image_type':'RSD','satellite':'SPOT-5', 'sensor':'HRG2', 'subsystem':None, 'acquisition_date':'2010-04-21', 'entity_ID':'00197112010'})
if re.search('RapidEye', ','.join(CFG.usecase.filt_datasets)):
data_list.append({'image_type':'RSD','satellite':'RapidEye-5','sensor':'MSI', 'subsystem':None, 'acquisition_date':'2014-04-23', 'entity_ID':'4357606_2014-04-23_RE5_3A_180259'})
if re.search('SRTM', ','.join(CFG.usecase.filt_datasets)):
data_list.append({'image_type':'DGM','satellite':'SRTM', 'sensor':'SRTM2', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'srtm-1arcsec-version2jan2015-39-42n-70-85'})
if re.search('ATM', ','.join(CFG.usecase.filt_datasets)):
data_list.append({'image_type':'ATM','satellite':'ATM-data', 'sensor':'unknown', 'subsystem':None, 'acquisition_date':'unknown', 'entity_ID':'dummy_ID'})
for ds in data_list:
ds['proc_level'] = 'L0A'
ds['acquisition_date'] = datetime.datetime.strptime(ds['acquisition_date'], '%Y-%m-%d')
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
#ds['scene_ID'] = '_'.join([ds['satellite'],ds['sensor'],ds['subsystem'],ds['entity_ID']])
ds['scene_ID'] = ds['entity_ID']
ds['sensormode'] = get_sensormode(ds)
if CFG.usecase.skip_thermal:
data_list = [ds for ds in data_list if not ds['subsystem'] == 'TIR'] # removes ASTER TIR in case of skip_thermal
if CFG.usecase.skip_pan:
data_list = [ds for ds in data_list if not ds['sensormode'] == 'P'] # removes e.g. SPOT PAN in case of skip_pan
return data_list
def get_data_list_of_current_jobID(): # called in webapp mode
job = CFG.job
data_list = []
with psycopg2.connect(job.conn_database) as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
cur.execute("""
WITH jobs_unnested AS (
SELECT id, unnest(sceneids) AS sceneid FROM jobs
)
SELECT jobs_unnested.sceneid,
scenes.datasetid,
scenes.acquisitiondate,
scenes.entityid,
scenes.filename,
COALESCE(scenes_proc.proc_level::text, 'L0A') AS proc_level,
datasets.image_type,
satellites.name AS satellite,
sensors.name AS sensor,
subsystems.name AS subsystem
FROM jobs_unnested
LEFT OUTER JOIN scenes ON scenes.id = jobs_unnested.sceneid
LEFT OUTER JOIN scenes_proc ON scenes_proc.sceneid = jobs_unnested.sceneid
LEFT OUTER JOIN datasets ON datasets.id = datasetid
LEFT OUTER JOIN satellites ON satellites.id = satelliteid
LEFT OUTER JOIN sensors ON sensors.id = sensorid
LEFT OUTER JOIN subsystems ON subsystems.id = subsystemid
WHERE jobs_unnested.id = %s
""",
(job.ID,))
for row in cur.fetchall():
ds = OrderedDict()
ds["proc_level"] = row["proc_level"]
ds["scene_ID"] = row["sceneid"]
ds["datasetid"] = row["datasetid"]
ds["image_type"] = row["image_type"]
ds["satellite"] = row["satellite"]
ds["sensor"] = row["sensor"]
ds["subsystem"] = row["subsystem"]
ds["acquisition_date"] = row["acquisitiondate"]
ds["entity_ID"] = row["entityid"]
ds["filename"] = row["filename"]
ds['sensor'] = 'ETM+' if re.search('ETM+', ds['sensor']) else ds['sensor']
if CFG.usecase.skip_thermal and ds['subsystem']=='TIR': continue # removes ASTER TIR in case of skip_thermal
ds['subsystem'] = '' if ds['subsystem'] is None else ds['subsystem']
ds['sensormode'] = get_sensormode(ds)
if CFG.usecase.skip_pan and ds['sensormode']=='P': continue # removes e.g. SPOT PAN in case of skip_pan
if re.search("Sentinel-2A",ds['satellite'],re.I):
for subsystem in ['S2A10','S2A20','S2A60']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
elif re.search("Terra", ds['satellite'], re.I):
for subsystem in ['VNIR1', 'VNIR2', 'SWIR','TIR']:
sub_ds = ds.copy()
sub_ds['subsystem'] = subsystem
data_list.append(sub_ds)
else:
data_list.append(ds)
return data_list
def LandsatID2dataset(ID_list):
dataset_list =[]
for ID in ID_list:
dataset = dict(image_type='RSD', satellite=None, sensor=None, subsystem=None, acquisition_date=None,
entity_ID=ID)
dataset['satellite'] = 'Landsat-5' if ID[:3]=='LT5' else 'Landsat-7' if ID[:3]=='LE7' else 'Landsat-8' \
if ID[:3]=='LC8' else dataset['satellite']
dataset['sensor'] = 'TM' if ID[:3]=='LT5' else 'ETM+' if ID[:3]=='LE7' else 'OLI_TIRS' \
if ID[:3]=='LC8' else dataset['satellite']
dataset['subsystem'] = None
dataset['acquisition_date'] = (datetime.datetime(int(ID[9:13]), 1, 1) + datetime.timedelta(int(ID[13:16]) - 1))\
.strftime('%Y-%m-%d')
dataset_list.append(dataset)
return dataset_list
def get_sensormode(dataset):
if re.search('SPOT',dataset['satellite']):
path_archive = PG.path_generator(dataset).get_local_archive_path_baseN()
dim_ = HLP_F.open_specific_file_within_archive(path_archive,'*/scene01/metadata.dim')[0]
SPOT_mode = re.search("<SENSOR_CODE>([a-zA-Z0-9]*)</SENSOR_CODE>",dim_, re.I).group(1)
assert SPOT_mode in ['J','X','XS','A','P','M'], 'Unknown SPOT sensor mode: %s' %SPOT_mode
return 'M' if SPOT_mode in ['J','X','XS'] else 'P'
else:
return 'M'
# -*- coding: utf-8 -*-
###############################################################################
#
# BigData 0B Processor:
#
# Performed operations:
# Downloads data from servers and creates a python object with the path of the
# downloaded data and some information about the data
# Inputs:
# - list of datasets to be downloaded
# Output:
# - python object with filepath of the downloaded archive, information about data, basename
#
# Written by Daniel Scheffler
# Section 1.4 Remote Sensing, GFZ Potsdam
#
###############################################################################
########################### Library import ####################################
#from __future__ import (division, print_function, unicode_literals,absolute_import)
import collections
import os
import re
from ..config import GMS_config as CFG
from ..misc import path_generator as PG
from ..misc.logging import GMS_logger
########################### core functions ####################################
class L0B_object(object):
def __init__(self, data_list_posX):
self.proc_level = 'L0B'
self.job_ID = CFG.job.ID
self.job_CPUs = CFG.job.CPUs
self.image_type = data_list_posX['image_type']
self.satellite = data_list_posX['satellite']
self.sensor = data_list_posX['sensor']
self.subsystem = data_list_posX['subsystem']
self.sensormode = data_list_posX['sensormode']
self.acquisition_date = data_list_posX['acquisition_date']
self.entity_ID = data_list_posX['entity_ID']
self.scene_ID = data_list_posX['scene_ID']
self.filename = data_list_posX['filename']
PathGen = PG.path_generator(self.__dict__)
self.baseN = PathGen.get_baseN()
self.path_procdata = PathGen.get_path_procdata()
self.ExtractedFolder = PathGen.get_path_tempdir()
self.path_logfile = PathGen.get_path_logfile()
self.logger = GMS_logger('log__'+self.baseN, self.path_logfile, append=0)
PathGen = PG.path_generator(self.__dict__) # passes a logger in addition to previous attributes
self.path_archive = PathGen.get_local_archive_path_baseN()
self.GMS_identifier = collections.OrderedDict(zip(
['image_type' ,'Satellite' ,'Sensor' ,'Subsystem' ,'logger' ],
[self.image_type,self.satellite,self.sensor,self.subsystem,self.logger]) )
self.path_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier,
get_all=True if CFG.job.bench_CLD_class else False)
if not os.path.isfile(self.path_archive) and not os.path.isdir(self.path_archive):
self.logger.info("The %s dataset '%s' has not been processed earlier and no corresponding raw data archive"
"has been found at %s." %(self.sensor,self.entity_ID,self.path_archive))
self.logger.info('Trying to download the dataset...')
self.path_archive_valid = self._data_downloader(self.sensor,self.entity_ID)
else:
self.path_archive_valid = True
self.georef = 1 if self.image_type == 'RSD' and re.search('OLI', self.sensor, re.I) else 0
if self.path_archive_valid:
self.logger.info('Level 0B object for %s %s%s (data-ID %s) successfully created.' %(self.satellite,
self.sensor, (' '+self.subsystem) if self.subsystem not in [None,''] else '', self.entity_ID))
if CFG.job.exec_mode=='Python' and self.ExtractedFolder and not os.path.isdir(self.ExtractedFolder):
os.makedirs(self.ExtractedFolder)
# close loggers
self.logger.close()
del self.logger
self.GMS_identifier['logger'].close()
del self.GMS_identifier['logger']
def _data_downloader(self,sensor, entity_ID):
self.logger.info('Level 0B Processing started')
success = False
" > download source code for Landsat here < "
if not success:
self.logger.critical("Download for %s dataset '%s' failed. No further processing possible." %(sensor,entity_ID))
return success
This diff is collapsed.
......@@ -515,15 +515,17 @@ class ref_Scene:
class L1B_object(L1A_object):
def __init__(self, L1A_obj):
super().__init__(None)
def __init__(self, L1A_obj=None):
super().__init__()
# set defaults
self._spatRef_available = None
if L1A_obj:
# set defaults
self.spatRef_scene = None # set by self.get_spatial_reference_scene()
self.coreg_info = {}
self.deshift_results = collections.OrderedDict()
self.spatRef_scene = None # set by self.get_spatial_reference_scene()
self.coreg_info = {}
self.deshift_results = collections.OrderedDict()
if L1A_obj:
# populate attributes
[setattr(self, key, value) for key, value in L1A_obj.__dict__.items()]
......@@ -532,7 +534,7 @@ class L1B_object(L1A_object):
@property
def spatRef_available(self):
if self._spatRef_available:
if self._spatRef_available is None:
return self._spatRef_available
else:
self.get_spatial_reference_scene()
......
......@@ -39,9 +39,13 @@ from .L1B_P import L1B_object
########################### core functions ####################################
class L1C_object(L1B_object):
def __init__(self, L1B_obj):
super().__init__(None)
if L1B_obj: [setattr(self, key, value) for key,value in L1B_obj.__dict__.items()]
def __init__(self, L1B_obj=None):
super().__init__()
if L1B_obj:
# populate attributes
[setattr(self, key, value) for key,value in L1B_obj.__dict__.items()]
self.proc_level = 'L1C'
self.lonlat_arr = None # set by self.get_lonlat_coord_array()
self.VZA_arr = None # set by self.calc_VZA_array()
......
......@@ -446,7 +446,11 @@ class DESHIFTER(object):
class L2A_object(L1C_object):
def __init__(self, L1C_obj):
super().__init__(None)
if L1C_obj: [setattr(self, key, value) for key,value in L1C_obj.__dict__.items()]
def __init__(self, L1C_obj=None):
super().__init__()
if L1C_obj:
# populate attributes
[setattr(self, key, value) for key,value in L1C_obj.__dict__.items()]
self.proc_level = 'L2A'
......@@ -16,9 +16,14 @@ from ..io import Input_reader as INP_R
class L2B_object(L2A_object):
def __init__(self, L2A_obj):
super().__init__(None)
if L2A_obj: [setattr(self, key, value) for key,value in L2A_obj.__dict__.items()]
def __init__(self, L2A_obj=None):
super().__init__()
if L2A_obj:
# populate attributes
[setattr(self, key, value) for key,value in L2A_obj.__dict__.items()]
self.proc_level = 'L2B'
def spectral_homogenization(self, subset=None, kind='linear'):
......
......@@ -13,9 +13,14 @@ res = {}
from .L2B_P import L2B_object
class L2C_object(L2B_object):
def __init__(self, L2B_obj):
super().__init__(None)
if L2B_obj: [setattr(self, key, value) for key,value in L2B_obj.__dict__.items()]
def __init__(self, L2B_obj=None):
super().__init__()
if L2B_obj:
# populate attributes
[setattr(self, key, value) for key,value in L2B_obj.__dict__.items()]
self.proc_level = 'L2C'
def calc_geometric_accurracy(self):
......
......@@ -40,6 +40,7 @@ from ..io import envifilehandling as ef
from . import GEOPROCESSING as GEOP
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
from ..misc.path_generator import path_generator
# + Input Reader (has to be left out here in order to avoid circular dependencies)
......@@ -1577,7 +1578,7 @@ def get_LayerBandsAssignment(GMS_identifier, nBands=None, ignore_usecase=False):
else:
LayerBandsAssignment = ['1']
return LayerBandsAssignment
#
def get_dict_LayerOptTherm(GMS_identifier,LayerBandsAssignment):
dict_out = collections.OrderedDict()
[dict_out.update({lr:'thermal' if isTHERMAL(GMS_identifier,lr) else 'optical'}) for lr in LayerBandsAssignment]
......@@ -1684,3 +1685,30 @@ def metaDict_to_metaODict(metaDict,logger=None):
meta_vals = [metaDict[k] for k in expected_keys] + [metaDict[k] for k in unexpected_keys]
return collections.OrderedDict(zip(expected_keys+unexpected_keys, meta_vals))
def LandsatID2dataset(ID_list):
dataset_list =[]
for ID in ID_list:
dataset = dict(image_type='RSD', satellite=None, sensor=None, subsystem=None, acquisition_date=None,
entity_ID=ID)
dataset['satellite'] = 'Landsat-5' if ID[:3]=='LT5' else 'Landsat-7' if ID[:3]=='LE7' else 'Landsat-8' \
if ID[:3]=='LC8' else dataset['satellite']
dataset['sensor'] = 'TM' if ID[:3]=='LT5' else 'ETM+' if ID[:3]=='LE7' else 'OLI_TIRS' \
if ID[:3]=='LC8' else dataset['satellite']
dataset['subsystem'] = None
dataset['acquisition_date'] = (datetime.datetime(int(ID[9:13]), 1, 1) + datetime.timedelta(int(ID[13:16]) - 1))\
.strftime('%Y-%m-%d')
dataset_list.append(dataset)
return dataset_list
def get_sensormode(dataset):
if re.search('SPOT',dataset['satellite']):
path_archive = path_generator(dataset).get_local_archive_path_baseN()
dim_ = HLP_F.open_specific_file_within_archive(path_archive,'*/scene01/metadata.dim')[0]
SPOT_mode = re.search("<SENSOR_CODE>([a-zA-Z0-9]*)</SENSOR_CODE>",dim_, re.I).group(1)
assert SPOT_mode in ['J','X','XS','A','P','M'], 'Unknown SPOT sensor mode: %s' %SPOT_mode
return 'M' if SPOT_mode in ['J','X','XS'] else 'P'
else:
return 'M'
\ No newline at end of file
This diff is collapsed.
......@@ -9,6 +9,12 @@ import multiprocessing
import collections
import numpy as np
import builtins
import glob
import re
import sys
import psycopg2
import psycopg2.extras
from collections import OrderedDict
def set_config(call_type, ID, exec_mode='Python', reset=False):
......@@ -84,11 +90,11 @@ class Job:
self.path_job_logs = absP('./logs/job_logs/')
# processor configuration: [run processor, write output, delete output if not needed anymore]
self.exec__L1AP = [1, 0, 0]
self.exec__L1BP = [1, 0, 0]
self.exec__L1CP = [1, 0, 0]
self.exec__L2AP = [1, 0, 0]
self.exec__L2BP = [1, 0, 0]
self.exec__L1AP = [1, 1, 1]
self.exec__L1BP = [1, 1, 1]
self.exec__L1CP = [1, 1, 1]
self.exec__L2AP = [1, 1, 1]
self.exec__L2BP = [1, 1, 0]
self.exec__L2CP = [1, 1, 0]
if call_type == 'console':
......@@ -111,6 +117,14 @@ class Job:
self.path_archive = joinP(self.path_fileserver, 'database/sampledata/')
elif call_type == 'webapp':
self.conn_database = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='localhost' connect_timeout=3" # FIXME: localhost could be a problem on other nodes
# check if job ID exists in database
from .misc.database_tools import GMS_JOB
try:
self.DB_job_record = GMS_JOB(self.conn_database).from_job_ID(ID)
except ValueError:
raise
self.conn_db_meta = self.conn_database
from .misc.database_tools import get_info_from_postgreSQLdb
query_cfg = lambda key: \
......@@ -119,7 +133,7 @@ class Job:
self.path_tempdir = query_cfg('path_tempdir')
self.path_procdata_scenes = joinP(self.path_fileserver, query_cfg('foldername_procdata_scenes'))
self.path_procdata_MGRS = joinP(self.path_fileserver, query_cfg('foldername_procdata_MGRS'))
self.path_archive = joinP(self.path_fileserver, query_cfg('foldername_download'))
self.path_archive = joinP(self.path_fileserver, query_cfg('foldername_download'))
self.path_spatIdxSrv = query_cfg( 'path_spatial_index_mediator_server')
self.path_earthSunDist = absP(query_cfg('path_earthSunDist'))
self.path_SRFs = absP(query_cfg('path_SRFs'))
......@@ -147,6 +161,8 @@ class Job:
class Usecase:
def __init__(self, _job):
self._job = _job
from .misc.database_tools import get_info_from_postgreSQLdb
query_cfg = lambda key: \
get_info_from_postgreSQLdb(_job.conn_db_meta, 'config', ['value'], {'key': "%s" % key})[0][0]
......@@ -174,10 +190,12 @@ class Usecase:
self.conversion_type_optical = 'Ref' # 'Rad' / 'Ref'
self.conversion_type_thermal = 'Rad' # 'Rad' / 'Temp'
self.scale_factor_TOARef = 10000
self.virtual_sensor_id = 10 # Sentinel-2A 10m
self.datasetid_spectral_ref = 249 # Sentinel-2A
self.virtual_sensor_id = 10 # Sentinel-2A 10m
self.datasetid_spectral_ref = 249 # Sentinel-2A
self.target_CWL = []
self.target_FWHM = []
self.data_list = self.get_entity_IDs_within_AOI()
elif _job.call_type == 'webapp':
query_job = lambda col: get_info_from_postgreSQLdb(_job.conn_db_meta,'jobs',col,{'id':_job.ID})[0][0]
#skip_thermal = int(query_cfg(_job.conn_db_meta, 'skip_thermal'))
......@@ -208,15 +226,190 @@ class Usecase:
#conversion_type_optical = 'Rad' # 'Rad' / 'Ref' # FIXME
#conversion_type_thermal = 'Temp' # 'Rad' / 'Temp' # FIXME
self.scale_factor_TOARef = int(query_cfg('scale_factor_TOARef'))
self.data_list = self.get_data_list_of_current_jobID()
self.align_coord_grids = 1 # ONLY TO FORCE DEACTIVATION OF IMAGE RESAMPLING
self.match_gsd = True
assert isinstance(self.target_gsd,list) and len(self.target_gsd)==2
def get_usecase_coord_grid():
"""consider projections of images with status georef = master"""
geotransform = (0,1,0,0,0,-1) # FIXME
EPSG = 'EPSG:4326' # FIXME
GSD_meters = 30 # default
return geotransform , EPSG, GSD_meters
\ No newline at end of file
@staticmethod
def get_usecase_coord_grid():
"""consider projections of images with status georef = master"""
geotransform = (0,1,0,0,0,-1) # FIXME
EPSG = 'EPSG:4326' # FIXME
GSD_meters = 30 # default
return geotransform , EPSG, GSD_meters
def get_entity_IDs_within_AOI(self): # called in console mode
from .algorithms.METADATA import LandsatID2dataset, get_sensormode
# parse cli arguments
sys.stderr.write("No scene ids from CLI received. Using old data_list.\n")
data_list = []
if re.search('ALOS', ','.join(self.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type': 'RSD', 'satellite': 'ALOS', 'sensor': 'AVNIR-2', 'subsystem': None,
'acquisition_date': '2009-07-02',
'entity_ID': 'A1002553-001-P6100002-AODS-201007300008'}) # TAR-ID 1B1
data_list.append({'image_type': 'RSD', 'satellite': 'ALOS', 'sensor': 'AVNIR-2', 'subsystem': None,
'acquisition_date': '2007-09-27',
'entity_ID': '20070927_L1B2_ALAV2A089152780'}) # extracted Folder 1B2
data_list.append({'image_type': 'RSD', 'satellite': 'ALOS', 'sensor': 'AVNIR-2', 'subsystem': None,
'acquisition_date': '2009-07-19',
'entity_ID': '20090719_L1B2_ALAV2A185572780'}) # extracted Folder 1B2
data_list.append({'image_type': 'RSD', 'satellite': 'ALOS', 'sensor': 'AVNIR-2', 'subsystem': None,
'acquisition_date': '2010-04-21',
'entity_ID': '20100421_L1B2_ALAV2A225832780'}) # extracted Folder 1B2
if re.search('Terra', ','.join(self.filt_datasets)):
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'VNIR1',
'acquisition_date': '2007-11-08',
'entity_ID': 'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'VNIR2',
'acquisition_date': '2007-11-08',
'entity_ID': 'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'SWIR',
'acquisition_date': '2007-11-08',
'entity_ID': 'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'TIR',
'acquisition_date': '2007-11-08',
'entity_ID': 'AST_L1B_00308192007061017_20071108171717_32444'}) # HDF-ID
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'VNIR1',
'acquisition_date': '2002-06-08',
'entity_ID': 'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'VNIR2',
'acquisition_date': '2002-06-08',
'entity_ID': 'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'SWIR',
'acquisition_date': '2002-06-08',
'entity_ID': 'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
data_list.append({'image_type': 'RSD', 'satellite': 'Terra', 'sensor': 'ASTER', 'subsystem': 'TIR',
'acquisition_date': '2002-06-08',
'entity_ID': 'AST_L1A_003_05262002060543_06082002144959'}) # HDF-ID
if re.search('Landsat',
','.join(self.filt_datasets)): # sensorname has to be in HLP_F.get_GMS_sensorcode
data_list.append({'image_type': 'RSD', 'satellite': 'Landsat-5', 'sensor': 'TM', 'subsystem': None,
'acquisition_date': '1996-10-24', 'entity_ID': 'LT51510321996298XXX01'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2002-08-15', 'entity_ID':'LE70050152002227EDC00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-7', 'sensor':'ETM+', 'subsystem':None, 'acquisition_date':'2000-04-02', 'entity_ID':'LE71510322000093SGS00'}) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(
os.path.join(self._job.path_archive, 'Landsat-7/ETM+/*.tar.gz'))]) # TAR-ID
data_list = data_list + LandsatID2dataset([os.path.basename(i).split('.tar.gz')[0] for i in glob.glob(
os.path.join(self._job.path_archive, 'Landsat-8/OLI_TIRS/*.tar.gz'))]) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-07-03', 'entity_ID':'LC81510322013184LGN00'}) # TAR-ID
# data_list.append({'image_type':'RSD','satellite':'Landsat-8', 'sensor':'OLI_TIRS','subsystem':None, 'acquisition_date':'2013-06-01', 'entity_ID':'LC81510322013152LGN00'}) # TAR-ID ~6% Cloud cover
if re.search('SPOT', ','.join(self.filt_datasets)):
data_list.append({'image_type': 'RSD', 'satellite': 'SPOT-1', 'sensor': 'HRV1', 'subsystem': None,
'acquisition_date': '1986-07-17', 'entity_ID': '00197112001'})
data_list.append({'image_type': 'RSD', 'satellite': 'SPOT-5', 'sensor': 'HRG2', 'subsystem': None,
'acquisition_date': '2010-04-21', 'entity_ID': '00197112009'})