Commit a16e19c1 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Config revision - intermediate state: JSON file completely parsed.


Former-commit-id: cc4f6a67
parent 068e26cb
......@@ -207,7 +207,6 @@ def get_gms_argparser():
'-blocksize': dict(),
'-profiling': dict(),
'-bench_all': dict(),
'-bench_cloudMask': dict(),
}
......
......@@ -148,8 +148,8 @@ class L1C_object(L1B_object):
meshwidth=10,
nodata_mask=None, # dont overwrite areas outside the image with nodata
outFill=get_outFillZeroSaturated(np.float32)[0],
accurracy=CFG.job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr if CFG.job.SZA_SAA_calculation_accurracy == 'fine' else None)
accurracy=CFG.usecase.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr if CFG.usecase.SZA_SAA_calculation_accurracy == 'fine' else None)
return self._SZA_arr
@SZA_arr.setter
......@@ -662,7 +662,7 @@ class AtmCorr(object):
# compute cloud mask if not already provided
if no_avail_CMs:
algorithm = CFG.job.cloud_masking_algorithm[self.inObjs[0].satellite]
algorithm = CFG.usecase.cloud_masking_algorithm[self.inObjs[0].satellite]
if algorithm == 'SICOR':
return None
......@@ -792,7 +792,7 @@ class AtmCorr(object):
script = False
# check if ECMWF data are available - if not, start the download
if CFG.job.auto_download_ecmwf:
if CFG.usecase.auto_download_ecmwf:
self._check_or_download_ECMWF_data()
# validate SNR
......
......@@ -15,11 +15,17 @@ from inspect import getargvalues, stack, getfullargspec, signature, _empty
import json
from jsmin import jsmin
from cerberus import Validator
import pkgutil
from typing import TYPE_CHECKING, Dict
if TYPE_CHECKING:
from .misc.database_tools import GMS_JOB
__author__ = 'Daniel Scheffler'
def set_config(job_ID, exec_mode='Python', db_host='localhost', reset=False, job_kwargs=None):
def set_configOLD(job_ID, exec_mode='Python', db_host='localhost', reset=False, job_kwargs=None):
# type: (int, str, str, bool, dict) -> None
"""Set up a configuration for a new gms_preprocessing job!
......@@ -69,12 +75,513 @@ class GMS_configuration(object):
GMS_config = GMS_configuration()
def set_config(job_ID, exec_mode='Python', db_host='localhost', reset=False, exec_L1AP=None, exec_L1BP=None,
exec_L1CP=None, exec_L2AP=None, exec_L2BP=None, exec_L2CP=None, CPUs=None,
allow_subMultiprocessing=True, disable_exception_handler=True, log_level='INFO',
tiling_block_size_XY=(2048, 2048), is_test=False, profiling=False, benchmark_global=False,
path_procdata_scenes=None, path_procdata_MGRS=None, path_archive=None):
"""Set up a configuration for a new gms_preprocessing job!
:param job_ID: job ID of the job to be executed, e.g. 123456 (must be present in database)
:param exec_mode: 'Python': writes intermediate results to disk in order to save memory
'Flink': keeps intermediate results in memory in order to save IO time
:param db_host: host name of the server that runs the postgreSQL database
:param reset: whether to reset the job status or not (default=False)
:param exec_L1AP: list of 3 elements: [run processor, write output, delete output if not needed anymore]
:param exec_L1BP: list of 3 elements: [run processor, write output, delete output if not needed anymore]
:param exec_L1CP: list of 3 elements: [run processor, write output, delete output if not needed anymore]
:param exec_L2AP: list of 3 elements: [run processor, write output, delete output if not needed anymore]
:param exec_L2BP: list of 3 elements: [run processor, write output, delete output if not needed anymore]
:param exec_L2CP: list of 3 elements: [run processor, write output, delete output if not needed anymore]
:param CPUs: number of CPU cores to be used for processing (default: None -> use all available)
:param allow_subMultiprocessing:
allow multiprocessing within workers
:param disable_exception_handler:
enable/disable automatic handling of unexpected exceptions (default: True -> enabled)
:param log_level: the logging level to be used (choices: 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL';
default: 'INFO')
:param tiling_block_size_XY:
X/Y block size to be used for any tiling process (default: (2048,2048)
:param is_test: whether the current job represents a software test job (run by a test runner) or not
(default=False)
:param profiling: enable/disable code profiling (default: False)
:param benchmark_global:
enable/disable benchmark of the whole processing pipeline
:param path_procdata_scenes:
output path to store processed scenes
:param path_procdata_MGRS:
output path to store processed MGRS tiles
:param path_archive: input path where downloaded data are stored
"""
if not hasattr(builtins, 'GMS_JobConfig') or reset:
kwargs = dict([x for x in locals().items() if x[0] != "self" and not x[0].startswith('__')])
builtins.GMS_JobConfig = JobConfig(job_ID, **kwargs)
return getattr(builtins, 'GMS_JobConfig')
# user_options = \
# {
# "exec_mode": exec_mode,
# "db_host": db_host,
# "CPUs": CPUs,
# "allow_subMultiprocessing": allow_subMultiprocessing,
# "disable_exception_handler": disable_exception_handler,
# "log_level": log_level,
# "tiling_block_size_XY": tiling_block_size_XY,
# "is_test": is_test,
# "profiling": profiling,
# "benchmark_global": benchmark_global,
# "paths": {
# # "path_fileserver": path_fileserver,
# "path_archive": path_archive,
# "path_procdata_scenes": path_procdata_scenes,
# "path_procdata_MGRS": path_procdata_MGRS,
# # "path_tempdir": path_tempdir,
# # "path_benchmarks": path_benchmarks,
# # "path_job_logs": path_job_logs,
# # "path_spatIdxSrv": path_spatIdxSrv,
# # "path_ac_tables": path_ac_tables,
# # "path_SNR_models": path_SNR_models,
# # "path_SRFs": path_SRFs,
# # "path_dem_proc_srtm_90m": path_dem_proc_srtm_90m,
# # "path_earthSunDist": path_earthSunDist,
# # "path_solar_irr": path_solar_irr,
# # "path_cloud_classif": path_cloud_classif,
# # "path_ECMWF_db": path_ECMWF_db
# },
#
# "processors": {
# "general_cfg": {
# "skip_thermal": skip_thermal,
# "skip_pan": skip_pan,
# "sort_bands_by_cwl": sort_bands_by_cwl,
# "conversion_type_optical": conversion_type_optical,
# "conversion_type_thermal": conversion_type_thermal,
# "scale_factor_TOARef": scale_factor_TOARef,
# "scale_factor_BOARef": scale_factor_BOARef,
# },
# "L1A_P": {
# "run_processor": exec_L1AP[0],
# "write_output": exec_L1AP[1],
# "delete_output": exec_L1AP[2],
# "SZA_SAA_calculation_accurracy": SZA_SAA_calculation_accurracy,
# "export_VZA_SZA_SAA_RAA_stats": export_VZA_SZA_SAA_RAA_stats
#
# },
# "L1B_P": {
# "run_processor": exec_L1BP[0],
# "write_output": exec_L1BP[1],
# "delete_output": exec_L1BP[2],
# "skip_coreg": skip_coreg,
# },
# "L1C_P": {
# "run_processor": exec_L1CP[0],
# "write_output": exec_L1CP[1],
# "delete_output": exec_L1CP[2],
# "cloud_masking_algorithm": {
# "Landsat-4": "FMASK",
# "Landsat-5": "FMASK",
# "Landsat-7": "FMASK",
# "Landsat-8": "FMASK",
# "Sentinel-2A": "SICOR",
# "Sentinel-2B": "SICOR"
# },
# "export_L1C_obj_dumps": export_L1C_obj_dumps,
# "scale_factor_errors_ac": 255,
# "auto_download_ecmwf": auto_download_ecmwf
# },
# "L2A_P": {
# "run_processor": exec_L2AP[0],
# "write_output": exec_L2AP[1],
# "delete_output": exec_L2AP[2],
# },
# "L2B_P": {
# "run_processor": exec_L2BP[0],
# "write_output": exec_L2BP[1],
# "delete_output": exec_L2BP[2],
# },
# "L2C_P": {
# "run_processor": exec_L2CP[0],
# "write_output": exec_L2CP[1],
# "delete_output": exec_L2CP[2],
# }
# },
# "usecase": {
# "virtual_sensor_id": virtual_sensor_id,
# "virtual_sensor_name": virtual_sensor_name,
# "datasetid_spatial_ref": datasetid_spatial_ref,
# "datasetid_spectral_ref": datasetid_spectral_ref,
# "target_CWL": [
#
# ], /*list of central wavelength positions of target sensor. Empty list means: use WebApp input.*/
# "target_FWHM": [
#
# ], /*list of band widths of target sensor. Empty list means: use WebApp input.*/
# "target_gsd": [
#
# ], /*X/Y pixel size of target sensor as list with two float/integer values*/
# "target_epsg_code": "None", /*target projection as EPSG code. "None": use projection of input data.*/
# "spatial_ref_gridx": [
#
# ], /*target sensor x-coordinate-grid. e.g. [15, 45]*/
# "spatial_ref_gridy": [
#
# ], /*target sensor y-coordinate-grid. e.g. [15, 45]*/
# "align_coord_grids": true, /*allows to force deactivation of image resampling*/
# "match_gsd": true
# }
# }
#
# }
#
class JobConfig(object):
def __init__(self, ID, db_host='localhost', **user_opts):
"""Create a job configuration
Workflow:
# 0. Environment
# 1. 2 Wege, wo JSON herkommen kann: per console-command oder aus Datenbank
# - bei console-command: GMS_JOB.from_... muss default-options in DB schreiben
# => zuerst JobConfig auf Basis von JSON erstellen
# 2. dann überschreiben mit user-defined parametern (entweder init-parameter oder db-settings per webapp)
:param ID: job ID of the job to be executed, e.g. 123456 (must be present in database)
:param db_host: host name of the server that runs the postgreSQL database
"""
# privates
self._DB_job_record = None # type: GMS_JOB
self._DB_config_table = None # type: dict
self._user_opts_defaults = None
# fixed attributes
# possible values: 'pending', 'running', 'canceled', 'failed', 'finished_with_warnings',
# 'finished_with_errors', 'finished'
self.status = 'pending'
self.start_time = datetime.datetime.now()
self.end_time = None
self.computation_time = None
self.hostname = socket.gethostname()
#######################
# POPULATE PARAMETERS #
#######################
# args
self.ID = ID
self.db_host = db_host
self.user_opts = user_opts
# database connection
self.conn_database = "dbname='geomultisens' user='gmsdb' password='gmsdb' host='%s' connect_timeout=3" \
% self.db_host
# get validated options dict from JSON-options
json_opts = self.get_json_opts_from_db(validate=True)
gp = self.get_parameter
###################
# general options #
###################
self.exec_mode = \
gp('exec_mode', json_opts['exec_mode'])
self.CPUs = \
gp('CPUs', json_opts['exec_mode'], fallback=multiprocessing.cpu_count())
self.allow_subMultiprocessing = \
gp('allow_subMultiprocessing', json_opts['allow_subMultiprocessing'])
self.disable_exception_handler = \
gp('disable_exception_handler', json_opts['allow_subMultiprocessing'])
self.log_level = \
gp('log_level', json_opts['log_level'])
self.tiling_block_size_XY = \
gp('tiling_block_size_XY', json_opts['tiling_block_size_XY'])
self.is_test = \
gp('is_test', json_opts['is_test'])
self.profiling = \
gp('profiling', json_opts['profiling'])
self.benchmark_global = \
gp('benchmark_global', json_opts['benchmark_global'])
#########
# paths #
#########
json_paths = json_opts['paths'] # type: dict
self.path_spatIdxSrv = self.DB_config_table['path_spatial_index_mediator_server']
self.path_tempdir = self.DB_config_table['path_tempdir']
self.path_ac_tables = self.DB_config_table['path_ac_tables']
self.path_SNR_models = self.DB_config_table['path_SNR_models']
self.path_dem_proc_srtm_90m = self.DB_config_table['path_dem_proc_srtm_90m']
if not self.is_test:
# normal mode
self.path_fileserver = self.DB_config_table['path_data_root']
self.path_archive = \
gp('path_archive', json_paths['path_archive'],
fallback=self.joinP(self.path_fileserver, self.DB_config_table['foldername_download']))
self.path_procdata_scenes = \
gp('path_procdata_scenes', json_paths['path_procdata_scenes'],
fallback=self.joinP(self.path_fileserver, self.DB_config_table['foldername_procdata_scenes']))
self.path_procdata_MGRS = \
gp('path_procdata_MGRS', json_paths['path_procdata_MGRS'],
fallback=self.joinP(self.path_fileserver, self.DB_config_table['foldername_procdata_MGRS']))
self.path_earthSunDist = self.DB_config_table['path_earthSunDist']
self.path_SRFs = self.DB_config_table['path_SRFs']
self.path_cloud_classif = self.DB_config_table['path_cloud_classif']
self.path_solar_irr = self.DB_config_table['path_solar_irr']
self.path_ECMWF_db = self.DB_config_table['path_ECMWF_db']
self.path_benchmarks = \
gp('path_benchmarks', json_paths['path_benchmarks'],
fallback=self.DB_config_table['path_benchmarks'])
self.path_job_logs = \
gp('path_job_logs', json_paths['path_job_logs'], fallback=self.DB_config_table['path_job_logs'])
else:
# software test mode, the repository should be self-contained -> use only relative paths
self.path_fileserver = self.absP('../tests/data/')
self.path_archive = self.absP('../tests/data/archive_data/')
self.path_procdata_scenes = self.absP('../tests/data/output_scenes/')
self.path_procdata_MGRS = self.absP('../tests/data/output_mgrs_tiles/')
self.path_earthSunDist = self.absP('./database/earth_sun_distance/Earth_Sun_distances_per_day_edited.csv', )
self.path_SRFs = self.absP('./database/srf/')
self.path_cloud_classif = self.absP('./database/cloud_classifier/')
self.path_solar_irr = self.absP(
'./database/solar_irradiance/SUNp1fontenla__350-2500nm_@0.1nm_converted.txt')
self.path_ECMWF_db = self.absP('../tests/data/processed_ECMWF/')
self.path_benchmarks = self.absP('./benchmarks/')
self.path_job_logs = self.absP('./logs/job_logs/')
###########################
# processor configuration #
###########################
json_processors = json_opts['processors'] # type: dict
# general_opts
self.skip_thermal = \
gp('skip_thermal', json_processors['general_opts']['skip_thermal'])
self.skip_pan = \
gp('skip_pan', json_processors['general_opts']['skip_pan'])
self.sort_bands_by_cwl = \
gp('sort_bands_by_cwl', json_processors['general_opts']['sort_bands_by_cwl'])
self.conversion_type_optical = \
gp('conversion_type_optical', json_processors['general_opts']['conversion_type_optical'])
self.conversion_type_thermal = \
gp('conversion_type_thermal', json_processors['general_opts']['conversion_type_thermal'])
self.scale_factor_TOARef = \
gp('scale_factor_TOARef', json_processors['general_opts']['scale_factor_TOARef'])
self.scale_factor_BOARef = \
gp('scale_factor_BOARef', json_processors['general_opts']['scale_factor_BOARef'])
# processor specific opts
# L1A
self.exec_L1AP = gp('exec_L1AP', [
json_processors['L1A_P']['run_processor'],
json_processors['L1A_P']['write_output'],
json_processors['L1A_P']['delete_output']])
self.SZA_SAA_calculation_accurracy = \
gp('SZA_SAA_calculation_accurracy', json_processors['L1A_P']['SZA_SAA_calculation_accurracy'])
self.export_VZA_SZA_SAA_RAA_stats = \
gp('export_VZA_SZA_SAA_RAA_stats', json_processors['L1A_P']['SZA_SAA_calculation_accurracy'])
# L1B
self.exec_L1BP = gp('exec_L1BP', [
json_processors['L1B_P']['run_processor'],
json_processors['L1B_P']['write_output'],
json_processors['L1B_P']['delete_output']])
self.skip_coreg = gp('skip_coreg', json_processors['L1B_P']['skip_coreg'])
# L1C
self.exec_L1CP = gp('exec_L1CP', [
json_processors['L1C_P']['run_processor'],
json_processors['L1C_P']['write_output'],
json_processors['L1C_P']['delete_output']])
self.cloud_masking_algorithm = \
gp('cloud_masking_algorithm', json_processors['L1C_P']['cloud_masking_algorithm'])
self.export_L1C_obj_dumps = \
gp('export_L1C_obj_dumps', json_processors['L1C_P']['export_L1C_obj_dumps'])
self.scale_factor_errors_ac = \
gp('scale_factor_errors_ac', json_processors['L1C_P']['scale_factor_errors_ac'])
self.auto_download_ecmwf = \
gp('auto_download_ecmwf', json_processors['L1C_P']['auto_download_ecmwf'])
# L2A
self.exec_L2AP = gp('exec_L2AP', [
json_processors['L2A_P']['run_processor'],
json_processors['L2A_P']['write_output'],
json_processors['L2A_P']['delete_output']])
self.align_coord_grids = gp('align_coord_grids', json_processors['L2A_P']['align_coord_grids'])
self.match_gsd = gp('match_gsd', json_processors['L2A_P']['match_gsd'])
# L2B
self.exec_L2BP = gp('exec_L2BP', [
json_processors['L2B_P']['run_processor'],
json_processors['L2B_P']['write_output'],
json_processors['L2B_P']['delete_output']])
# L2C
self.exec_L2CP = gp('exec_L2CP', [
json_processors['L2C_P']['run_processor'],
json_processors['L2C_P']['write_output'],
json_processors['L2C_P']['delete_output']])
################################
# target sensor specifications #
################################
self.virtual_sensor_id = gp('virtual_sensor_id', attr_db_job_record='virtualsensorid')
# FIXME Why is datasetid_spatial_ref missing in virtual_sensors table
self.datasetid_spatial_ref = gp('datasetid_spatial_ref', attr_db_job_record='datasetid_spatial_ref')
VSSpecs = self.get_virtual_sensor_specs()
self.virtual_sensor_name = VSSpecs['name']
# spectral specifications
self.datasetid_spectral_ref = VSSpecs['spectral_characteristics_datasetid']
self.target_CWL = VSSpecs['wavelengths_pos']
self.target_FWHM = VSSpecs['band_width']
# spatial specifications
target_gsd_tmp = VSSpecs['spatial_resolution'] # table features only 1 value for X/Y-dims FIXME user inputs?
self.target_gsd = xgsd, ygsd = \
[target_gsd_tmp]*2 if isinstance(target_gsd_tmp, (int, float)) else target_gsd_tmp
self.EPSG = VSSpecs['projection_epsg']
# FIXME values in case user defines only Landsat?
self.spatial_ref_gridx = np.arange(xgsd / 2., xgsd / 2. + 2 * xgsd, xgsd) # e.g. [15, 45]
self.spatial_ref_gridy = np.arange(ygsd / 2., ygsd / 2. + 2 * ygsd, ygsd)
@property
def user_opts_defaults(self):
if not self._user_opts_defaults:
a = getfullargspec(set_config)
self._user_opts_defaults = dict(zip(a.args[-len(a.defaults):], a.defaults))
return self._user_opts_defaults
def get_parameter(self, key_user_opts, val_json=None, attr_db_job_record='', fallback=None):
# 1. JobConfig parameters: parameters that are directly passed to JobConfig
if key_user_opts in self.user_opts and self.user_opts[key_user_opts] != self.user_opts_defaults[key_user_opts]:
return self.user_opts[key_user_opts]
# 2. WebUI parameters: parameters that have been defined via WebUI
if attr_db_job_record:
return getattr(self.DB_job_record, attr_db_job_record)
# 3. JSON parameters: parameters that have been defined via JSON Input (command line or advanced UI params)
if val_json:
return val_json
# fallback: if nothing has been returned until here
if not fallback and key_user_opts in self.user_opts_defaults:
fallback = self.user_opts_defaults[key_user_opts]
return fallback
@property
def DB_job_record(self):
# type: () -> GMS_JOB
if not self._DB_job_record:
# check if job ID exists in database
from .misc.database_tools import GMS_JOB
try:
self._DB_job_record = GMS_JOB(self.conn_database).from_job_ID(self.ID)
except ValueError:
raise
return self._DB_job_record
@property
def DB_config_table(self):
# type: () -> dict
"""Returns the content of the config table of the postgreSQL database as dictionary."""
if not self._DB_config_table:
from .misc.database_tools import get_info_from_postgreSQLdb
db_cfg = dict(get_info_from_postgreSQLdb(self.conn_database, 'config', ['key', 'value']))
# convert relative to absolute paths
self._DB_config_table = {k: self.absP(v) if k.startswith('path_') and v.startswith('./') else v
for k, v in db_cfg.items()}
return self._DB_config_table
def get_virtual_sensor_specs(self):
# type: () -> dict
"""Returns the content of the virtual_sensors table of the postgreSQL database as dictionary."""
from .misc.database_tools import get_info_from_postgreSQLdb
# column spectral_characteristics_datasetid is not used later because its given by jobs.datasetid_spatial_ref
cols2read = ['name', 'projection_epsg', 'spatial_resolution', 'spectral_characteristics_datasetid',
'wavelengths_pos', 'band_width']
res = get_info_from_postgreSQLdb(self.conn_database, 'virtual_sensors',
cols2read, {'id': self.virtual_sensor_id})[0]
VSSpecs = dict()
for i, col in enumerate(cols2read):
val = res[i]
if col == 'spectral_characteristics_datasetid' and val == -1: # nodata value
val = None
VSSpecs[col] = val
return VSSpecs
def get_json_opts_from_db(self, validate=True):
"""Get a dictionary of GMS config parameters according to the jobs table of the database.
NOTE: Reads the default options from options_default.json and updates the values with those from database.
"""
# read options_default.json
default_options = get_options(os.path.join(os.path.dirname(pkgutil.get_loader("gms_preprocessing").path),
'options_default.json'), validation=validate)
# update default options with those from DB
db_options = json_to_python(json.loads(jsmin(self.DB_job_record.analysis_parameter))) # type: dict
default_options.update(db_options)
if validate:
GMSValidator().validate(default_options)
json_options = default_options
return json_options
@staticmethod
def absP(relP):
return os.path.abspath(os.path.join(os.path.dirname(__file__), relP))
@staticmethod
def joinP(*items):
return os.path.join(*items)
def get_init_argskwargs(self, ignore=("logger",)):
"""
Return a tuple containing dictionary of calling function's. named arguments and a list of
calling function's unnamed positional arguments.
"""
posname, kwname, argskwargs = getargvalues(stack()[1][0])[-3:]
argskwargs.update(argskwargs.pop(kwname, []))
argskwargs = {k: v for k, v in argskwargs.items() if k not in ignore and k != 'self' and not k.startswith('__')}
sig = signature(self.__init__)
argsnames = [k for k in sig.parameters if sig.parameters[k].default == _empty]
return {'args': {k: v for k, v in argskwargs.items() if k in argsnames},
'kwargs': {k: v for k, v in argskwargs.items() if k not in argsnames}}
class Job(object):
def __init__(self, ID, exec_mode='Python', db_host='localhost', exec_L1AP=None, exec_L1BP=None,
exec_L1CP=None, exec_L2AP=None, exec_L2BP=None, exec_L2CP=None, CPUs=None,
allow_subMultiprocessing=True, disable_exception_handler=True, log_level='INFO',
tiling_block_size_XY=(2048, 2048), is_test=False, profiling=False, benchmark_global=False,
bench_cloudMask=False, path_procdata_scenes=None, path_procdata_MGRS=None, path_archive=None):
path_procdata_scenes=None, path_procdata_MGRS=None, path_archive=None):
"""Create a job configuration
......@@ -102,7 +609,6 @@ class Job(object):
:param profiling: enable/disable code profiling (default: False)
:param benchmark_global:
enable/disable benchmark of the whole processing pipeline
:param bench_cloudMask: enable/disable benchmark of the of the cloud mask generator module
:param path_procdata_scenes:
output path to store processed scenes
:param path_procdata_MGRS:
......@@ -140,18 +646,6 @@ class Job(object):
self.is_test = is_test
self.profiling = profiling
self.benchmark_global = benchmark_global
self.bench_cloudMask = bench_cloudMask
self.cloud_masking_algorithm = {'Landsat-4': 'FMASK',
'Landsat-5': 'FMASK',
'Landsat-7': 'FMASK',
'Landsat-8': 'FMASK',
'Sentinel-2A': 'SICOR',
'Sentinel-2B': 'SICOR',
} # 'FMASK', 'Classical Bayesian', 'SICOR'
self.SZA_SAA_calculation_accurracy = 'coarse' # hardcoded
self.export_VZA_SZA_SAA_RAA_stats = True # hardcoded
self.export_L1C_obj_dumps = False # hardcoded
self.auto_download_ecmwf = False
# fixed attributes
# possible values: 'pending', 'running', 'canceled', 'failed', 'finished_with_warnings',
......@@ -180,7 +674,6 @@ class Job(object):
self.path_cloud_classif = self.DB_config['path_cloud_classif']
self.path_solar_irr = self.DB_config['path_solar_irr']
self.path_ECMWF_db = self.DB_config['path_ECMWF_db']
self.path_testing = self.DB_config['path_testing']
self.path_benchmarks = self.DB_config['path_benchmarks']
self.path_job_logs = self.DB_config['path_job_logs']
else:
......@@ -195,7 +688,6 @@ class Job(object):
self.path_solar_irr = self.absP(