Commit b15bca5a authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Added test Test_ProcessContinuing_CompletePipeline.

Former-commit-id: 949deeab
Former-commit-id: 2b3c5df9
parent a3a08ef3
......@@ -177,9 +177,6 @@ class JobConfig(object):
self.ID = ID
self.kwargs = user_opts
# input validation
self.validate_exec_configs()
# database connection
self.db_host = user_opts['db_host']
self.conn_database = get_conn_database(hostname=self.db_host)
......@@ -415,6 +412,8 @@ class JobConfig(object):
############
# validate #
############
self.validate_exec_configs()
GMSValidator(allow_unknown=True, schema=gms_schema_config_output).validate(self.to_dict())
@property
......
......@@ -443,8 +443,9 @@ class process_controller(object):
# group dataset dicts by sceneid
dataset_groups = HLP_F.group_dicts_by_key(self.config.data_list, key='scene_ID')
# RUN PREPROCESSING
from .pipeline import run_complete_preprocessing
GMS_objs = imap_unordered(run_complete_preprocessing, dataset_groups)
GMS_objs = imap_unordered(run_complete_preprocessing, dataset_groups, flatten_output=True)
# separate results into successful and failed objects
def assign_attr(tgt_procL):
......
......@@ -198,6 +198,10 @@ class BaseTestCases:
def test_run_all_processors(self):
self.PC.run_all_processors()
self.assertIsInstance(self.PC.L2C_newObjects, list)
self.assertIsInstance(self.PC.summary_detailed, pandas.DataFrame)
self.assertFalse(self.PC.summary_detailed.empty)
self.assertIsInstance(self.PC.summary_quick, pandas.DataFrame)
self.assertFalse(self.PC.summary_quick.empty)
###################################################################################
# Test cases 1-9: Test_<Satelite-Dataset>_<PreCollection or Collection>Data
......@@ -344,6 +348,58 @@ class Test_MultipleDatasetsInOneJob_CompletePipeline(BaseTestCases.TestCompleteP
# PC = cls.PC
class Test_ProcessContinuing_CompletePipeline(unittest.TestCase):
"""
Parametrized testclass. Tests the level-processes on a job containing a Landsat-5 (pre-collection data),
Landsat-7 SLC_off (pre-collection data) and a Sentinel-2A (collection data) scene.
"""
PC = None # default
@classmethod
def tearDownClass(cls):
cls.PC.config.DB_job_record.delete_procdata_of_entire_job(force=True)
@classmethod
def validate_db_entry(cls, filename):
sceneID_res = get_info_from_postgreSQLdb(cls.PC.config.conn_database, 'scenes', ['id'],
{'filename': filename})
assert sceneID_res and isinstance(sceneID_res[0][0], int), 'Invalid database entry.'
@classmethod
def create_job(cls, jobID, config):
cls.PC = process_controller(jobID, **config)
cls.PC.logger.info('Execution of entire GeoMultiSens pre-processing chain started for job ID %s...'
% cls.PC.config.ID)
[cls.validate_db_entry(ds['filename']) for ds in cls.PC.config.data_list]
def setUp(self):
self.cfg_kw = job_config_kwargs
self.cfg_kw.update(dict(
reset_status=True,
exec_L1BP=[False, False, False],
exec_L1CP=[False, False, False],
exec_L2AP=[False, False, False],
exec_L2BP=[False, False, False],
exec_L2CP=[False, False, False]
))
# produce L1A data and stop processing there
self.create_job(26186263, self.cfg_kw) # 1x L5 pre-collection
self.PC.run_all_processors()
def test_continue_from_L1A(self):
# create a new job and try to continue from L1A
cfg_kw = self.cfg_kw
cfg_kw.update(dict(
exec_L1BP=[True, True, False],
delete_old_output=False
))
self.create_job(26186263, cfg_kw) # 1x L5 pre-collection
self.PC.run_all_processors()
###################################################################################
# Summarizing the information regarding the test datasets.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment