Commit e3b5ecd4 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Added first TestCase, updated run-file, moved METADATA, gms_object and dataset...

Added first TestCase, updated run-file, moved METADATA, gms_object and dataset to new submodule 'model'
bin.run_gms:
- added run_from_filenames() dummy
geomultisens.model.METADATA:
- get_EarthSunDistance(): fixed unclosed File warning
tests.data:
- added LC80010702013141LGN01.tar.gz
- test_geomultisens:
    - added class BaseTestCases
    - added class Test_Landsat8_PreCollectionData
updated __version__
updated __versionalias__
Former-commit-id: 2b978035
Former-commit-id: e41b2f4a
parent 3246b0c9
......@@ -66,6 +66,24 @@ def run_from_entityids(args):
# set up process controller instance
PC = process_controller(args.ID, parallelization_level='scenes')
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
# run the job
PC.run_all_processors()
def run_from_filenames(args):
# find out sceneIDs belonging to filenames
# create a new job from entity IDs
# TODO
# set up process controller instance
PC = process_controller(args.ID, parallelization_level='scenes')
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
......
......@@ -2,8 +2,8 @@
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.1.1'
__versionalias__ = '20170524.01'
__version__ = '0.2.0'
__versionalias__ = '20170530.01'
from . import algorithms
......
......@@ -18,20 +18,20 @@ except ImportError:
SD = None
from geoarray import GeoArray
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from py_tools_ds.ptds.geo.projection import EPSG2WKT
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from . import METADATA as META
from . import gms_cloud_classifier as CLD_P # Cloud Processor
from ..io import Output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon
from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from py_tools_ds.ptds.geo.projection import EPSG2WKT
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from . import gms_cloud_classifier as CLD_P # Cloud Processor
from ..io import Output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc.definition_dicts import get_outFillZeroSaturated, is_dataset_provided_as_fullScene
from .gms_object import GMS_object
from ..model.gms_object import GMS_object
from ..model import METADATA as META
############################# L1A object ######################################
......
......@@ -2,7 +2,7 @@
###############################################################################
#
# Level 1C Processor:
#
#
# Performed operations:
# Atmospheric correction of TOA-reflectance data:
#
......@@ -28,7 +28,7 @@ from py_tools_ds.ptds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from .L1B_P import L1B_object
from .METADATA import get_LayerBandsAssignment
from ..model.METADATA import get_LayerBandsAssignment
from ..misc.definition_dicts import get_outFillZeroSaturated, proc_chain
from ..io.Input_reader import SRF
......@@ -766,7 +766,7 @@ class AtmCorr(object):
'(ENTITY ID %s) HAS NOT BEEN ATMOSPHERICALLY CORRECTED! Error message was: \n%s\n'
%(self.inObjs[0].scene_ID, self.inObjs[0].entity_ID, repr(e)) )
# TODO include that in the job summary!
#raise
return list(self.inObjs)
......@@ -938,4 +938,4 @@ class AtmCorr(object):
else:
self.logger.warning("Atmospheric correction did not provide a 'mask_confidence_array' array for "
"attribute 'mask_clouds. GMS_object.mask_clouds_confidence kept None.")
\ No newline at end of file
"attribute 'mask_clouds. GMS_object.mask_clouds_confidence kept None.")
###############################################################################
#
# algorithms/__init__.py - This file is part of the GeoMultiSens package.
#
#
# Written by Daniel Scheffler
# GFZ Potsdam, Section 1.4
#
###############################################################################
from . import GEOPROCESSING
from . import METADATA
from . import gms_cloud_classifier
from . import gms_object
from . import L1A_P
from . import L1B_P
from . import L1C_P
......@@ -19,9 +17,7 @@ from . import L2B_P
from . import L2C_P
__all__=['GEOPROCESSING',
'METADATA',
'gms_cloud_classifier',
'gms_object',
'L1A_P',
'L1B_P',
'L1C_P',
......
......@@ -316,7 +316,7 @@ class Usecase:
def get_entity_IDs_within_AOI(self): # called in console mode
from .algorithms.METADATA import LandsatID2dataset, get_sensormode
from .model.METADATA import LandsatID2dataset, get_sensormode
# parse cli arguments
sys.stderr.write("No scene ids from CLI received. Using old data_list.\n")
......@@ -417,7 +417,7 @@ class Usecase:
('entity_ID', 'LC81930242015036LGN00'), ('filename', 'LC81930242015036LGN00.tar.gz'),
('sensormode', 'M'), ('logger', None)]), ...]
"""
from .algorithms.METADATA import get_sensormode
from .model.METADATA import get_sensormode
data_list = []
with psycopg2.connect(self._job.conn_database) as conn:
with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
......
......@@ -28,11 +28,11 @@ import spectral
from spectral.io import envi as envi
from pandas import DataFrame, Series
from ..config import GMS_config as CFG
from ..algorithms import METADATA as META
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from ..misc import helper_functions as HLP_F
from ..config import GMS_config as CFG
from ..model import METADATA as META
from ..misc import database_tools as DB_T
from ..misc import path_generator as PG
from ..misc import helper_functions as HLP_F
from ..misc.logging import GMS_logger
from ..misc.database_tools import get_overlapping_scenes_from_postgreSQLdb
......
......@@ -8,7 +8,7 @@ import sys
import traceback
import warnings
from ..algorithms.gms_object import failed_GMS_object
from ..model.gms_object import failed_GMS_object
from ..config import GMS_config as CFG
from ..misc import database_tools as DB_T
from .definition_dicts import db_jobs_statistics_def
......
......@@ -36,15 +36,15 @@ except ImportError: import osr
from py_tools_ds.ptds.geo.map_info import geotransform2mapinfo
from py_tools_ds.ptds.geo.projection import WKT2EPSG
from ..config import GMS_config as CFG
from ..io import envifilehandling as ef
from ..io.Input_reader import open_specific_file_within_archive, Solar_Irradiance_reader, SRF_reader
from ..io.Output_writer import enviHdr_keyOrder
from . import GEOPROCESSING as GEOP
from ..misc import helper_functions as HLP_F
from ..misc import database_tools as DB_T
from ..misc.path_generator import path_generator, get_path_ac_options
from ..misc.definition_dicts import get_GMS_sensorcode
from geomultisens.config import GMS_config as CFG
from geomultisens.io import envifilehandling as ef
from geomultisens.io.Input_reader import open_specific_file_within_archive, Solar_Irradiance_reader, SRF_reader
from geomultisens.io.Output_writer import enviHdr_keyOrder
from geomultisens.algorithms import GEOPROCESSING as GEOP
from geomultisens.misc import helper_functions as HLP_F
from geomultisens.misc import database_tools as DB_T
from geomultisens.misc.path_generator import path_generator, get_path_ac_options
from geomultisens.misc.definition_dicts import get_GMS_sensorcode
from S2SCAPEM.options import get_options as get_ac_options
......@@ -1215,11 +1215,11 @@ class METADATA(object):
xml_Scene_root = ET.fromstring(xml_SC_str_)
xml_GR_root = ET.fromstring(xml_GR_str_)
self.Metafile = self.Metafile + ", " + Metafile_
# define Sentinel 2A metadata (hard coded)
self.Sensor = "MSI"
# extract metadata from xml_Scene_root
namespace = "https://psd-13.sentinel2.eo.esa.int/PSD/User_Product_Level-1C.xsd"
self.EntityID = xml_Scene_root.find(".//Datatake").attrib['datatakeIdentifier'] #FIXME tileID (Granule) oder scene ID???
......@@ -1237,24 +1237,24 @@ class METADATA(object):
self.Gains = [float(ele.text) for ele in xml_Scene_root.findall(".//PHYSICAL_GAINS")] #ATTENTION Gains are only provided for 12 bands! I don't knwo why?
self.Gains = self.Gains if len(self.Gains)==13 else [1]+self.Gains
# FIXME assuming that the first band at 443nm has been left out here IS POSSIBLY WRONG (could also be band 8A oder band 9 (water vapour))
# Flight direction
Fdir={'ASCENDING':"Ascending", 'DESCENDING':"Descending"}
self.additional.append(["Flight Direction", Fdir[xml_Scene_root.find(".//SENSING_ORBIT_DIRECTION").text]])
self.ProcLCode = xml_Scene_root.find(".//PROCESSING_LEVEL").text
# extract metadata from xml_GR_root
namespace="https://psd-12.sentinel2.eo.esa.int/PSD/S2_PDI_Level-1C_Tile_Metadata.xsd"
# set self.AcqDateTime as well as self.AcqDate and self.AcqTime
self.AcqDateTime = iso8601.parse_date(xml_GR_root.find(".//SENSING_TIME").text)
#SunAngles
self.SunElevation = 90-float(xml_GR_root.find(".//Mean_Sun_Angle/ZENITH_ANGLE") .text) # mean angle of granule
self.SunAzimuth = float(xml_GR_root.find(".//Mean_Sun_Angle/AZIMUTH_ANGLE").text) # mean angle of granule
# coordinate system
geo_codings = HLP_F.find_in_xml_root(namespace, xml_GR_root, 'Geometric_Info', "Tile_Geocoding")
self.CS_EPSG = int(geo_codings.find(".//HORIZONTAL_CS_CODE").text.split(":")[1])
......@@ -1278,10 +1278,10 @@ class METADATA(object):
LRX = ss_sub['ULX']+ss_sub['NCOLS']*ss_sub['XDIM']
LRY = ss_sub['ULY']+ss_sub['NROWS']*ss_sub['YDIM']
self.CornerTieP_UTM = [(ss_sub['ULX'],ss_sub['ULY']),(LRX,ss_sub['ULY']),(ss_sub['ULX'],LRY),(LRX,LRY)] #(x,y) for UL,UR,LL,LR
#geometricResolution
self.gResolution = subsytem_Res_dic[self.Subsystem]
# determine metadata from extracted metadata values
self.EarthSunDist = self.get_EarthSunDistance(self.AcqDate)
......@@ -1557,11 +1557,12 @@ class METADATA(object):
"acquisition date could not be read from metadata.")
return 1.0
EA_dist_f = open(CFG.job.path_earthSunDist, "r")
EA_dist_dict = {}
for line in EA_dist_f:
date, EA = [item.strip() for item in line.split(",")]
EA_dist_dict[date] = EA
with open(CFG.job.path_earthSunDist, "r") as EA_dist_f:
EA_dist_dict = {}
for line in EA_dist_f:
date, EA = [item.strip() for item in line.split(",")]
EA_dist_dict[date] = EA
return float(EA_dist_dict[acqDate])
......@@ -1938,4 +1939,4 @@ def get_sensormode(dataset):
assert SPOT_mode in ['J','X','XS','A','P','M'], 'Unknown SPOT sensor mode: %s' %SPOT_mode
return 'M' if SPOT_mode in ['J','X','XS'] else 'P'
else:
return 'M'
\ No newline at end of file
return 'M'
from . import gms_object
from . import METADATA
from . import dataset
__all__=['gms_object',
'METADATA',
'dataset']
__author__='Daniel Scheffler'
......@@ -20,7 +20,7 @@ from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX, imXY2m
from py_tools_ds.ptds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform
from ..misc.logging import GMS_logger as DatasetLogger
from ..algorithms.METADATA import METADATA, get_LayerBandsAssignment
from ..model.METADATA import METADATA, get_LayerBandsAssignment
from ..misc import path_generator as PG
from ..algorithms import GEOPROCESSING as GEOP
from ..io import Input_reader as INP_R
......@@ -641,4 +641,4 @@ class Dataset(object):
if hasattr(self, 'MetaObj') and self.MetaObj and hasattr(self.MetaObj, 'logger') and \
self.MetaObj.logger not in [None, 'not set']:
self.MetaObj.logger.close()
self.MetaObj.logger = None
\ No newline at end of file
self.MetaObj.logger = None
......@@ -24,31 +24,25 @@ try:
except ImportError:
import gdalnumeric
from geoarray import GeoArray
from py_tools_ds.ptds.geo.coord_grid import is_coord_grid_equal
from py_tools_ds.ptds.geo.map_info import geotransform2mapinfo, mapinfo2geotransform
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX, imXY2mapXY
from ..misc.logging import GMS_logger
from ..misc.mgrs_tile import MGRS_tile
from .METADATA import METADATA, get_LayerBandsAssignment, get_dict_LayerOptTherm, metaDict_to_metaODict
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from ..io import Input_reader as INP_R
from ..io import Output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc import definition_dicts as DEF_D
from ..model.dataset import Dataset
from ..misc.logging import GMS_logger as DatasetLogger
from py_tools_ds.ptds.geo.coord_calc import calc_FullDataset_corner_positions
from py_tools_ds.ptds.geo.coord_trafo import pixelToLatLon, pixelToMapYX
from S2SCAPEM.options import get_options as get_ac_options
from ..misc.logging import GMS_logger as DatasetLogger
from ..misc.mgrs_tile import MGRS_tile
from ..model.METADATA import METADATA, get_dict_LayerOptTherm, metaDict_to_metaODict
from ..model.dataset import Dataset
from ..misc import path_generator as PG
from ..misc import database_tools as DB_T
from ..config import GMS_config as CFG
from ..algorithms import GEOPROCESSING as GEOP
from ..io import Input_reader as INP_R
from ..io import Output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc import definition_dicts as DEF_D
class GMS_object(Dataset):
......@@ -305,6 +299,7 @@ class GMS_object(Dataset):
opt_dict['RTFO'][key]['atm_tables_fn'] = PG.get_path_ac_table(key)
opt_dict['S2Image']['S2_MSI_granule_path'] = None # only a placeholder -> will always be None for GMS usage
opt_dict['cld_mask']['persistence_file'] = PG.get_path_cloud_class_obj(self.GMS_identifier)
opt_dict['cld_mask']['novelty_detector'] = None # FIXME update this after switching to SICOR
opt_dict['output'] = [] # outputs are not needed for GMS -> so
opt_dict['report']['report_path'] = os.path.join(self.pathGen.get_path_procdata(), '[TYPE]')
if 'uncertainties' in opt_dict:
......@@ -942,8 +937,8 @@ class GMS_object(Dataset):
if k=='MetaObj':
continue # make sure MetaObj getter is not called -> would delete meta_odict
elif isinstance(v, datetime.datetime):
dict2write[k] = v.strftime('%Y-%m-%d %H:%M:%S.%f%z')
elif isinstance(v, GMS_logger):
dict2write[k] = v.strftime('%Y-%m-%d %H:%M:%S.%f%z') # FIXME
elif isinstance(v, DatasetLogger):
if hasattr(v, 'handlers') and v.handlers[:]:
warnings.warn('Not properly closed logger at GMS_obj.logger pointing to %s.' % v.path_logfile)
dict2write[k] = 'not set'
......
......@@ -14,20 +14,20 @@ import signal
import re
import collections
from ..io import Output_writer as OUT_W
from ..io import Input_reader as INP_R
from ..misc import database_tools as DB_T
from ..misc import helper_functions as HLP_F
from ..misc import environment as ENV
from ..misc.path_generator import path_generator
from ..misc.logging import GMS_logger, shutdown_loggers
from ..algorithms.METADATA import get_LayerBandsAssignment
from ..algorithms import L1A_P, L1B_P, L1C_P, L2A_P, L2B_P, L2C_P
from ..algorithms.gms_object import failed_GMS_object
from .pipeline import (L1A_map, L1A_map_1, L1A_map_2, L1A_map_3, L1B_map, L1C_map,
from ..io import Output_writer as OUT_W
from ..io import Input_reader as INP_R
from ..misc import database_tools as DB_T
from ..misc import helper_functions as HLP_F
from ..misc import environment as ENV
from ..misc.path_generator import path_generator
from ..misc.logging import GMS_logger, shutdown_loggers
from ..algorithms import L1A_P, L1B_P, L1C_P, L2A_P, L2B_P, L2C_P
from ..model.METADATA import get_LayerBandsAssignment
from ..model.gms_object import failed_GMS_object
from .pipeline import (L1A_map, L1A_map_1, L1A_map_2, L1A_map_3, L1B_map, L1C_map,
L2A_map, L2B_map, L2C_map)
from ..config import set_config, GMS_config
from .multiproc import MAP
from ..config import set_config, GMS_config
from .multiproc import MAP
from ..misc.definition_dicts import proc_chain, db_jobs_statistics_def
......
......@@ -20,7 +20,7 @@ test_requirements = [
setup(
name='geomultisens',
version='0.1.1',
version='0.2.0',
description="GeoMultiSens - Scalable Multi-Sensor Analysis of Remote Sensing Data",
long_description=readme + '\n\n' + history,
author="Daniel Scheffler",
......
......@@ -11,18 +11,80 @@ Tests for `geomultisens` module.
import sys
import unittest
import socket
from geomultisens import geomultisens
from geomultisens import process_controller
from geomultisens.algorithms.L1A_P import L1A_object
from geomultisens.algorithms.L1B_P import L1B_object
from geomultisens.algorithms.L1C_P import L1C_object
from geomultisens.algorithms.L2A_P import L2A_object
from geomultisens.algorithms.L2B_P import L2B_object
from geomultisens.algorithms.L2C_P import L2C_object
from geomultisens.misc.database_tools import get_info_from_postgreSQLdb
class BaseTestCases:
class TestAll(unittest.TestCase):
PC = None # default
class TestGeomultisens(unittest.TestCase):
@classmethod
def tearDownClass(cls):
cls.PC.DB_job_record.delete_procdata_of_entire_job(force=True)
def setUp(self):
pass
@classmethod
def validate_db_entry(cls, filename):
sceneID_res = get_info_from_postgreSQLdb(cls.PC.job.conn_database, 'scenes', ['id'], {'filename': filename})
assert sceneID_res and isinstance(sceneID_res[0][0], int), 'Invalid database entry.'
def tearDown(self):
pass
def test_000_something(self):
pass
def test_L1A_processing(self):
self.L1A_newObjects = self.PC.L1A_processing()
self.assertIsInstance(self.L1A_newObjects, list)
self.assertIsInstance(self.L1A_newObjects[0], L1A_object)
def test_L1B_processing(self):
self.L1B_newObjects = self.PC.L1B_processing()
self.assertIsInstance(self.L1B_newObjects, list)
self.assertIsInstance(self.L1B_newObjects[0], L1B_object)
def test_L1C_processing(self):
self.L1C_newObjects = self.PC.L1C_processing()
self.assertIsInstance(self.L1C_newObjects, list)
self.assertIsInstance(self.L1C_newObjects[0], L1C_object)
def test_L2A_processing(self):
self.L2A_newObjects = self.PC.L2A_processing()
self.assertIsInstance(self.L2A_newObjects, list)
self.assertIsInstance(self.L2A_newObjects[0], L2A_object)
def test_L2B_processing(self):
self.L2B_newObjects = self.PC.L2B_processing()
self.assertIsInstance(self.L2B_newObjects, list)
self.assertIsInstance(self.L2B_newObjects[0], L2B_object)
def test_L2C_processing(self):
self.L2C_newObjects = self.PC.L2C_processing()
self.assertIsInstance(self.L2C_newObjects, list)
self.assertIsInstance(self.L2C_newObjects[0], L2C_object)
class Test_Landsat8_PreCollectionData(BaseTestCases.TestAll):
@classmethod
def setUpClass(cls):
if socket.gethostname() == 'geoms':
cls.PC = process_controller(26186196, parallelization_level='scenes')
[cls.PC.add_local_availability(ds) for ds in cls.PC.usecase.data_list]
else:
NotImplementedError()
[cls.validate_db_entry(ds['filename']) for ds in cls.PC.usecase.data_list]
if __name__ == '__main__':
unittest.main(argv=['first-arg-is-ignored'],exit=False, verbosity=2)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment