Commit 4aa141dd authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Merge branch 'master' into bugfix/fix_progress_stats

Former-commit-id: 1c4efa90
Former-commit-id: 05931db6
parents 744f4195 63dbecfc
......@@ -15,13 +15,6 @@ export LD_LIBRARY_PATH=${LD_PATH_PYTHON_GFZ}:${LD_LIBRARY_PATH}
export PYTHONPATH=${PFX}/opt/lib/python3.6/site-packages:${PFX}/opt/lib/python2.7/site-packages # Python version must be updated here!
export GDAL_DATA=${PFX}/opt/share/gdal
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed/gms_preprocessing # needed to find gms_preprocessing
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed/geoarray # needed to find geoarray
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed/py_tools_ds # needed to find py_tools_ds
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/scheffler/python_deployed/arosics # needed to find e.g. AROSICS
export PYTHONPATH=${PYTHONPATH}:/home/gfz-fe/hollstein/python/sicor # needed to find sicor
# execute python script
#ipython --colors='NoColor' run_gms.py jobid "$@" # NoColor must be active for ipython because gwt-log cannot interpret ANSI console colors
python run_gms.py jobid "$@"
......
......@@ -19,7 +19,7 @@ def run_from_jobid(args):
# TODO download: run only the downloader
# set up process controller instance
PC = process_controller(args.jobid, parallelization_level='scenes')
PC = process_controller(args.jobid, parallelization_level='scenes', db_host='geoms') # FIXME hardcoded host
# PC.job.path_procdata_scenes = '/geoms/data/processed_scenes_dev'
# PC.job.path_procdata_MGRS = '/geoms/data/processed_mgrs_tiles_dev'
......
#!/usr/bin/env bash
# execute python script
# NOTE: This asserts that the gms_preprocessing has been installed via 'pip install...' and that the PATH environment
# variable points to the correct Python interpreter.
#ipython --colors='NoColor' run_gms.py jobid "$@" # NoColor must be active for ipython because gwt-log cannot interpret ANSI console colors
run_gms.py jobid "$@"
......@@ -13,8 +13,8 @@ from .processing.process_controller import process_controller # noqa: E402
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.8.6'
__versionalias__ = '20171018.01'
__version__ = '0.8.12'
__versionalias__ = '20171023.02'
__all__ = ['algorithms',
'io',
'misc',
......
......@@ -203,6 +203,7 @@ class L1A_object(GMS_object):
i, list_matching_dsIdx = 0, []
while True: # Get dataset indices within HDF file
# noinspection PyBroadException
try:
ds = hdfFile.select(i)
if subsystem_identifier in str(ds.dimensions()) and 'ImagePixel' in str(ds.dimensions()):
......@@ -496,10 +497,11 @@ class L1A_object(GMS_object):
'mask_nodata') and self.mask_nodata is not None, "The L1A object needs to have a nodata mask."
self.logger.info('Calculating true data corner positions (image and world coordinates)...')
if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31,
tzinfo=datetime.timezone.utc):
# if re.search('ETM+', self.sensor) and self.acq_datetime > datetime.datetime(year=2003, month=5, day=31,
# tzinfo=datetime.timezone.utc):
if is_dataset_provided_as_fullScene(self.GMS_identifier):
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, algorithm='numpy',
assert_four_corners=False)
assert_four_corners=True)
else:
self.trueDataCornerPos = calc_FullDataset_corner_positions(self.mask_nodata, assert_four_corners=False)
......@@ -546,9 +548,10 @@ class L1A_object(GMS_object):
def calc_orbit_overpassParams(self):
"""Calculate orbit parameters."""
self.MetaObj.overpassDurationSec, self.MetaObj.scene_length = \
self.MetaObj.get_overpassDuration_SceneLength(
self.fullSceneCornerLonLat, self.fullSceneCornerPos, self.shape_fullArr, self.logger)
if is_dataset_provided_as_fullScene(self.GMS_identifier):
self.MetaObj.overpassDurationSec, self.MetaObj.scene_length = \
self.MetaObj.get_overpassDuration_SceneLength(
self.fullSceneCornerLonLat, self.fullSceneCornerPos, self.shape_fullArr, self.logger)
def add_rasterInfo_to_MetaObj(self, custom_rasObj=None):
"""
......@@ -602,13 +605,15 @@ class L1A_object(GMS_object):
def calc_mean_VAA(self):
"""Calculates mean viewing azimuth angle using sensor flight line derived from full scene corner coordinates."""
if re.search('Sentinel-2', self.satellite, re.I):
if is_dataset_provided_as_fullScene(self.GMS_identifier):
self.VAA_mean = \
GEOP.calc_VAA_using_fullSceneCornerLonLat(self.fullSceneCornerLonLat, self.MetaObj.orbitParams)
else:
# e.g. Sentinel-2 / RapidEye
self.logger.warning('No precise calculation of mean viewing azimuth angle possible because orbit track '
'cannot be reconstructed from dataset since full scene corner positions are unknown. '
'Mean VAA angle is filled with the mean value of the viewing azimuth array provided '
'in metadata.')
self.VAA_mean = self.MetaObj.IncidenceAngle
else:
self.VAA_mean = \
GEOP.calc_VAA_using_fullSceneCornerLonLat(self.fullSceneCornerLonLat, self.MetaObj.orbitParams)
self.logger.info('Calculation of mean VAA...: %s' % round(self.VAA_mean, 2))
......@@ -1163,9 +1163,12 @@ def calc_VAA_using_fullSceneCornerLonLat(fullSceneCornerLonLat, orbit_params):
"""Calculates the Viewing azimuth angle (defined as 90 degrees from the flight line),
e.g. if flight line is 8 degrees from North -> VAA will be 98 degrees.
:param fullSceneCornerLonLat:
:param fullSceneCornerLonLat: UL, UR, LL, LR
:param orbit_params: list of [altitude, inclination, period] => inclination is used as fallback
"""
assert len(fullSceneCornerLonLat) == 4, \
'VAA can only be calculated with fullSceneCornerLonLat representing 4 coordinates (UL, UR, LL, LR).'
UL_LonLat, UR_LonLat, LL_LonLat, LR_LonLat = fullSceneCornerLonLat
forward_az_left = pyproj.Geod(ellps='WGS84').inv(*LL_LonLat, *UL_LonLat)[0]
forward_az_right = pyproj.Geod(ellps='WGS84').inv(*LR_LonLat, *UR_LonLat)[0]
......@@ -1175,7 +1178,7 @@ def calc_VAA_using_fullSceneCornerLonLat(fullSceneCornerLonLat, orbit_params):
if abs(VAA_mean - 90) < 1:
# fullSceneCornerLonLat obviously don't belong to a full scene but a granule
assert orbit_params
warnings.warn('Derivation of mean VAA angle from flight line delivered a non reasonable value (%s degress).'
warnings.warn('Derivation of mean VAA angle from flight line delivered a non reasonable value (%s degrees).'
'Using sensor inclination (%s degrees) as fallback.' % (VAA_mean, orbit_params[1]))
VAA_mean = float(orbit_params[1]) # inclination # FIXME is this correct?
......@@ -1304,7 +1307,7 @@ def calc_SZA_SAA_array(shape_fullArr, arr_pos, AcqDate, CenterAcqTime, fullScene
:param AcqDate:
:param CenterAcqTime:
:param fullSceneCornerPos:
:param fullSceneCornerLonLat:
:param fullSceneCornerLonLat: UL, UR, LL, LR
:param overpassDurationSec:
:param logger:
:param meshwidth: <int> defines the density of the mesh used for generating the output
......
......@@ -187,7 +187,7 @@ class Job(object):
self.path_job_logs = self.DB_config['path_job_logs']
else:
# in test mode, the repository should be self-contained -> use only relative paths
self.path_archive = self.absP('../tests/data/')
self.path_fileserver = self.absP('../tests/data/')
self.path_archive = self.absP('../tests/data/archive_data/')
self.path_procdata_scenes = self.absP('../tests/data/output_scenes/')
self.path_procdata_MGRS = self.absP('../tests/data/output_mgrs_tiles/')
......
......@@ -1136,8 +1136,8 @@ def delete_processing_results(scene_ID, proc_level='all', force=False):
try:
shutil.rmtree(path_procdata)
except OSError: # directory not deletable because it is not empty
if [F for F in glob.glob(path_procdata) if not F.startswith('.fuse_hidden')]:
raise # raise OSError if there are other files that .fuse_hidden... remaining
if [F for F in glob.glob(path_procdata) if not os.path.basename(F).startswith('.fuse_hidden')]:
raise # raise OSError if there are other files than .fuse_hidden... remaining
else:
files2delete = glob.glob(os.path.join(path_procdata, '*%s*' % proc_level))
errors = False # default
......
......@@ -153,7 +153,7 @@ def is_dataset_provided_as_fullScene(GMS_identifier):
sensorcode = get_GMS_sensorcode(GMS_identifier)
dict_fullScene_or_tiles = {
'AVNIR-2': True,
'AST_full': False,
'AST_full': True,
'AST_V1': True,
'AST_V2': True,
'AST_S': True,
......@@ -173,7 +173,7 @@ def is_dataset_provided_as_fullScene(GMS_identifier):
'SPOT4b': True,
'SPOT5b': True,
'RE5': False,
'S2A_full': False, # FIXME this changed for S2 in 08/2016
'S2A_full': False,
'S2A10': False,
'S2A20': False,
'S2A60': False,
......
......@@ -473,13 +473,20 @@ def scene_ID_to_shapelyPolygon(scene_ID):
"""
Returns a LonLat shapely.Polygon() object corresponding to the given scene_ID.
"""
return Polygon(reorder_CornerLonLat(sceneID_to_trueDataCornerLonLat(scene_ID)))
poly = Polygon(reorder_CornerLonLat(sceneID_to_trueDataCornerLonLat(scene_ID)))
if not poly.is_valid:
poly = poly.buffer(0)
assert poly.is_valid
return poly
def CornerLonLat_to_shapelyPoly(CornerLonLat):
"""Returns a shapely.Polygon() object based on the given coordinate list. """
return Polygon(reorder_CornerLonLat(CornerLonLat))
poly = Polygon(reorder_CornerLonLat(CornerLonLat))
if not poly.is_valid:
poly = poly.buffer(0)
assert poly.is_valid
return poly
def find_in_xml_root(namespace, xml_root, branch, *branches, findall=None):
......
......@@ -5,6 +5,7 @@ import re
import tempfile
import warnings
import uuid
from logging import Logger
from ..config import GMS_config as CFG
from .definition_dicts import get_GMS_sensorcode
......@@ -336,7 +337,8 @@ def get_path_ac_options(GMS_identifier):
assert os.path.exists(path_ac)
except AssertionError:
# FIXME this is a temporary workaround for issue #6 of sicor
GMS_identifier['logger'].warning(
logger = GMS_identifier['logger'] or Logger(__name__)
logger.warning(
'Could not locate options file for atmospheric correction within SICOR installation folder. '
'Using the one provided with gms_preprocessing (maybe outdated).')
from gms_preprocessing import __file__
......
......@@ -324,4 +324,10 @@ class Scene:
self.bounds = bounds
tempList = list(bounds) + [None] * 2
self.coordsLonLat = [tempList[n:n + 2] for n in range(0, len(bounds), 2)]
self.polyLonLat = Polygon(self.coordsLonLat)
# set validated (!) polygon
poly = Polygon(self.coordsLonLat)
if not poly.is_valid:
poly = poly.buffer(0)
assert poly.is_valid
self.polyLonLat = poly
......@@ -187,6 +187,7 @@ class Dataset(object):
@property
def arr(self):
# type: () -> GeoArray
# TODO this must return a subset if self.subset is not None
return self._arr
......
......@@ -29,7 +29,7 @@ from gms_preprocessing.algorithms import geoprocessing as GEOP
from gms_preprocessing.misc import helper_functions as HLP_F
from gms_preprocessing.misc import database_tools as DB_T
from gms_preprocessing.misc.path_generator import path_generator, get_path_ac_options
from gms_preprocessing.misc.definition_dicts import get_GMS_sensorcode
from gms_preprocessing.misc.definition_dicts import get_GMS_sensorcode, is_dataset_provided_as_fullScene
from sicor.options import get_options as get_ac_options
......@@ -254,6 +254,7 @@ class METADATA(object):
# Solar irradiance, central wavelengths , full width half maximum per band
self.wvlUnit = 'Nanometers'
# derive number of bands from number of given gains/offsets in header file or from raster data
# noinspection PyBroadException
try:
self.nBands = (np.mean([len(self.Gains), len(self.Offsets)])
if np.std([len(self.Gains), len(self.Offsets)]) == 0 and len(self.Gains) != 0 else
......@@ -743,6 +744,7 @@ class METADATA(object):
# Solar irradiance, central wavelengths , full width half maximum per band
self.wvlUnit = 'Nanometers'
# derive number of bands from number of given gains/offsets in header file or from raster data
# noinspection PyBroadException
try:
self.nBands = (np.mean([len(self.Gains), len(self.Offsets)])
if np.std([len(self.Gains), len(self.Offsets)]) == 0 and len(self.Gains) != 0 else
......@@ -1109,6 +1111,7 @@ class METADATA(object):
# Solar irradiance, central wavelengths, full width half maximum per band
self.wvlUnit = 'Nanometers'
# derive number of bands from number of given gains/offsets in header file or from raster data
# noinspection PyBroadException
try:
self.nBands = (np.mean([len(self.Gains), len(self.Offsets)])
if np.std([len(self.Gains), len(self.Offsets)]) == 0 and len(self.Gains) != 0 else
......@@ -1597,6 +1600,8 @@ class METADATA(object):
:param fullSceneCornerLonLat:
:param logger:
"""
assert is_dataset_provided_as_fullScene(self.GMS_identifier) and len(fullSceneCornerLonLat) == 4, \
'Center acquisition time can only be computed for datasets provided as full scenes, not for tiles.'
ul, lr = fullSceneCornerLonLat[0], fullSceneCornerLonLat[3]
center_coord = [np.mean([ul[0], lr[0]]), np.mean([ul[1], lr[1]])]
......@@ -1629,22 +1634,30 @@ class METADATA(object):
:param shape_fullArr:
:param logger:
"""
if fullSceneCornerPos != list(([0, 0], [0, shape_fullArr[1] - 1],
[shape_fullArr[0] - 1, 0], [shape_fullArr[0] - 1, shape_fullArr[1] - 1])):
orbitAltitudeKm, orbitPeriodMin = self.orbitParams[0], self.orbitParams[2]
UL, UR, LL, LR = fullSceneCornerLonLat
geod = pyproj.Geod(ellps='WGS84')
scene_length = np.mean([geod.inv(UL[0], UL[1], LL[0], LL[1])[2],
geod.inv(UR[0], UR[1], LR[0], LR[1])[2]]) / 1000 # along-track distance [km]
logger.info('Calculation of scene length...: %s km' % round(scene_length, 2))
orbitPeriodLength = 2 * math.pi * (6371 + orbitAltitudeKm)
overpassDurationSec = (scene_length / orbitPeriodLength) * orbitPeriodMin * 60.
logger.info('Calculation of overpass duration...: %s sec' % round(overpassDurationSec, 2))
return overpassDurationSec, scene_length
else:
logger.warning('Overpass duration and scene length cannot be calculated because the given data represents'
'a subset of the original scene.')
assert is_dataset_provided_as_fullScene(self.GMS_identifier) and len(fullSceneCornerLonLat) == 4, \
'Overpass duration and scene length can only be computed for datasets provided as full scenes, not for ' \
'tiles.'
# check if current scene is a subset
assert fullSceneCornerPos != list(([0, 0], [0, shape_fullArr[1] - 1],
[shape_fullArr[0] - 1, 0], [shape_fullArr[0] - 1, shape_fullArr[1] - 1])),\
'Overpass duration and scene length cannot be calculated because the given data represents a subset of ' \
'the original scene.'
# compute scene length
orbitAltitudeKm, orbitPeriodMin = self.orbitParams[0], self.orbitParams[2]
UL, UR, LL, LR = fullSceneCornerLonLat
geod = pyproj.Geod(ellps='WGS84')
scene_length = np.mean([geod.inv(UL[0], UL[1], LL[0], LL[1])[2],
geod.inv(UR[0], UR[1], LR[0], LR[1])[2]]) / 1000 # along-track distance [km]
logger.info('Calculation of scene length...: %s km' % round(float(scene_length), 2))
# compute overpass duration
orbitPeriodLength = 2 * math.pi * (6371 + orbitAltitudeKm)
overpassDurationSec = (scene_length / orbitPeriodLength) * orbitPeriodMin * 60.
logger.info('Calculation of overpass duration...: %s sec' % round(overpassDurationSec, 2))
return overpassDurationSec, scene_length
def filter_layerdependent_metadata(self):
FULL_LayerBandsAssignment = \
......
......@@ -25,7 +25,7 @@ test_requirements = requirements + ['coverage', 'nose', 'nose-htmloutput', 'redn
setup(
name='gms_preprocessing',
version='0.8.6',
version='0.8.12',
description="GeoMultiSens - Scalable Multi-Sensor Analysis of Remote Sensing Data",
long_description=readme + '\n\n' + history,
author="Daniel Scheffler",
......@@ -36,7 +36,7 @@ setup(
package_data={"database": ["gms_preprocessing/database/*"]},
include_package_data=True,
install_requires=requirements,
scripts=["bin/run_gms.py"],
scripts=["bin/run_gms.py", "bin/run_gms.sh"],
license="GNU General Public License v3",
zip_safe=False,
keywords='gms_preprocessing',
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment