Commit 5107ea13 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Fixed bug regarding matplotlib backend. PEP-8 editing.

parent ba047c71
# -*- coding: utf-8 -*-
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.6.2'
__versionalias__ = '20170906.01'
import os
if 'MPLBACKEND' not in os.environ:
os.environ['MPLBACKEND'] = 'Agg'
from . import algorithms
from . import io
......@@ -13,10 +11,14 @@ from . import processing
from . import config
from .processing.process_controller import process_controller
__all__ = ['algorithms',
'io',
'misc',
'processing',
'config',
'process_controller',
]
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.6.3'
__versionalias__ = '20170915.01'
__all__ = ['algorithms',
'io',
'misc',
'processing',
'config',
'process_controller',
]
This diff is collapsed.
This diff is collapsed.
......@@ -39,7 +39,7 @@ from sicor.Mask import S2Mask
class L1C_object(L1B_object):
def __init__(self, L1B_obj=None):
super().__init__()
super(L1C_object, self).__init__()
if L1B_obj:
# populate attributes
......@@ -326,7 +326,7 @@ class AtmCorr(object):
for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in data_dict:
arr2pass = inObj.arr[:,:,bandIdx].astype(np.float32) # conversion to np.float16 will convert -9999 to -10000
arr2pass = inObj.arr[:, :, bandIdx].astype(np.float32) # conversion to np.float16 will convert -9999 to -10000
arr2pass[arr2pass==inObj.arr.nodata] = np.nan # set nodata values to np.nan
data_dict[bandN] = (arr2pass/inObj.meta_odict['ScaleFactor']).astype(np.float16)
else:
......
# -*- coding: utf-8 -*-
###############################################################################
#
# Level 2A Processor:
# Spatial homogenization
#
###############################################################################
__author__='Daniel Scheffler'
"""Level 2A Processor: Spatial homogenization"""
import collections
import os
import warnings
import numpy as np
from geoarray import GeoArray
......@@ -19,6 +12,8 @@ from py_tools_ds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from .L1C_P import L1C_object
__author__ = 'Daniel Scheffler'
def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False, paramsFromUsecase=True, **kwargs):
"""
......@@ -48,41 +43,42 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
"""
# FIXME diese Methode muss target grid festlegen, auch wenn keine Referenz verfügbar ist!
illegal_kw = [i for i in kwargs if i not in ['align_grids','out_gsd','match_gsd','no_resamp','cliptoextent']]
assert illegal_kw == [], "'%s' is not a legal keyword argument for L1B_P.get_DESHIFTER_configs()" %illegal_kw[0]
illegal_kw = [i for i in kwargs if i not in ['align_grids', 'out_gsd', 'match_gsd', 'no_resamp', 'cliptoextent']]
assert illegal_kw == [], "'%s' is not a legal keyword argument for L1B_P.get_DESHIFTER_configs()" % illegal_kw[0]
dicts_GMS_obj = [dicts_GMS_obj] if not isinstance(dicts_GMS_obj,list) else dicts_GMS_obj
attrnames2deshift = [attrnames2deshift] if not isinstance(attrnames2deshift,list) else attrnames2deshift
dicts_GMS_obj = [dicts_GMS_obj] if not isinstance(dicts_GMS_obj, list) else dicts_GMS_obj
attrnames2deshift = [attrnames2deshift] if not isinstance(attrnames2deshift, list) else attrnames2deshift
# get general kwargs
gen_kwargs = collections.OrderedDict()
if paramsFromUsecase:
gen_kwargs.update({'align_grids':CFG.usecase.align_coord_grids})
gen_kwargs.update({'out_gsd' :CFG.usecase.target_gsd})
gen_kwargs.update({'match_gsd' :CFG.usecase.match_gsd})
gen_kwargs.update({'align_grids': CFG.usecase.align_coord_grids})
gen_kwargs.update({'out_gsd': CFG.usecase.target_gsd})
gen_kwargs.update({'match_gsd': CFG.usecase.match_gsd})
else:
[gen_kwargs.update({kw:kwargs.get(kw)}) for kw in ['align_grids','out_gsd','match_gsd'] if kw in kwargs]
[gen_kwargs.update({kw:kwargs.get(kw)}) for kw in ['no_resamp','cliptoextent'] if kw in kwargs]
[gen_kwargs.update({kw: kwargs.get(kw)}) for kw in ['align_grids', 'out_gsd', 'match_gsd'] if kw in kwargs]
[gen_kwargs.update({kw: kwargs.get(kw)}) for kw in ['no_resamp', 'cliptoextent'] if kw in kwargs]
config_dicts = []
for obj in dicts_GMS_obj:
# FIXME workaround für fehlende refererence geotransform -> eigentlich müsste nicht gt, sondern target grid berechnet werden
assert isinstance(obj,dict)
# FIXME workaround für fehlende refererence geotransform
# FIXME -> eigentlich müsste nicht gt, sondern target grid berechnet werden
assert isinstance(obj, dict)
if not obj['coreg_info']['reference geotransform']:
obj['coreg_info']['reference geotransform'] = mapinfo2geotransform(
obj['coreg_info']['original map info'])
obj['coreg_info']['reference geotransform'] = mapinfo2geotransform(
obj['coreg_info']['original map info'])
obj['coreg_info']['reference geotransform'][1] = CFG.usecase.target_gsd[0]
obj['coreg_info']['reference geotransform'][5] = -abs(CFG.usecase.target_gsd[1])
item2add = [obj]
for attrname in attrnames2deshift:
attrVal = obj[attrname]
attritem2add = item2add+[attrname]
attrVal = obj[attrname]
attritem2add = item2add + [attrname]
if isinstance(attrVal,np.ndarray) or isinstance(attrVal,GeoArray) and attrVal.is_inmem:
bands = attrVal.shape[2] if attrVal.ndim==3 else None
elif isinstance(attrVal,GeoArray) and not attrVal.is_inmem:
if isinstance(attrVal, np.ndarray) or isinstance(attrVal, GeoArray) and attrVal.is_inmem:
bands = attrVal.shape[2] if attrVal.ndim == 3 else None
elif isinstance(attrVal, GeoArray) and not attrVal.is_inmem:
if os.path.exists(attrVal):
bands = attrVal.bands
else:
......@@ -91,18 +87,18 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
elif attrVal is None:
continue
else:
raise Exception('Unexpected attribute type %s in attribute %s.' %(type(attrVal),attrname))
raise Exception('Unexpected attribute type %s in attribute %s.' % (type(attrVal), attrname))
if proc_bandwise and bands is not None and 'band2process' not in kwargs:
for bI in range(bands):
kwargs2add = collections.OrderedDict()
kwargs2add.update({'band2process':bI+1})
kwargs2add.update({'band2process': bI + 1})
kwargs2add.update(gen_kwargs)
banditem2add = attritem2add+[kwargs2add]
banditem2add = attritem2add + [kwargs2add]
config_dicts.append(banditem2add)
elif 'band2process' in kwargs and kwargs.get('band2process') is not None:
assert isinstance(kwargs.get('band2process'),int), "'band2process' must contain an integer."
kwargs2add = collections.OrderedDict({'band2process':kwargs.get('band2process')})
assert isinstance(kwargs.get('band2process'), int), "'band2process' must contain an integer."
kwargs2add = collections.OrderedDict({'band2process': kwargs.get('band2process')})
kwargs2add.update(gen_kwargs)
attritem2add.append(kwargs2add)
config_dicts.append(attritem2add)
......@@ -115,10 +111,10 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
class L2A_object(L1C_object):
def __init__(self, L1C_obj=None):
super().__init__()
super(L2A_object, self).__init__()
if L1C_obj:
# populate attributes
[setattr(self, key, value) for key,value in L1C_obj.__dict__.items()]
[setattr(self, key, value) for key, value in L1C_obj.__dict__.items()]
self.proc_level = 'L2A'
self.proc_level = 'L2A'
# -*- coding: utf-8 -*-
###############################################################################
#
# Level 2C Processor:
# Accurracy layers
#
###############################################################################
__author__='Daniel Scheffler'
"""Level 2C Processor: Quality layers"""
from .L2B_P import L2B_object
__author__ = 'Daniel Scheffler'
shared = {}
res = {}
res = {}
from .L2B_P import L2B_object
class L2C_object(L2B_object):
def __init__(self, L2B_obj=None):
super().__init__()
super(L2C_object, self).__init__()
if L2B_obj:
# populate attributes
......@@ -27,4 +23,4 @@ class L2C_object(L2B_object):
pass
def calc_spectral_accurracy(self):
pass
\ No newline at end of file
pass
......@@ -16,13 +16,13 @@ from . import L2A_P
from . import L2B_P
from . import L2C_P
__all__=['GEOPROCESSING',
'gms_cloud_classifier',
'L1A_P',
'L1B_P',
'L1C_P',
'L2A_P',
'L2B_P',
'L2C_P']
__all__ = ['GEOPROCESSING',
'gms_cloud_classifier',
'L1A_P',
'L1B_P',
'L1C_P',
'L2A_P',
'L2B_P',
'L2C_P']
__author__='Daniel Scheffler'
__author__ = 'Daniel Scheffler'
# -*- coding: utf-8 -*-
__author__ = 'Daniel Scheffler'
import os
import fnmatch
......@@ -26,6 +24,8 @@ from ..misc.database_tools import get_info_from_postgreSQLdb
from ..misc.exceptions import FmaskError, FmaskWarning
from geoarray import GeoArray
__author__ = 'Daniel Scheffler'
class _FMASK_Runner(object):
"""The FMASK runner base class (not to be called directly)."""
......@@ -56,18 +56,17 @@ class _FMASK_Runner(object):
tempdir_rootPath = '/dev/shm/GeoMultiSens'
if not os.path.isdir(tempdir_rootPath):
os.makedirs(tempdir_rootPath)
self.tempdir = tempfile.mkdtemp(dir=tempdir_rootPath, prefix='FMASK__%s__' %os.path.basename(self.path_archive))
self.tempdir = tempfile.mkdtemp(dir=tempdir_rootPath,
prefix='FMASK__%s__' % os.path.basename(self.path_archive))
# create subdirectory for FMASK internal intermediate files
os.makedirs(os.path.join(self.tempdir, 'FMASK_intermediates'))
def validate_inputs(self):
if not os.path.exists(self.path_archive):
raise FileNotFoundError(self.path_archive)
if not self.satellite in ['Landsat-4', 'Landsat-5', 'Landsat-7', 'Landsat-8', 'Sentinel-2A', 'Sentinel-2B']:
raise ValueError('%s is not a supported satellite for cloud mask calculation via FMASK.' %self.satellite)
if self.satellite not in ['Landsat-4', 'Landsat-5', 'Landsat-7', 'Landsat-8', 'Sentinel-2A', 'Sentinel-2B']:
raise ValueError('%s is not a supported satellite for cloud mask calculation via FMASK.' % self.satellite)
@property
def is_GMSConfig_available(self):
......@@ -78,14 +77,12 @@ class _FMASK_Runner(object):
except (EnvironmentError, OSError):
return False
@property
def gdal_path_archive(self):
if not self._gdal_path_archive:
self._gdal_path_archive = convert_absPathArchive_to_GDALvsiPath(self.path_archive)
return self._gdal_path_archive
@property
def files_in_archive(self):
if not self._files_in_archive:
......@@ -93,7 +90,6 @@ class _FMASK_Runner(object):
self._files_in_archive = gdal.ReadDirRecursive(self.gdal_path_archive)
return self._files_in_archive
@staticmethod
def run_cmd(cmd):
output, exitcode, err = subcall_with_output(cmd)
......@@ -102,21 +98,18 @@ class _FMASK_Runner(object):
if output:
return output.decode('UTF-8')
def extract_tar_archive(self):
with tarfile.open(self.path_archive) as tarF:
tarF.extractall(self.tempdir)
self.is_extracted = True
def extract_zip_archive(self):
with zipfile.ZipFile(self.path_archive, "r") as z:
z.extractall(self.tempdir)
self.is_extracted = True
def to_saved_rasterFile(self, value, attrname):
pathFile = os.path.join(self.tempdir, "%s.bsq" %attrname)
pathFile = os.path.join(self.tempdir, "%s.bsq" % attrname)
if isinstance(value, str) and os.path.exists(value):
pathFile = value
elif isinstance(value, GeoArray):
......@@ -124,13 +117,12 @@ class _FMASK_Runner(object):
value.save(pathFile)
else:
raise TypeError("The attribute '%s' can only be set by an existing path or an instance of GeoArray. "
"Received %s" %(attrname, type(value)))
"Received %s" % (attrname, type(value)))
assert isinstance(pathFile, str) and os.path.exists(pathFile)
return pathFile
def calc_cloudMask(self, path_out=None, fmt=None):
if path_out:
gdal.Translate(path_out, gdal.Open(self.cloud_mask), format=fmt)
......@@ -144,14 +136,13 @@ class _FMASK_Runner(object):
if self.is_GMSConfig_available:
self.cloud_mask.legend = \
get_mask_classdefinition('mask_clouds', self.satellite)
else: # use default FMASK legend
else: # use default FMASK legend
warnings.warn('GMS configuration not available. Using default cloud mask legend.', FmaskWarning)
self.cloud_mask.legend = \
{'No Data': 0, 'Clear': 1, 'Cloud': 2, 'Shadow': 3, 'Snow': 4, 'Water': 5}
return self.cloud_mask
def clean(self):
shutil.rmtree(self.tempdir)
self.is_extracted = False
......@@ -161,11 +152,7 @@ class _FMASK_Runner(object):
self._TOARef = None
class FMASK_Runner_Landsat(_FMASK_Runner):
def __init__(self, path_providerArchive, satellite, TOARef=None, opticalDNs=None, thermalDNs=None):
"""FMASK wrapper class for Landsat 4-8.
......@@ -189,7 +176,7 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
'Landsat-8': dict(optical='L*_B[1-7,9].TIF', thermal='L*_B1[0,1].TIF', meta='*_MTL.txt')
}[satellite]
super(FMASK_Runner_Landsat,self).__init__(path_providerArchive, satellite, extract_archive=False)
super(FMASK_Runner_Landsat, self).__init__(path_providerArchive, satellite, extract_archive=False)
# populate optional attributes
if TOARef is not None:
......@@ -199,7 +186,6 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
if thermalDNs is not None:
self.thermal_stack = thermalDNs
@property
def optical_stack(self):
if self._optical_stack is None:
......@@ -214,12 +200,10 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
return self._optical_stack
@optical_stack.setter
def optical_stack(self, value):
self._optical_stack = super(FMASK_Runner_Landsat, self).to_saved_rasterFile(value, 'optical_stack')
@property
def thermal_stack(self):
if self._thermal_stack is None:
......@@ -234,43 +218,38 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
return self._thermal_stack
@thermal_stack.setter
def thermal_stack(self, value):
self._thermal_stack = super(FMASK_Runner_Landsat, self).to_saved_rasterFile(value, 'thermal_stack')
@property
def metaFile(self):
if not self._metaFile:
if not self.is_extracted:
self.extract_tar_archive()
self._metaFile = os.path.join(self.tempdir,fnmatch.filter(os.listdir(self.tempdir), '*_MTL.txt')[0])
self._metaFile = os.path.join(self.tempdir, fnmatch.filter(os.listdir(self.tempdir), '*_MTL.txt')[0])
return self._metaFile
@property
def angles_stack(self):
if self._angles_stack is None:
self._angles_stack = os.path.join(self.tempdir, 'angles.vrt')
self.run_cmd('fmask_usgsLandsatMakeAnglesImage.py -m %s -t %s -o %s'
% (self.metaFile, self.optical_stack, self._angles_stack))
% (self.metaFile, self.optical_stack, self._angles_stack))
return self._angles_stack
@property
def saturationmask(self):
if self._saturationmask is None:
self._saturationmask = os.path.join(self.tempdir, 'saturationmask.vrt')
self.run_cmd('fmask_usgsLandsatSaturationMask.py -m %s -i %s -o %s'
% (self.metaFile, self.optical_stack, self._saturationmask))
self.saturationmask_legend = {'blue':0, 'green':1, 'red':2}
self.saturationmask_legend = {'blue': 0, 'green': 1, 'red': 2}
return self._saturationmask
@property
def TOARef(self):
if self._TOARef is None:
......@@ -280,12 +259,10 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
return self._TOARef
@TOARef.setter
def TOARef(self, value):
self._TOARef = super(FMASK_Runner_Landsat, self).to_saved_rasterFile(value, 'TOARef')
def calc_cloudMask(self, path_out=None, fmt=None):
# type: (str, str) -> any
......@@ -299,20 +276,19 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
try:
self.cloud_mask = os.path.join(self.tempdir, 'fmask_cloudmask.vrt')
self.run_cmd('fmask_usgsLandsatStacked.py %s'
%' '.join(['-m %s' %self.metaFile,
'-a %s' %self.TOARef,
'-t %s' %self.thermal_stack,
'-z %s' %self.angles_stack,
'-s %s' %self.saturationmask,
'-o %s' %self.cloud_mask,
'-e %s' %os.path.join(self.tempdir, 'FMASK_intermediates')
]) )
% ' '.join(['-m %s' % self.metaFile,
'-a %s' % self.TOARef,
'-t %s' % self.thermal_stack,
'-z %s' % self.angles_stack,
'-s %s' % self.saturationmask,
'-o %s' % self.cloud_mask,
'-e %s' % os.path.join(self.tempdir, 'FMASK_intermediates')
]))
return super(FMASK_Runner_Landsat, self).calc_cloudMask(path_out=path_out, fmt=fmt)
finally:
self.clean()
def clean(self):
self._thermal_stack = None
self._optical_stack = None
......@@ -322,11 +298,7 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
assert not os.path.isdir(self.tempdir), 'Error deleting temporary FMASK directory.'
class FMASK_Runner_Sentinel2(_FMASK_Runner):
def __init__(self, path_providerArchive, satellite, scene_ID=None, granule_ID='', target_res=20, TOARef=None,
extract_archive=False):
"""FMASK wrapper class for Sentinel-2.
......@@ -347,17 +319,17 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
self.scene_ID = scene_ID
self.tgt_res = target_res
oldStPref = '*GRANULE/'+self.granule_ID + '*/'
oldStPref = '*GRANULE/' + self.granule_ID + '*/'
self.FileMatchExp = {
'Sentinel-2A': dict(opticalOLDStyle='%s*_B0[1-8].jp2 %s*_B8A.jp2 %s*_B09.jp2 %s*_B1[0-2].jp2'
%(oldStPref, oldStPref, oldStPref, oldStPref),
% (oldStPref, oldStPref, oldStPref, oldStPref),
opticalNEWStyle='*_B0[1-8].jp2 *_B8A.jp2 *_B09.jp2 *_B1[0-2].jp2',
metaOLDStyle='%sS2A*.xml' %oldStPref,
metaOLDStyle='%sS2A*.xml' % oldStPref,
metaNEWStyle='*MTD_TL.xml'),
'Sentinel-2B': dict(opticalOLDStyle='%s*_B0[1-8].jp2 %s*_B8A.jp2 %s*_B09.jp2 %s*_B1[0-2].jp2'
%(oldStPref, oldStPref, oldStPref, oldStPref),
% (oldStPref, oldStPref, oldStPref, oldStPref),
opticalNEWStyle='*_B0[1-8].jp2 *_B8A.jp2 *_B09.jp2 *_B1[0-2].jp2',
metaOLDStyle='%sS2A*.xml' %oldStPref,
metaOLDStyle='%sS2A*.xml' % oldStPref,
metaNEWStyle='*MTD_TL.xml'),
}[satellite]
......@@ -367,34 +339,32 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
if TOARef is not None:
self.TOARef = TOARef
@property
def granule_ID(self):
"""Gets the Sentinel-2 granule ID from the database using the scene ID in case the granule ID has not been
given."""
if not self._granule_ID and self.scene_ID and self.scene_ID!=-9999 and self.is_GMSConfig_available:
if not self._granule_ID and self.scene_ID and self.scene_ID != -9999 and self.is_GMSConfig_available:
from ..config import GMS_config as CFG
res = get_info_from_postgreSQLdb(CFG.job.conn_database, 'scenes', ['entityid'], {'id': self.scene_ID})
assert len(res) != 0, \
"Invalid SceneID given - no corresponding scene with the ID=%s found in database.\n" % self.scene_ID
"Invalid SceneID given - no corresponding scene with the ID=%s found in database.\n" % self.scene_ID
assert len(res) == 1, "Error in database. The sceneid %s exists more than once. \n" % self.scene_ID
self._granule_ID = res[0][0]
return self._granule_ID
@property
def metaFile(self):
if not self._metaFile:
fNs_meta = fnmatch.filter(self.files_in_archive, self.FileMatchExp['metaNEWStyle'])
if not fNs_meta:
fNs_meta = fnmatch.filter(self.files_in_archive, self.FileMatchExp['metaOLDStyle'])
if len(fNs_meta)>1:
if len(fNs_meta) > 1:
raise RuntimeError('Found multiple metadata files for the given %s dataset. Please provide the '
'granule ID where you want to use the metadata from.' %self.satellite)
'granule ID where you want to use the metadata from.' % self.satellite)
elif not fNs_meta:
raise RuntimeError('Could not find a metadata file for the given %s dataset.' %self.satellite)
raise RuntimeError('Could not find a metadata file for the given %s dataset.' % self.satellite)
fN_meta = fNs_meta[0]
# only extract the metadata file
......@@ -405,7 +375,6 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
return self._metaFile
@property
def angles_stack(self):
if self._angles_stack is None:
......@@ -414,7 +383,6 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
return self._angles_stack
@property
def TOARef(self):
if self._TOARef is None:
......@@ -426,7 +394,7 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
fileList = glob(self.tempdir + '/**', recursive=True)
matchExps = self.FileMatchExp['opticalOLDStyle'].split()
opt_fNames = list(itertools.chain.from_iterable(
opt_fNames = list(itertools.chain.from_iterable(
[list(sorted(fnmatch.filter(fileList, mE))) for mE in matchExps]))
if not opt_fNames:
matchExps = self.FileMatchExp['opticalNEWStyle'].split()
......@@ -442,12 +410,10 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
return self._TOARef
@TOARef.setter
def TOARef(self, value):
self._TOARef = super(FMASK_Runner_Sentinel2, self).to_saved_rasterFile(value, 'TOARef')
def calc_cloudMask(self, path_out=None, fmt=None):