Commit 5107ea13 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Fixed bug regarding matplotlib backend. PEP-8 editing.

parent ba047c71
# -*- coding: utf-8 -*-
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.6.2'
__versionalias__ = '20170906.01'
import os
if 'MPLBACKEND' not in os.environ:
os.environ['MPLBACKEND'] = 'Agg'
from . import algorithms
from . import io
......@@ -13,6 +11,10 @@ from . import processing
from . import config
from .processing.process_controller import process_controller
__author__ = """Daniel Scheffler"""
__email__ = 'daniel.scheffler@gfz-potsdam.de'
__version__ = '0.6.3'
__versionalias__ = '20170915.01'
__all__ = ['algorithms',
'io',
'misc',
......
......@@ -70,9 +70,6 @@ class L1A_object(GMS_object):
% (self.satellite, self.sensor,
(' ' + self.subsystem) if self.subsystem not in [None, ''] else '', self.entity_ID))
########################### core functions ####################################
# <editor-fold desc="decompress(...) - not used anymore" >
# def decompress(self, compressed_file, outputpath=None):
# """Decompresses ZIP, TAR, TAR.GZ and TGZ archives to a given output path.
......@@ -85,7 +82,8 @@ class L1A_object(GMS_object):
# if not os.path.exists(outputpath): os.makedirs(outputpath)
#
# if compressed_file.endswith(".zip"):
# assert zipfile.is_zipfile(compressed_file), self.logger.critical(compressed_file+" is not a valid zipfile!")
# assert zipfile.is_zipfile(compressed_file),
# self.logger.critical(compressed_file+" is not a valid zipfile!")
# zf = zipfile.ZipFile(compressed_file)
# names = zf.namelist()
# count_extracted = 0
......@@ -118,7 +116,8 @@ class L1A_object(GMS_object):
# names, members = tf.getnames(), tf.getmembers()
# count_extracted = 0
# for n, m in zip(names, members):
# if os.path.exists(os.path.join(outputpath, n)) and m.size==os.stat(os.path.join(outputpath, n)).st_size:
# if os.path.exists(os.path.join(outputpath, n)) and
# m.size==os.stat(os.path.join(outputpath, n)).st_size:
# self.logger.warning("file '%s' from '%s' already exists in the directory: '%s'" \
# %(n,filename,outputpath))
# else:
......@@ -164,7 +163,8 @@ class L1A_object(GMS_object):
# os.remove(outFile[:-4] + '.hdr')
# self.Layerstacking(InFolder, outFile, sensor, ullr)
# else:
# LayerBandsAssignment_existingFile = [i.strip() for i in re_LayerBandsAssignment_str.group(1).split(',')]
# LayerBandsAssignment_existingFile =
# [i.strip() for i in re_LayerBandsAssignment_str.group(1).split(',')]
# if self.LayerBandsAssignment != LayerBandsAssignment_existingFile:
# self.logger.warning("""\n\t\tWARNING: There is already a layerstacking result at %s but
# Layer-Bands Assignment does not match. Layerstacking has been restarted.\n""" % outFile)
......@@ -172,7 +172,8 @@ class L1A_object(GMS_object):
# os.remove(outFile[:-4] + '.hdr')
# self.Layerstacking(InFolder, outFile, sensor, ullr)
# else:
# self.logger.warning("\n\t\tWARNING: File %s already exists. File has not been overwritten.\n" % outFile)
# self.logger.warning("\n\t\tWARNING:
# File %s already exists. File has not been overwritten.\n" % outFile)
# else:
# try:
# if re.search('Terra', self.satellite):
......@@ -204,11 +205,13 @@ class L1A_object(GMS_object):
# elif var2[0] in str(ds.GetSubDatasets()):
# subds_names = var2
# else:
# self.logger.error('Subdatasets seem to have unknown names in HDF file. Layerstacking failed.')
# self.logger.error(
# 'Subdatasets seem to have unknown names in HDF file. Layerstacking failed.')
# sys.exit()
#
# # calculate a DN subsystem_stack from the hdf data
# layers2merge = ["%s__b%s.bsq" %(os.path.splitext(outFile)[0], i) for i in self.LayerBandsAssignment]
# layers2merge = \
# ["%s__b%s.bsq" %(os.path.splitext(outFile)[0], i) for i in self.LayerBandsAssignment]
# for subds_name, layerPath in zip(subds_names, layers2merge):
# # convert hdf subdatasets in ENVI bsq files
# subprocess.Popen(["gdal_translate", "-q", "-of", "ENVI",subds_name,layerPath]).wait()
......@@ -223,8 +226,10 @@ class L1A_object(GMS_object):
# Files = os.listdir(InFolder)
# filtered_files = []
# for File in HLP_F.sorted_nicely(Files):
# if re.search('ALOS', self.satellite) and re.search("IMG-0[0-9]-[\s\S]*", File) or \
# re.search('Landsat', self.satellite) and re.search("[\S]*_B[1-9][0-9]?[\S]*.TIF", File):
# if re.search('ALOS', self.satellite) and \
# re.search("IMG-0[0-9]-[\s\S]*", File) or \
# re.search('Landsat', self.satellite) and \
# re.search("[\S]*_B[1-9][0-9]?[\S]*.TIF", File):
# filtered_files.append(File)
#
# assert len(self.LayerBandsAssignment) == len(filtered_files), \
......@@ -260,8 +265,8 @@ class L1A_object(GMS_object):
# band_names.append(band)
# self.logger.info(band)
# elif CFG.usecase.skip_thermal and META.isTHERMAL(self.GMS_identifier, LayerNr):
# self.logger.info(
# 'Band %s skipped because Layerstacking has been called with skipthermal = True.' % band)
# self.logger.info('Band %s skipped because Layerstacking has been called with '
# 'skipthermal = True.' % band)
# elif META.isPAN(self.GMS_identifier, LayerNr):
# self.logger.info('Band %s skipped because it is a panchromatic band.' % band)
# else:
......@@ -279,8 +284,8 @@ class L1A_object(GMS_object):
# the first 21 columns are deleted.'''
# bands = bands[:, :, 21:] # [bands,lines,columns]
# if ullr:
# GEOP.ndarray2gdal(bands, outPath=outFile, geotransform=[float(ulx), pixWidth, rot1, float(uly),
# rot2, pixHeight], projection=projection_orig, v=1)
# GEOP.ndarray2gdal(bands, outPath=outFile, geotransform=[float(ulx), pixWidth, rot1,
# float(uly), rot2, pixHeight], projection=projection_orig, v=1)
# else:
# GEOP.ndarray2gdal(bands, outPath=outFile, importFile=os.path.join(InFolder, band_names[0]))
# hdr_o = ef.ReadEnviHeader(outFile[:-4] + ".hdr")
......@@ -514,11 +519,13 @@ class L1A_object(GMS_object):
(rS, rE), (cS, cE) = self.arr_pos if self.arr_pos else ((0, self.shape_fullArr[0]), (0, self.shape_fullArr[1]))
# in_mem = hasattr(self, 'arr') and isinstance(self.arr, np.ndarray)
# if in_mem:
# (rS, rE), (cS, cE) = self.arr_pos if self.arr_pos else ((0,self.shape_fullArr[0]),(0,self.shape_fullArr[1]))
# (rS, rE), (cS, cE) =
# self.arr_pos if self.arr_pos else ((0,self.shape_fullArr[0]),(0,self.shape_fullArr[1]))
# bands = true_bands = self.arr.shape[2] if len(self.arr.shape) == 3 else 1
# else:
# subset = subset if subset else ['block', self.arr_pos] if self.arr_pos else ['cube', None]
# bands, rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[2:7]
# bands, rS, rE, cS, cE =
# list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[2:7]
# ds = gdal.Open(self.MetaObj.Dataname); true_bands = ds.RasterCount; ds = None
assert len(self.LayerBandsAssignment) == self.arr.bands, \
"DN2RadRef-Input data have %s bands although %s bands are specified in self.LayerBandsAssignment." \
......
This diff is collapsed.
......@@ -39,7 +39,7 @@ from sicor.Mask import S2Mask
class L1C_object(L1B_object):
def __init__(self, L1B_obj=None):
super().__init__()
super(L1C_object, self).__init__()
if L1B_obj:
# populate attributes
......@@ -326,7 +326,7 @@ class AtmCorr(object):
for inObj in self.inObjs:
for bandN, bandIdx in inObj.arr.bandnames.items():
if bandN not in data_dict:
arr2pass = inObj.arr[:,:,bandIdx].astype(np.float32) # conversion to np.float16 will convert -9999 to -10000
arr2pass = inObj.arr[:, :, bandIdx].astype(np.float32) # conversion to np.float16 will convert -9999 to -10000
arr2pass[arr2pass==inObj.arr.nodata] = np.nan # set nodata values to np.nan
data_dict[bandN] = (arr2pass/inObj.meta_odict['ScaleFactor']).astype(np.float16)
else:
......
# -*- coding: utf-8 -*-
###############################################################################
#
# Level 2A Processor:
# Spatial homogenization
#
###############################################################################
__author__='Daniel Scheffler'
"""Level 2A Processor: Spatial homogenization"""
import collections
import os
import warnings
import numpy as np
from geoarray import GeoArray
......@@ -19,6 +12,8 @@ from py_tools_ds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from .L1C_P import L1C_object
__author__ = 'Daniel Scheffler'
def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False, paramsFromUsecase=True, **kwargs):
"""
......@@ -48,27 +43,28 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
"""
# FIXME diese Methode muss target grid festlegen, auch wenn keine Referenz verfügbar ist!
illegal_kw = [i for i in kwargs if i not in ['align_grids','out_gsd','match_gsd','no_resamp','cliptoextent']]
assert illegal_kw == [], "'%s' is not a legal keyword argument for L1B_P.get_DESHIFTER_configs()" %illegal_kw[0]
illegal_kw = [i for i in kwargs if i not in ['align_grids', 'out_gsd', 'match_gsd', 'no_resamp', 'cliptoextent']]
assert illegal_kw == [], "'%s' is not a legal keyword argument for L1B_P.get_DESHIFTER_configs()" % illegal_kw[0]
dicts_GMS_obj = [dicts_GMS_obj] if not isinstance(dicts_GMS_obj,list) else dicts_GMS_obj
attrnames2deshift = [attrnames2deshift] if not isinstance(attrnames2deshift,list) else attrnames2deshift
dicts_GMS_obj = [dicts_GMS_obj] if not isinstance(dicts_GMS_obj, list) else dicts_GMS_obj
attrnames2deshift = [attrnames2deshift] if not isinstance(attrnames2deshift, list) else attrnames2deshift
# get general kwargs
gen_kwargs = collections.OrderedDict()
if paramsFromUsecase:
gen_kwargs.update({'align_grids':CFG.usecase.align_coord_grids})
gen_kwargs.update({'out_gsd' :CFG.usecase.target_gsd})
gen_kwargs.update({'match_gsd' :CFG.usecase.match_gsd})
gen_kwargs.update({'align_grids': CFG.usecase.align_coord_grids})
gen_kwargs.update({'out_gsd': CFG.usecase.target_gsd})
gen_kwargs.update({'match_gsd': CFG.usecase.match_gsd})
else:
[gen_kwargs.update({kw:kwargs.get(kw)}) for kw in ['align_grids','out_gsd','match_gsd'] if kw in kwargs]
[gen_kwargs.update({kw:kwargs.get(kw)}) for kw in ['no_resamp','cliptoextent'] if kw in kwargs]
[gen_kwargs.update({kw: kwargs.get(kw)}) for kw in ['align_grids', 'out_gsd', 'match_gsd'] if kw in kwargs]
[gen_kwargs.update({kw: kwargs.get(kw)}) for kw in ['no_resamp', 'cliptoextent'] if kw in kwargs]
config_dicts = []
for obj in dicts_GMS_obj:
# FIXME workaround für fehlende refererence geotransform -> eigentlich müsste nicht gt, sondern target grid berechnet werden
assert isinstance(obj,dict)
# FIXME workaround für fehlende refererence geotransform
# FIXME -> eigentlich müsste nicht gt, sondern target grid berechnet werden
assert isinstance(obj, dict)
if not obj['coreg_info']['reference geotransform']:
obj['coreg_info']['reference geotransform'] = mapinfo2geotransform(
obj['coreg_info']['original map info'])
......@@ -78,11 +74,11 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
item2add = [obj]
for attrname in attrnames2deshift:
attrVal = obj[attrname]
attritem2add = item2add+[attrname]
attritem2add = item2add + [attrname]
if isinstance(attrVal,np.ndarray) or isinstance(attrVal,GeoArray) and attrVal.is_inmem:
bands = attrVal.shape[2] if attrVal.ndim==3 else None
elif isinstance(attrVal,GeoArray) and not attrVal.is_inmem:
if isinstance(attrVal, np.ndarray) or isinstance(attrVal, GeoArray) and attrVal.is_inmem:
bands = attrVal.shape[2] if attrVal.ndim == 3 else None
elif isinstance(attrVal, GeoArray) and not attrVal.is_inmem:
if os.path.exists(attrVal):
bands = attrVal.bands
else:
......@@ -91,18 +87,18 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
elif attrVal is None:
continue
else:
raise Exception('Unexpected attribute type %s in attribute %s.' %(type(attrVal),attrname))
raise Exception('Unexpected attribute type %s in attribute %s.' % (type(attrVal), attrname))
if proc_bandwise and bands is not None and 'band2process' not in kwargs:
for bI in range(bands):
kwargs2add = collections.OrderedDict()
kwargs2add.update({'band2process':bI+1})
kwargs2add.update({'band2process': bI + 1})
kwargs2add.update(gen_kwargs)
banditem2add = attritem2add+[kwargs2add]
banditem2add = attritem2add + [kwargs2add]
config_dicts.append(banditem2add)
elif 'band2process' in kwargs and kwargs.get('band2process') is not None:
assert isinstance(kwargs.get('band2process'),int), "'band2process' must contain an integer."
kwargs2add = collections.OrderedDict({'band2process':kwargs.get('band2process')})
assert isinstance(kwargs.get('band2process'), int), "'band2process' must contain an integer."
kwargs2add = collections.OrderedDict({'band2process': kwargs.get('band2process')})
kwargs2add.update(gen_kwargs)
attritem2add.append(kwargs2add)
config_dicts.append(attritem2add)
......@@ -115,10 +111,10 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
class L2A_object(L1C_object):
def __init__(self, L1C_obj=None):
super().__init__()
super(L2A_object, self).__init__()
if L1C_obj:
# populate attributes
[setattr(self, key, value) for key,value in L1C_obj.__dict__.items()]
[setattr(self, key, value) for key, value in L1C_obj.__dict__.items()]
self.proc_level = 'L2A'
# -*- coding: utf-8 -*-
###############################################################################
#
# Level 2C Processor:
# Accurracy layers
#
###############################################################################
__author__='Daniel Scheffler'
"""Level 2C Processor: Quality layers"""
from .L2B_P import L2B_object
__author__ = 'Daniel Scheffler'
shared = {}
res = {}
from .L2B_P import L2B_object
class L2C_object(L2B_object):
def __init__(self, L2B_obj=None):
super().__init__()
super(L2C_object, self).__init__()
if L2B_obj:
# populate attributes
......
......@@ -16,7 +16,7 @@ from . import L2A_P
from . import L2B_P
from . import L2C_P
__all__=['GEOPROCESSING',
__all__ = ['GEOPROCESSING',
'gms_cloud_classifier',
'L1A_P',
'L1B_P',
......@@ -25,4 +25,4 @@ __all__=['GEOPROCESSING',
'L2B_P',
'L2C_P']
__author__='Daniel Scheffler'
__author__ = 'Daniel Scheffler'
# -*- coding: utf-8 -*-
__author__ = 'Daniel Scheffler'
import os
import fnmatch
......@@ -26,6 +24,8 @@ from ..misc.database_tools import get_info_from_postgreSQLdb
from ..misc.exceptions import FmaskError, FmaskWarning
from geoarray import GeoArray
__author__ = 'Daniel Scheffler'
class _FMASK_Runner(object):
"""The FMASK runner base class (not to be called directly)."""
......@@ -56,18 +56,17 @@ class _FMASK_Runner(object):
tempdir_rootPath = '/dev/shm/GeoMultiSens'
if not os.path.isdir(tempdir_rootPath):
os.makedirs(tempdir_rootPath)
self.tempdir = tempfile.mkdtemp(dir=tempdir_rootPath, prefix='FMASK__%s__' %os.path.basename(self.path_archive))
self.tempdir = tempfile.mkdtemp(dir=tempdir_rootPath,
prefix='FMASK__%s__' % os.path.basename(self.path_archive))
# create subdirectory for FMASK internal intermediate files
os.makedirs(os.path.join(self.tempdir, 'FMASK_intermediates'))
def validate_inputs(self):
if not os.path.exists(self.path_archive):
raise FileNotFoundError(self.path_archive)
if not self.satellite in ['Landsat-4', 'Landsat-5', 'Landsat-7', 'Landsat-8', 'Sentinel-2A', 'Sentinel-2B']:
raise ValueError('%s is not a supported satellite for cloud mask calculation via FMASK.' %self.satellite)
if self.satellite not in ['Landsat-4', 'Landsat-5', 'Landsat-7', 'Landsat-8', 'Sentinel-2A', 'Sentinel-2B']:
raise ValueError('%s is not a supported satellite for cloud mask calculation via FMASK.' % self.satellite)
@property
def is_GMSConfig_available(self):
......@@ -78,14 +77,12 @@ class _FMASK_Runner(object):
except (EnvironmentError, OSError):
return False
@property
def gdal_path_archive(self):
if not self._gdal_path_archive:
self._gdal_path_archive = convert_absPathArchive_to_GDALvsiPath(self.path_archive)
return self._gdal_path_archive
@property
def files_in_archive(self):
if not self._files_in_archive:
......@@ -93,7 +90,6 @@ class _FMASK_Runner(object):
self._files_in_archive = gdal.ReadDirRecursive(self.gdal_path_archive)
return self._files_in_archive
@staticmethod
def run_cmd(cmd):
output, exitcode, err = subcall_with_output(cmd)
......@@ -102,21 +98,18 @@ class _FMASK_Runner(object):
if output:
return output.decode('UTF-8')
def extract_tar_archive(self):
with tarfile.open(self.path_archive) as tarF:
tarF.extractall(self.tempdir)
self.is_extracted = True
def extract_zip_archive(self):
with zipfile.ZipFile(self.path_archive, "r") as z:
z.extractall(self.tempdir)
self.is_extracted = True
def to_saved_rasterFile(self, value, attrname):
pathFile = os.path.join(self.tempdir, "%s.bsq" %attrname)
pathFile = os.path.join(self.tempdir, "%s.bsq" % attrname)
if isinstance(value, str) and os.path.exists(value):
pathFile = value
elif isinstance(value, GeoArray):
......@@ -124,13 +117,12 @@ class _FMASK_Runner(object):
value.save(pathFile)
else:
raise TypeError("The attribute '%s' can only be set by an existing path or an instance of GeoArray. "
"Received %s" %(attrname, type(value)))
"Received %s" % (attrname, type(value)))
assert isinstance(pathFile, str) and os.path.exists(pathFile)
return pathFile
def calc_cloudMask(self, path_out=None, fmt=None):
if path_out:
gdal.Translate(path_out, gdal.Open(self.cloud_mask), format=fmt)
......@@ -151,7 +143,6 @@ class _FMASK_Runner(object):
return self.cloud_mask
def clean(self):
shutil.rmtree(self.tempdir)
self.is_extracted = False
......@@ -161,11 +152,7 @@ class _FMASK_Runner(object):
self._TOARef = None
class FMASK_Runner_Landsat(_FMASK_Runner):
def __init__(self, path_providerArchive, satellite, TOARef=None, opticalDNs=None, thermalDNs=None):
"""FMASK wrapper class for Landsat 4-8.
......@@ -189,7 +176,7 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
'Landsat-8': dict(optical='L*_B[1-7,9].TIF', thermal='L*_B1[0,1].TIF', meta='*_MTL.txt')
}[satellite]
super(FMASK_Runner_Landsat,self).__init__(path_providerArchive, satellite, extract_archive=False)
super(FMASK_Runner_Landsat, self).__init__(path_providerArchive, satellite, extract_archive=False)
# populate optional attributes
if TOARef is not None:
......@@ -199,7 +186,6 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
if thermalDNs is not None:
self.thermal_stack = thermalDNs
@property
def optical_stack(self):
if self._optical_stack is None:
......@@ -214,12 +200,10 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
return self._optical_stack
@optical_stack.setter
def optical_stack(self, value):
self._optical_stack = super(FMASK_Runner_Landsat, self).to_saved_rasterFile(value, 'optical_stack')
@property
def thermal_stack(self):
if self._thermal_stack is None:
......@@ -234,22 +218,19 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
return self._thermal_stack
@thermal_stack.setter
def thermal_stack(self, value):
self._thermal_stack = super(FMASK_Runner_Landsat, self).to_saved_rasterFile(value, 'thermal_stack')
@property
def metaFile(self):
if not self._metaFile:
if not self.is_extracted:
self.extract_tar_archive()
self._metaFile = os.path.join(self.tempdir,fnmatch.filter(os.listdir(self.tempdir), '*_MTL.txt')[0])
self._metaFile = os.path.join(self.tempdir, fnmatch.filter(os.listdir(self.tempdir), '*_MTL.txt')[0])
return self._metaFile
@property
def angles_stack(self):
if self._angles_stack is None:
......@@ -259,18 +240,16 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
return self._angles_stack
@property
def saturationmask(self):
if self._saturationmask is None:
self._saturationmask = os.path.join(self.tempdir, 'saturationmask.vrt')
self.run_cmd('fmask_usgsLandsatSaturationMask.py -m %s -i %s -o %s'
% (self.metaFile, self.optical_stack, self._saturationmask))
self.saturationmask_legend = {'blue':0, 'green':1, 'red':2}
self.saturationmask_legend = {'blue': 0, 'green': 1, 'red': 2}
return self._saturationmask
@property
def TOARef(self):
if self._TOARef is None:
......@@ -280,12 +259,10 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
return self._TOARef
@TOARef.setter
def TOARef(self, value):
self._TOARef = super(FMASK_Runner_Landsat, self).to_saved_rasterFile(value, 'TOARef')
def calc_cloudMask(self, path_out=None, fmt=None):
# type: (str, str) -> any
......@@ -299,20 +276,19 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
try:
self.cloud_mask = os.path.join(self.tempdir, 'fmask_cloudmask.vrt')
self.run_cmd('fmask_usgsLandsatStacked.py %s'
%' '.join(['-m %s' %self.metaFile,
'-a %s' %self.TOARef,
'-t %s' %self.thermal_stack,
'-z %s' %self.angles_stack,
'-s %s' %self.saturationmask,
'-o %s' %self.cloud_mask,
'-e %s' %os.path.join(self.tempdir, 'FMASK_intermediates')
]) )
% ' '.join(['-m %s' % self.metaFile,
'-a %s' % self.TOARef,
'-t %s' % self.thermal_stack,
'-z %s' % self.angles_stack,
'-s %s' % self.saturationmask,
'-o %s' % self.cloud_mask,
'-e %s' % os.path.join(self.tempdir, 'FMASK_intermediates')
]))
return super(FMASK_Runner_Landsat, self).calc_cloudMask(path_out=path_out, fmt=fmt)
finally:
self.clean()
def clean(self):
self._thermal_stack = None
self._optical_stack = None
......@@ -322,11 +298,7 @@ class FMASK_Runner_Landsat(_FMASK_Runner):
assert not os.path.isdir(self.tempdir), 'Error deleting temporary FMASK directory.'
class FMASK_Runner_Sentinel2(_FMASK_Runner):
def __init__(self, path_providerArchive, satellite, scene_ID=None, granule_ID='', target_res=20, TOARef=None,
extract_archive=False):
"""FMASK wrapper class for Sentinel-2.
......@@ -347,17 +319,17 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
self.scene_ID = scene_ID
self.tgt_res = target_res
oldStPref = '*GRANULE/'+self.granule_ID + '*/'
oldStPref = '*GRANULE/' + self.granule_ID + '*/'
self.FileMatchExp = {
'Sentinel-2A': dict(opticalOLDStyle='%s*_B0[1-8].jp2 %s*_B8A.jp2 %s*_B09.jp2 %s*_B1[0-2].jp2'
%(oldStPref, oldStPref, oldStPref, oldStPref),
% (oldStPref, oldStPref, oldStPref, oldStPref),
opticalNEWStyle='*_B0[1-8].jp2 *_B8A.jp2 *_B09.jp2 *_B1[0-2].jp2',
metaOLDStyle='%sS2A*.xml' %oldStPref,
metaOLDStyle='%sS2A*.xml' % oldStPref,
metaNEWStyle='*MTD_TL.xml'),
'Sentinel-2B': dict(opticalOLDStyle='%s*_B0[1-8].jp2 %s*_B8A.jp2 %s*_B09.jp2 %s*_B1[0-2].jp2'
%(oldStPref, oldStPref, oldStPref, oldStPref),
% (oldStPref, oldStPref, oldStPref, oldStPref),
opticalNEWStyle='*_B0[1-8].jp2 *_B8A.jp2 *_B09.jp2 *_B1[0-2].jp2',
metaOLDStyle='%sS2A*.xml' %oldStPref,
metaOLDStyle='%sS2A*.xml' % oldStPref,
metaNEWStyle='*MTD_TL.xml'),
}[satellite]
......@@ -367,13 +339,12 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
if TOARef is not None:
self.TOARef = TOARef
@property
def granule_ID(self):
"""Gets the Sentinel-2 granule ID from the database using the scene ID in case the granule ID has not been
given."""
if not self._granule_ID and self.scene_ID and self.scene_ID!=-9999 and self.is_GMSConfig_available:
if not self._granule_ID and self.scene_ID and self.scene_ID != -9999 and self.is_GMSConfig_available:
from ..config import GMS_config as CFG
res = get_info_from_postgreSQLdb(CFG.job.conn_database, 'scenes', ['entityid'], {'id': self.scene_ID})
assert len(res) != 0, \
......@@ -383,18 +354,17 @@ class FMASK_Runner_Sentinel2(_FMASK_Runner):
return self._granule_ID