Commit dd92ef21 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

FMASK is now called from L1A processor for Landsat.


Former-commit-id: 0b31a9c6
parent 74936563
......@@ -29,7 +29,7 @@ from . import gms_cloud_classifier as CLD_P # Cloud Processor
from ..io import Output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc.definition_dicts import get_outFillZeroSaturated, is_dataset_provided_as_fullScene
from ..misc.definition_dicts import get_outFillZeroSaturated, is_dataset_provided_as_fullScene, get_mask_classdefinition
from ..model.gms_object import GMS_object
from ..model import METADATA as META
......@@ -685,119 +685,147 @@ class L1A_object(GMS_object):
self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists)
def calc_cloud_mask(self,subset=None):
# FIXME Landsat cloud mask pixel values are currently not compatible to definition_dicts.get_mask_classdefinition
# append /<GeoMultiSensRepo>/algorithms to PATH in order to properly import py_tools_ah when unpickling cloud classifiers
sys.path.append(os.path.join(os.path.dirname(__file__))) # FIXME handle py_tools_ah as normal external dependency
def calc_cloud_mask(self, subset=None):
algorithm = CFG.job.cloud_masking_algorithm
if algorithm not in ['FMASK', 'Classical Bayesian']: # TODO move validation to config
raise ValueError(algorithm)
(rS, rE), (cS, cE) = self.arr_pos if self.arr_pos else ((0, self.shape_fullArr[0]), (0, self.shape_fullArr[1]))
#in_mem = hasattr(self, 'arr') and isinstance(self.arr, np.ndarray)
#if in_mem:
# (rS, rE), (cS, cE) = self.arr_pos if self.arr_pos else ((0,self.shape_fullArr[0]),(0,self.shape_fullArr[1]))
# bands = self.arr.shape[2] if len(self.arr.shape) == 3 else 1
#else:
# subset = subset if subset else ['block', self.arr_pos] if self.arr_pos else ['cube', None]
# bands, rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[2:7]
# arr_isPath = isinstance(self.arr, str) and os.path.isfile(self.arr) # FIXME
# inPath = self.arr if arr_isPath else self.MetaObj.Dataname if \
# (hasattr(self,'MetaObj') and self.MetaObj) else self.meta_odict['Dataname'] # FIXME ersetzen durch path generator?
if not self.path_cloud_class_obj or self.satellite=='Sentinel-2A': # FIXME dont exclude S2 here
self.log_for_fullArr_or_firstTile('Cloud masking is not yet implemented for %s %s...'
%(self.satellite,self.sensor),subset)
mask_clouds = None
self.logger.info("Calculating cloud mask based on '%s' algorithm..." %algorithm)
if algorithm == 'FMASK':
from .fmask_runner import FMask_Runner_Landsat
if re.search('Landsat', self.satellite, re.I):
FMR = FMask_Runner_Landsat(self.path_archive, self.satellite)
cm_gA = FMR.calc_cloudMask()
mask_clouds = cm_gA[:]
elif re.search('Sentinel-2', self.satellite, re.I):
self.logger.error(
'Cloud masking is not yet implemented for %s %s...' %(self.satellite,self.sensor),subset)
mask_clouds = None
else:
self.logger.error(
'Cloud masking is not yet implemented for %s %s...' % (self.satellite, self.sensor), subset)
mask_clouds = None
else:
self.log_for_fullArr_or_firstTile('Calculating cloud mask...', subset)
#if CFG.usecase.skip_thermal: # FIXME
#if not in_mem:
# subset = subset if subset else ['cube',None]
# rasObj = GEOP.GEOPROCESSING(inPath, self.logger, subset=subset)
# self.arr = rasObj.tondarray(direction=3)
# TODO Sentinel-2 mask:
#if not hasattr(s2img, "mask_clouds"):
# logger.info("Cloud mask missing -> derive own cloud mask.")
# CldMsk = CloudMask(logger=logger, persistence_file=options["cld_mask"]["persistence_file"],
# processing_tiles=options["cld_mask"]["processing_tiles"])
# s2img.mask_clouds = CldMsk(S2_img=s2img, target_resolution=options["cld_mask"]["target_resolution"],
# majority_filter_options=options["cld_mask"]["majority_mask_filter"],
# nodata_value=options["cld_mask"]['nodata_value_mask'])
# del CldMsk
self.GMS_identifier['logger'] = self.logger
if not CFG.job.bench_CLD_class:
self.path_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier)
CLD_obj = CLD_P.GmsCloudClassifier(classifier=self.path_cloud_class_obj)
assert CLD_obj, 'Error loading cloud classifier.'
if self.arr.bands == CLD_obj.classifier.n_channels: #FIXME
try:
mask_clouds = CLD_obj(self) # CLD_obj uses self.arr for cloud mask generation
except Exception:
# FIXME Landsat cloud mask pixel values are currently not compatible to definition_dicts.get_mask_classdefinition
# append /<GeoMultiSensRepo>/algorithms to PATH in order to properly import py_tools_ah when unpickling cloud classifiers
sys.path.append(os.path.join(os.path.dirname(__file__))) # FIXME handle py_tools_ah as normal external dependency
#in_mem = hasattr(self, 'arr') and isinstance(self.arr, np.ndarray)
#if in_mem:
# (rS, rE), (cS, cE) = self.arr_pos if self.arr_pos else ((0,self.shape_fullArr[0]),(0,self.shape_fullArr[1]))
# bands = self.arr.shape[2] if len(self.arr.shape) == 3 else 1
#else:
# subset = subset if subset else ['block', self.arr_pos] if self.arr_pos else ['cube', None]
# bands, rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[2:7]
# arr_isPath = isinstance(self.arr, str) and os.path.isfile(self.arr) # FIXME
# inPath = self.arr if arr_isPath else self.MetaObj.Dataname if \
# (hasattr(self,'MetaObj') and self.MetaObj) else self.meta_odict['Dataname'] # FIXME ersetzen durch path generator?
if not self.path_cloud_class_obj or self.satellite=='Sentinel-2A': # FIXME dont exclude S2 here
self.log_for_fullArr_or_firstTile('Cloud masking is not yet implemented for %s %s...'
%(self.satellite,self.sensor),subset)
mask_clouds = None
else:
self.log_for_fullArr_or_firstTile('Calculating cloud mask...', subset)
#if CFG.usecase.skip_thermal: # FIXME
#if not in_mem:
# subset = subset if subset else ['cube',None]
# rasObj = GEOP.GEOPROCESSING(inPath, self.logger, subset=subset)
# self.arr = rasObj.tondarray(direction=3)
# TODO Sentinel-2 mask:
#if not hasattr(s2img, "mask_clouds"):
# logger.info("Cloud mask missing -> derive own cloud mask.")
# CldMsk = CloudMask(logger=logger, persistence_file=options["cld_mask"]["persistence_file"],
# processing_tiles=options["cld_mask"]["processing_tiles"])
# s2img.mask_clouds = CldMsk(S2_img=s2img, target_resolution=options["cld_mask"]["target_resolution"],
# majority_filter_options=options["cld_mask"]["majority_mask_filter"],
# nodata_value=options["cld_mask"]['nodata_value_mask'])
# del CldMsk
self.GMS_identifier['logger'] = self.logger
if not CFG.job.bench_CLD_class:
self.path_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier)
CLD_obj = CLD_P.GmsCloudClassifier(classifier=self.path_cloud_class_obj)
assert CLD_obj, 'Error loading cloud classifier.'
if self.arr.bands == CLD_obj.classifier.n_channels: #FIXME
try:
mask_clouds = CLD_obj(self) # CLD_obj uses self.arr for cloud mask generation
except Exception:
mask_clouds = None
self.logger.error('Error during calculation of cloud mask:\n', exc_info=True)
else:
sensorcode = ('%s %s %s' %(self.satellite,self.sensor, self.subsystem)) \
if self.subsystem else '%s %s' %(self.satellite,self.sensor)
nbands = self.shape_fullArr[2] if len(self.shape_fullArr)==3 else 1
self.log_for_fullArr_or_firstTile("'The chosen cloud classifier for %s is not suitable "
" for the given %s bands. %s bands expected. Cloud masking failed."
%(sensorcode,nbands,CLD_obj.classifier.n_channels), subset)
mask_clouds = None
self.logger.error('Error during calculation of cloud mask:\n', exc_info=True)
else:
sensorcode = ('%s %s %s' %(self.satellite,self.sensor, self.subsystem)) \
if self.subsystem else '%s %s' %(self.satellite,self.sensor)
nbands = self.shape_fullArr[2] if len(self.shape_fullArr)==3 else 1
self.log_for_fullArr_or_firstTile("'The chosen cloud classifier for %s is not suitable "
" for the given %s bands. %s bands expected. Cloud masking failed."
%(sensorcode,nbands,CLD_obj.classifier.n_channels), subset)
pathlist_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier, get_all=True)
# load_time,proc_time,classifier_name = [],[],[]
categories_timinggroup_timing = np.empty((2*len(pathlist_cloud_class_obj),3),dtype='<U7')
for i,class_path in zip(range(0,2*len(pathlist_cloud_class_obj),2),pathlist_cloud_class_obj):
categories_timinggroup_timing[i:i+1,0] = os.path.splitext(os.path.basename(class_path))[0]
t1 = time.time()
CLD_obj = CLD_P.GmsCloudClassifier(classifier=class_path)
categories_timinggroup_timing[i,1] = "import time"
categories_timinggroup_timing[i,2] = time.time()-t1
t2 = time.time()
mask_clouds = CLD_obj(self)
categories_timinggroup_timing[i+1,1] = "processing time"
categories_timinggroup_timing[i+1,2] = time.time()-t2
classifiers = np.unique(categories_timinggroup_timing[:,0])
categories = np.unique(categories_timinggroup_timing[:,1])
plt.ioff()
fig = plt.figure()
ax = fig.add_subplot(111)
space = 0.3
n = len(classifiers)
width = (1 - space) / (len(classifiers))
#for i,classif in enumerate(classifiers): # FIXME
# vals = dpoints[dpoints[:,0] == cond][:,2].astype(np.float)
# pos = [j - (1 - space) / 2. + i * width for j in range(1,len(categories)+1)]
# ax.bar(pos, vals, width=width)
print(os.path.join(CFG.job.path_testing,'out/%s_benchmark_cloud_classifiers.png' %self.baseN))
fig.savefig(os.path.abspath('./testing/out/%s_benchmark_cloud_classifiers.png' %self.baseN),
format='png',dpi=300,bbox_inches='tight')
mask_clouds = None
else:
pathlist_cloud_class_obj = PG.get_path_cloud_class_obj(self.GMS_identifier, get_all=True)
# load_time,proc_time,classifier_name = [],[],[]
categories_timinggroup_timing = np.empty((2*len(pathlist_cloud_class_obj),3),dtype='<U7')
for i,class_path in zip(range(0,2*len(pathlist_cloud_class_obj),2),pathlist_cloud_class_obj):
categories_timinggroup_timing[i:i+1,0] = os.path.splitext(os.path.basename(class_path))[0]
t1 = time.time()
CLD_obj = CLD_P.GmsCloudClassifier(classifier=class_path)
categories_timinggroup_timing[i,1] = "import time"
categories_timinggroup_timing[i,2] = time.time()-t1
t2 = time.time()
mask_clouds = CLD_obj(self)
categories_timinggroup_timing[i+1,1] = "processing time"
categories_timinggroup_timing[i+1,2] = time.time()-t2
classifiers = np.unique(categories_timinggroup_timing[:,0])
categories = np.unique(categories_timinggroup_timing[:,1])
plt.ioff()
fig = plt.figure()
ax = fig.add_subplot(111)
space = 0.3
n = len(classifiers)
width = (1 - space) / (len(classifiers))
#for i,classif in enumerate(classifiers): # FIXME
# vals = dpoints[dpoints[:,0] == cond][:,2].astype(np.float)
# pos = [j - (1 - space) / 2. + i * width for j in range(1,len(categories)+1)]
# ax.bar(pos, vals, width=width)
print(os.path.join(CFG.job.path_testing,'out/%s_benchmark_cloud_classifiers.png' %self.baseN))
fig.savefig(os.path.abspath('./testing/out/%s_benchmark_cloud_classifiers.png' %self.baseN),
format='png',dpi=300,bbox_inches='tight')
mask_clouds = None
# import time
# t1 = time.time()
# import builtins
# data = builtins.CLD_obj(self) # CLD_obj is part of builtins
# print( time.time()-t1, os.path.basename(i), 'cld calc')
# del self.GMS_identifier['logger']
# # print(subset)
# # for i in pathlist_cloud_class_obj:
# # # print(os.path.basename(i))
# # t1 = time.time()
# # # self.path_cloud_class_obj = INP_R.get_path_cloud_class_obj(self.GMS_identifier)
# # # CLD_obj = CLD_P.GmsCloudClassifier(classifier=self.path_cloud_class_obj)
# # # CLD_obj = CLD_P.GmsCloudClassifier(classifier=i)
# # print( time.time()-t1, os.path.basename(i), 'cld class load')
# # t1 = time.time()
# # data = CLD_obj(self)
# # # data = self.CLD_obj(self)
# # print( time.time()-t1, os.path.basename(i), 'cld calc')
# import time
# t1 = time.time()
# import builtins
# data = builtins.CLD_obj(self) # CLD_obj is part of builtins
# print( time.time()-t1, os.path.basename(i), 'cld calc')
# del self.GMS_identifier['logger']
# # print(subset)
# # for i in pathlist_cloud_class_obj:
# # # print(os.path.basename(i))
# # t1 = time.time()
# # # self.path_cloud_class_obj = INP_R.get_path_cloud_class_obj(self.GMS_identifier)
# # # CLD_obj = CLD_P.GmsCloudClassifier(classifier=self.path_cloud_class_obj)
# # # CLD_obj = CLD_P.GmsCloudClassifier(classifier=i)
# # print( time.time()-t1, os.path.basename(i), 'cld class load')
# # t1 = time.time()
# # data = CLD_obj(self)
# # # data = self.CLD_obj(self)
# # print( time.time()-t1, os.path.basename(i), 'cld calc')
if mask_clouds is not None:
if False in np.equal(mask_clouds,mask_clouds.astype(np.uint8)):
......@@ -808,7 +836,7 @@ class L1A_object(GMS_object):
# update fill values (fill values written by CLD_obj(self) are not equal to GMS fill values)
outFill = get_outFillZeroSaturated(self.mask_clouds.dtype)[0]
if outFill in mask_clouds:
if outFill in mask_clouds and get_mask_classdefinition('mask_clouds')['No Data'] != 0:
warnings.warn('Value %s is assigned to mask_clouds although it already contains this value. '
'Please make sure that the produced cloud mask does not use potential fill values. '
'Otherwise cloud mask results can be affected.' %outFill)
......
......@@ -30,7 +30,7 @@ from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from .L1B_P import L1B_object
from ..model.METADATA import get_LayerBandsAssignment
from ..misc.definition_dicts import get_outFillZeroSaturated, proc_chain
from ..misc.definition_dicts import get_outFillZeroSaturated, proc_chain, get_mask_classdefinition
from ..io.Input_reader import SRF
from sicor.sicor_ac import ac_gms
......@@ -692,7 +692,7 @@ class AtmCorr(object):
if list(set(avail_cloud_masks.values())) == [None]:
return None
# get cloud mask target resolution
# check cloud mask target resolution
tgt_res = self.inObjs[0].ac_options['cld_mask']['target_resolution']
inObjs2use = [obj for obj in self.inObjs if obj.mask_clouds is not None and obj.mask_clouds.xgsd==tgt_res]
if not inObjs2use:
......@@ -704,10 +704,11 @@ class AtmCorr(object):
cm_array = inObj2use.masks[:,:,1] # FIXME hardcoded
# get legend
cm_legend = {'Clear': 10, 'Thick Clouds': 20, 'Thin Clouds': 30, 'Snow': 40} # FIXME hardcoded
cm_legend = get_mask_classdefinition('mask_clouds')
#{'Clear': 10, 'Thick Clouds': 20, 'Thin Clouds': 30, 'Snow': 40} # FIXME hardcoded
# get geocoding
# validate GSD
# validate that xGSD equals yGSD
if inObj2use.mask_clouds.xgsd != inObj2use.mask_clouds.ygsd:
warnings.warn("Cloud mask X/Y GSD is not equal for entity ID %s" % inObj2use.entity_ID +
(' (%s)' % inObj2use.subsystem if inObj2use.subsystem else '') +
......
......@@ -17,6 +17,7 @@ except ImportError:
import fmask
from ..misc.helper_functions import convert_absPathArchive_to_GDALvsiPath, subcall_with_output
from ..misc.definition_dicts import get_mask_classdefinition
from ..io.Input_reader import open_specific_file_within_archive
from geoarray import GeoArray
......@@ -53,6 +54,7 @@ class FMask_Runner_Landsat(object):
self.cloud_mask = None
self.cloud_mask = None
self.FileMatchExp = landsat_filematchexp[satellite]
self.saturationmask_legend = {}
# create temporary directory
tempdir_rootPath = '/dev/shm/GeoMultiSens'
......@@ -213,6 +215,7 @@ class FMask_Runner_Landsat(object):
self._saturationmask = os.path.join(self.tempdir, 'saturationmask.vrt')
self.run_cmd('fmask_usgsLandsatSaturationMask.py -m %s -i %s -o %s'
% (self.metaFile, self.optical_stack, self._saturationmask))
self.saturationmask_legend = {'blue':0, 'green':1, 'red':2}
return self._saturationmask
......@@ -242,10 +245,14 @@ class FMask_Runner_Landsat(object):
if path_out:
gdal.Translate(path_out, gdal.Open(self.cloud_mask), format=fmt)
self.cloud_mask = path_out
self.cloud_mask = GeoArray(path_out)
else:
self.cloud_mask = GeoArray(self.cloud_mask).to_mem()
self.cloud_mask.legend = \
get_mask_classdefinition('mask_clouds')
return self.cloud_mask
finally:
......
......@@ -134,6 +134,7 @@ class Job:
self.profiling = profiling
self.benchmark_global = bench_all
self.bench_CLD_class = bench_cloudMask
self.cloud_masking_algorithm = 'FMASK' # 'FMASK', 'Classical Bayesian', 'SICOR'
self.SZA_SAA_calculation_accurracy = 'coarse' # hardcoded
self.export_VZA_SZA_SAA_RAA_stats = True # hardcoded
self.export_L1C_obj_dumps = False # hardcoded
......
......@@ -6,6 +6,8 @@ import re
import numpy as np
from ..config import GMS_config as CFG
########################### General dicts #####################################
dtype_lib_Python_IDL = {'bool_':0, 'uint8':1, 'int8':1, 'int_':1, 'int16':2, 'uint16':12, 'int32':3, 'uint32':13,
'int64':14, 'uint64':15, 'float32':4, 'float64':5, 'complex_':6, 'complex64':9}
......@@ -78,18 +80,31 @@ def get_mask_classdefinition(maskname):
return {'No data': 0,
'Data' : 1 }
elif maskname == 'mask_clouds':
#return {'Clear' : 10,
# 'Thick Clouds' : 20,
# 'Thin Clouds' : 30,
# 'Snow' : 40 }
return {'Clear' : 10,
'Water' : 20,
'Shadow' : 30,
'Cirrus' : 40,
'Cloud' : 50,
'Snow' : 60}
legends = {
'FMASK':{
'No Data': 0,
'Clear' : 1,
'Cloud' : 2,
'Shadow' : 3,
'Snow' : 4,
'Water' : 5}, #{'Clear Land': 0, 'Clear Water': 1, 'Cloud Shadow': 2, 'Snow': 3, 'Cloud': 4, 'No data': 255} # seems to be outdated
'Classical Bayesian':{
'Clear' : 10,
'Thick Clouds' : 20,
'Thin Clouds' : 30,
'Snow' : 40 }, # Classical Bayesian py_tools_ah
'SICOR':{
'Clear' : 10,
'Water' : 20,
'Shadow': 30,
'Cirrus': 40,
'Cloud' : 50,
'Snow' : 60} # SICOR
}
return legends[CFG.job.cloud_masking_algorithm]
else:
raise ValueError('%s is not a supported mask name.')
raise ValueError("'%s' is not a supported mask name." %maskname)
def get_mask_colormap(maskname):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment