Commit 588c814d authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Flink compatibility update 2

GEOP:
    - moved get_lonlat_coord_array(), calc_VZA_array(), calc_AcqTime_array(), calc_SZA_SAA(), calc_SZA_SAA_array(), calc_RAA_array() outside of GEOPROCESSING object which makes them callable without disk access
    - added GEOP.get_subsetProps_from_shapeFullArr_arrPos()
    - added GEOP.get_subsetProps_from_subsetArg()
L1A_P.L1A_object / L1C_P.L1C_object:
    - combined calc_VZA_array(), calc_SZA_SAA_array(), calc_RAA_array() to L1C_P.L1C_object.calc_acquisition_illumination_geometry()
    - moved get_lonlat_coord_array() to L1C_P.L1C_object
    - L1A_P: some bugfixes
META: refactored Meta2SpyfileheaderMeta() to Meta2ODict()
INP_R:
    - added read_mask_subset()
    - added quiet mode to read_ENVI_image_data_as_array()
PG:
    - added docstings
    - added get_path_maskdata()
parent 1cfc573a
This diff is collapsed.
......@@ -84,7 +84,7 @@ class L1A_object(object):
self.outInterleave = 'bsq'
self.LayerBandsAssignment = ''
self.dict_LayerOptTherm = None
self.lonlat_arr = None
self.VAA_mean = None # set by self.calc_mean_VAA()
self.corner_lonlat = None
self.trueDataCornerPos = [] # set by self.calc_corner_positions()
self.trueDataCornerLonLat = [] # set by self.calc_corner_positions()
......@@ -97,7 +97,7 @@ class L1A_object(object):
self.path_Outfile_L1A = None
self.path_Outfile_L1A_masks = None
self.MetaObj = None # set by self.get_MetaObj()
self.meta = None # set by self.MetaObj2SpyfileheaderMeta()
self.meta = None # set by self.MetaObj2ODict()
self.GeoTransProj_ok = True # set by self.validate_GeoTransProj_GeoAlign()
self.GeoAlign_ok = True # set by self.validate_GeoTransProj_GeoAlign()
self.mask_1bit = None # set by self.calc_mask_nodata()
......@@ -201,7 +201,7 @@ class L1A_object(object):
if job.exec_mode=='Flink':
PG_obj = PG.path_generator(self.__dict__)
self.arr = INP_R.read_ENVIfile(get_hdr(PG_obj.get_path_imagedata()),self.arr_shape,self.arr_pos,self.logger)
self.mask_clouds = INP_R.read_ENVI_image_data_as_array(get_hdr(self.path_Outfile_L1A_masks),'band',1,self.logger)
self.mask_clouds = INP_R.read_mask_subset(PG_obj.get_path_maskdata(),'mask_clouds',self.logger,tuple_GMS_subset[1])
if self.arr_pos: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
else: self.logger.info('Reading file: %s' %self.baseN)
......@@ -570,8 +570,8 @@ class L1A_object(object):
proc_opt_therm = sorted(list(set(self.dict_LayerOptTherm.values())))
assert proc_opt_therm in [['optical','thermal'],['optical'],['thermal']]
if subset is None and (self.arr_shape=='cube' or [self.arr_pos[0][0], self.arr_pos[1][0]] == [0,0]) or \
subset==['cube',None] or (subset and [subset[1][0][0], subset[1][1][0]]==[0,0]): # cube or 1st tile
if subset is None and self.arr_shape=='cube' or (subset and [subset[1][0][0], subset[1][1][0]]==[0,0]) or \
(self.arr_pos and [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]): # cube or 1st tile
if len(proc_opt_therm)==1:
temp_logger.info('Calculating %s...' %getattr(usecase,'conversion_type_%s' %proc_opt_therm[0]))
else:
......@@ -682,7 +682,8 @@ class L1A_object(object):
'%s_%s' %(usecase.conversion_type_optical, usecase.conversion_type_thermal)
if job.exec_mode=='Flink' and subset is None:
self.arr=dataOut
self.arr = dataOut
self.arr_desc = tiles_desc
else:
return {'desc': tiles_desc, 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': dataOut}
......@@ -752,7 +753,7 @@ class L1A_object(object):
if rasObj.get_projection_type() == 'UTM':
self.MetaObj.CornerTieP_UTM = rasObj.get_corner_coordinates('UTM')
self.meta = self.MetaObj.Meta2SpyfileheaderMeta() # important in order to keep geotransform/projection
self.meta = self.MetaObj.Meta2ODict() # important in order to keep geotransform/projection
if job.exec_mode=='Flink':
self.delete_tempFiles() # these files are needed later in Python execution mode
self.MetaObj.Dataname = previous_dataname # /vsi.. pointing directly to raw data archive (which exists)
......@@ -762,12 +763,15 @@ class L1A_object(object):
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Calculating nodata mask...')
nodataVal = custom_nodataVal if custom_nodataVal else \
self.MetaObj.spec_vals['fill'] if hasattr(self,'MetaObj') else HLP_F.get_outFillZeroSaturated(np.int16)[0] # -9999
self.MetaObj.spec_vals['fill'] if hasattr(self,'MetaObj') and self.MetaObj else \
HLP_F.get_outFillZeroSaturated(np.int16)[0] # -9999
get_mask = lambda arr,nodata: np.all(np.where(arr==nodata,0,1),axis=2)
if hasattr(self,'arr') and isinstance(self.arr,np.ndarray):
self.mask_1bit = get_mask(self.arr,nodataVal)
else:
in_arr = np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(self.MetaObj.Dataname),0,2),0,1)
path_arr = PG.path_generator(self.__dict__.copy()).get_path_imagedata()
in_arr = np.swapaxes(np.swapaxes(gdalnumeric.LoadFile(path_arr),0,2),0,1)
self.mask_1bit = get_mask(in_arr,nodataVal)
def calc_mask_nodataOLD(self, subset):
......@@ -930,10 +934,10 @@ class L1A_object(object):
self.MetaObj.spec_vals['fill'],self.MetaObj.spec_vals['zero'],self.MetaObj.spec_vals['saturated'] = \
HLP_F.get_outFillZeroSaturated(dtype)
def MetaObj2SpyfileheaderMeta(self):
def MetaObj2ODict(self):
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Preparing extracted metadata to be written to disk...')
self.meta = self.MetaObj.Meta2SpyfileheaderMeta()
self.meta = self.MetaObj.Meta2ODict()
self.path_Outfile_L1A = self.MetaObj.Dataname
def apply_nodata_mask_to_ObjAttr(self, attrname, custom_nodata_val=None):
......@@ -990,76 +994,12 @@ class L1A_object(object):
return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks}
def get_lonlat_coord_array(self,subset):
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
rasObj = GEOP.GEOPROCESSING(self.path_Outfile_L1A, temp_logger, subset=subset)
if hasattr(self,'mask_1bit') and self.mask_1bit.shape == [rasObj.rows,rasObj.cols]:
mask_1bit_temp = self.mask_1bit
elif hasattr(self,'mask_1bit') and self.mask_1bit.shape == self.shape_fullArr[:2]:
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999) # FIXME rechnet auf FullArray
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
data = rasObj.get_lonlat_coord_array(assignNoData=[mask_1bit_temp,fillVal])[0]
return {'desc': 'lonlat_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd, 'data': data}
def calc_VZA_array(self,subset):
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
rasObj = GEOP.GEOPROCESSING(self.path_Outfile_L1A, temp_logger, subset=subset)
if hasattr(self,'mask_1bit') and self.mask_1bit.shape == [rasObj.rows,rasObj.cols]:
mask_1bit_temp = self.mask_1bit
elif hasattr(self,'mask_1bit') and self.mask_1bit.shape == self.shape_fullArr[:2]:
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
#print(self.meta)
#sys.exit()
data = rasObj.calc_VZA_array(float(self.meta['ViewingAngle']), float(self.meta['FieldOfView']),
self.trueDataCornerPos,self.shape_fullArr, [mask_1bit_temp,fillVal])
return {'desc': 'VZA_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd, 'data': data}
def calc_SZA_SAA_array(self,subset):
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
rasObj = GEOP.GEOPROCESSING(self.path_Outfile_L1A, temp_logger, subset=subset)
if hasattr(self,'mask_1bit') and self.mask_1bit.shape == [rasObj.rows,rasObj.cols]:
mask_1bit_temp = self.mask_1bit
elif hasattr(self,'mask_1bit') and self.mask_1bit.shape == self.shape_fullArr[:2]:
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
SZA_data, SAA_data = rasObj.calc_SZA_SAA_array(self.meta['AcqDate'], self.meta['AcqTime'],self.trueDataCornerPos,
self.trueDataCornerLonLat,self.shape_fullArr, self.meta['overpass duraction sec'],
assignNoData=[mask_1bit_temp,fillVal],accurracy=job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr if job.SZA_SAA_calculation_accurracy=='fine' else None)
return ({'desc': 'SZA_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd, 'data': SZA_data},
{'desc': 'SAA_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd, 'data': SAA_data})
def calc_mean_VAA(self):
"""Calculates mean viewing azimuth angle using sensor flight line derived from corner coordinates."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.VAA_mean = GEOP.calc_VAA_using_trueCornerLonLat(self.trueDataCornerLonLat)
temp_logger.info('Calculation of mean VAA...: %s' % round(self.VAA_mean, 2))
def calc_RAA_array(self,subset):
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
rasObj = GEOP.GEOPROCESSING(self.path_Outfile_L1A, temp_logger, subset=subset)
if hasattr(self,'mask_1bit') and self.mask_1bit.shape == [rasObj.rows,rasObj.cols]:
mask_1bit_temp = self.mask_1bit
elif hasattr(self,'mask_1bit') and self.mask_1bit.shape == self.shape_fullArr[:2]:
mask_1bit_temp = self.mask_1bit[rasObj.rowStart:rasObj.rowEnd+1, rasObj.colStart:rasObj.colEnd+1]
else:
mask_1bit_temp = rasObj.calc_mask_data_nodata(custom_nodataVal=-9999)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
data = rasObj.calc_RAA_array(self.trueDataCornerLonLat, self.SAA_arr, self.VAA_mean,
assignNoData=[mask_1bit_temp,fillVal])
return {'desc': 'RAA_arr', 'row_start': rasObj.rowStart, 'row_end': rasObj.rowEnd,
'col_start': rasObj.colStart, 'col_end': rasObj.colEnd, 'data': data}
def combine_tiles_to_ObjAttr(self, tiles, target_attr):
"""Combines tiles, e.g. produced by L1A_P.L1A_object.DN2RadRefTemp() to a single attribute.
If usecase.job.exec_mode == 'Python' the produced attribute is additionally written to disk.
......@@ -1105,7 +1045,7 @@ class L1A_object(object):
temp_logger.info("Writing tiles '%s' temporarily to disk..." % tiles[0]['desc'])
outpath = os.path.join(self.ExtractedFolder, '%s__%s.%s' %(self.baseN, tiles[0]['desc'], self.outInterleave))
if usecase.conversion_type_optical in tiles[0]['desc'] or usecase.conversion_type_thermal in tiles[0]['desc']:
self.meta = self.MetaObj.Meta2SpyfileheaderMeta() # important in order to keep geotransform/projection
self.meta = self.MetaObj.Meta2ODict() # important in order to keep geotransform/projection
self.arr_desc = tiles[0]['desc']
self.arr = outpath
# self.arr = os.path.abspath('./testing/out/%s_TOA_Ref.bsq' % self.baseN)
......
......@@ -21,17 +21,113 @@
###############################################################################
########################### Library import ####################################
import glob
import logging
import numpy as np
import os
import builtins
try:
from osgeo import osr
except ImportError:
import osr
import misc.helper_functions as HLP_F
import algorithms.GEOPROCESSING_BD as GEOP
import gms_io.Input_reader as INP_R
import misc.path_generator as PG
job = builtins.GMS_config.job
########################### core functions ####################################
class L1C_object(object):
#"""@DynamicAttrs"""
def __init__(self, L1B_obj):
[setattr(self, key, value) for key,value in L1B_obj.__dict__.items()]
self.proc_level = 'L1C'
self.logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.lonlat_arr = None # set by self.get_lonlat_coord_array()
self.VZA_arr = None # set by self.calc_VZA_array()
self.SZA_arr = None # set by self.calc_SZA_SAA_array()
self.SAA_arr = None # set by self.calc_SZA_SAA_array()
self.RAA_arr = None # set by self.calc_RAA_array()
def get_lonlat_coord_array(self, subset=None):
"""Calculates pixelwise 2D-array with longitude and latitude coordinates.Supports 3 modes for subsetting:
(1) called with given subset argument if self.mask_1bit is an array: passes array subset
(2) called with given subset argument if self.mask_1bit is NO array: reads mask subset from disk
(3) called without subset argument but L1A obj represents a block subset: self.arr_pos is passed for subsetting"""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
if subset is None and self.arr_shape == 'cube' or (subset and [subset[1][0][0], subset[1][1][0]] == [0, 0]) or \
(self.arr_pos and [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]): # cube or 1st tile
temp_logger.info('Calculating lonlat array.')
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy()).get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', temp_logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
lonlat_arr = GEOP.get_lonlat_coord_array(self.shape_fullArr, subset[1],
GEOP.mapinfo2geotransform(self.MetaObj.map_info),
self.MetaObj.projection, mask_1bit_temp, fillVal)[0]
if job.exec_mode == 'Flink' and subset is None:
self.lonlat_arr = lonlat_arr
else:
return {'desc': 'lonlat_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE,
'data': lonlat_arr}
def calc_acquisition_illumination_geometry(self, subset=None):
"""Calculates pixelwise arrays for viewing zenith angle (VZA), sun zenith angle (SZA),
sun azimuth angle (SAA) and relative azimuth angle (RAA).
Supports 3 modes for subsetting:
(1) called with given subset argument if self.mask_1bit is an array: passes array subset
(2) called with given subset argument if self.mask_1bit is NO array: reads mask subset from disk
(3) called without subset argument but L1A obj represents a block subset: self.arr_pos is passed for subsetting"""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
print(subset, self.arr_pos)
if subset is None and self.arr_shape == 'cube' or (subset and [subset[1][0][0], subset[1][1][0]] == [0, 0]) or \
(self.arr_pos and [self.arr_pos[0][0], self.arr_pos[1][0]] == [0, 0]): # cube or 1st tile
temp_logger.info('Calculating acquisition and illumination geometry arrays.')
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy()).get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', temp_logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
assert self.meta, "Missing 'meta' attribute. Run self.Meta2ODict() first!"
VZA_arr = GEOP.calc_VZA_array(self.shape_fullArr, subset[1], self.trueDataCornerPos,
float(self.meta['ViewingAngle']), float(self.meta['FieldOfView']),
temp_logger, mask_1bit_temp, fillVal)
SZA_arr, SAA_arr = GEOP.calc_SZA_SAA_array(self.shape_fullArr, subset[1], self.meta['AcqDate'],
self.meta['AcqTime'], self.trueDataCornerPos,
self.trueDataCornerLonLat, self.meta['overpass duraction sec'],
temp_logger, mask_1bit_temp, fillVal,
accurracy=job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr
if job.SZA_SAA_calculation_accurracy == 'fine' else None)
RAA_arr = GEOP.calc_RAA_array(self.trueDataCornerLonLat, SAA_arr, self.VAA_mean, mask_1bit_temp, fillVal)
if job.exec_mode == 'Flink' and subset is None:
self.VZA_arr, self.SZA_arr, self.SAA_arr, self.RAA_arr = VZA_arr, SZA_arr, SAA_arr, RAA_arr
else:
return ({'desc': 'VZA_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': VZA_arr},
{'desc': 'SZA_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': SZA_arr},
{'desc': 'SAA_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': SAA_arr},
{'desc': 'RAA_arr', 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': RAA_arr})
def dummy_atm_corr(L1B_obj):
''' Performs an atmospheric correction and returns atmospherically corrected reflectance data.'''
temp_logger = HLP_F.setup_logger('log__' + L1B_obj.baseN, L1B_obj.path_logfile, L1B_obj.job_CPUs, append=1)
"""Performs an atmospheric correction and returns atmospherically corrected reflectance data."""
temp_logger = HLP_F.setup_logger('log__' + L1B_obj.baseN, L1B_obj.path_logfile, append=1)
temp_logger.info('Dummy Level 1C Processing started.')
SRF_dict = _SRF_reader(SRF_fold,RSD_md_L1B)
#SRF_dict = INP_R.SRF_reader(SRF_fold,RSD_md_L1B)
L1B_obj.arr = L1B_obj.arr/2
......@@ -40,7 +40,8 @@ job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from
# + Input Reader (has to be left out here in order to avoid circular dependencies)
class METADATA(object):
def __init__(self, satellite, subsystem, scene_ID, stacked_image, data_folderOrArchive ,logger, LayerBandsAssignment=[]):
def __init__(self, satellite, subsystem, scene_ID, stacked_image, data_folderOrArchive ,logger,
LayerBandsAssignment=None):
"""**** CREATE OBJECT ****************************************************"""
#****OBJECT ATTRIBUTES***************************************************
self.Dataname = stacked_image
......@@ -49,7 +50,7 @@ class METADATA(object):
self.EntityID = "" # ID to identify the original scene
self.SceneID = scene_ID # postgreSQL-database identifier
self.Satellite = ""
self.Satellite_GMS = ""
self.Satellite_GMS = "" # FIXME deprecated
self.Sensor = ""
self.Subsystem = "" if subsystem is None else subsystem
self.Sensormode = ""
......@@ -1144,7 +1145,7 @@ class METADATA(object):
logger.error("Physical unit could not be determined due to unexpected 'dict_LayerOptTherm'. Got %s." \
%dict_LayerOptTherm)
def Meta2SpyfileheaderMeta(self):
def Meta2ODict(self):
Meta = collections.OrderedDict()
# descr_dic = { ### FillZeroSaturated von HLP_F ausgeben lassen
# 'ALOS_Rad' :"(1) GEOCODED Level1B2 Product; '"+self.Dataname+"'\n (2) Int16 RadianceData in [W * m-2 * sr-1 * micrometer-1]*10; radiance scale factor: 10 (fillPixels: -99, zeroPixels:0, saturatedPixels: 32767 (Max of Int16))'",
......
......@@ -46,7 +46,7 @@ class out_object(object):
self.meta = read_ENVIhdr_to_dict(os.path.splitext(self.path_Outfile_L1A)[0]+'.hdr', self.logger)
# Methods
if self.proc_level == 'L1A':
if self.proc_level == 'L1C':
from algorithms.L1A_P import L1A_object
self.get_lonlat_coord_array = getattr(L1A_object,'get_lonlat_coord_array')
# for method in dir(L1A_object):
......@@ -78,12 +78,13 @@ def read_ENVIhdr_to_dict(hdr_path, logger=None):
SpyFileheader = envi.open(hdr_path)
return SpyFileheader.metadata
def read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None, return_meta=False):
def read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None, return_meta=False,q=0):
if not os.path.isfile(hdr_path):
if logger is not None: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
else: print ('read_ENVIfile: Input data not found at %s.'%hdr_path)
if not q:
if logger: logger.critical('read_ENVIfile: Input data not found at %s.'%hdr_path)
else: print ('read_ENVIfile: Input data not found at %s.'%hdr_path)
else:
if logger is not None: logger.info('Reading %s ...' %(os.path.basename(hdr_path)))
if logger and not q: logger.info('Reading %s ...' %(os.path.basename(hdr_path)))
File_obj = spectral.open_image(hdr_path)
SpyFileheader = envi.open(hdr_path)
if arr_shape == 'cube':
......@@ -112,12 +113,30 @@ def read_ENVI_image_data_as_array(hdr_path,arr_shape,arr_pos,logger=None, return
image_data = \
File_obj.read_subregion ((arr_pos[0][0],arr_pos[0][1]+1),(arr_pos[1][0],arr_pos[1][1]+1),arr_pos[2])
else:
if logger is not None: logger.critical("Array shape '%s' is not known. Known array shapes are cube, row, "\
"col, band, block, pixel, custom." %arr_shape); sys.exit()
if logger: logger.critical("Array shape '%s' is not known. Known array shapes are cube, row, "\
"col, band, block, pixel, custom." %arr_shape); sys.exit()
else: print("Array shape '%s' is not known. Known array shapes are cube, row, col, band, block, pixel, "\
"custom." %arr_shape); sys.exit()
return (image_data, SpyFileheader.metadata) if return_meta else image_data
def read_mask_subset(path_masks,bandname,logger,subset=None):
subset = subset if subset else ['cube',None]
assert subset[0] in ['cube','block'], "INP_R.read_mask_subset(): '%s' subset is not supported." % subset[0]
path_masks_hdr = os.path.splitext(path_masks)[0] + '.hdr'
hdrDict = read_ENVIhdr_to_dict(path_masks_hdr, logger)
(rS,rE),(cS, cE)= ((0,hdrDict['lines']),(0,hdrDict['samples'])) if subset[0]=='cube' else subset[1]
band_idx_nodata = hdrDict['band names'].index(bandname) if bandname in hdrDict['band names'] else None
if band_idx_nodata is None:
logger.warning("No band called '%s' in %s. Using first band." %(bandname,path_masks))
band_idx_nodata = 0
if subset is None or subset[0] == 'cube':
mask_sub = read_ENVI_image_data_as_array(path_masks_hdr, 'band', band_idx_nodata, logger=logger, q=1)
else:
mask_sub = read_ENVI_image_data_as_array(
path_masks_hdr, 'custom', ((rS, rE), (cS, cE), [band_idx_nodata]), logger=logger, q=1)
mask_sub = mask_sub[:,:,0] if len(mask_sub.shape)==3 and mask_sub.shape[2]==1 else mask_sub
return mask_sub
def GMSfile2dict(path_GMSfile):
""" Converts a JSON file (like the GMS file) to a Python dictionary with keys and values.
:param path_GMSfile:
......
......@@ -36,17 +36,21 @@ class path_generator(object):
else args[7] if not isdict and len(args)==8 else None
def get_path_procdata(self):
"""Returns the target folder of all processed data for the current scene."""
return os.path.join(job.path_procdata, self.satellite, self.sensor, self.AcqDate.strftime('%Y-%m-%d'),
self.entity_ID)
def get_baseN(self):
"""Returns the basename belonging to the given scene."""
return self.sensor+'__'+self.entity_ID if self.subsystem in ['',None] else '__'.join(
[self.sensor,self.subsystem,self.entity_ID])
def get_path_logfile(self):
"""Returns the path of the logfile belonging to the given scene, e.g. '/path/to/file/file.log'."""
return os.path.join(self.get_path_procdata(), self.get_baseN()+'.log')
def get_local_archive_path_baseN(self): # must be callable from L0A-P
"""Returns the path of the downloaded raw data archive, e.g. '/path/to/file/file.tar.gz'."""
if self.image_type == 'RSD' and self.satellite:
folder_rawdata = os.path.join(job.path_archive,self.satellite,self.sensor)
extensions_found = [ext for ext in ['.tar.gz','.zip','.hdf'] \
......@@ -68,11 +72,18 @@ class path_generator(object):
'image_type: %s; satellite: %s; sensor: %s' %(self.image_type,self.satellite,self.sensor))
def get_path_gmsfile(self):
"""Returns the path of the .gms file belonging to the given processing level, e.g. '/path/to/file/file.gms'."""
return os.path.join(self.get_path_procdata(),'%s_%s.gms' %(self.get_baseN(), self.proc_level))
def get_path_imagedata(self):
"""Returns the path of the .bsq file belonging to the given processing level, e.g. '/path/to/file/file.bsq'."""
return os.path.join(self.get_path_procdata(),'%s_%s.bsq' %(self.get_baseN(), self.proc_level))
def get_path_maskdata(self):
"""Returns the path of the *_masks_*.bsq file belonging to the given processing level,
e.g. '/path/to/file/file_masks_L1A.bsq'."""
return os.path.join(self.get_path_procdata(), '%s_masks_%s.bsq' % (self.get_baseN(), self.proc_level))
def get_path_tempdir(self):
path_archive = self.get_local_archive_path_baseN()
RootName = os.path.splitext(os.path.basename(path_archive))[0]
......
......@@ -79,7 +79,7 @@ def L0B_L1A_map(data_list_item): #map (scene-wise parallelization)
L1A_obj.calc_center_AcqTime() # (if neccessary); requires corner positions
L1A_obj.calc_mean_VAA()
L1A_obj.calc_orbit_overpassParams() # requires corner positions
L1A_obj.MetaObj2SpyfileheaderMeta()
L1A_obj.MetaObj2ODict()
L1A_obj.apply_nodata_mask_to_ObjAttr('mask_clouds',0)
L1A_obj.build_L1A_masks()
if job.exec__L1AP[1]:
......@@ -109,7 +109,7 @@ def L1A_map_3(L1A_obj): #map (scene-wise parallelization)
L1A_obj.calc_center_AcqTime() # (if neccessary); requires corner positions
L1A_obj.calc_mean_VAA()
L1A_obj.calc_orbit_overpassParams() # requires corner positions
L1A_obj.MetaObj2SpyfileheaderMeta()
L1A_obj.MetaObj2ODict()
L1A_obj.apply_nodata_mask_to_ObjAttr('mask_clouds',0)
L1A_obj.build_L1A_masks()
if job.exec__L1AP[1]:
......
......@@ -98,7 +98,7 @@ def run_processController_in_singleprocessing(usecase_data_list):
nodata_tiles.append(i.calc_mask_nodata((['block', tb])))
i.combine_tiles_to_ObjAttr(nodata_tiles,'mask_1bit')
i.calc_corner_positions()
i.MetaObj2SpyfileheaderMeta()
i.MetaObj2ODict()
i.build_L1A_masks()
i.delete_tempFiles()
if job.exec__L1AP[1]:
......@@ -230,7 +230,7 @@ def run_processController_in_multiprocessing(usecase_data_list):
# i.calc_center_AcqTime() # (if neccessary); requires corner positions
# i.calc_mean_VAA()
# i.calc_orbit_overpassParams() # requires corner positions
# i.MetaObj2SpyfileheaderMeta()
# i.MetaObj2ODict()
# i.apply_nodata_mask_to_saved_ENVIfile(i.arr)
# i.apply_nodata_mask_to_ObjAttr('mask_clouds',0)
# masks = i.build_L1A_masks()
......@@ -266,7 +266,7 @@ def run_processController_in_multiprocessing(usecase_data_list):
i.calc_center_AcqTime() # (if neccessary); requires corner positions
i.calc_mean_VAA()
i.calc_orbit_overpassParams() # requires corner positions
i.MetaObj2SpyfileheaderMeta()
i.MetaObj2ODict()
i.apply_nodata_mask_to_saved_ENVIfile(i.arr)
i.apply_nodata_mask_to_ObjAttr('mask_clouds',0)
masks = i.build_L1A_masks()
......@@ -526,7 +526,7 @@ def run_processController_in_multiprocessing(usecase_data_list):
# i.combine_tiles_to_ObjAttr(nodata_tiles,'mask_1bit')
# # i.combine_tiles_to_ObjAttr(TOA_Ref_tiles,'mask_cloud')
# i.calc_corner_positions()
# i.MetaObj2SpyfileheaderMeta()
# i.MetaObj2ODict()
# i.arr = arr
# i.apply_nodata_mask_to_ObjAttr_to_arr()
# i.delete_tempFiles()
......
......@@ -114,7 +114,7 @@ def run_processController_in_multiprocessing(usecase_data_list):
obj.calc_center_AcqTime() # (if neccessary); requires corner positions
obj.calc_mean_VAA()
obj.calc_orbit_overpassParams() # requires corner positions
obj.MetaObj2SpyfileheaderMeta()
obj.MetaObj2ODict()
obj.apply_nodata_mask_to_saved_ENVIfile(obj.arr)
obj.apply_nodata_mask_to_ObjAttr('mask_clouds',0)
masks = obj.build_L1A_masks()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment