Commit 8abcf2ed authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Python execution mode now fully operational || implemented functions for...

Python execution mode now fully operational || implemented functions for adding externally downloaded scenes to GMS database
GEOP:
    - fixed imports
    - fixed a deprecated keyword in warp_ndarray()
L1A_P:
    - fixed imports
    - added 'Python' exec_mode to L1A_object.fill_from_disk()
    - fixed return values calc_cloud_mask()
    - added some docstrings and type hints
    - fixed a bug where MetaObj.SpecVals are not updated after applying no data mask to object attribute
    - revised build_L1A_masks(9 -> new version called build_combined_masks_array()
        -> not longer called from process controller but only from output writer
    - improved delete_tempfiles()
L1B_P:
    - revised COREG.__init__() to a much clearer version -> all attributes are now always existing
    - revised L1B_object.__init__()
L1C_P:
    - fixed a bug where no data mask has been read from wrong processing level
L1D_P:
     - L1D_P was deprecated -> not existing anymore
INP_R:
    - added som docstrings and type hints
    - read_ENVIfile(), read_ENVI_image_data_as_array() and read_mask_subset() now also support .bsq files as input paths
OUT_W:  major revision!
    - deleted param_dic
    - Tiles_writer(): fixed a bug that prevented writing of 2D arrays
    - revision of reorder_ENVI_header() -> more stable, more effective
    - revision of mask_to_ENVI_Classification()
    - revision of Obj2ENVI():
        - now also supports a tempfile mode that allows to directly write arrays of TILES from within the mapper function
        - added image_type_dict, metaAttr_dict, out_dtype_dict
        - now directly creates InObj.masks on demand
        - revised writing of array attributes
DB_T:  multiple new functions for adding externally downloaded satellite scenes to GMS database
    - added many docstrings and type hints
    - added get_postgreSQL_matchingExp()
    - get_info_from_postgreSQLdb()
        - added a custom timeout keyword to get_info_from_postgreSQLdb() in order to allow non-static query timings
        - added the possibility to pass a list of values within conditions dictionary
    - added set_info_in_postgreSQLdb()
    - added get_dict_satellite_name_id()
    - added get_dict_sensor_name_id()
    - added get_entityIDs_from_filename()
    - added get_filename_by_entityID()
    - added get_notDownloadedsceneIDs()
    - added add_externally_downloaded_data_to_GMSDB()
    - added archive_exists_on_fileserver()
HLP_F:
    - added silentremove()
    - fixed a missing colormap in get_mask_colormap()
PG:
    - revised path_generator.__init__()
    - added get_path_cloudmaskdata()
    - added get_outPath_hdr()
CFG:
    - removed static database statement timeout -> now directly adjustable by called functions
PC:
    - L1A_map_2: array attributes are now directly written to disk in exec_mode 'Python' @ parallelization_level 'tiles'
      in order to save a lot of time for pickling big objects
    - L1A_obj.build_L1A_masks() is no longer called from PC
    - fixed a bug that prevented appending L1B_newTiles to L1B database objects
parent ee3fd0eb
......@@ -30,13 +30,9 @@ import time
import rasterio
import warnings
import collections
from rasterio.warp import reproject as rio_reproject
from rasterio.warp import calculate_default_transform as rio_calc_transform
from rasterio.warp import RESAMPLING
from scipy.interpolate import RegularGridInterpolator
from numba import jit, autojit
# custom
try:
from osgeo import ogr
from osgeo import osr
......@@ -49,7 +45,9 @@ except ImportError:
import gdal
import gdalnumeric
from gdalconst import *
# custom
from rasterio.warp import reproject as rio_reproject
from rasterio.warp import calculate_default_transform as rio_calc_transform
from rasterio.warp import RESAMPLING
import pyproj
from pyorbital import astronomy
import ephem
......@@ -57,9 +55,10 @@ from spectral.io import envi
from shapely.geometry import Polygon
from shapely.geometry import shape
from shapely.geometry import MultiPoint
from numba import jit, autojit
import gms_io.envifilehandling_BD as ef
import misc.helper_functions as HLP_F
from gms_io import envifilehandling_BD as ef
from misc import helper_functions as HLP_F
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
class GEOPROCESSING(object):
......@@ -943,10 +942,10 @@ class GEOPROCESSING(object):
"""
if use_inherent_GCPs and TieP is None:
self.logger.info('Georeferencing dataset using inherent GCP list...')
if use_inherent_GCPs and TieP is not None:
if use_inherent_GCPs and TieP:
self.logger.info("\n\nWARNING: User defined tie points are provided though 'use_inherent_GCPs' is true. "
"Georeferencing dataset using inherent GCP list...")
if not use_inherent_GCPs and TieP is not None:
if not use_inherent_GCPs and TieP:
self.logger.info('Georeferencing dataset by given tiepoints...')
gcp_ul = [1, 1, TieP[0][0], TieP[0][1]] # col/row/map_x/map_y
gcp_ur = [self.cols, 1, TieP[1][0], TieP[1][1]]
......@@ -1551,7 +1550,8 @@ class GEOPROCESSING(object):
pass # bands, rows,cols
return im
def layerstacking_Robert(self, layerlist, outPath, optshift=None):
@staticmethod
def layerstacking_Robert(layerlist, outPath, optshift=None):
"""----METHOD_3---------------------------------------------------------
# Noch machen: 1. stacking of different ndarrays to stack. 2. stacking of different multispectral datasets to
one individual stack stacking of different layers (greyscale-files) to one file. layers must have the same
......@@ -1843,7 +1843,7 @@ def defBandNo(hdrPath, searchW, v=0):
# originally: re.search("wavelength[\s]*=[\s]*{([0-9.,\s]*)}", hdr_, re.I)
if wL == None:
if wL is None:
print("No wavelength specified")
else:
wList = wL.group(1).strip().split(",")
......@@ -2841,7 +2841,7 @@ def warp_ndarray(ndarray,in_gt, in_prj, out_prj, out_gt=None, outRowsCols=None,
print('OUT_RES NOCHMAL CHECKEN: ', out_res)
dst_transform,out_cols,out_rows = rio_calc_transform(
src_crs, dst_crs, cols, rows, left, bottom, right, top, resolution=out_res, densify_pts=21)
src_crs, dst_crs, cols, rows, left, bottom, right, top, resolution=out_res) # TODO keyword densify_pts=21 does not exist anymore (moved to transform_bounds()) -> could that be a problem? check warp outputs
rows_expected, cols_expected = abs(top-bottom) / out_res[1], abs(right-left) / out_res[0]
if rows==cols and rows_expected==cols_expected and inEPSG==outEPSG and out_rows!=out_cols:
out_rows,out_cols = rows_expected,cols_expected
......@@ -3230,11 +3230,12 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
return TOA_ref
def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZero=None, inSaturated=None):
# type: (np.ndarray,list,list,float,int,int,int) -> np.ndarray
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([K1,K2],['K1', 'K2']):
assert isinstance(arg,float) or isinstance(arg,int), "TOARad2Kelvin_fastforward: Expected float or integer \
values from argument '%s'. Got type %s" %(argname,type(arg))
assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \
"integer values from argument '%s'. Got type %s" %(argname,type(arg))
assert len(arg) == bands, """Argument '%s' does not match the number of bands. Expected a list of %s %s values
for each band in ascending order. Got %s.""" %(argname, bands, argname, len(arg))
for i in arg:
......@@ -3258,7 +3259,7 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ
return Kelvin
def DN2DegreesCelsius_fastforward(ndarray, offsets, gains, K1, K2, emissivity=0.95,
def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.95,
inFill=None, inZero=None, inSaturated=None):
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
......
......@@ -180,7 +180,7 @@ def add_local_availability(dataset):
%(dataset['entity_ID'],ProcL))
DB_T.data_DB_updater(GMS_file_dict)
if job.call_type == 'console':
DB_T.data_DB_to_csv()
DB_T.SQL_DB_to_csv()
dataset['proc_level'] = ProcL
elif len(DB_match) == 1:
proc_chain = ['L0A','L0B','L1A','L1B','L1C','L2A','L2B','L2C','L2D']
......
......@@ -36,15 +36,15 @@ import matplotlib.pyplot as plt
from pyhdf import SD
from spectral.io import envi
import algorithms.METADATA_BD as META
import algorithms.GEOPROCESSING_BD as GEOP
import algorithms.gms_cloud_classifier as CLD_P # Cloud Processor
import algorithms.py_tools_ah
import gms_io.envifilehandling_BD as ef
import misc.helper_functions as HLP_F
import gms_io.Input_reader as INP_R
import gms_io.Output_writer as OUT_W
import misc.path_generator as PG
from algorithms import METADATA_BD as META
from algorithms import GEOPROCESSING_BD as GEOP
from algorithms import gms_cloud_classifier as CLD_P # Cloud Processor
from algorithms import py_tools_ah
from gms_io import envifilehandling_BD as ef
from misc import helper_functions as HLP_F
from gms_io import Input_reader as INP_R
from gms_io import Output_writer as OUT_W
from misc import path_generator as PG
job, usecase = builtins.GMS_config.job, builtins.GMS_config.usecase # read from builtins (set by process_controller)
sys.path.append('./algorithms')
......@@ -188,16 +188,23 @@ class L1A_object(object):
self.acquisition_date = datetime.datetime.strptime(self.acquisition_date,'%Y-%m-%d')
self.arr_shape, self.arr_pos = tuple_GMS_subset[1]
self.logger = HLP_F.setup_logger('log__'+self.baseN, self.path_logfile, append=1)
get_hdr = lambda path_ENVIf: os.path.splitext(path_ENVIf)[0] + '.hdr'
PG_obj = PG.path_generator(self.__dict__)
path_arr = PG_obj.get_path_imagedata()
path_masks = PG_obj.get_path_maskdata()
path_maskClouds = PG_obj.get_path_cloudmaskdata()
if job.exec_mode=='Flink':
PG_obj = PG.path_generator(self.__dict__)
self.arr = INP_R.read_ENVIfile(get_hdr(PG_obj.get_path_imagedata()),self.arr_shape,self.arr_pos,self.logger)
path_masks = PG_obj.get_path_maskdata()
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger)
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
if self.arr_pos: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
else: self.logger.info('Reading file: %s' %self.baseN)
self.build_L1A_masks()
#self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # job.exec_mode=='Python'
self.arr = path_arr
self.mask_1bit = path_masks
self.mask_clouds = path_maskClouds
self.masks = path_masks
del self.logger; self.GMS_identifier['logger'] = 'not set'
return self
......@@ -580,10 +587,10 @@ class L1A_object(object):
def log_for_fullArr_or_firstTile(self, logger, log_msg, subset=None):
"""Send a message to the logger only if full array or the first tile is currently processed.
This function can be called when processing any tile but log message will only be sent from first tile.
:param logger: a logging.logger object
:param log_msg: the log message to be logged
:param subset: subset argument as sent to e.g. DN2TOARadRefTemp
that indicates which tile is to be processed"""
:param logger: a logging.logger object
:param log_msg: the log message to be logged
:param subset: subset argument as sent to e.g. DN2TOARadRefTemp that indicates which tile is to be processed.
Not needed if self.arr_pos is not None."""
if subset is None and (self.arr_shape=='cube' or [self.arr_pos[0][0], self.arr_pos[1][0]] == [0,0]) or \
subset==['cube',None] or (subset and [subset[1][0][0], subset[1][1][0]]==[0,0]): # cube or 1st tile
logger.info(log_msg)
......@@ -689,14 +696,11 @@ class L1A_object(object):
self.update_spec_vals_according_to_dtype('int16')
tiles_desc = '_'.join([desc for op_th,desc in zip(['optical','thermal'], [usecase.conversion_type_optical,
usecase.conversion_type_thermal]) if desc in self.dict_LayerOptTherm.values()])
#tiles_desc = usecase.conversion_type_optical if usecase.skip_thermal else \
# '%s_%s' %(usecase.conversion_type_optical, usecase.conversion_type_thermal)
if job.exec_mode=='Flink' and subset is None:
self.arr = dataOut
self.arr_desc = tiles_desc
else:
return {'desc': tiles_desc, 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': dataOut}
self.arr = dataOut
self.arr_desc = tiles_desc
return {'desc': tiles_desc, 'row_start': rS, 'row_end': rE, 'col_start': cS, 'col_end': cE, 'data': dataOut}
def validate_GeoTransProj_GeoAlign(self):
project_dir = os.path.abspath(os.curdir)
......@@ -881,17 +885,10 @@ class L1A_object(object):
del self.GMS_identifier['logger']
self.arr = self.arr if job.exec_mode=='Flink' else inPath
self.mask_clouds = mask_clouds if job.exec_mode=='Flink' else None
if mask_clouds is not None:
self.mask_clouds = mask_clouds
if job.exec_mode == 'Flink':
if mask_clouds is not None:
self.mask_clouds = mask_clouds
else:
self.arr = inPath
return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds}
# envi.save_image(os.path.abspath('./testing/out/%s_mask_clouds.hdr' %self.baseN), self.mask_clouds,
# dtype= 'float32', interleave='bsq', ext='.bsq', force=True)
return {'desc':'mask_clouds','row_start':rS,'row_end':rE,'col_start':cS,'col_end':cE,'data':mask_clouds}
def calc_corner_positions(self):
"""Get true corner positions in the form
......@@ -954,12 +951,14 @@ class L1A_object(object):
HLP_F.get_outFillZeroSaturated(dtype)
def MetaObj2ODict(self):
"""Convert self.MetaObj to an OrderedDict."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Preparing extracted metadata to be written to disk...')
self.meta = self.MetaObj.Meta2ODict()
self.path_Outfile_L1A = self.MetaObj.Dataname
def apply_nodata_mask_to_ObjAttr(self, attrname, custom_nodata_val=None):
# type: (str,int)
"""Applies self.mask_1bit to the specified array attribute by setting all values where mask_1bit is 0 to the
given nodata value.
:param attrname: The attribute to apply the nodata mask to. Must be an array attribute or
......@@ -969,18 +968,27 @@ class L1A_object(object):
assert hasattr(self,attrname)
if getattr(self,attrname) is not None:
if isinstance(getattr(self,attrname),str):
self.apply_nodata_mask_to_saved_ENVIfile(getattr(self,attrname), custom_nodata_val)
update_spec_vals = True if attrname=='arr' else False
self.apply_nodata_mask_to_saved_ENVIfile(getattr(self,attrname), custom_nodata_val, update_spec_vals)
else:
assert isinstance(getattr(self,attrname),np.ndarray), \
'L1A_obj.%s must be a numpy array. Got type %s.' %(attrname,type(getattr(self,attrname)))
assert hasattr(self,'mask_1bit') and self.mask_1bit is not None
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Applying nodata mask to L1A_object.%s...' %attrname)
nodata_val = custom_nodata_val if custom_nodata_val is not None else \
HLP_F.get_outFillZeroSaturated(getattr(self,attrname).dtype)[0]
nodata_val = custom_nodata_val if custom_nodata_val else \
HLP_F.get_outFillZeroSaturated(getattr(self, attrname).dtype)[0]
getattr(self,attrname)[self.mask_1bit==0] = nodata_val
def apply_nodata_mask_to_saved_ENVIfile(self, path_saved_ENVIhdr, custom_nodata_val=None):
if attrname=='arr': self.MetaObj.spec_vals['fill']=nodata_val
def apply_nodata_mask_to_saved_ENVIfile(self, path_saved_ENVIhdr, custom_nodata_val=None, update_spec_vals=False):
# type: (str,int,bool)
"""Applies self.mask_1bit to a saved ENVI file with the same X/Y dimensions like self.mask_1bit by setting all
values where mask_1bit is 0 to the given nodata value.
:param path_saved_ENVIhdr: <str> The path of the ENVI file to apply the nodata mask to.
:param custom_nodata_val: <int> set the values of the given attribute to this value.
:param update_spec_vals: <bool> whether to update self.MetaObj.spec_vals['fill']
"""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Applying nodata mask to saved ENVI file...')
assert os.path.isfile(path_saved_ENVIhdr)
......@@ -988,35 +996,36 @@ class L1A_object(object):
if not path_saved_ENVIhdr.endswith('.hdr') and os.path.isfile(os.path.splitext(path_saved_ENVIhdr)[0]+'.hdr'):
path_saved_ENVIhdr = os.path.splitext(path_saved_ENVIhdr)[0]+'.hdr'
if custom_nodata_val is None:
dtype_IDL = int(INP_R.read_ENVIhdr_to_dict(path_saved_ENVIhdr)['data type'])
dtype_IDL = int(INP_R.read_ENVIhdr_to_dict(path_saved_ENVIhdr)['data type'])
nodata_val = HLP_F.get_outFillZeroSaturated(HLP_F.dtype_lib_IDL_Python[dtype_IDL])[0]
else:
nodata_val = custom_nodata_val
FileObj = spectral.open_image(path_saved_ENVIhdr)
File_memmap = FileObj.open_memmap(writable=True)
File_memmap[self.mask_1bit==0] = nodata_val
if update_spec_vals: self.MetaObj.spec_vals['fill'] = nodata_val
def build_L1A_masks(self):
"""Generates L1A_obj.masks attribute from by concatenating all masks included in L1A_obj.
def build_combined_masks_array(self):
# type: () -> dict
"""Generates self.masks attribute from by concatenating all masks included in GMS obj.
The corresponding metadata is assigned to L1A_obj.masks_meta. Empty mask attributes are skipped."""
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
temp_logger.info('Generating masks and corresponding metadata......')
bandslist = [i for i in ['mask_1bit','mask_clouds'] if hasattr(self,i) and getattr(self,i) is not None]
self.masks = getattr(self,bandslist[0]).astype(np.uint8)[:,:,None] if len(bandslist) == 1 else \
np.concatenate([getattr(self,i)[:,:,None] for i in bandslist], axis=2).astype(np.uint8) # uint8 allows to be written by spectral python
# add some information to mask header
masks_L1A_md = {'description': 'L1A mask layer:\n0 = no data\n1 = data in good condition\n2 = bad image pixel'}
masks_L1A_md.update({i:self.meta[i] for i in ['samples', 'lines', 'map info', 'header offset', 'file type',
'data type', 'interleave', 'byte order', 'coordinate system string'] if i in self.meta.keys() })
masks_L1A_md['bands'] = len(bandslist)
if self.masks.dtype == np.uint8: masks_L1A_md['data type'] = 1
if self.masks.dtype == np.int16: masks_L1A_md['data type'] = 2
masks_L1A_md['band names'] = bandslist
self.masks_meta = masks_L1A_md
return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks}
arrays2combine = [aN for aN in ['mask_1bit', 'mask_clouds'] \
if hasattr(self, aN) and isinstance(getattr(self, aN), np.ndarray)]
if arrays2combine:
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.path_logfile, append=1)
self.log_for_fullArr_or_firstTile(temp_logger,'Combining masks for output writer...')
dtype = np.uint8 if float(spectral.__version__)>=0.16 else np.int8
get_data = lambda arrName: getattr(self,arrName).astype(dtype)[:,:,None]
self.masks = get_data(arrays2combine[0]) if len(arrays2combine)==1 else \
np.concatenate([get_data(aN) for aN in arrays2combine], axis=2)
mapI, CSS = (self.meta['map info'], self.meta['coordinate system string']) \
if hasattr(self, 'meta') and self.meta else (self.MetaObj.map_info, self.MetaObj.projection)
self.masks_meta = {'map info': mapI, 'coordinate system string': CSS,
'bands':len(arrays2combine),'band names':arrays2combine}
return {'desc': 'masks', 'row_start': 0, 'row_end': self.shape_fullArr[0],
'col_start': 0, 'col_end': self.shape_fullArr[1], 'data': self.masks} # usually not needed
def calc_mean_VAA(self):
"""Calculates mean viewing azimuth angle using sensor flight line derived from corner coordinates."""
......@@ -1032,6 +1041,7 @@ class L1A_object(object):
temp_logger.info('Calculation of mean VAA...: %s' % round(self.VAA_mean, 2))
def combine_tiles_to_ObjAttr(self, tiles, target_attr):
# type: (list,str)
"""Combines tiles, e.g. produced by L1A_P.L1A_object.DN2TOARadRefTemp() to a single attribute.
If usecase.job.exec_mode == 'Python' the produced attribute is additionally written to disk.
:param tiles: <list> a list of dictionaries with the keys 'desc', 'data', 'row_start','row_end',
......@@ -1067,6 +1077,7 @@ class L1A_object(object):
self.MetaObj.Dataname = path_radref_file
def write_tiles_to_ENVIfile(self, tiles, overwrite=True):
# type: (list,bool)
"""Writes tiles, e.g. produced by L1A_P.L1A_object.DN2TOARadRefTemp() to a single output ENVI file.
:param tiles: <list> a list of dictionaries with the keys 'desc', 'data', 'row_start','row_end',
'col_start' and 'col_end'
......@@ -1105,4 +1116,9 @@ class L1A_object(object):
if not glob.glob('%s/*' % pardir) and os.path.isdir(pardir):
os.rmdir(pardir)
else:
break
\ No newline at end of file
break
path_procdata = PG.path_generator(self.__dict__.copy()).get_path_procdata()
tempfiles = glob.glob(os.path.join(path_procdata,'*__TEMPFILE*'))
[HLP_F.silentremove(tf) for tf in tempfiles]
else:
raise NotImplementedError
\ No newline at end of file
......@@ -201,6 +201,8 @@ from algorithms.L1A_P import L1A_object
class COREG(object):
def __init__(self, dict_L1A_Instance,v=0,q=0):
# type (dict) -> COREG
assert dict_L1A_Instance['proc_level'] == 'L1A',\
'L1B_P only processes L1A data, not %s.' %dict_L1A_Instance['proc_level']
self.max_shift = 5 # TODO: was passiert, wenn erkannter shift zu groß ist? win_pos/imref/win_size ändern und neu rechnen?
......@@ -229,29 +231,54 @@ class COREG(object):
self.overlap_percentage = None # set by self.get_reference_image_params()
self.overlap_area = None # set by self.get_reference_image_params()
self.imref_entity_ID = None # set by self.get_reference_image_params()
self.ds_imref = None # set below
self.ds_im2shift = None # set below
self.ds_cldmask = None # missing # TODO
self.ds_dgm = None # missing # TODO
self.ref_prj = None # set below
self.ref_gt = None # set below
self.ref_xgsd = None # set below
self.ref_ygsd = None # set below
self.shift_xgsd = None # set below
self.shift_ygsd = None # set below
self.verbose_out = None # set below
self.imref_band4match = None # set by self.get_opt_bands4matching(), index starts with 1
self.im2shift_band4match = None # set by self.get_opt_bands4matching(), index starts with 1
self.win_pos = None # set by self.get_opt_winpos_winsize()
self.win_size = None # set by self.get_opt_winpos_winsize()
self.x_shift_px = None # set by self.calculate_spatial_shifts()
self.y_shift_px = None # set by self.calculate_spatial_shifts()
self.x_shift_map = None # set by self.get_updated_map_info()
self.y_shift_map = None # set by self.get_updated_map_info()
self.updated_map_info = None # set by self.get_updated_map_info()
self.success = False # default
# POPULATE ATTRIBUTES
self.get_reference_image_params()
if not self.imref_scene_ID:
self.success = 0
else:
if self.imref_scene_ID:
"""read input data (from disk or memory)"""
if v: print('reference image:', self.path_imref)
if v: print('shift image: ', self.path_im2shift, '\n')
self.ds_imref = self.get_ds_imref() # from disk or from L1A object
self.ds_im2shift = self.get_ds_im2shift() # from disk or from L1A object
self.ds_cldmask = None
self.ds_dgm = None
self.ds_imref = self.get_ds_imref() # from disk or from L1A object
self.ds_im2shift = self.get_ds_im2shift() # from disk or from L1A object
# TODO add ds_cldmask, ds_dgm
"""get georeference infos"""
self.ref_prj, self.ref_gt = self.ds_imref.GetProjection(), self.ds_imref.GetGeoTransform()
self.ref_xgsd,self.shift_xgsd = abs(self.ref_gt[1]), abs(self.shift_gt[1])
self.ref_ygsd,self.shift_ygsd = abs(self.ref_gt[5]), abs(self.shift_gt[5])
assert self.ref_prj is not '', 'Reference image has no projection.'
assert re.search('LOCAL_CS',self.ref_prj) is None, 'Reference image is not georeferenced.'
assert self.ref_gt is not None, 'Reference image has no map information.'
assert self.shift_prj is not '', 'The image to be shifted has no projection.'
assert re.search('LOCAL_CS',self.shift_prj) is None, 'The image to be shifted is not georeferenced.'
assert self.shift_gt is not None, 'The image to be shifted has no map information.'
assert self.ref_prj , 'Reference image has no projection.'
assert not re.search('LOCAL_CS',self.ref_prj) , 'Reference image is not georeferenced.'
assert self.ref_gt , 'Reference image has no map information.'
assert self.shift_prj , 'The image to be shifted has no projection.'
assert not re.search('LOCAL_CS',self.shift_prj), 'The image to be shifted is not georeferenced.'
assert self.shift_gt , 'The image to be shifted has no map information.'
assert get_proj4info(self.ds_imref) == get_proj4info(self.ds_im2shift),\
'Input projections are not equal. Different projections are currently not supported.'
......@@ -266,22 +293,13 @@ class COREG(object):
OUT_W.write_shp(self.overlap_poly, os.path.join(self.verbose_out,'overlap_poly.shp'), get_prjLonLat())
"""get bands to use for matching"""
self.imref_band4match = None # set by self.get_opt_bands4matching(), index starts with 1
self.im2shift_band4match = None # set by self.get_opt_bands4matching(), index starts with 1
self.get_opt_bands4matching(target_cwlPos_nm=550)
"""2. get optimal window position and size to be used for shift calculation (within overlap)"""
self.win_pos = None # set by self.get_opt_winpos_winsize()
self.win_size = None # set by self.get_opt_winpos_winsize()
self.get_opt_winpos_winsize()
self.x_shift_px = None # set by self.calculate_spatial_shifts()
self.y_shift_px = None # set by self.calculate_spatial_shifts()
self.x_shift_map = None # set by self.get_updated_map_info()
self.y_shift_map = None # set by self.get_updated_map_info()
self.updated_map_info = None # set by self.get_updated_map_info()
self.success = True
self.success = 1
def get_reference_image_params(self):
"""postgreSQL query: get IDs of overlapping scenes and select most suitable scene_ID
......@@ -473,11 +491,11 @@ class COREG(object):
if self.imref is None:
assert os.path.exists(self.path_imref), 'The reference image can not be found at %s.' %self.path_imref
ds = gdal.Open(self.path_imref)
assert ds is not None, 'The image %s can not be read by GDAL.' %self.imref_entity_ID
assert ds, 'The image %s can not be read by GDAL.' %self.imref_entity_ID
elif isinstance(self.imref,str):
assert os.path.exists(self.imref), 'The reference image can not be found at %s.' %self.imref
ds = gdal.Open(self.imref)
assert ds is not None, 'The image %s can not be read by GDAL.' %self.imref_entity_ID
assert ds, 'The image %s can not be read by GDAL.' %self.imref_entity_ID
elif isinstance(self.imref,np.ndarray):
"""L1A_obj refernce in GDAL-Array konvertieren"""
path_src = PG.get_tempfile(ext='.bsq') # FIXME schreibt .bsq in tempfile
......@@ -495,7 +513,7 @@ class COREG(object):
if isinstance(self.im2shift,str):
assert os.path.exists(self.im2shift), 'The image to be shifted can not be found at %s.' %self.im2shift
ds = gdal.Open(self.path_im2shift)
assert ds is not None, 'The image %s can not be read by GDAL.' # FIXME entity ID fehlt
assert ds, 'The image %s can not be read by GDAL.' # FIXME entity ID fehlt
elif isinstance(self.im2shift,np.ndarray):
path_src = PG.get_tempfile(ext='.bsq') # FIXME schreibt .bsq in tempfile
arr_ds = GEOP.ndarray2gdal(self.im2shift, path_src, geotransform=self.shift_gt,
......@@ -963,9 +981,13 @@ def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False,
attritem2add = item2add+[attrname]
if isinstance(obj[attrname],np.ndarray):
bands = obj[attrname].shape[2] if len(obj[attrname].shape)==3 else None
elif isinstance(obj[attrname],str) and os.path.exists(obj[attrname]):
bands = gdal.Open(obj[attrname]).RasterCount
bands = bands if bands > 1 else None
elif isinstance(obj[attrname],str):
if os.path.exists(obj[attrname]):
bands = gdal.Open(obj[attrname]).RasterCount
bands = bands if bands > 1 else None
else:
warnings.warn('get_DESHIFTER_configs: Missing file %s. File skipped.' % obj[attrname])
continue
else:
raise Exception('Unsupported attribute type %s in attribute %s.' %(type(obj[attrname]),attrname))
......@@ -1281,7 +1303,11 @@ class L1B_object(L1A_object):
super().__init__(None)
if L1A_obj: [setattr(self, key, value) for key,value in L1A_obj.__dict__.items()]
self.proc_level = 'L1B'
self.coreg_info = self.set_coreg_info(COREG_obj) if COREG_obj else None
self.coreg_info = None
#self.coreg_info = dict(zip(['corrected_shifts_px','corrected_shifts_map','original map info','updated map info' None # set by self.set_coreg_info(COREG_obj)
if COREG_obj:
self.set_coreg_info(COREG_obj)
def set_coreg_info(self, COREG_obj):
......@@ -1295,9 +1321,11 @@ class L1B_object(L1A_object):
'reference entity ID' : COREG_obj.imref_entity_ID,
'reference projection' : COREG_obj.ref_prj,
'reference geotransform': COREG_obj.ref_gt,
'reference extent' : {'cols':COREG_obj.ds_imref.RasterXSize, 'rows':COREG_obj.ds_imref.RasterYSize},
'reference extent' : {'cols':COREG_obj.ds_imref.RasterXSize, 'rows':COREG_obj.ds_imref.RasterYSize} \
if COREG_obj.ds_imref else {'cols':None,'rows':None},
'is shifted' : False,
'is resampled' : False } if COREG_obj.success else None
'is resampled' : False,
'success' : COREG_obj.success}
if self.meta:
self.meta['original map info'] = COREG_obj.map_info_to_update
......
......@@ -28,17 +28,16 @@ try:
except ImportError:
import osr
import misc.helper_functions as HLP_F
import algorithms.GEOPROCESSING_BD as GEOP
import gms_io.Input_reader as INP_R
import misc.path_generator as PG
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP
from gms_io import Input_reader as INP_R
from misc import path_generator as PG
from algorithms.L1B_P import L1B_object
job = builtins.GMS_config.job
########################### core functions ####################################
class L1C_object(L1B_object):
#"""@DynamicAttrs"""
def __init__(self, L1B_obj):
super().__init__(None,None)
if L1B_obj: [setattr(self, key, value) for key,value in L1B_obj.__dict__.items()]
......@@ -66,7 +65,7 @@ class L1C_object(L1B_object):
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy()).get_path_maskdata()
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', temp_logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
......@@ -92,19 +91,18 @@ class L1C_object(L1B_object):
subset = subset if subset else [self.arr_shape, self.arr_pos]
assert subset[0] in ['cube', 'block'], "'%s' subset is not supported." % subset[0]
rS, rE, cS, cE = list(GEOP.get_subsetProps_from_subsetArg(self.shape_fullArr, subset).values())[3:7]
#print(subset, self.arr_pos)
self.log_for_fullArr_or_firstTile(temp_logger,'Calculating acquisition and illumination geometry arrays.',subset)
if hasattr(self, 'mask_1bit') and isinstance(self.mask_1bit, np.ndarray):
mask_1bit_temp = self.mask_1bit[rS:rE + 1, cS:cE + 1]
else:
path_masks = PG.path_generator(self.__dict__.copy()).get_path_maskdata()
path_masks = PG.path_generator(self.__dict__.copy(),proc_level='L1B').get_path_maskdata()
mask_1bit_temp = INP_R.read_mask_subset(path_masks, 'mask_1bit', temp_logger, subset)
fillVal = HLP_F.get_outFillZeroSaturated(np.float32)[0]
assert self.meta, "Missing 'meta' attribute. Run self.Meta2ODict() first!"
if 'ViewingAngle_arrProv' in self.meta:
if 'ViewingAngle_arrProv' in self.meta and self.meta['ViewingAngle_arrProv']:
VZA_arr = GEOP.adjust_acquisArrProv_to_shapeFullArr(self.meta['ViewingAngle_arrProv'],self.shape_fullArr,
subset,bandwise=0)
else:
......@@ -118,7 +116,7 @@ class L1C_object(L1B_object):
accurracy=job.SZA_SAA_calculation_accurracy,
lonlat_arr=self.lonlat_arr
if job.SZA_SAA_calculation_accurracy == 'fine' else None)
if 'IncidenceAngle_arrProv' in self.meta:
if 'IncidenceAngle_arrProv' in self.meta and self.meta['IncidenceAngle_arrProv']:
pass # FIXME Sentinel-2s viewing azimuth array is currently not used (instead a static VAA is used)
RAA_arr = GEOP.calc_RAA_array(self.trueDataCornerLonLat, SAA_arr, self.VAA_mean, mask_1bit_temp, fillVal)
......
# -*- coding: utf-8 -*-
###############################################################################
#