Commit 7d82a28a authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

Refactored GEOPROCESSING, METADATA, Input_reader, Output_writer and...

Refactored GEOPROCESSING, METADATA, Input_reader, Output_writer and SpatialIndexMediator. Deleted a lot of deprecated/unused code:

GEOPROCESSING:
- deleted many unused functions written by Robert Behling

L1A_P:
- deleted decompress()
- deleted Layerstacking()

L1B_P:
- deleted functions related with SIFT/Orfeo toolbox co-registration

L2A_P:
- deleted get_DESHIFTER_configs()

- deleted envifilehandling.py
- deleted SRF_DB_2_PKL.py (content moved to output_writer

gms_object:
- deleted calc_mask_nodataOLD()

METADATA:
- deleted some unused functions written by Robert Behling

- deleted sandbox module


Former-commit-id: 36ab28d1
parent f867b21f
......@@ -51,7 +51,7 @@ clean-test: ## remove test and coverage artifacts
lint: ## check style with flake8
flake8 --max-line-length=120 gms_preprocessing tests > ./tests/linting/flake8.log
pycodestyle gms_preprocessing --exclude="*.ipynb,*.ipynb*,envifilehandling.py" --max-line-length=120 > ./tests/linting/pycodestyle.log
pycodestyle gms_preprocessing --exclude="*.ipynb,*.ipynb*" --max-line-length=120 > ./tests/linting/pycodestyle.log
-pydocstyle gms_preprocessing > ./tests/linting/pydocstyle.log
test: ## run tests quickly with the default Python
......
......@@ -23,14 +23,14 @@ from py_tools_ds.geo.map_info import mapinfo2geotransform
from py_tools_ds.geo.projection import EPSG2WKT
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from . import geoprocessing as GEOP
from . import gms_cloud_classifier as CLD_P # Cloud Processor
from ..io import Output_writer as OUT_W
from ..io import output_writer as OUT_W
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc.definition_dicts import get_outFillZeroSaturated, is_dataset_provided_as_fullScene, get_mask_classdefinition
from ..model.gms_object import GMS_object
from ..model import METADATA as META
from ..model import metadata as META
__author__ = 'Daniel Scheffler'
......@@ -70,236 +70,6 @@ class L1A_object(GMS_object):
% (self.satellite, self.sensor,
(' ' + self.subsystem) if self.subsystem not in [None, ''] else '', self.entity_ID))
# <editor-fold desc="decompress(...) - not used anymore" >
# def decompress(self, compressed_file, outputpath=None):
# """Decompresses ZIP, TAR, TAR.GZ and TGZ archives to a given output path.
# :param compressed_file:
# :param outputpath:
# """
# (filepath, filename) = os.path.split(compressed_file)
# self.logger.info('Extracting ' + filename + '...')
# outputpath = outputpath if outputpath is not None else os.path.join(filepath, filename.partition(".")[0])
# if not os.path.exists(outputpath): os.makedirs(outputpath)
#
# if compressed_file.endswith(".zip"):
# assert zipfile.is_zipfile(compressed_file),
# self.logger.critical(compressed_file+" is not a valid zipfile!")
# zf = zipfile.ZipFile(compressed_file)
# names = zf.namelist()
# count_extracted = 0
# for n in names:
# if os.path.exists(os.path.join(outputpath, n)) and \
# zipfile.ZipFile.getinfo(zf, n).file_size == os.stat(os.path.join(outputpath, n)).st_size:
# self.logger.warning("file '%s' from '%s' already exists in the directory: '%s'" \
# %(n,filename,outputpath))
# else:
# written = 0
# while written == 0:
# try:
# zf.extract(n, outputpath)
# self.logger.info("Extracting %s..." % n)
# count_extracted += 1
# written = 1
# except OSError as e:
# if e.errno == 28:
# print('No space left on device. Waiting..')
# else:
# raise
# if count_extracted == 0:
# self.logger.warning("No files of %s have been decompressed.\n" % filename)
# else:
# self.logger.info("Extraction of '" + filename + " was successful\n")
# zf.close()
# elif compressed_file.endswith(".tar") or compressed_file.endswith(".tar.gz") or \
# compressed_file.endswith(".tgz"):
# tf = tarfile.open(compressed_file)
# names, members = tf.getnames(), tf.getmembers()
# count_extracted = 0
# for n, m in zip(names, members):
# if os.path.exists(os.path.join(outputpath, n)) and
# m.size==os.stat(os.path.join(outputpath, n)).st_size:
# self.logger.warning("file '%s' from '%s' already exists in the directory: '%s'" \
# %(n,filename,outputpath))
# else:
# written = 0
# while written == 0:
# try:
# tf.extract(n, outputpath)
# self.logger.info("Extracting %s..." % n)
# count_extracted += 1
# written = 1
# except OSError as e:
# if e.errno == 28:
# print('No space left on device. Waiting..')
# else:
# raise
# if count_extracted == 0:
# self.logger.warning("No files of %s have been decompressed.\n" % filename)
# else:
# self.logger.info("Extraction of '" + filename + " was successful\n")
# tf.close()
# </editor-fold>
# <editor-fold desc="Layerstacking(...) - deprecated">
# def Layerstacking(self, InFolderOrFile, outFile=None, sensor=None, ullr=None):
# assert os.path.isfile(InFolderOrFile) or os.path.isdir(InFolderOrFile)
# InFolder,inFile = (os.path.dirname(InFolderOrFile), InFolderOrFile) if os.path.isfile(InFolderOrFile) else \
# (InFolderOrFile, '')
# if outFile is None:
# root_dir, folderName = os.path.split(InFolder)
# if folderName == "":
# root_dir, folderName = os.path.split(InFolder[:-1])
# outFile = os.path.join(InFolder, folderName + "_stack.bsq")
# if not os.path.isdir(os.path.dirname(outFile)): os.makedirs(os.path.dirname(outFile))
# self.logger.info('%s layerstacking started for target-file %s' % (self.satellite, os.path.split(outFile)[1]))
#
# if os.path.exists(outFile):
# re_LayerBandsAssignment_str = re.search("LayerBandsAssignment[\s]*=[\s]* {([\s\S]*)}",
# open(outFile[:-4] + '.hdr', "r").read(), re.I)
# if re_LayerBandsAssignment_str is None:
# self.logger.warning("""\n\t\tWARNING: There is already a layerstacking result at %s but no Layer-Bands
# Assignment could be extracted. Layerstacking has been restarted.\n""" % outFile)
# os.remove(outFile)
# os.remove(outFile[:-4] + '.hdr')
# self.Layerstacking(InFolder, outFile, sensor, ullr)
# else:
# LayerBandsAssignment_existingFile =
# [i.strip() for i in re_LayerBandsAssignment_str.group(1).split(',')]
# if self.LayerBandsAssignment != LayerBandsAssignment_existingFile:
# self.logger.warning("""\n\t\tWARNING: There is already a layerstacking result at %s but
# Layer-Bands Assignment does not match. Layerstacking has been restarted.\n""" % outFile)
# os.remove(outFile)
# os.remove(outFile[:-4] + '.hdr')
# self.Layerstacking(InFolder, outFile, sensor, ullr)
# else:
# self.logger.warning("\n\t\tWARNING:
# File %s already exists. File has not been overwritten.\n" % outFile)
# else:
# try:
# if re.search('Terra', self.satellite):
# # Workaround for incompatibility of HDF4 to GDAL 2.x.x and Python3.4
# if sys.version_info[0] == 3 and sys.version_info[1] > 3 and inFile.endswith('.hdf'):
# path_inFile_converted = os.path.splitext(inFile)[0]+'.h5'
# os.system('h4toh5 %s %s' % (inFile, path_inFile_converted))
# inFile = path_inFile_converted
# rasObj = GEOP.GEOPROCESSING(inFile, self.logger)
# if rasObj.drname_s == "ENVI":
# pass
# elif rasObj.drname_s in ['HDF4', 'HDF5']:
# # check how subdatasets are named in HDF-File
# subsystem_identifier = 'VNIR' if self.subsystem in ['VNIR1','VNIR2'] else 'SWIR' \
# if self.subsystem == 'SWIR' else 'TIR'
# if rasObj.drname_s == 'HDF4':
# var1 = ['HDF4_EOS:EOS_SWATH:"%s":%s_Swath:ImageData%s' % (inFile, subsystem_identifier, i)
# for i in self.LayerBandsAssignment]
# var2 = ['HDF4_EOS:EOS_SWATH:"%s":%s_Band%s:ImageData' % (inFile, subsystem_identifier, i)
# for i in self.LayerBandsAssignment]
# else: # HDF5
# var1 = ['HDF5:"%s"://%s/%s_Swath/Data_Fields/ImageData%s' % (inFile, subsystem_identifier,
# subsystem_identifier, i) for i in self.LayerBandsAssignment]
# var2 = ['HDF5:"%s"://%s/%s_Band%s/Data_Fields/ImageData' % (inFile, subsystem_identifier,
# subsystem_identifier, i) for i in self.LayerBandsAssignment]
# ds = gdal.Open(inFile,gdalconst.GA_ReadOnly)
# if var1[0] in str(ds.GetSubDatasets()):
# subds_names = var1
# elif var2[0] in str(ds.GetSubDatasets()):
# subds_names = var2
# else:
# self.logger.error(
# 'Subdatasets seem to have unknown names in HDF file. Layerstacking failed.')
# sys.exit()
#
# # calculate a DN subsystem_stack from the hdf data
# layers2merge = \
# ["%s__b%s.bsq" %(os.path.splitext(outFile)[0], i) for i in self.LayerBandsAssignment]
# for subds_name, layerPath in zip(subds_names, layers2merge):
# # convert hdf subdatasets in ENVI bsq files
# subprocess.Popen(["gdal_translate", "-q", "-of", "ENVI",subds_name,layerPath]).wait()
#
# # create DN stack for current subsystem
# if os.path.isfile(outFile): os.remove(outFile)
# subprocess.Popen(
# ["gdal_merge.py", "-q", "-o", outFile, "-of", "ENVI", "-seperate"] + layers2merge).wait()
#
# else:
# '''Landsat / ALOS'''
# Files = os.listdir(InFolder)
# filtered_files = []
# for File in HLP_F.sorted_nicely(Files):
# if re.search('ALOS', self.satellite) and \
# re.search("IMG-0[0-9]-[\s\S]*", File) or \
# re.search('Landsat', self.satellite) and \
# re.search("[\S]*_B[1-9][0-9]?[\S]*.TIF", File):
# filtered_files.append(File)
#
# assert len(self.LayerBandsAssignment) == len(filtered_files), \
# self.logger.warning("%s data should have %s bands (%s found in archive). Execution stopped." \
# % (sensor, len(self.LayerBandsAssignment), len(filtered_files)))
#
# band_names = []
# for LayerNr, band in zip(self.LayerBandsAssignment, filtered_files):
# GEOP_o = GEOP.GEOPROCESSING(os.path.join(InFolder, band), self.logger)
# if ullr:
# kwargs = {}
# if LayerNr == self.LayerBandsAssignment[0]:
# assert isinstance(ullr,list) and len(ullr) ==4, self.logger.critical(""" ERROR not
# the correct number of coordinates given! 4 Required: ulx,uly,lrx,lry Given: %s
# different coordinates ->Clip_batch.py""" % (len(ullr)))
# ulx, uly, lrx, lry = ullr
# #cols = GEOP_o.cols
# #rows = GEOP_o.rows
# pixWidth = GEOP_o.pixelWidth
# pixHeight = GEOP_o.pixelHeight
# rot1 = GEOP_o.rot1
# rot2 = GEOP_o.rot2
# #ulx_orig = GEOP_o.originX
# #uly_orig = GEOP_o.originY
# #geotransform_orig = [ulx_orig, pixWidth, rot1, uly_orig, rot2, pixHeight]
# projection_orig = GEOP_o.projection
# kwargs = {'UL':[ulx, uly], 'LR':[lrx, lry]}
# band_nd = GEOP_o.tondarray(**kwargs)
# else:
# band_nd = GEOP_o.tondarray()
# if LayerNr == self.LayerBandsAssignment[0]:
# bands = band_nd
# band_names.append(band)
# self.logger.info(band)
# elif CFG.usecase.skip_thermal and META.isTHERMAL(self.GMS_identifier, LayerNr):
# self.logger.info('Band %s skipped because Layerstacking has been called with '
# 'skipthermal = True.' % band)
# elif META.isPAN(self.GMS_identifier, LayerNr):
# self.logger.info('Band %s skipped because it is a panchromatic band.' % band)
# else:
# try:
# bands = np.append(bands, band_nd, axis=0)
# band_names.append(band)
# self.logger.info(band)
# except ValueError:
# self.logger.info("""The spatial resolution of band >%s< does not match. Thus it could
# not be added.""" % band)
#
# if re.search('ALOS', self.satellite):
# '''First 22 lines are nodata: = maybe due to an issue of the GDAL CEOS driver.
# But: UL of metadata refers to bands[:,:,21]! So the imported GeoTransform is correct when
# the first 21 columns are deleted.'''
# bands = bands[:, :, 21:] # [bands,lines,columns]
# if ullr:
# GEOP.ndarray2gdal(bands, outPath=outFile, geotransform=[float(ulx), pixWidth, rot1,
# float(uly), rot2, pixHeight], projection=projection_orig, v=1)
# else:
# GEOP.ndarray2gdal(bands, outPath=outFile, importFile=os.path.join(InFolder, band_names[0]))
# hdr_o = ef.ReadEnviHeader(outFile[:-4] + ".hdr")
# hdr_o.bNames = band_names
# ef.WriteEnviHeader(hdr_o, outFile[:-4] + ".hdr")
# with open(outFile[:-4] + '.hdr', "a") as stack_hdr_fp:
# stack_hdr_fp.write('LayerBandsAssignment = {%s}\n' % str(self.LayerBandsAssignment). \
# replace('[', '').replace(']','').replace("'", '').replace(' ', ''))
# self.logger.info('Layerstacking was successful.')
# except:
# self.logger.exception('Error during layerstacking!')
# self.logger.warning("\n\t\tWARNING: %s is not processed.\n" % self.path_archive)
# </editor-fold>
def import_rasterdata(self):
if re.search("ALOS", self.satellite, re.I):
'''First 22 lines are nodata: = maybe due to an issue of the GDAL CEOS driver.
......
......@@ -3,8 +3,6 @@
Level 1B Processor:
Detection of global/local geometric displacements.
Written by Daniel Scheffler
"""
......@@ -33,164 +31,10 @@ from .L1A_P import L1A_object
from ..misc import database_tools as DB_T
from ..misc import helper_functions as HLP_F
from ..misc import path_generator as PG
from ..misc.SpatialIndexMediator import SpatialIndexMediator
from ..misc.spatial_index_mediator import SpatialIndexMediator
from ..misc.definition_dicts import get_GMS_sensorcode, get_outFillZeroSaturated
# if socket.gethostname() == 'geoms':
# sys.path.append('/usr/lib/otb/python/')
# os.environ['ITK_AUTOLOAD_PATH'] = "/usr/lib/otb/applications"
# sys.path.append('/usr/lib/python2.7/dist-packages') # cv2
# sys.path.append('/usr/local/lib/python2.7/site-packages')
# sys.path.append('/home/gfz-fe/scheffler/python')
# if socket.gethostname() == 'mefe18':
# sys.path.append('/usr/lib64/otb/python/')
# os.environ['ITK_AUTOLOAD_PATH'] = "/usr/lib64/otb/applications"
# sys.path.append('/misc/hy5/scheffler/python')
# try: import otbApplication
# except ImportError: print('otbApplication-lib missing..')
# except SyntaxError: print('The installed otbApplication-lib throughs syntax errors.. Maybe too old?')
# try: import cv2
# except ImportError: print('cv2-lib missing..')
# <editor-fold desc="deprecated/unused functions">
# def calculate_TiePoints(im_ref,im_rpc,distance = 200):
# detector = cv2.FeatureDetector_create ("SIFT")
# descriptor = cv2.DescriptorExtractor_create("SIFT")
#
# TieP_ref = detector.detect(im_ref) # gibt Liste von Keypoints aus -> type(skp[0]): cv2.KeyPoint
# # Detector = numpy-array - shape: Anzahl Keypoints x 128
# TieP_ref, Detector_ref = descriptor.compute(im_ref, TieP_ref)
# print('%s temporary master tie points found.' %len(TieP_ref))
# TieP_rpc = detector.detect(im_rpc)
# TieP_rpc, Detector_rpc = descriptor.compute(im_rpc, TieP_rpc)
# print('%s temporary slave tie points found.' %len(TieP_rpc))
#
# flann_params = dict(algorithm=1, trees=4)
# flann = cv2.flann_Index(Detector_ref, flann_params)
# idx, dist = flann.knnSearch(Detector_rpc, 1, params={})
# del flann
# dist = dist[:,0]/2500.0
# dist = dist.reshape(-1,).tolist()
# idx = idx.reshape(-1).tolist()
# indices = range(len(dist))
# indices.sort(key=lambda i: dist[i])
# dist = [dist[i] for i in indices]
# idx = [idx[i] for i in indices]
# TieP_ref_final = []
# for i, dis in itertools.izip(idx, dist):
# if dis < distance:
# TieP_ref_final.append(TieP_ref[i])
#
# flann = cv2.flann_Index(Detector_rpc, flann_params)
# idx, dist = flann.knnSearch(Detector_ref, 1, params={})
# del flann
# dist = dist[:,0]/2500.0
# dist = dist.reshape(-1,).tolist()
# idx = idx.reshape(-1).tolist()
# indices = range(len(dist))
# indices.sort(key=lambda i: dist[i])
# dist = [dist[i] for i in indices]
# idx = [idx[i] for i in indices]
# TieP_rpc_final = []
# for i, dis in itertools.izip(idx, dist):
# if dis < distance:
# TieP_rpc_final.append(TieP_rpc[i])
#
# return TieP_ref_final,TieP_rpc_final
#
# def Orfeo_homologous_points_extraction(im_ref,im_rpc):
# HomologousPointsExtraction = otbApplication.Registry.CreateApplication("HomologousPointsExtraction")
# # The following lines set all the application parameters:
# # HomologousPointsExtraction.SetParameterString("in1", "sensor_stereo_left.tif")
# # HomologousPointsExtraction.SetParameterString("in2", "sensor_stereo_right.tif")
# HomologousPointsExtraction.SetParameterString("in1", im_ref)
# HomologousPointsExtraction.SetParameterString("in2", im_rpc)
# HomologousPointsExtraction.SetParameterString("mode","full")
# HomologousPointsExtraction.SetParameterString("out", "homologous.txt")
# # The following line execute the application
# HomologousPointsExtraction.ExecuteAndWriteOutput()
#
# def generate_RPCs(RSD_L1A, DGM_L1A, masks_L1A,path_out_baseN =''):
# ''' Generates RPC model and returns RPC points as list. '''
# print('\n##################### Level 1B Processing #####################')
# logging.info('Level 1B Processing started.')
# if isinstance(RSD_L1A,np.ndarray):
# # The following line creates an instance of the GenerateRPCSensorModel application
# GenerateRPCSensorModel = otbApplication.Registry.CreateApplication("GenerateRPCSensorModel")
#
# # The following lines set all the application parameters:
# GenerateRPCSensorModel.SetParameterString("outgeom", "output.geom")
# GenerateRPCSensorModel.SetParameterString("inpoints", "points.txt")
# GenerateRPCSensorModel.SetParameterString("map","epsg")
# GenerateRPCSensorModel.SetParameterInt ("map.epsg.code", 32631)
#
# # The following line execute the application
# GenerateRPCSensorModel.ExecuteAndWriteOutput()
# else:
# logging.info('L1B-Processor accepts only numpy arrays as first input argument. Execution stopped.')
# raise ValueError('L1B-Processor accepts only numpy arrays as first input argument.')
#
# print('Generating dummy RPCs...')
# logging.info('Generating dummy RPCs...')
# list_RPCs = [0]*93
# return list_RPCs
#
# def update_metadata(list_RPCs, L1A_meta2update, path_out_baseN =''):
# ''' Adds RPC points to metadata of RS data, masks, atmospheric layers and DGM. '''
# if isinstance(L1A_meta2update,dict):
# # metadata dictionary updater
# L1A_meta2update['rpc coeffs'] = list_RPCs
# L1B_meta = L1A_meta2update
# # L1B_meta = L1A_meta2update.update({'rpc coeffs':list_RPCs}) # result = None
# return L1B_meta
#
# elif isinstance(L1A_meta2update,str) and os.path.splitext(L1A_meta2update)[1] in ['.BSQ', '.bsq'] and \
# os.path.isfile(os.path.splitext(L1A_meta2update)[0] + '.hdr'):
# # header file updater
# hdr_path = os.path.splitext(L1A_meta2update)[0] + '.hdr'
# L1A_meta2update = envi.read_envi_header(hdr_path)
# L1A_meta2update['rpc coeffs'] = list_RPCs
# L1B_meta = L1A_meta2update
# return L1B_meta
#
# else:
# logging.info('L1B-Processor accepts only L1A metadata dictionaries or path-strings to L1A header files as '
# 'second input argument. Execution stopped.')
# raise ValueError('L1B-Processor accepts only L1A metadata dictionaries or path-strings to L1A header files '
# 'as second input argument.')
#
# def convert_to_8bit(Inimage,In_dtype):
# image_min,image_max = (np.min(Inimage), np.max(Inimage))
# # lut = np.arange(np.iinfo(In_dtype).max, dtype=In_dtype)
# lut = np.arange(2**16, dtype='uint16')
# Outimage = np.array(lut, copy=True)
# Outimage.clip(image_min, image_max, out=Outimage)
# Outimage -= image_min
# Outimage //= (image_max - image_min + 1) / 256.
# lut = Outimage.astype(np.uint8)
# return np.take(lut, Inimage)
#
# def L1B_P__main(L1A_Instances):
# for i in L1A_Instances:
# if i.image_type == 'RSD':
# if i.georef == 'Master':
# if i.arr_shape == 'cube':
# im_ref = convert_to_8bit(i.arr[:,:,0],i.arr.dtype)
# else:
# im_ref = convert_to_8bit(i.arr,i.arr.dtype)
# print('Calling tie point calculation with master image %s' %i.entity_ID)
# else:
# if i.arr_shape == 'cube':
# im_rpc = convert_to_8bit(i.arr[:,:,0],i.arr.dtype)
# else:
# im_rpc = convert_to_8bit(i.arr,i.arr.dtype)
# print('Calling tie point calculation with slave image %s' %i.entity_ID)
# print(im_ref.shape,im_rpc.shape)
# # Orfeo_homologous_points_extraction(im_ref,im_rpc) # funktioniert nur von Platte
# TieP_ref,TieP_rpc = calculate_TiePoints(im_ref,im_rpc)
# print('%s master tie points calculated.' %len(TieP_ref))
# </editor-fold>
__author__ = 'Daniel Scheffler'
class Scene_finder(object):
......@@ -332,7 +176,7 @@ class Scene_finder(object):
GDF = self.GDF_ref_scenes
if not GDF.empty:
# compare projections of target and reference image
from ..io.Input_reader import read_ENVIhdr_to_dict
from ..io.input_reader import read_ENVIhdr_to_dict
def get_prj(path_binary):
return read_ENVIhdr_to_dict(os.path.splitext(path_binary)[0] + '.hdr')['coordinate system string']
......
......@@ -14,11 +14,11 @@ from geoarray import GeoArray
from py_tools_ds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from . import GEOPROCESSING as GEOP
from . import geoprocessing as GEOP
from .L1B_P import L1B_object
from ..model.METADATA import get_LayerBandsAssignment
from ..model.metadata import get_LayerBandsAssignment
from ..misc.definition_dicts import get_outFillZeroSaturated, proc_chain, get_mask_classdefinition
from ..io.Input_reader import SRF
from ..io.input_reader import SRF
# from .cloud_masking import Cloud_Mask_Creator # circular dependencies
from sicor.sicor_ac import ac_gms
......
# -*- coding: utf-8 -*-
"""Level 2A Processor: Spatial homogenization"""
import collections
import os
import warnings
import numpy as np
from geoarray import GeoArray
from py_tools_ds.geo.map_info import mapinfo2geotransform
from ..config import GMS_config as CFG
from .L1C_P import L1C_object
__author__ = 'Daniel Scheffler'
def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False, paramsFromUsecase=True, **kwargs):
"""
Get list of argument and keyword argument tuples as input for DESHIFTER class.
:param dicts_GMS_obj: list of the copied dictionaries of GMS objects, containing the attribute 'coreg_info'
:param attrnames2deshift: list of attribute names of the GMS object containing the array to be shifted (or a path)
:param proc_bandwise: True: configurations for each band of the array to be shifted are returned
(if DESHIFTER will be called in multiprocessing), default = False
:param paramsFromUsecase: True: respect the usecase parameters align_coord_grids, target_gsd and match_gsd when
executing DESHIFTER class, default = True
:Keyword Arguments:
- band2process (int): The index of the band to be processed within the given array (starts with 1),
default = None (all bands are processed)
- out_gsd (float): output pixel size in units of the reference coordinate system (default = pixel size
of the input array)
- align_grids (bool): True: align the input coordinate grid to the reference (does not affect the
output pixel size as long as input and output pixel sizes are compatible
(5:30 or 10:30 but not 4:30), default = False
- match_gsd (bool): True: match the input pixel size to the reference pixel size,
default = False
- no_resamp (bool): True: force avoiding of any resampling (shifts are corrected via ENVI map info),
default = False
- cliptoextent (bool): True: clip the input image to its actual bounds while deleting possible no data
areas outside of the actual bounds, default = True
"""
# FIXME diese Methode muss target grid festlegen, auch wenn keine Referenz verfügbar ist!
illegal_kw = [i for i in kwargs if i not in ['align_grids', 'out_gsd', 'match_gsd', 'no_resamp', 'cliptoextent']]
assert illegal_kw == [], "'%s' is not a legal keyword argument for L1B_P.get_DESHIFTER_configs()" % illegal_kw[0]
dicts_GMS_obj = [dicts_GMS_obj] if not isinstance(dicts_GMS_obj, list) else dicts_GMS_obj
attrnames2deshift = [attrnames2deshift] if not isinstance(attrnames2deshift, list) else attrnames2deshift
# get general kwargs
gen_kwargs = collections.OrderedDict()
if paramsFromUsecase:
gen_kwargs.update({'align_grids': CFG.usecase.align_coord_grids})
gen_kwargs.update({'out_gsd': CFG.usecase.target_gsd})
gen_kwargs.update({'match_gsd': CFG.usecase.match_gsd})
else:
[gen_kwargs.update({kw: kwargs.get(kw)}) for kw in ['align_grids', 'out_gsd', 'match_gsd'] if kw in kwargs]
[gen_kwargs.update({kw: kwargs.get(kw)}) for kw in ['no_resamp', 'cliptoextent'] if kw in kwargs]
config_dicts = []
for obj in dicts_GMS_obj:
# FIXME workaround für fehlende refererence geotransform
# FIXME -> eigentlich müsste nicht gt, sondern target grid berechnet werden
assert isinstance(obj, dict)
if not obj['coreg_info']['reference geotransform']:
obj['coreg_info']['reference geotransform'] = mapinfo2geotransform(
obj['coreg_info']['original map info'])
obj['coreg_info']['reference geotransform'][1] = CFG.usecase.target_gsd[0]
obj['coreg_info']['reference geotransform'][5] = -abs(CFG.usecase.target_gsd[1])
item2add = [obj]
for attrname in attrnames2deshift:
attrVal = obj[attrname]
attritem2add = item2add + [attrname]
if isinstance(attrVal, np.ndarray) or isinstance(attrVal, GeoArray) and attrVal.is_inmem:
bands = attrVal.shape[2] if attrVal.ndim == 3 else None
elif isinstance(attrVal, GeoArray) and not attrVal.is_inmem:
if os.path.exists(attrVal):
bands = attrVal.bands
else:
warnings.warn('get_DESHIFTER_configs: Missing file %s. File skipped.' % attrVal)
continue
elif attrVal is None:
continue
else:
raise Exception('Unexpected attribute type %s in attribute %s.' % (type(attrVal), attrname))
if proc_bandwise and bands is not None and 'band2process' not in kwargs:
for bI in range(bands):
kwargs2add = collections.OrderedDict()
kwargs2add.update({'band2process': bI + 1})
kwargs2add.update(gen_kwargs)
banditem2add = attritem2add + [kwargs2add]
config_dicts.append(banditem2add)
elif 'band2process' in kwargs and kwargs.get('band2process') is not None:
assert isinstance(kwargs.get('band2process'), int), "'band2process' must contain an integer."
kwargs2add = collections.OrderedDict({'band2process': kwargs.get('band2process')})
kwargs2add.update(gen_kwargs)
attritem2add.append(kwargs2add)
config_dicts.append(attritem2add)
else:
kwargs2add = collections.OrderedDict(gen_kwargs)
attritem2add.append(kwargs2add)
config_dicts.append(attritem2add)
return config_dicts
class L2A_object(L1C_object):
def __init__(self, L1C_obj=None):
super(L2A_object, self).__init__()
......
......@@ -10,7 +10,7 @@ import matplotlib.pyplot as plt
from typing import TypeVar
from ..config import GMS_config as CFG
from ..io.Input_reader import SRF
from ..io.input_reader import SRF
from .L2A_P import L2A_object
__author__ = 'Daniel Scheffler'
......
......@@ -7,7 +7,7 @@
#
###############################################################################
from . import GEOPROCESSING
from . import geoprocessing
from . import gms_cloud_classifier
from . import L1A_P
from . import L1B_P
......@@ -16,7 +16,7 @@ from . import L2A_P
from . import L2B_P
from . import L2C_P
__all__ = ['GEOPROCESSING',
__all__ = ['geoprocessing.py',
'gms_cloud_classifier',
'L1A_P',
'L1B_P',
......