Commit 9142f468 authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

First prototype of algorithms for geometrical and spectral resolution working...

First prototype of algorithms for geometrical and spectral resolution working in map-reduce context (L2A, L2B).
GEOP:
    - renamed GEOPROCESSING_BD.py to GEOPROCESSING.py
    - moved get_prjLonLat(), get_proj4info(), corner_coord_to_minmax() to GEOP
    - added docstrings to DN2Rad(), DN2TOARef(), TOARad2Kelvin_fastforward(), DN2DegreesCelsius_fastforward()
L1A_P:
    - L1A_object.fill_arr_from_disk(): further silencing of console outputs
L1B_P:
    - moved get_DESHIFTER_configs() and class DESHIFTER() to L2A_P
    - adjusted initial values for COREG attributes related to reference image (not None anymore in order to make L2A_P work if shift calculation failed)
    - increased database statement timeouts for queries within get_reference_image_params() to 25sek
    - L1B_object():
        - added attribute "deshift_results"
        - removed deprecated code
        - added join_deshift_results()
        - revised apply_deshift_results()
L2A_P:
    -  added get_DESHIFTER_configs() and class DESHIFTER() from L1B_P
    - fixed two bugs in DESHIFTER.correct_shifts() where DESHIFTER.band2process was not respected and whole image cube was read instead of only one band
    - added class L2A_object()
L2B_P:
    - added class L2B_object()
    - L2B_object():
        - added interpolate_cube_linear()
        - added spectral_homogenization()
META:
    - renamed METADATA_BD.py to METADATA.py
INP_R:
    - added quiet mode to read_ENVIfile()
OUT_W:
    - added enviHdr_keyOrder using list from reorder_envi_header()
    - fixed a bug in reorder_ENVI_header() that caused repetitions of header keys
    - adjusted print_dict within mask_to_ENVI_Classification() in order to also support L2A and L2B
HLP_F:
    - added parent objects for L2A and L2B in parentObjDict
    - added type hints to cut_GMS_obj_into_blocks() and merge_GMS_tiles_to_GMS_obj()
GITIGNORE:
    - updated .gitignore file
CFG:
    - added virtual_sensor_id, datasetid_spectral_ref, target_CWL, target_FWHM to usecase class by querying the database
PC:
    - added type hints to mapper functions
    - revised L2A_map_2()
    - added L2B_map_1()
    - revised/added L2A algorithm calls (only Flink mode is supported so far)
    - added L2B algorithm calls (only Flink mode is supported so far)
pgDB:
    - added Sentinel-2A virtual sensors to virtual_sensors table (different spatial resolutions)
    - added wavelengths positions and band widths to virtual_sensors table
parent 8abcf2ed
......@@ -2,19 +2,8 @@
.idea/
BAK/
OLD/
database/old/
database/cloud_classifier/
database/sampledata/
database/metadata/
database/processed_data/
testing/out/
algorithms/OLD/
*.pyc
gms_io/robert/
gms_io/robert UNIX-Format konvertiert/
gms_io/robert ALT/
gms_io/landsat_downloader/
algorithms/METADATA.py
......@@ -210,7 +210,7 @@ class GEOPROCESSING(object):
self.originY = self.geotransform[3]
self.pixelWidth = self.geotransform[1]
self.pixelHeight = self.geotransform[5]
self.rot1 = self.geotransform[2]
self.rot1 = self.geotransform[2] # FIXME check
self.rot2 = self.geotransform[4]
self.extent = [self.originX, self.originY, self.originX + (self.cols * self.pixelWidth),
self.originY + (self.rows * self.pixelHeight)] # [ulx, uly, lrx, lry]
......@@ -318,8 +318,8 @@ class GEOPROCESSING(object):
self.update_dataset_related_attributes()
def DN2TOARefOLI(self, offsetsRef, gainsRef, zenith, outPath=None, fill=None, zero=None, saturated=None, cutNeg=True,
optshift=None, v=0):
def DN2TOARefOLI(self, offsetsRef, gainsRef, zenith, outPath=None, fill=None, zero=None, saturated=None,
cutNeg=True, optshift=None, v=0):
"""----METHOD_3a----------------------------------------------------------
converts OLI DN data to TOA Reflectance. http://landsat.usgs.gov/Landsat8_Using_Product.php
......@@ -3143,6 +3143,38 @@ def get_corner_coordinates(gdal_ds=None, gt=None, cols=None, rows=None):
gdal_ds_GT = None
return ext
def get_prjLonLat(fmt='wkt'):
# type: (str) -> Any
"""Returns standard geographic projection (EPSG 4326) in the WKT or PROJ4 format.
:param fmt: <str> target format - 'WKT' or 'PROJ4'
"""
assert re.search('wkt',fmt,re.I) or re.search('Proj4',fmt,re.I), 'unsupported output format'
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
return srs.ExportToWkt() if re.search('wkt',fmt,re.I) else srs.ExportToProj4()
def get_proj4info(ds=None,proj=None):
# type: (gdal.Dataset,str) -> str
"""Returns PROJ4 formatted projection info for the given gdal.Dataset or projection respectivly,
e.g. '+proj=utm +zone=43 +datum=WGS84 +units=m +no_defs '
:param ds: <gdal.Dataset> the gdal dataset to get PROJ4 info for
:param proj: <str> the projection to get PROJ4 formatted info for
"""
assert ds or proj, "Specify at least one of the arguments 'ds' or 'proj'"
srs = osr.SpatialReference()
srs.ImportFromWkt(ds.GetProjection() if ds else proj)
return srs.ExportToProj4()
def corner_coord_to_minmax(corner_coords):
# type: (list) -> (float,float,float,float)
"""Converts [[x1,y1],[x2,y2],[]...] to (xmin,xmax,ymin,ymax)
:param corner_coords: list of coordinates like [[x1,y1],[x2,y2],[]...]
"""
x_vals = [int(i[0]) for i in corner_coords]
y_vals = [int(i[1]) for i in corner_coords]
xmin,xmax,ymin,ymax = min(x_vals),max(x_vals),min(y_vals),max(y_vals)
return xmin,xmax,ymin,ymax
def get_footprint_polygon(CornerLonLat):
""" Converts a list of coordinates into a shapely polygon object.
:param CornerLonLat: a list of coordinate tuples like [[lon,lat], [lon. lat], ..]
......@@ -3169,6 +3201,21 @@ def get_overlap_polygon(poly1, poly2):
return {'overlap poly':None, 'overlap percentage':0, 'overlap area':0}
def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None, cutNeg=True):
#type: (np.ndarray,list,list,int,int,int,bool) -> np.ndarray
"""Convert DN to Radiance [W * m-2 * sr-1 * micrometer-1]
!!! InputGains and Offsets should be in [W * m-2 * sr-1 * micrometer-1]
:param ndarray: <np.ndarray> array of DNs to be converted into radiance
:param offsets: [W * m-2 * sr-1 * micrometer-1]:
list that includes the offsets of the individual rasterbands [offset_band1, offset_band2,
... ,offset_bandn] or optional input as number if Dataset has only 1 Band
:param gains: [W * m-2 * sr-1 * micrometer-1]:
list that includes the gains of the individual rasterbands [gain_band1, gain_band2, ... ,
gain_bandn] or optional input as number if Dataset has only 1 Band
:param inFill: pixelvalues allocated to background/dummy/fill pixels
:param inZero: pixelvalues allocated to zero radiance
:param inSaturated: pixelvalues allocated to saturated pixels
:param cutNeg: cutNegvalues -> all negative values set to 0
"""
assert isinstance(offsets,list) and isinstance(gains,list), \
"Offset and Gain parameters have to be provided as two lists containing gains and offsets for \
each band in ascending order. Got offsets as type '%s' and gains as type '%s'." %(type(offsets),type(gains))
......@@ -3200,6 +3247,25 @@ def DN2Rad(ndarray, offsets, gains, inFill=None, inZero=None, inSaturated=None,
def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
inFill=None, inZero=None, inSaturated=None, cutNeg=True):
#type: (np.ndarray,list,list,list,float,float,int,int,int,bool) -> np.ndarray
"""Converts DN data to TOA Reflectance.
:param ndarray: <np.ndarray> array of DNs to be converted into TOA reflectance
:param offsets: list: of offsets of each rasterband [W * m-2 * sr-1 * micrometer-1]
[offset_band1, offset_band2, ... ,offset_bandn] or optional as number if Dataset has
only 1 Band
:param gains: list: of gains of each rasterband [W * m-2 * sr-1 * micrometer-1]
[gain_band1, gain_band2, ... ,gain_bandn] or optional as number if Dataset has
only 1 Band
:param irradiances: list: of irradiance of each band [W * m-2 * micrometer-1]
[irradiance_band1, irradiance_band2, ... ,irradiance_bandn]
:param zenith: number: sun zenith angle
:param earthSunDist: earth-sun- distance for a certain day
:param inFill: number: pixelvalues allocated to background/dummy/fill pixels
:param inZero: number: pixelvalues allocated to zero radiance
:param inSaturated: number: pixelvalues allocated to saturated pixles
:param cutNeg: bool: if true. all negative values turned to zero. default: True
:return: Int16 TOA_Reflectance in [0-10000]
"""
assert isinstance(offsets,list) and isinstance(gains,list) and isinstance(irradiances, list), \
"Offset, Gain, Irradiance parameters have to be provided as three lists containing gains, offsets and " \
"irradiance for each band in ascending order. Got offsets as type '%s', gains as type '%s' and irradiance as " \
......@@ -3231,7 +3297,16 @@ def DN2TOARef(ndarray, offsets, gains, irradiances, zenith, earthSunDist,
def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZero=None, inSaturated=None):
# type: (np.ndarray,list,list,float,int,int,int) -> np.ndarray
"""Convert top-of-atmosphere radiances of thermal bands to temperatures in Kelvin
by applying the inverse of the Planck function.
:param ndarray: <np.ndarray> array of TOA radiance values to be converted into Kelvin
:param K1:
:param K2:
:param emissivity:
:param inFill:
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([K1,K2],['K1', 'K2']):
assert isinstance(arg[0],float) or isinstance(arg[0],int), "TOARad2Kelvin_fastforward: Expected float or " \
......@@ -3261,7 +3336,19 @@ def TOARad2Kelvin_fastforward(ndarray, K1, K2, emissivity=0.95, inFill=None, inZ
def DN2DegreesCelsius_fastforward(ndarray,offsets, gains, K1, K2, emissivity=0.95,
inFill=None, inZero=None, inSaturated=None):
"""Convert thermal DNs to temperatures in degrees Celsius
by calculating TOARadiance and applying the inverse of the Planck function.
:param ndarray: <np.ndarray> array of DNs to be converted into Degrees Celsius
:param offsets:
:param gains:
:param K1:
:param K2:
:param emissivity:
:param inFill:
:param inZero:
:param inSaturated:
"""
bands = 1 if len(ndarray.shape)==2 else ndarray.shape[2]
for arg,argname in zip([offsets,gains,K1,K2],['Offset', 'Gain','K1','K2']):
assert isinstance(offsets,list) and isinstance(gains,list), \
......@@ -3721,17 +3808,4 @@ def get_subsetProps_from_subsetArg(shape_fullArr,subset):
return collections.OrderedDict(zip(
['rows','cols','bands','rowStart','rowEnd','colStart','colEnd','bandStart','bandEnd','bandsList'],
[ rows , cols , bands , rowStart , rowEnd , colStart , colEnd , bandStart , bandEnd , bandsList ]))
[ rows , cols , bands , rowStart , rowEnd , colStart , colEnd , bandStart , bandEnd , bandsList ]))
\ No newline at end of file
......@@ -24,7 +24,7 @@ from gms_io import Input_reader as INP_R
from misc import helper_functions as HLP_F
from misc import database_tools as DB_T
from misc import path_generator as PG
from algorithms.METADATA_BD import get_LayerBandsAssignment
from algorithms.METADATA import get_LayerBandsAssignment
########################### core functions ####################################
def get_entity_IDs_within_AOI(): # called in console mode
......
......@@ -36,8 +36,8 @@ import matplotlib.pyplot as plt
from pyhdf import SD
from spectral.io import envi
from algorithms import METADATA_BD as META
from algorithms import GEOPROCESSING_BD as GEOP
from algorithms import METADATA as META
from algorithms import GEOPROCESSING as GEOP
from algorithms import gms_cloud_classifier as CLD_P # Cloud Processor
from algorithms import py_tools_ah
from gms_io import envifilehandling_BD as ef
......@@ -194,11 +194,11 @@ class L1A_object(object):
path_masks = PG_obj.get_path_maskdata()
path_maskClouds = PG_obj.get_path_cloudmaskdata()
if job.exec_mode=='Flink':
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger)
self.arr = INP_R.read_ENVIfile(path_arr,self.arr_shape,self.arr_pos,self.logger,q=1)
self.mask_1bit = INP_R.read_mask_subset(path_masks,'mask_1bit', self.logger,tuple_GMS_subset[1])
self.mask_clouds = INP_R.read_mask_subset(path_masks,'mask_clouds',self.logger,tuple_GMS_subset[1])
if self.arr_pos: self.logger.info('Reading file: %s @ position %s' %(self.baseN, self.arr_pos))
else: self.logger.info('Reading file: %s' %self.baseN)
self.log_for_fullArr_or_firstTile(self.logger,'Reading file %s as tiles...' %self.baseN \
if self.arr_pos else 'Reading file %s...' %self.baseN)
#self.masks is only needed by Output writer to masks combined -> generated there and on demand
else: # job.exec_mode=='Python'
self.arr = path_arr
......
......@@ -16,29 +16,23 @@
########################### Library import ####################################
#from __future__ import (division, print_function, unicode_literals,absolute_import)
import itertools
import logging
import builtins
import collections
import glob
import json
import os
import re
import socket
import subprocess
import sys
import time
from datetime import timedelta
import builtins
import rasterio
import collections
import warnings
import shutil
import glob
import json
from scipy import interpolate as intp
from datetime import timedelta
from spectral.io import envi
import gdal
import numpy as np
import osr
import pyfftw
import rasterio
if socket.gethostname() == 'geoms':
sys.path.append('/usr/lib/otb/python/')
......@@ -57,12 +51,12 @@ except ImportError: print('cv2-lib missing..')
job, usecase, GMS_call_type = builtins.GMS_config.job, builtins.GMS_config.usecase, builtins.GMS_config.GMS_call_type
from gms_io import Output_writer as OUT_W
from misc import helper_functions as HLP_F
from algorithms import GEOPROCESSING_BD as GEOP
from algorithms import GEOPROCESSING as GEOP
from misc import path_generator as PG
from misc import database_tools as DB_T
from algorithms.L1A_P import L1A_object
#from algorithms.L2A_P import get_DESHIFTER_configs, DESHIFTER
########################### core functions ####################################
# <editor-fold desc="deprecated/unused functions">
# def calculate_TiePoints(im_ref,im_rpc,distance = 200):
# detector = cv2.FeatureDetector_create ("SIFT")
......@@ -279,7 +273,7 @@ class COREG(object):
assert self.shift_prj , 'The image to be shifted has no projection.'
assert not re.search('LOCAL_CS',self.shift_prj), 'The image to be shifted is not georeferenced.'
assert self.shift_gt , 'The image to be shifted has no map information.'
assert get_proj4info(self.ds_imref) == get_proj4info(self.ds_im2shift),\
assert GEOP.get_proj4info(self.ds_imref) == GEOP.get_proj4info(self.ds_im2shift),\
'Input projections are not equal. Different projections are currently not supported.'
"""optionally write shapes (always LonLat because GMS shapely polygons are always LonLat"""
......@@ -288,9 +282,9 @@ class COREG(object):
self.verbose_out = os.path.join(PG.path_generator(dict_L1A_Instance).get_path_procdata(),
'CoReg_verboseOut__%s__shifted_to__%s' %(get_baseN(self.path_im2shift), get_baseN(self.path_imref)))
if not os.path.isdir(self.verbose_out): os.makedirs(self.verbose_out)
OUT_W.write_shp(self.imref_footprint_poly, os.path.join(self.verbose_out,'poly_imref.shp'), get_prjLonLat())
OUT_W.write_shp(self.footprint_poly, os.path.join(self.verbose_out,'poly_im2shift.shp'), get_prjLonLat())
OUT_W.write_shp(self.overlap_poly, os.path.join(self.verbose_out,'overlap_poly.shp'), get_prjLonLat())
OUT_W.write_shp(self.imref_footprint_poly, os.path.join(self.verbose_out,'poly_imref.shp'), GEOP.get_prjLonLat())
OUT_W.write_shp(self.footprint_poly, os.path.join(self.verbose_out,'poly_im2shift.shp'), GEOP.get_prjLonLat())
OUT_W.write_shp(self.overlap_poly, os.path.join(self.verbose_out,'overlap_poly.shp'), GEOP.get_prjLonLat())
"""get bands to use for matching"""
self.get_opt_bands4matching(target_cwlPos_nm=550)
......@@ -299,6 +293,11 @@ class COREG(object):
self.get_opt_winpos_winsize()
self.success = True
else:
self.x_shift_px, self.y_shift_px = 0,0
self.x_shift_map, self.y_shift_map = 0,0
self.updated_map_info = self.map_info_to_update
def get_reference_image_params(self):
......@@ -323,7 +322,7 @@ class COREG(object):
query_scenes = lambda condlist: DB_T.get_overlapping_scenes_from_postgreSQLdb(job.conn_database,
table='scenes', trueDataCornerLonLat=self.trueDataCornerLonLat,
conditions=condlist, add_cmds='ORDER BY scenes.cloudcover ASC')
conditions=condlist, add_cmds='ORDER BY scenes.cloudcover ASC', timeout=30000)
conds_descImportance = [dataset_cond, cloudcov_cond, dayrange_cond]
temp_logger = HLP_F.setup_logger('log__' + self.baseN, self.im2shift_objDict['path_logfile'],append=1)
......@@ -336,7 +335,7 @@ class COREG(object):
res = DB_T.get_overlapping_scenes_from_postgreSQLdb(
job.conn_database, trueDataCornerLonLat=self.trueDataCornerLonLat,
conditions=['datasetid=%s' %usecase.datasetid_spatial_ref],
add_cmds='ORDER BY scenes.cloudcover ASC') # das ist nur Ergebnis aus scenes_proc -> dort liegt nur eine referenz, wenn die szene schon bei job-Beginn in Datensatzliste drin war
add_cmds='ORDER BY scenes.cloudcover ASC',timeout=25000) # das ist nur Ergebnis aus scenes_proc -> dort liegt nur eine referenz, wenn die szene schon bei job-Beginn in Datensatzliste drin war
filt_overlap_scenes = self.sceneIDList_to_filt_overlap_scenes([i[0] for i in res[:50]],20.)
else:
......@@ -463,7 +462,7 @@ class COREG(object):
overlap_center_pos_x, overlap_center_pos_y = self.overlap_poly.centroid.coords.xy
# transform to reference projection if needed (get_image_windows_to_match expects win_pos as ref_prj
if get_proj4info(proj=self.ref_prj) != get_prjLonLat(fmt='Proj4'): # reference has no LonLat projection
if GEOP.get_proj4info(proj=self.ref_prj) != GEOP.get_prjLonLat(fmt='Proj4'): # reference has no LonLat projection
srs = osr.SpatialReference()
srs.ImportFromWkt(self.ref_prj)
assert srs.IsProjected(), 'L1B processor only takes LonLat or UTM datasets' # FIXME Bug
......@@ -918,392 +917,13 @@ class COREG(object):
self.updated_map_info[4] = str(float(self.map_info_to_update[4]) + self.y_shift_map)
print('Updated map info:',self.updated_map_info)
def get_prjLonLat(fmt='wkt'):
assert re.search('wkt',fmt,re.I) or re.search('Proj4',fmt,re.I), 'unsupported output format'
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
return srs.ExportToWkt() if re.search('wkt',fmt,re.I) else srs.ExportToProj4()
def get_proj4info(ds=None,proj=None):
assert ds is not None or proj is not None, "Specify at least one of the arguments 'ds' or 'proj'"
srs = osr.SpatialReference()
srs.ImportFromWkt(ds.GetProjection() if ds is not None else proj)
return srs.ExportToProj4()
#=====================================================================================================================
def get_DESHIFTER_configs(dicts_GMS_obj, attrnames2deshift, proc_bandwise=False, paramsFromUsecase=True, **kwargs):
"""
Get list of argument and keyword argument tuples as input for DESHIFTER class.
:param dicts_GMS_obj: list of the copied dictionaries of GMS objects, containing the attribute 'coreg_info'
:param attrnames2deshift: list of attribute names of the GMS object containing the array to be shifted (or a path)
:param proc_bandwise: True: configurations for each band of the array to be shifted are returned
(if DESHIFTER will be called in multiprocessing), default = False
:param paramsFromUsecase: True: respect the usecase parameters align_coord_grids, target_gsd and match_gsd when
executing DESHIFTER class, default = True
:Keyword Arguments:
- band2process (int): The index of the band to be processed within the given array (starts with 1),
default = None (all bands are processed)
- out_gsd (float): output pixel size in units of the reference coordinate system (default = pixel size
of the input array)
- align_grids (bool): True: align the input coordinate grid to the reference (does not affect the
output pixel size as long as input and output pixel sizes are compatible
(5:30 or 10:30 but not 4:30), default = False
- match_gsd (bool): True: match the input pixel size to the reference pixel size,
default = False
- no_resamp (bool): True: force avoiding of any resampling (shifts are corrected via ENVI map info),
default = False
- cliptoextent (bool): True: clip the input image to its actual bounds while deleting possible no data
areas outside of the actual bounds, default = True
"""
illegal_kw = [i for i in kwargs if i not in ['align_grids','out_gsd','match_gsd','no_resamp','cliptoextent']]
assert illegal_kw == [], "'%s' is not a legal keyword argument for L1B_P.get_DESHIFTER_configs()" %illegal_kw[0]
dicts_GMS_obj = [dicts_GMS_obj] if not isinstance(dicts_GMS_obj,list) else dicts_GMS_obj
attrnames2deshift = [attrnames2deshift] if not isinstance(attrnames2deshift,list) else attrnames2deshift
# get general kwargs
gen_kwargs = collections.OrderedDict()
if paramsFromUsecase:
gen_kwargs.update({'align_grids':usecase.align_coord_grids})
gen_kwargs.update({'out_gsd' :usecase.target_gsd})
gen_kwargs.update({'match_gsd' :usecase.match_gsd})
else:
[gen_kwargs.update({kw:kwargs.get(kw)}) for kw in ['align_grids','out_gsd','match_gsd'] if kw in kwargs]
[gen_kwargs.update({kw:kwargs.get(kw)}) for kw in ['no_resamp','cliptoextent'] if kw in kwargs]
config_dicts = []
for obj in dicts_GMS_obj:
item2add = [obj]
for attrname in attrnames2deshift:
attritem2add = item2add+[attrname]
if isinstance(obj[attrname],np.ndarray):
bands = obj[attrname].shape[2] if len(obj[attrname].shape)==3 else None
elif isinstance(obj[attrname],str):
if os.path.exists(obj[attrname]):
bands = gdal.Open(obj[attrname]).RasterCount
bands = bands if bands > 1 else None
else:
warnings.warn('get_DESHIFTER_configs: Missing file %s. File skipped.' % obj[attrname])
continue
else:
raise Exception('Unsupported attribute type %s in attribute %s.' %(type(obj[attrname]),attrname))
if proc_bandwise and bands is not None and 'band2process' not in kwargs:
for bI in range(bands):
kwargs2add = collections.OrderedDict()
kwargs2add.update({'band2process':bI+1})
kwargs2add.update(gen_kwargs)
banditem2add = attritem2add+[kwargs2add]
config_dicts.append(banditem2add)
elif 'band2process' in kwargs and kwargs.get('band2process') is not None:
assert isinstance(kwargs.get('band2process'),int), "'band2process' must contain an integer."
kwargs2add = collections.OrderedDict({'band2process':kwargs.get('band2process')})
kwargs2add.update(gen_kwargs)
attritem2add.append(kwargs2add)
config_dicts.append(attritem2add)
else:
kwargs2add = collections.OrderedDict(gen_kwargs)
attritem2add.append(kwargs2add)
config_dicts.append(attritem2add)
return config_dicts
class DESHIFTER(object):
def __init__(self,dict_GMS_obj, attrname2deshift, **kwargs):
"""
Deshift an image array or one of its products by applying the coregistration info calculated by COREG class.
:param dict_GMS_obj: the copied dictionary of a GMS object, containing the attribute 'coreg_info'
:param attrname2deshift: attribute name of the GMS object containing the array to be shifted (or a its path)
:Keyword Arguments:
- band2process (int): The index of the band to be processed within the given array (starts with 1),
default = None (all bands are processed)
- out_gsd (float): output pixel size in units of the reference coordinate system (default = pixel size
of the input array), given values are overridden by match_gsd=True
- align_grids (bool): True: align the input coordinate grid to the reference (does not affect the
output pixel size as long as input and output pixel sizes are compatible
(5:30 or 10:30 but not 4:30), default = False
- match_gsd (bool): True: match the input pixel size to the reference pixel size,
default = False
- no_resamp (bool): True: force avoiding of any resampling (shifts are corrected via ENVI map info),
default = False
- cliptoextent (bool): True: clip the input image to its actual bounds while deleting possible no data
areas outside of the actual bounds, default = True
"""
# unpack kwargs
self.band2process = kwargs.get('band2process',None)
out_gsd = kwargs.get('out_gsd' ,None)
self.align_grids = kwargs.get('align_grids' ,False)
self.match_gsd = kwargs.get('match_gsd' ,False)
tempAsENVI = kwargs.get('tempAsENVI' ,False)
self.outFmt = 'VRT' if not tempAsENVI else 'ENVI'
self.no_resamp = kwargs.get('no_resamp' ,False) # Fixme deprecated?
self.cliptoextent = kwargs.get('cliptoextent',True)
# unpack args
self.shift_scene_ID = dict_GMS_obj['scene_ID']
self.shift_entity_ID = dict_GMS_obj['entity_ID']
self.shift_prj = dict_GMS_obj['meta']['coordinate system string']
self.shift_gt = GEOP.mapinfo2geotransform(dict_GMS_obj['meta']['map info'])
self.shift_trueDataCornerPos = dict_GMS_obj['trueDataCornerPos']
self.shift_trueDataCornerLonLat = dict_GMS_obj['trueDataCornerLonLat']
self.im2shift = dict_GMS_obj[attrname2deshift] # TODO checken obs funktioniert, weil ja nur dict übergeben wird, statt dem obj
self.shift_shape = self.get_dims_im2shift()
self.ds_im2shift = None # from disk or from L1A object; set by self.get_ds_im2shift() called from self.correct shifts)
self.attrname2deshift = attrname2deshift
self.updated_map_info = dict_GMS_obj['coreg_info']['updated map info']
self.original_map_info= dict_GMS_obj['coreg_info']['updated map info']
self.updated_gt = GEOP.mapinfo2geotransform(self.updated_map_info)
self.ref_scene_ID = dict_GMS_obj['coreg_info']['reference scene ID']
self.ref_entity_ID = dict_GMS_obj['coreg_info']['reference entity ID']
self.ref_prj = dict_GMS_obj['coreg_info']['reference projection']
self.ref_gt = dict_GMS_obj['coreg_info']['reference geotransform']
self.ref_cols = dict_GMS_obj['coreg_info']['reference extent']['cols']
self.ref_rows = dict_GMS_obj['coreg_info']['reference extent']['rows']
self.is_shifted = dict_GMS_obj['coreg_info']['is shifted']
self.is_resampled = dict_GMS_obj['coreg_info']['is resampled']
self.updated_projection = self.ref_prj
# set the rest
self.ref_xgsd,self.shift_xgsd = abs(self.ref_gt[1]), abs(self.shift_gt[1])
self.ref_ygsd,self.shift_ygsd = abs(self.ref_gt[5]), abs(self.shift_gt[5])
if not isinstance(out_gsd,int) or (isinstance(out_gsd,list) and len(out_gsd)==2): raise ValueError
self.out_gsd = [self.shift_xgsd, self.shift_ygsd] if out_gsd is None else \
[self.ref_xgsd, self.ref_ygsd] if self.match_gsd else \
[out_gsd, out_gsd] if isinstance(out_gsd,int) else out_gsd
tmpBaseN = '%s__shifted_to__%s' %(dict_GMS_obj['entity_ID'],self.ref_entity_ID)
tempdir = '/dev/shm/GeoMultiSens/' if os.path.exists('/dev/shm/GeoMultiSens/') else\
PG.path_generator(dict_GMS_obj).get_path_procdata()
self.path_tmp = os.path.join(tempdir, '%s.bsq' %tmpBaseN) if tempAsENVI else PG.get_tempfile(ext='.vrt',tgt_dir='/dev/shm/') # FIXME: müsste in PG
self.path_tmp = '/dev/shm/%s.bsq' %tmpBaseN # FIXME
self.outFmt = 'ENVI' # FIXME
self.arr_shifted = None # set by self.correct_shifts
self.deshift_results = None # set by self.correct_shifts
def get_ds_im2shift(self):
print('im2shift', self.im2shift)
if isinstance(self.im2shift,str):
assert os.path.exists(self.im2shift), '%s: The image to be shifted can not be found at %s.'\
%(self.shift_entity_ID, self.im2shift)
ds = gdal.Open(self.im2shift)
assert ds, 'The image %s can not be read by GDAL.' %self.shift_entity_ID
elif isinstance(self.im2shift,np.ndarray):
path_src = PG.get_tempfile(ext='.bsq') # FIXME schreibt .bsq in tempfile
arr_ds = GEOP.ndarray2gdal(self.im2shift, path_src, geotransform=self.shift_gt,
projection=self.shift_prj, direction=3)
path_vrt = PG.get_tempfile(ext='.bsq') # FIXME schreibt .bsq in tempfile
ds = gdal.GetDriverByName('VRT').CreateCopy(path_vrt, arr_ds,0) # FIXME warum überhaupt noch ein VRT schreiben
arr_ds = None
gdal.Unlink(path_src)
else:
raise TypeError('DESHIFTER.im2shift must contain a path or a numpy array. Got %s' &type(self.im2shift))
return ds
def get_dims_im2shift(self):
if isinstance(self.im2shift,str):
assert os.path.exists(self.im2shift), '%s: The image to be shifted can not be found at %s.'\
%(self.shift_entity_ID, self.im2shift)
try: ds = rasterio.open(self.im2shift)
except: raise Exception('The image %s can not be read by GDAL.' %self.shift_entity_ID)
rows, cols, bands = ds.height, ds.width, ds.count
elif isinstance(self.im2shift,np.ndarray):
rows, cols = self.im2shift.shape[:2]
bands = self.im2shift.shape[2] if len(self.im2shift.shape)==3 else 1
else:
raise TypeError('DESHIFTER.im2shift must contain a path or a numpy array. Got %s' %type(self.im2shift))
return rows, cols, bands
def correct_shifts(self):
t0 = time.time()
equal_prj = get_proj4info(proj=self.ref_prj)==get_proj4info(proj=self.shift_prj)
if equal_prj and not self.align_grids and self.out_gsd in [None,[self.shift_xgsd,self.shift_ygsd]]:
"""NO RESAMPLING NEEDED"""
print("DESHIFT: Only the map info of %s has been updated because 'align_grids' is turned off, the pixel "
"sizes keep the same and source and target projections are equal. " %self.shift_entity_ID)
self.is_shifted = True
if self.cliptoextent:
# get true bounds in pixel coordinates
df_xmin,df_xmax,df_ymin,df_ymax = 0, self.shift_shape[1], 0, self.shift_shape[0] # defaults
xmin,xmax,ymin,ymax = corner_coord_to_minmax([[c[1],c[0]] for c in self.shift_trueDataCornerPos])
xmin,xmax,ymin,ymax = [c if c>=0 else df_c for c,df_c in # replace eventually negative px-coordinates
zip([xmin,xmax,ymin,ymax],[df_xmin,df_xmax,df_ymin,df_ymax])]
# overwrite self.arr_shifted with subset read from L1A array
if isinstance(self.im2shift,str): # self.im2shift contains path to disk
tmp_arr = rasterio.open(self.im2shift).read(self.band2process,
window=((ymin,ymax+1), (xmin, xmax+1))) # reads all bands if self.band2process is None
in_arr = np.swapaxes(np.swapaxes(tmp_arr,0,2),0,1) if len(tmp_arr.shape)==3 else tmp_arr
elif isinstance(self.im2shift,np.ndarray): # self.im2shift contains array
in_arr = self.im2shift[ymin:ymax+1, xmin:xmax+1]
else:
raise Exception('Unexpected attribute type for attribute DESHIFTER.im2shift.')
self.arr_shifted = in_arr
# update gt
self.updated_gt[3],self.updated_gt[0] =\
GEOP.pixelToMapYX([xmin,ymin],geotransform=self.updated_gt,projection=self.updated_projection)[0]
self.updated_map_info = GEOP.geotransform2mapinfo(self.updated_gt,self.updated_projection)
else: # FIXME equal_prj==False ist noch NICHT implementiert
"""RESAMPLING NEEDED"""
xgsd, ygsd = self.out_gsd
self.updated_gt[1], self.updated_gt[5] = xgsd, -ygsd
if self.cliptoextent:
XY = [[c[1],c[0]] for c in self.shift_trueDataCornerPos]
XY = [[c[1],c[0]] for c in GEOP.pixelToMapYX(XY, geotransform=self.shift_gt,projection=self.shift_prj)]
s_xmin,s_xmax,s_ymin,s_ymax = corner_coord_to_minmax(XY)
else:
s_xmin,s_xmax,s_ymin,s_ymax = corner_coord_to_minmax(GEOP.get_corner_coordinates(
gt=self.shift_gt,cols=self.shift_shape[1],rows=self.shift_shape[0]))
if self.align_grids:
is_alignable = lambda gsd1,gsd2: max(gsd1,gsd2)%min(gsd1,gsd2)==0 # checks if pixel sizes are divisible
if not is_alignable(self.ref_xgsd,xgsd) or not is_alignable(self.ref_ygsd,ygsd):
print("\nWARNING: The coordinate grids of the reference image and the image to be shifted cannot "
"be aligned because their pixel sizes are not exact multiples of each other (ref [X/Y]: "
"%s %s; shift [X/Y]: %s %s). Therefore the pixel size of the reference image is "