Commit e06e354b authored by Daniel Scheffler's avatar Daniel Scheffler
Browse files

added interactive visualization for comparing reference and target image...

added interactive visualization for comparing reference and target image coregistration before and after de-shifting

components.CoReg.COREG:
- __init__(): added assertion that rejects GeoArray objects where isinstance(geoArr, GeoArray) returns False due to Jupyter Notebook autoreload magic
- _set_outpathes(): added assertion
- added functin show_matchWin() for visualizing reference and target image coregistration before and after de-shifting

components.CoReg_local.COREG_LOCAL:
- __init__(): added assertion that rejects GeoArray objects where isinstance(geoArr, GeoArray) returns False due to Jupyter Notebook autoreload magic

components.Geom_Quality_Grid.Geom_Quality_Grid:
- separated RANSAC outlier detection from _flag_outliers()
- added _ransac_outlier_detection(): still a prototype

- updated __version__
parent 868d070e
...@@ -9,7 +9,7 @@ from .components import utilities ...@@ -9,7 +9,7 @@ from .components import utilities
from .components import geometry from .components import geometry
__author__ = 'Daniel Scheffler' __author__ = 'Daniel Scheffler'
__version__= '2016-11-15_01' __version__= '2016-11-17_01'
__all__=['COREG', __all__=['COREG',
'COREG_LOCAL', 'COREG_LOCAL',
......
...@@ -20,6 +20,7 @@ try: ...@@ -20,6 +20,7 @@ try:
except ImportError: except ImportError:
pyfftw = None pyfftw = None
from shapely.geometry import Point, Polygon from shapely.geometry import Point, Polygon
from skimage.exposure import rescale_intensity
# internal modules # internal modules
from .DeShifter import DESHIFTER, _dict_rspAlg_rsp_Int from .DeShifter import DESHIFTER, _dict_rspAlg_rsp_Int
...@@ -59,6 +60,11 @@ class imParamObj(object): ...@@ -59,6 +60,11 @@ class imParamObj(object):
self.GeoArray.progress = CoReg_params['progress'] self.GeoArray.progress = CoReg_params['progress']
self.GeoArray.q = CoReg_params['q'] self.GeoArray.q = CoReg_params['q']
assert isinstance(self.GeoArray, GeoArray), \
'Something went wrong with the creation of GeoArray instance for the %s. The created ' \
'instance does not seem to belong to the GeoArray class. If you are working in Jupyter Notebook, reset the ' \
'kernel and try again.' %self.imName
# set title to be used in plots # set title to be used in plots
self.title = os.path.basename(self.GeoArray.filePath) if self.GeoArray.filePath else self.imName self.title = os.path.basename(self.GeoArray.filePath) if self.GeoArray.filePath else self.imName
...@@ -325,6 +331,10 @@ class COREG(object): ...@@ -325,6 +331,10 @@ class COREG(object):
def _set_outpathes(self, im_ref, im_tgt): def _set_outpathes(self, im_ref, im_tgt):
assert isinstance(im_ref, (GeoArray, str)) and isinstance(im_tgt, (GeoArray, str)),\
'COREG._set_outpathes() expects two file pathes (string) or two instances of the ' \
'GeoArray class. Received %s and %s.' %(type(im_ref), type(im_tgt))
get_baseN = lambda path: os.path.splitext(os.path.basename(path))[0] get_baseN = lambda path: os.path.splitext(os.path.basename(path))[0]
# get input pathes # get input pathes
...@@ -422,6 +432,72 @@ class COREG(object): ...@@ -422,6 +432,72 @@ class COREG(object):
return m return m
def show_matchWin(self, interactive=True, deshifted=False):
if interactive:
warnings.warn(UserWarning('This function is still under construction and may not work as expected!'))
# use Holoviews
try:
import holoviews as hv
except ImportError:
hv =None
if not hv:
raise ImportError(
"This method requires the library 'holoviews'. It can be installed for Anaconda with "
"the shell command 'conda install -c ioam holoviews bokeh'.")
warnings.filterwarnings('ignore')
hv.notebook_extension('matplotlib')
hv.Store.add_style_opts(hv.Image, ['vmin','vmax'])
#hv.Store.option_setters.options().Image = hv.Options('style', cmap='gnuplot2')
#hv.Store.add_style_opts(hv.Image, ['cmap'])
#renderer = hv.Store.renderers['matplotlib'].instance(fig='svg', holomap='gif')
#RasterPlot = renderer.plotting_class(hv.Image)
#RasterPlot.cmap = 'gray'
matchWin_orig = self.matchWin.data
otherWin_orig = self.otherWin.data
otherWin_corr = self._get_deshifted_otherWin()[:]
xmin,xmax,ymin,ymax = self.matchWin.boundsMap
get_vmin = lambda arr: np.percentile(arr, 2)
get_vmax = lambda arr: np.percentile(arr, 98)
get_arr = lambda arr: rescale_intensity(arr, in_range=(get_vmin(arr), get_vmax(arr)))
get_hv_image = lambda arr: hv.Image(get_arr(arr), bounds=(xmin,ymin,xmax,ymax))(
style={'cmap':'gray',
'vmin':get_vmin(arr), 'vmax':get_vmax(arr), # does not work
'interpolation':'none'},
plot={'fig_inches':(15,15), 'show_grid':True})
#plot={'fig_size':100, 'show_grid':True})
imgs_orig = {1 : get_hv_image(matchWin_orig),
2 : get_hv_image(otherWin_orig)
}
imgs_corr = {1: get_hv_image(matchWin_orig),
2: get_hv_image(otherWin_corr)
}
#layout = get_hv_image(matchWin_orig) + get_hv_image(matchWin_orig)
imgs = {1 : get_hv_image(matchWin_orig) + get_hv_image(matchWin_orig),
2 : get_hv_image(otherWin_orig) + get_hv_image(otherWin_corr)
}
# Construct a HoloMap by evaluating the function over all the keys
hmap_orig = hv.HoloMap(imgs_orig, kdims=['image'])
hmap_corr = hv.HoloMap(imgs_corr, kdims=['image'])
hmap = hv.HoloMap(imgs, kdims=['image']).collate().cols(1) # displaying this results in a too small figure
#hmap = hv.HoloMap(imgs_corr, kdims=['image']) + hv.HoloMap(imgs_corr, kdims=['image'])
## Construct a HoloMap by defining the sampling on the Dimension
#dmap = hv.DynamicMap(image_slice, kdims=[hv.Dimension('z_axis', values=keys)])
warnings.filterwarnings('default')
#return hmap
return hmap_orig if not deshifted else hmap_corr
def _get_opt_winpos_winsize(self): def _get_opt_winpos_winsize(self):
# type: (tuple,tuple) -> tuple,tuple # type: (tuple,tuple) -> tuple,tuple
"""Calculates optimal window position and size in reference image units according to DGM, cloud_mask and """Calculates optimal window position and size in reference image units according to DGM, cloud_mask and
......
...@@ -156,6 +156,11 @@ class COREG_LOCAL(object): ...@@ -156,6 +156,11 @@ class COREG_LOCAL(object):
self.progress = progress if not q else False # overridden by v self.progress = progress if not q else False # overridden by v
self.ignErr = ignore_errors self.ignErr = ignore_errors
assert isinstance(self.imref, GeoArray) and isinstance(self.im2shift, GeoArray), \
'Something went wrong with the creation of GeoArray instances for reference or target image. The created ' \
'instances do not seem to belong to the GeoArray class. If you are working in Jupyter Notebook, reset the ' \
'kernel and try again.'
COREG.__dict__['_set_outpathes'](self, self.imref, self.im2shift) COREG.__dict__['_set_outpathes'](self, self.imref, self.im2shift)
# make sure that the output directory of coregistered image is the project directory if a project directory is given # make sure that the output directory of coregistered image is the project directory if a project directory is given
if path_out and projectDir and os.path.basename(self.path_out): if path_out and projectDir and os.path.basename(self.path_out):
......
...@@ -312,7 +312,10 @@ class Geom_Quality_Grid(object): ...@@ -312,7 +312,10 @@ class Geom_Quality_Grid(object):
:param inplace: <bool> whether to overwrite CoRegPoints_table directly (True) or not (False). If False, the :param inplace: <bool> whether to overwrite CoRegPoints_table directly (True) or not (False). If False, the
resulting GeoDataFrame is returned. resulting GeoDataFrame is returned.
""" """
if not algorithm in ['RANSAC']: raise ValueError if not algorithm in ['RANSAC']: raise ValueError
warnings.warn("The currently implemented RANSAC outlier detection is still very experimental. You enabled it "
"by passing 'tieP_filter_level=2' to COREG_LOCAL. Use it on your own risk!")
GDF = self.CoRegPoints_table.copy() GDF = self.CoRegPoints_table.copy()
...@@ -324,15 +327,8 @@ class Geom_Quality_Grid(object): ...@@ -324,15 +327,8 @@ class Geom_Quality_Grid(object):
dst = src + xyShift dst = src + xyShift
if algorithm=='RANSAC': if algorithm=='RANSAC':
# robustly estimate affine transform model with RANSAC model_roust, outliers = self._ransac_outlier_detection(src, dst)
#model_robust, inliers = ransac((src, dst), PolynomialTransform, min_samples=3, #print(np.count_nonzero(outliers), 'marked as outliers')
model_robust, inliers = ransac((src, dst), AffineTransform,
min_samples=3,
residual_threshold=10,
max_trials=1000,
#stop_sample_num=int(0.9*len(GDF))
)
outliers = inliers == False
records = GDF[['POINT_ID']].copy() records = GDF[['POINT_ID']].copy()
records['OUTLIER'] = outliers records['OUTLIER'] = outliers
...@@ -346,6 +342,71 @@ class Geom_Quality_Grid(object): ...@@ -346,6 +342,71 @@ class Geom_Quality_Grid(object):
return GDF return GDF
@staticmethod
def _ransac_outlier_detection(src_coords, est_coords, max_outlier_percentage=10, tolerance=2.5, max_iter=15, timeout=20):
"""Detect geometric outliers between point cloud of source and estimated coordinates using RANSAC algorithm.
:param src_coords: <np.ndarray> source coordinate point cloud as array with shape [Nx2]
:param est_coords: <np.ndarray> estimated coordinate point cloud as array with shape [Nx2]
:param max_outlier_percentage: <float, int> maximum percentage of outliers to be detected
:param tolerance: <float, int> percentage tolerance for max_outlier_percentage
:param max_iter: <int> maximum iterations for finding the best RANSAC threshold
:param timeout: <float, int> timeout for iteration loop in seconds
:return:
"""
for co, n in zip([src_coords, est_coords], ['src_coords', 'est_coords']):
assert co.ndim==2 and co.shape[1]==2, "'%s' must have shape [Nx2]. Got shape %s."%(n, co.shape)
if max_outlier_percentage >100: raise ValueError
min_inlier_percentage = 100-max_outlier_percentage
class PolyTF_1(PolynomialTransform):
def estimate(*data):
return PolynomialTransform.estimate(*data, order=1)
# robustly estimate affine transform model with RANSAC
# exliminates not more than the given maximum outlier percentage of the tie points
model_robust, inliers = None, None
count_inliers = None
th = 5 # start value
th_checked = {} # dict of thresholds that already have been tried + calculated inlier percentage
th_substract = 2
count_iter = 0
time_start = time.time()
while True:
if count_iter > max_iter or time.time()-time_start > timeout:
break # keep last values and break while loop
if th_checked:
if min_inlier_percentage-tolerance <= th_checked[th] <= min_inlier_percentage+tolerance:
th_substract /= 2
if abs(th_checked[th] - min_inlier_percentage) <= tolerance:
break # keep last values and break while loop
# check if more or less outliers have been detected than given percentage
if count_inliers >= min_inlier_percentage * src_coords.shape[0] / 100:
th -= th_substract # too less outliers found -> decrease threshold
else:
th += th_substract # too much outliers found -> increase threshold
# model_robust, inliers = ransac((src, dst), PolynomialTransform, min_samples=3,
model_robust, inliers = ransac((src_coords, est_coords), AffineTransform,
min_samples = 6,
residual_threshold = th,
max_trials = 3000,
stop_sample_num = int((min_inlier_percentage-tolerance)/100*src_coords.shape[0]),
stop_residuals_sum = int((max_outlier_percentage-tolerance)/100*src_coords.shape[0])
)
count_inliers = np.count_nonzero(inliers)
th_checked[th] = count_inliers / src_coords.shape[0] * 100
print(th_checked)
count_iter+=1
outliers = inliers == False
return model_robust, outliers
def dump_CoRegPoints_table(self, path_out=None): def dump_CoRegPoints_table(self, path_out=None):
path_out = path_out if path_out else get_generic_outpath(dir_out=self.dir_out, path_out = path_out if path_out else get_generic_outpath(dir_out=self.dir_out,
fName_out="CoRegPoints_table_grid%s_ws(%s_%s)__T_%s__R_%s.pkl" % (self.grid_res, self.COREG_obj.win_size_XY[0], fName_out="CoRegPoints_table_grid%s_ws(%s_%s)__T_%s__R_%s.pkl" % (self.grid_res, self.COREG_obj.win_size_XY[0],
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment