Commit d6dd51a0 authored by Niklas Bohn's avatar Niklas Bohn

Merge remote-tracking branch 'origin/enhancement/snow_isofit_approach' into...

Merge remote-tracking branch 'origin/enhancement/snow_isofit_approach' into enhancement/snow_isofit_approach
parents f35a8659 e7a28e07
......@@ -38,10 +38,11 @@ from tqdm import tqdm
import os
import pkgutil
import platform
from scipy.interpolate import interp1d
from py_tools_ds.processing.progress_mon import ProgressBar
from enpt.processors.spatial_transform.spatial_transform import VNIR_SWIR_SensorGeometryTransformer
# from enpt.processors.spatial_transform.spatial_transform import VNIR_SWIR_SensorGeometryTransformer
from sicor.Tools.EnMAP.metadata import varsol
from sicor.Tools.EnMAP.LUT import get_data_file, read_lut_enmap_formatted, interpol_lut_c
......@@ -162,7 +163,15 @@ class FoGen(object):
# wvl
self.wvl = np.array(options["sensor"]["wvl_center"])
self.fwhm = np.array(options["sensor"]["fwhm"])
self.snr = np.array(options["sensor"]["snr"])
# snr
if self.instrument == "AVIRIS":
self.noise_file = options["sensor"]["snr"]
coeffs = np.loadtxt(self.noise_file, delimiter=' ', comments='#')
p_a, p_b, p_c = [interp1d(coeffs[:, 0], coeffs[:, col], fill_value='extrapolate') for col in (1, 2, 3)]
self.snr = np.array([[p_a(w), p_b(w), p_c(w)] for w in self.wvl])
else:
self.snr = np.array(options["sensor"]["snr"])
# load RT LUT
self.logger.info("Loading RT LUT...")
......@@ -325,10 +334,10 @@ class FoGen(object):
:return: measurement error covariance matrix
"""
if self.instrument == "AVIRIS":
nedl = abs(snr[:, 0] * sp.sqrt(snr[:, 1] + dt) + snr[:, 2])
sy = np.identity(num_bd) * nedl
nedl = abs(snr[:, 0] * np.sqrt(snr[:, 1] + dt) + snr[:, 2])
sy = np.identity(num_bd) * nedl**2
else:
sy = np.identity(num_bd) * ((dt / snr) ** 2)
sy = np.identity(num_bd) * ((dt / snr)**2)
if not unknowns:
return sy
......@@ -848,8 +857,9 @@ def __minimize__(fo, opt_func, unknowns=False, logger=None):
# avoid unrealistic values in deep water absorption features
# 1370-1394 nm, 1813-1960 nm
surf_fg[:, :, 129:132] = 0.01
surf_fg[:, :, 169:185] = 0.01
# surf_fg[:, :, 129:132] = 0.01
# surf_fg[:, :, 169:185] = 0.01
surf_fg[np.isnan(surf_fg)] = 0.01
# determine appropriate prior mean and covariance by assigning the first guess to the closest cluster
logger.info("Determining appropriate surface prior mean and covariance...")
......
......@@ -69,7 +69,7 @@ class MultiComponentSurface(object):
if self.normalize == 'Euclidean':
self.e_norm = lambda r: norm(r)
elif self.normalize == 'RMS':
self.e_norm = lambda r: s.sqrt(s.mean(pow(r, 2)))
self.e_norm = lambda r: np.sqrt(np.mean(pow(r, 2)))
elif self.normalize == 'None':
self.e_norm = lambda r: 1.0
else:
......@@ -87,16 +87,16 @@ class MultiComponentSurface(object):
# Reference values are used for normalizing the reflectances.
# in the VSWIR regime, reflectances are normalized so that the model
# is agnostic to absolute magnitude.
self.refwl = s.squeeze(model_dict['refwl'])
self.idx_ref = [s.argmin(abs(self.wl-w))
for w in s.squeeze(self.refwl)]
self.idx_ref = s.array(self.idx_ref)
self.refwl = np.squeeze(model_dict['refwl'])
self.idx_ref = [np.argmin(abs(self.wl-w))
for w in np.squeeze(self.refwl)]
self.idx_ref = np.array(self.idx_ref)
# Cache some important computations
self.Covs, self.Cinvs, self.mus = [], [], []
for i in range(self.n_comp):
Cov = self.components[i][1]
self.Covs.append(s.array([Cov[j, self.idx_ref]
self.Covs.append(np.array([Cov[j, self.idx_ref]
for j in self.idx_ref]))
self.Cinvs.append(svd_inv(self.Covs[-1]))
self.mus.append(self.components[i][0][self.idx_ref])
......@@ -107,7 +107,7 @@ class MultiComponentSurface(object):
self.bounds = [[rmin, rmax] for w in self.wl]
self.scale = [1.0 for w in self.wl]
self.init = [0.15 * (rmax-rmin)+rmin for v in self.wl]
self.idx_lamb = s.arange(self.n_wl)
self.idx_lamb = np.arange(self.n_wl)
self.n_state = len(self.statevec)
def component(self, x):
......@@ -137,7 +137,7 @@ class MultiComponentSurface(object):
else:
md = sum(pow(lamb_ref - ref_mu, 2))
mds.append(md)
closest = s.argmin(mds)
closest = np.argmin(mds)
return closest
......@@ -194,7 +194,7 @@ def surface_model(config):
outfile = expand_path(configdir, config['output_model_file'])
# load wavelengths file
q = s.loadtxt(wavelength_file)
q = np.loadtxt(wavelength_file)
if q.shape[1] > 2:
q = q[:, 1:]
if q[0, 0] < 100:
......@@ -207,7 +207,7 @@ def surface_model(config):
for wi, window in enumerate(reference_windows):
active_wl = aand(wl >= window[0], wl < window[1])
refwl.extend(wl[active_wl])
refwl = s.array(refwl, dtype=float)
refwl = np.array(refwl, dtype=float)
# create basic model template
model = {
......@@ -248,7 +248,7 @@ def surface_model(config):
rfl = envi.open(hdrfile, infile)
nl, nb, ns = [int(rfl.metadata[n])
for n in ('lines', 'bands', 'samples')]
swl = s.array([float(f) for f in rfl.metadata['wavelength']])
swl = np.array([float(f) for f in rfl.metadata['wavelength']])
# Maybe convert to nanometers
if swl[0] < 100:
......@@ -256,9 +256,9 @@ def surface_model(config):
rfl_mm = rfl.open_memmap(interleave='source', writable=True)
if rfl.metadata['interleave'] == 'bip':
x = s.array(rfl_mm[:, :, :])
x = np.array(rfl_mm[:, :, :])
if rfl.metadata['interleave'] == 'bil':
x = s.array(rfl_mm[:, :, :]).transpose((0, 2, 1))
x = np.array(rfl_mm[:, :, :]).transpose((0, 2, 1))
x = x.reshape(nl * ns, nb)
# import spectra and resample
......@@ -275,12 +275,12 @@ def surface_model(config):
s2, m2 = spectra[int(s.rand() * n)], 1.0 - m1
spectra.append(m1 * s1 + m2 * s2)
spectra = s.array(spectra)
use = s.all(s.isfinite(spectra), axis=1)
spectra = np.array(spectra)
use = np.all(np.isfinite(spectra), axis=1)
spectra = spectra[use, :]
# accumulate total list of window indices
window_idx = -s.ones((nchan), dtype=int)
window_idx = -np.ones((nchan), dtype=int)
for wi, win in enumerate(windows):
active_wl = aand(wl >= win['interval'][0], wl < win['interval'][1])
window_idx[active_wl] = wi
......@@ -296,8 +296,8 @@ def surface_model(config):
for ci in range(ncomp):
m = s.mean(spectra[Z == ci, :], axis=0)
C = s.cov(spectra[Z == ci, :], rowvar=False)
m = np.mean(spectra[Z == ci, :], axis=0)
C = np.cov(spectra[Z == ci, :], rowvar=False)
for i in range(nchan):
window = windows[window_idx[i]]
......@@ -314,9 +314,9 @@ def surface_model(config):
# Normalize the component spectrum if desired
if normalize == 'Euclidean':
z = s.sqrt(s.sum(pow(m[:len(wl)], 2)))
z = np.sqrt(np.sum(pow(m[:len(wl)], 2)))
elif normalize == 'RMS':
z = s.sqrt(s.mean(pow(m[:len(wl)], 2)))
z = np.sqrt(np.mean(pow(m[:len(wl)], 2)))
elif normalize == 'None':
z = 1.0
else:
......@@ -334,8 +334,8 @@ def surface_model(config):
model['means'].append(m_norm)
model['covs'].append(C_norm)
model['means'] = s.array(model['means'])
model['covs'] = s.array(model['covs'])
model['means'] = np.array(model['means'])
model['covs'] = np.array(model['covs'])
s.io.savemat(outfile, model)
print("saving results to", outfile)
......
{
"output_model_file": "/misc/fluo6/niklas_bohn/phd/2nd_paper/Snow_EnMAP/Data/BioSNICAR_GO_EeteS/Testcase_3_Glacier_Ice/snow_surface_model.mat",
"wavelength_file": "/misc/fluo6/niklas_bohn/phd/2nd_paper/Snow_EnMAP/Data/BioSNICAR_GO_EeteS/EnMAP_wavelengths_fwhm.txt",
"output_model_file": "/Users/niklasbohn/Desktop/2nd_paper/model/snow_surface_model.mat",
"wavelength_file": "/Users/niklasbohn/Desktop/2nd_paper/data/aviris_ng/20170320_ang20170228_wavelength_fit.txt",
"normalize":"Euclidean",
"reference_windows":[[400,1300],[1450,1700],[2100,2450]],
"sources":
......@@ -8,7 +8,7 @@
{
"input_spectrum_files":
[
"/misc/fluo6/niklas_bohn/phd/2nd_paper/Snow_EnMAP/Data/BioSNICAR_GO_EeteS/Testcase_3_Glacier_Ice/glacier_ice_surface_model_sld30_ga121_ga221.img"
"/Users/niklasbohn/Desktop/2nd_paper/model/glacier_ice_surface_model_sld30_ga121_ga221.img"
],
"n_components": 8,
"windows": [
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment