Commit 8e108d99 authored by Pablo Iturrieta Rebolledo's avatar Pablo Iturrieta Rebolledo
Browse files

added inhomogeneous pointprocess

parent 3f7a1fb7
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
<module type="PYTHON_MODULE" version="4"> <module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager"> <component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" /> <content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.8 (venv)" jdkType="Python SDK" /> <orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
<component name="TestRunnerService"> <component name="TestRunnerService">
......
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (venv)" project-jdk-type="Python SDK" /> <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.7 (venv)" project-jdk-type="Python SDK" />
</project> </project>
\ No newline at end of file
...@@ -155,6 +155,6 @@ def run(use_saved=False): ...@@ -155,6 +155,6 @@ def run(use_saved=False):
if __name__ == '__main__': if __name__ == '__main__':
use_saved = False use_saved = False
a =run(use_saved) a = run(use_saved)
import n_test from codes.csep_tests import n_test, m_test, s_test, l_test, cl_test, pgs, t_test
import m_test
import s_test
import l_test
import cl_test
import t_test
import pgs
use_saved = False n_test.run()
# n_test.run(use_saved) s_test.run()
# m_test.run(use_saved) m_test.run()
# s_test.run(use_saved) l_test.run()
# l_test.run(use_saved) cl_test.run()
# cl_test.run(use_saved) t_test.run()
# t_test.run(use_saved) pgs.run()
pgs.run(use_saved) \ No newline at end of file
...@@ -163,7 +163,7 @@ def filter_10yr_01_2010(cat): ...@@ -163,7 +163,7 @@ def filter_10yr_01_2010(cat):
start_date = time_utils.strptime_to_utc_epoch('2010-01-01 00:00:00.00') start_date = time_utils.strptime_to_utc_epoch('2010-01-01 00:00:00.00')
end_date = time_utils.strptime_to_utc_epoch('2020-01-01 00:00:00.00') end_date = time_utils.strptime_to_utc_epoch('2020-01-01 00:00:00.00')
min_mag = 4.95 min_mag = 4.95
max_depth = 30 max_depth = 300
cat.filter([f'origin_time >= {start_date}', cat.filter([f'origin_time >= {start_date}',
f'origin_time < {end_date}', f'origin_time < {end_date}',
......
...@@ -16,7 +16,16 @@ plot_properties = {'grid_labels': True, ...@@ -16,7 +16,16 @@ plot_properties = {'grid_labels': True,
# forecasts = get_models.five_years() # forecasts = get_models.five_years()
# figures_dir = paths.forecast_figures[5] # figures_dir = paths.forecast_figures[5]
#
# for model_i in forecasts:
# plot_properties['filename'] = os.path.join(figures_dir, model_i.name)
# plot_properties['title'] = model_i.name + ' ' + model_i.author
# print(plot_properties['title'])
# model_i.plot(plot_args=plot_properties)
#
# forecasts = get_models.ten_years()
# figures_dir = paths.forecast_figures[10]
# for model_i in forecasts: # for model_i in forecasts:
# plot_properties['filename'] = os.path.join(figures_dir, model_i.name) # plot_properties['filename'] = os.path.join(figures_dir, model_i.name)
# plot_properties['title'] = model_i.name + ' ' + model_i.author # plot_properties['title'] = model_i.name + ' ' + model_i.author
...@@ -24,11 +33,19 @@ plot_properties = {'grid_labels': True, ...@@ -24,11 +33,19 @@ plot_properties = {'grid_labels': True,
# model_i.plot(plot_args=plot_properties) # model_i.plot(plot_args=plot_properties)
### Contours
plot_properties = {'grid_labels': True,
'borders': True,
'feature_lw': 0.5,
'basemap': 'stock_img',
'cmap': 'rainbow',
'clim' : [-8, 0],
'projection': cartopy.crs.Mercator()}
forecasts = get_models.ten_years() forecasts = get_models.ten_years()
figures_dir = paths.forecast_figures[10] figures_dir = paths.forecast_figures[10]
for model_i in forecasts: for model_i in forecasts:
plot_properties['filename'] = os.path.join(figures_dir, 'fig_bins/%s_8-0-15' % model_i.name) plot_properties['filename'] = os.path.join(figures_dir, 'fig_bins/%s_8-0-15' % model_i.name)
plot_properties['title'] = model_i.name + ' ' + model_i.author plot_properties['title'] = model_i.name + ' ' + model_i.author
print(plot_properties['title']) print(plot_properties['title'])
model_i.plot(show=True, levels=9, plot_args=plot_properties) model_i.plot(plot_args=plot_properties)
...@@ -41,14 +41,14 @@ def entropy2d(a): ...@@ -41,14 +41,14 @@ def entropy2d(a):
else: else:
return 0 return 0
def entropy1d(a): def entropy1d(a):
b = a[~np.isnan(a)]
if b.size:
b = b[np.where(b.ravel() != 0.)]
return reduce((lambda x, y: x - y * np.log(y)), b)
else:
return 0
Hx = 0
for i in a:
if i!=0:
Hx += -i*np.log2(i)
else:
pass
return Hx
def get_NSMI_2D(A, B, bins, range_): def get_NSMI_2D(A, B, bins, range_):
m = np.size(A, 0) m = np.size(A, 0)
...@@ -69,38 +69,49 @@ def get_NSMI_2D(A, B, bins, range_): ...@@ -69,38 +69,49 @@ def get_NSMI_2D(A, B, bins, range_):
B_l = np.hstack((B[:, 1:, :], B[:, 0:1, :])).reshape((m * n * p)) B_l = np.hstack((B[:, 1:, :], B[:, 0:1, :])).reshape((m * n * p))
h2d_x_y = np.histogram2d(a, b, bins, [range_, range_], density=True) h2d_x_y = np.histogram2d(a, b, bins, [range_, range_], density=True)
# print(h2d_x_y[0].shape)
bin = np.diff(h2d_x_y[1])[0] bin = np.diff(h2d_x_y[1])[0]
h2d_x_y = h2d_x_y[0]*bin scale = bin**2
h2d_x_yu = np.histogram2d(a, B_u, bins, [range_, range_], density=True)[0]*bin**2
h2d_xu_y = np.histogram2d(A_u, b, bins, [range_, range_], density=True)[0]*bin**2 h2d_x_y = h2d_x_y[0]*scale
h2d_x_yd = np.histogram2d(a, B_d, bins, [range_, range_], density=True)[0]*bin**2
h2d_xd_y = np.histogram2d(A_d, b, bins, [range_, range_], density=True)[0]*bin**2 h2d_x_yu = np.histogram2d(a, B_u, bins, [range_, range_], density=True)[0]*scale
h2d_x_yl = np.histogram2d(a, B_l, bins, [range_, range_], density=True)[0]*bin**2 h2d_xu_y = np.histogram2d(A_u, b, bins, [range_, range_], density=True)[0]*scale
h2d_xl_y = np.histogram2d(A_l, b, bins, [range_, range_], density=True)[0]*bin**2 h2d_x_yd = np.histogram2d(a, B_d, bins, [range_, range_], density=True)[0]*scale
h2d_x_yr = np.histogram2d(a, B_r, bins, [range_, range_], density=True)[0]*bin**2 h2d_xd_y = np.histogram2d(A_d, b, bins, [range_, range_], density=True)[0]*scale
h2d_xr_y = np.histogram2d(A_r, b, bins, [range_, range_], density=True)[0]*bin**2 h2d_x_yl = np.histogram2d(a, B_l, bins, [range_, range_], density=True)[0]*scale
h2d_xl_y = np.histogram2d(A_l, b, bins, [range_, range_], density=True)[0]*scale
h2d_xu_yl = np.histogram2d(A_u, B_l, bins, [range_, range_], density=True)[0]*bin**2 h2d_x_yr = np.histogram2d(a, B_r, bins, [range_, range_], density=True)[0]*scale
h2d_xl_yu = np.histogram2d(A_l, B_u, bins, [range_, range_], density=True)[0]*bin**2 h2d_xr_y = np.histogram2d(A_r, b, bins, [range_, range_], density=True)[0]*scale
h2d_xd_yl = np.histogram2d(A_d, B_l, bins, [range_, range_], density=True)[0]*bin**2
h2d_xl_yd = np.histogram2d(A_l, B_l, bins, [range_, range_], density=True)[0]*bin**2 h2d_xu_yl = np.histogram2d(A_u, B_l, bins, [range_, range_], density=True)[0]*scale
h2d_xu_yr = np.histogram2d(A_u, B_r, bins, [range_, range_], density=True)[0]*bin**2 h2d_xl_yu = np.histogram2d(A_l, B_u, bins, [range_, range_], density=True)[0]*scale
h2d_xr_yu = np.histogram2d(A_r, B_u, bins, [range_, range_], density=True)[0]*bin**2 h2d_xd_yl = np.histogram2d(A_d, B_l, bins, [range_, range_], density=True)[0]*scale
h2d_xd_yr = np.histogram2d(A_d, B_r, bins, [range_, range_], density=True)[0]*bin**2 h2d_xl_yd = np.histogram2d(A_l, B_l, bins, [range_, range_], density=True)[0]*scale
h2d_xr_yd = np.histogram2d(A_r, B_d, bins, [range_, range_], density=True)[0]*bin**2 h2d_xu_yr = np.histogram2d(A_u, B_r, bins, [range_, range_], density=True)[0]*scale
h2d_xr_yu = np.histogram2d(A_r, B_u, bins, [range_, range_], density=True)[0]*scale
print(np.max(h2d_xu_y.ravel())) h2d_xd_yr = np.histogram2d(A_d, B_r, bins, [range_, range_], density=True)[0]*scale
h2d_xr_yd = np.histogram2d(A_r, B_d, bins, [range_, range_], density=True)[0]*scale
#
SMI = -m * n * entropy2d(h2d_x_y) + m * n / 4.0 * ( # print(m*n)
entropy2d(np.hstack((h2d_x_yu, h2d_xu_y, h2d_x_yd, h2d_xd_y, h2d_x_yl, h2d_xl_y, h2d_x_yr, h2d_xr_y)))) - \ # print(entropy2d(h2d_x_y))
m * n / 8.0 * (entropy2d( # print(entropy2d(np.hstack((h2d_x_yu, h2d_xu_y, h2d_x_yd, h2d_xd_y, h2d_x_yl, h2d_xl_y, h2d_x_yr, h2d_xr_y))))
np.hstack((h2d_xu_yl, h2d_xl_yu, h2d_xd_yl, h2d_xl_yd, h2d_xu_yr, h2d_xr_yu, h2d_xd_yr, h2d_xr_yd)))) # print(entropy2d(np.hstack((h2d_xu_yl, h2d_xl_yu, h2d_xd_yl, h2d_xl_yd, h2d_xu_yr, h2d_xr_yu, h2d_xd_yr, h2d_xr_yd))))
# #
# SMI = (-m * n * entropy2d(h2d_x_y) +
return SMI + 3000 # m * n /2.0 * (entropy2d( np.hstack((h2d_x_yu, h2d_xu_y, h2d_x_yd, h2d_xd_y, h2d_x_yl, h2d_xl_y, h2d_x_yr, h2d_xr_y)) ) ) - \
# m * n / 4.0 * (entropy2d( np.hstack((h2d_xu_yl, h2d_xl_yu, h2d_xd_yl, h2d_xl_yd, h2d_xu_yr, h2d_xr_yu, h2d_xd_yr, h2d_xr_yd))) ) ) /m/n
SMI = (-m * n * entropy2d(h2d_x_y) +
m * n /2.0 * ( entropy2d(h2d_x_yu) + entropy2d(h2d_xu_y) + entropy2d(h2d_x_yl) + entropy2d(h2d_xl_y) ) - \
m * n / 4.0 * (entropy2d(h2d_xu_yl) + entropy2d(h2d_xl_yu) + entropy2d(h2d_xu_yr) - entropy2d(h2d_xr_yu) ) ) /m/n
return SMI
......
# import scipy.spatial
# import libpysal as ps
# import numpy as np
# from pointpats import PointPattern, PoissonPointProcess, as_window, G, F, J, K, L, Genv, Fenv, Jenv, Kenv, Lenv
#
# import matplotlib.pyplot as plt
import numpy as np # NumPy package for arrays, random number generation, etc
import matplotlib.pyplot as plt # For plotting
from scipy.optimize import minimize # For optimizing
from scipy import integrate
# Simulation window parameters
xMin = -1;
xMax = 1;
yMin = -1;
yMax = 1;
xDelta = xMax - xMin;
yDelta = yMax - yMin; # rectangle dimensions
areaTotal = xDelta * yDelta;
s = 0.5; # scale parameter
# Point process parameters
def fun_lambda(x, y):
return 100 * np.exp(-(x ** 2 + y ** 2) / s ** 2); # intensity function
###START -- find maximum lambda -- START ###
# For an intensity function lambda, given by function fun_lambda,
# finds the maximum of lambda in a rectangular region given by
# [xMin,xMax,yMin,yMax].
def fun_Neg(x):
return -fun_lambda(x[0], x[1]); # negative of lambda
xy0 = [(xMin + xMax) / 2, (yMin + yMax) / 2]; # initial value(ie centre)
# Find largest lambda value
resultsOpt = minimize(fun_Neg, xy0, bounds=((xMin, xMax), (yMin, yMax)));
lambdaNegMin = resultsOpt.fun; # retrieve minimum value found by minimize
lambdaMax = -lambdaNegMin;
###END -- find maximum lambda -- END ###
# define thinning probability function
def fun_p(x, y):
return fun_lambda(x, y) / lambdaMax;
# Simulate a Poisson point process
numbPoints = np.random.poisson(lambdaMax * areaTotal); # Poisson number of points
xx = np.random.uniform(0, xDelta, ((numbPoints, 1))) + xMin; # x coordinates of Poisson points
yy = np.random.uniform(0, yDelta, ((numbPoints, 1))) + yMin; # y coordinates of Poisson points
# calculate spatially-dependent thinning probabilities
p = fun_p(xx, yy);
# Generate Bernoulli variables (ie coin flips) for thinning
booleRetained = np.random.uniform(0, 1, ((numbPoints, 1))) < p; # points to be thinned
# x/y locations of retained points
xxRetained = xx[booleRetained];
yyRetained = yy[booleRetained];
# Plotting
plt.scatter(xxRetained, yyRetained, edgecolor='b', facecolor='none', alpha=0.5);
plt.xlabel("x");
plt.ylabel("y");
plt.show();
\ No newline at end of file
...@@ -4,39 +4,240 @@ import os ...@@ -4,39 +4,240 @@ import os
import entropy_calc as ec import entropy_calc as ec
from codes import get_models from codes import get_models
from codes import paths from codes import paths
import numpy
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
import numpy as np
from codes import get_catalogs
import csep
#################### CATALOG
plt.close('all')
cat = get_catalogs.filter_10yr_01_2010(get_catalogs.bollettino())
region = cat.region
Dir = '/media/pablo/Pandapan/gfz/PhD/codes/' counts= cat.spatial_counts()
data_dir = os.path.join(Dir,'data') A = np.zeros(region.idx_map.shape)
res_dir = os.path.join(paths.results,'smi') A[A==0] = np.nan
os.makedirs(res_dir, exist_ok=True) for i in range(A.shape[0]):
for j in range(A.shape[1]):
if not np.isnan(region.idx_map[i,j]):
A[i,j] = counts[int(region.idx_map[i,j])]
names = []
H_models = {}
SMI_models = {}
Datas = []
cmap = plt.get_cmap('jet')
fig = plt.figure()
# a=plt.imshow(np.flipud(A), cmap=cmap)
lons, lats = numpy.meshgrid(numpy.append(region.xs, region.xs[-1] + region.dh),
numpy.append(region.ys, region.ys[-1] + region.dh))
a = plt.contourf(lons[:-1, :-1], lats[:-1, :-1], A, cmap=cmap)
fig.colorbar(a)
plt.title('Catalog')
plt.show()
A[np.isnan(A)] = 0
x,y = np.where(A!=0)
################# Forecasts
models = get_models.ten_years() models = get_models.ten_years()
names = []
cmap = plt.get_cmap('jet')
####### Cat
model_i = 11
forecast_data = models[model_i].spatial_counts()
observed_data = cat.spatial_counts()
n_obs = numpy.sum(observed_data)
n_fore = numpy.sum(forecast_data)
scale = n_obs / n_fore
expected_forecast_count = int(n_obs)
num_events_to_simulate = expected_forecast_count
sampling_weights = numpy.cumsum(forecast_data.ravel()) / numpy.sum(forecast_data)
# data structures to store results
n_sims = [1, 100, 200, 500, 1000, 2000, 5000, 10000]
n_sim = 1
AA=A.reshape((A.shape[0], A.shape[1], 1))
DD = models[model_i].spatial_counts(cartesian=True).reshape((A.shape[0], A.shape[1], 1))
B = np.zeros(region.idx_map.shape)
B[B==0] = np.nan
sim = np.zeros(sampling_weights.shape)
for i in range(n_sim):
sim_fore = numpy.zeros(sampling_weights.shape)
sim += csep.core.poisson_evaluations._simulate_catalog(num_events_to_simulate, sampling_weights, sim_fore)
for i in range(B.shape[0]):
for j in range(B.shape[1]):
if not np.isnan(region.idx_map[i,j]):
B[i,j] = sim[int(region.idx_map[i,j])]
B /= n_sim
BB = B.reshape((B.shape[0], B.shape[1], 1))
print(models[model_i].name)
# smi=ec.get_NSMI_2D(AA,BB, 200, (0, 2))
B[np.isnan(B)] = 0
x,y = np.where(B!=0)
#######
for i, Forecast in enumerate(models): for i, Forecast in enumerate(models):
names.append(Forecast.name) names.append(Forecast.name)
Data = Forecast.spatial_counts(cartesian=True)
levels = MaxNLocator(nbins=20).tick_values(0, 0.02)
region = Forecast.region
fig = plt.figure()
plt.title(Forecast.name)
lons, lats = numpy.meshgrid(numpy.append(region.xs, region.xs[-1] + region.dh),
numpy.append(region.ys, region.ys[-1] + region.dh))
A = np.log10(Forecast.spatial_counts(cartesian=True)) ############# Simulate Catalog
A[np.abs(A)==np.inf] = -15
hist = np.histogram(A, density=True,bins=100, # forecast_data = models[0].spatial_counts()
range=( np.nanquantile(A,0.05), np.nanquantile(A,0.95))) # observed_data = cat.spatial_counts()
# n_obs = numpy.sum(observed_data)
# n_fore = numpy.sum(forecast_data)
# scale = n_obs / n_fore
# expected_forecast_count = int(n_obs)
# num_events_to_simulate = expected_forecast_count
#
# sampling_weights = numpy.cumsum(forecast_data.ravel()) / numpy.sum(forecast_data)
#
# # data structures to store results
#
#
# n_sims = [100, 200, 500, 1000, 2000, 5000, 10000]
# AA=A.reshape((A.shape[0], A.shape[1], 1))
#
# for n_sim in n_sims:
# B = np.zeros(region.idx_map.shape)
# B[B==0] = np.nan
# sim = np.zeros(sampling_weights.shape)
# for i in range(n_sim):
# sim_fore = numpy.zeros(sampling_weights.shape)
# sim += csep.core.poisson_evaluations._simulate_catalog(num_events_to_simulate, sampling_weights, sim_fore)
#
# for i in range(B.shape[0]):
# for j in range(B.shape[1]):
# if not np.isnan(region.idx_map[i,j]):
# B[i,j] = sim[int(region.idx_map[i,j])]
# B /= n_sim
# BB = B.reshape((B.shape[0], B.shape[1], 1))
# smi=ec.get_NSMI_2D(AA,BB, 200, (0, 2))
# print(smi)
# cmap = plt.get_cmap('jet')
# fig = plt.figure()
# b=plt.imshow(np.flipud(B), cmap=cmap)
# fig.colorbar(b)
# plt.show()
CC = Forecast.spatial_counts(cartesian=True).reshape((A.shape[0], A.shape[1], 1))
# smi=ec.get_NSMI_2D(AA,CC, 200, (0, 2))
smi = ec.get_NSMI_2D(DD, CC, 200, (0, 0.1))
a = plt.contourf(lons[:-1, :-1], lats[:-1, :-1], Data, levels=levels, cmap=cmap)
plt.plot(lons[x-1,y-1], lats[x-1,y-1], 'o', color='black', markersize=3)
fig.colorbar(a)
plt.title(Forecast.name + ' - %.5f' % smi)
plt.show()
print(smi)
# Dir = '/media/pablo/Pandapan/gfz/PhD/codes/'
# data_dir = os.path.join(Dir,'data')
# res_dir = os.path.join(paths.results,'smi')
# os.makedirs(res_dir, exist_ok=True)
#
# names = []
# H_models = {}
# SMI_models = {}
# Datas = []
#
# models = get_models.ten_years()
#
#
# Hx = []
# Hx_model = {}
# names = []
# n_bins = 100
# n_cells = 8993
#
# cmap = plt.cm.get_cmap('jet')
# for i, Forecast in enumerate(models):
# names.append(Forecast.name)
# Data = Forecast.spatial_counts(cartesian=True)
# hist = np.histogram(Data[~np.isnan(Data)], bins=n_bins)
# entropy = ec.entropy1d(hist[0]/n_cells)
# Hx.append(ec.entropy1d(hist[0]/n_cells))
# Hx_model[Forecast.name] = entropy
# map = plt.get_cmap('jet')
# levels = MaxNLocator(nbins=20).tick_values(0, 0.02)
# region = Forecast.region
#
# fig = plt.figure()
# plt.title(Forecast.name)
# lons, lats = numpy.meshgrid(numpy.append(region.xs, region.xs[-1] + region.dh),
# numpy.append(region.ys, region.ys[-1] + region.dh))
# a = plt.contourf(lons[:-1, :-1], lats[:-1, :-1], Data, levels=levels, cmap=cmap)
# fig.colorbar(a)
# plt.show()
# Hx = np.array(Hx)
# order = np.argsort(Hx)
# for i in order:
# print(i)
# Data = models[i].spatial_counts(cartesian=True)
# name = names[i]
# # logData = np.log10(Forecast.spatial_counts(cartesian=True))
#
# # A[np.abs(A)==np.inf] = -15
# n_bins = 50
# # plt.figure(figsize=(8,6))
# hist = plt.hist(Data[~np.isnan(Data)], bins=100, log=True, range=(0, 0.01))
# plt.ylim((0, 5000))
# plt.title(models[i].name + ' - %.3f' % Hx[i])
# plt.show()
# hist = np.histogram(data, density=True, bins=150, range=(-7.06,-1.29)) # hist = np.histogram(data, density=True, bins=150, range=(-7.06,-1.29))
dbin = np.diff(hist[1])[0] # dbin = np.diff(hist[1])[0]
hist_freq = hist[0]*dbin # hist_freq = hist[0]*dbin
H = ec.entropy1d(hist_freq) # H = ec.entropy1d(hist_freq)
H_models[Forecast.name] = np.exp(H) # H_models[Forecast.name] = np.exp(H)
# B = A.reshape( A.shape[0], A.shape[1], -1) # B = A.reshape( A.shape[0], A.shape[1], -1)
B = A.reshape(A.shape[1], -1, A.shape[0]) # B = A.reshape(A.shape[1], -1, A.shape[0])
SMI = ec.get_NSMI_2D(B, B, 100, (-10,0)) # SMI = ec.get_NSMI_2D(B, B, 100, (-10,0))
SMI_models[Forecast.name] = SMI # SMI_models[Forecast.name] = SMI
# Datas.append(data) # Datas.append(data)
# plt.figure() # plt.figure()
...@@ -54,31 +255,31 @@ for i, Forecast in enumerate(models): ...@@ -54,31 +255,31 @@ for i, Forecast in enumerate(models):
# SMI_models['HiResSmoSeis-m2'], SMI_models['HiResSmoSeis-m1'] = SMI_models['HiResSmoSeis-m1'], SMI_models['HiResSmoSeis-m2'] # SMI_models['HiResSmoSeis-m2'], SMI_models['HiResSmoSeis-m1'] = SMI_models['HiResSmoSeis-m1'], SMI_models['HiResSmoSeis-m2']
T={} # T={}
T = {row[0].sim_name[1] : np.sum([i.observed_statistic for i in row])
for row in paths.get_csep_result('T', 10)[0]}
L = {i.sim_name : i.observed_statistic for i in paths.get_csep_result('L', 10)}
CL = {i.sim_name : i.observed_statistic for i in paths.get_csep_result('CL', 10)}
pgs = {i.sim_name : i.test_distribution[0] for i in paths.get_csep_result('PGS', 10, 'avg')}