Commit 681f50db authored by Pablo Iturrieta Rebolledo's avatar Pablo Iturrieta Rebolledo
Browse files

MOdified K ripley to calculate using mercator projection

parent fff9b48f
......@@ -56,13 +56,17 @@ store = join(results, 'stored_results')
stored_csep = join(store, 'csep_tests')
spatent_logs = join(store, 'spatent')
store_years = {i: join(stored_csep, '%iyr') % i for i in years}
spatent_years = {i: join(spatent_logs, '%iyr') % i for i in years}
csep_store = {i: join(stored_csep, '%iyr') % i for i in years}
spatent_store = {i: join(spatent_logs, '%iyr') % i for i in years}
kripley_store = {i: join(spatent_logs, '%iyr') % i for i in years}
tests = ['N', 'M', 'S', 'L', 'CL', 'T', 'PGS']
for i, j in store_years.items():
for i, j in csep_store.items():
os.makedirs(j, exist_ok=True)
for i, j in spatent_store.items():
os.makedirs(j, exist_ok=True)
for i, j in spatent_years.items():
for i, j in kripley_store.items():
os.makedirs(j, exist_ok=True)
# Figures
......@@ -101,7 +105,7 @@ def get_csep_result_path(test, year, type=False):
type = '_' + str(type)
else:
type = ''
return join(store_years[year], test + type)
return join(csep_store[year], test + type)
def get_spatent_result_path(method, year):
"""
......@@ -112,17 +116,19 @@ def get_spatent_result_path(method, year):
:return:
"""
return join(spatent_years[int(year)], 'log_%s_%s.txt' % (method, str(year)))
return join(spatent_store[int(year)], 'log_%s_%s.txt' % (method, str(year)))
def get_csep_result(test, year, type=False):
def get_kripley_result_path(method, year):
"""
Get the saved CSEPEvaluationResult object of a test
Get the file path of a result object
:param test: Name of the test
:param year: Year duration of the test
:param type: Particular type of test
:return:
"""
return pickle.load(open(get_csep_result_path(test, year, type), 'rb'))
return join(kripley_store[int(year)], 'log_%s_%s.txt' % (method, str(year)))
def get_csep_result(test, year, type=False):
"""
......@@ -132,7 +138,11 @@ def get_csep_result(test, year, type=False):
:param type: Particular type of test
:return:
"""
return pickle.load(open(get_csep_result_path(test, year, type), 'rb'))
with open(get_csep_result_path(test, year, type), 'rb') as file_:
binary = pickle.load(file_)
return binary
def get_csep_figpath(test, year, type=None):
......@@ -169,3 +179,13 @@ def get_sentropy_figpath(analysis, year, model):
:return:
"""
return join(spat_entropy_figs, "%s_%s_%s.png" % (analysis, str(year), model))
def get_kripley_figpath(analysis, year, model):
"""
Get the figure path of a particular test
:param test: Name of the test
:param year: Year duration of the test
:param type: Particular type of test
:return:
"""
return join(spat_entropy_figs, "%s_%s_%s.png" % (analysis, str(year), model))
\ No newline at end of file
import fiona
import matplotlib.pyplot as plt
import numpy as np
import os
import entropy_calc as ec
from codes import get_models
from codes import paths
import numpy
import matplotlib
import pickle
import matplotlib.pyplot as plt
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
import numpy as np
from codes import get_catalogs
import csep
import itertools
import pickle
import time
from rpy2 import robjects
import rpy2.robjects.packages as rpackages
import shapely
from rpy2.robjects import globalenv
import cartopy
def get_simulations(model, catalog, lons, lats):
......@@ -75,60 +69,158 @@ def K_r(Dists, rates, r):
K_r /= n**2
return K_r
if __name__ == "__main__":
with open('obj', 'rb') as file_:
model, catalog = pickle.load(file_)
def lonlat2mercator(points):
"""
Defined to system RDN2008 - Italy (epsg:6875)
"""
src_crs = cartopy.crs.Geodetic()
dest_crs = cartopy.crs.epsg(6875)
Points = dest_crs.transform_points(src_crs, points[:,0], points[:,1])
return np.floor(Points[:,:2])
def K_r_spatstat(points, model, normpower=1,plot=False, r=None):
#todo get cartesian coordinates
## Import R spatstat modules
spat_geom = rpackages.importr("spatstat.geom")
spat_core = rpackages.importr("spatstat.core")
# Import model to R
region = model.region
points = np.stack((catalog.get_longitudes(), catalog.get_latitudes()))
lon_grid = region.midpoints()[:,0]
lat_grid = region.midpoints()[:,1]
# Get the region properties
midpoints = region.midpoints()
lon_r = np.sort(np.unique(midpoints[:, 0]))
lat_r = np.sort(np.unique(midpoints[:, 1]))
X_r = lonlat2mercator(np.stack([lon_r, np.repeat(min(lat_r), lon_r.shape[0])]).T)[:,0]/1000
Y_r = lonlat2mercator(np.stack([np.repeat(min(lon_r), lat_r.shape[0]), lat_r]).T)[:,1]/1000
Xregion_array = robjects.FloatVector(np.sort(np.unique(X_r)))
Yregion_array = robjects.FloatVector(np.sort(np.unique(Y_r)))
# Get the model rates
rates = model.spatial_counts(cartesian=True)
rates_array = robjects.FloatVector(rates.T.ravel())
rates_mat = robjects.r['matrix'](rates_array, nrow=rates.shape[0])
image = spat_geom.im(rates_mat, Xregion_array, Yregion_array)
# Get the polygon window of the forecast
italy_region = fiona.open(paths.italy_region)
polygon = np.array(italy_region[0]['geometry']['coordinates'][0])
polygon = lonlat2mercator(polygon)/1000.
poly_array = robjects.FloatVector(np.flipud(polygon[:-1, :]).T.ravel())
poly_mat = robjects.r['matrix'](poly_array, nrow=polygon.shape[0] - 1)
poly_array_xrange = robjects.FloatVector([np.min(polygon[:, 0]), np.max(polygon[:, 0])])
poly_array_yrange = robjects.FloatVector([np.min(polygon[:, 1]), np.max(polygon[:, 1])])
window = spat_geom.owin(xrange=poly_array_xrange, yrange=poly_array_yrange, poly=poly_mat)
spat_geom = rpackages.importr("spatstat.geom")
spat_core = rpackages.importr("spatstat.core")
stat = rpackages.importr("stats")
# Convert the catalog into R point process model
Points = lonlat2mercator(points)/1000
point_array_x = robjects.FloatVector(points[:,0])
point_array_y = robjects.FloatVector(points[:,1])
point_array_x = robjects.FloatVector(Points[:, 0])
point_array_y = robjects.FloatVector(Points[:, 1])
PPP_R = spat_geom.ppp(point_array_x, point_array_y, window)
# poly_array_x = robjects.FloatVector(polygon[:,0])
# poly_array_y = robjects.FloatVector(polygon[:,1])
poly_array = robjects.FloatVector(np.flipud(polygon[:-1,:]).T.ravel())
poly_mat = robjects.r['matrix'](poly_array, nrow=polygon.shape[0]-1)
print(poly_mat)
poly_array_xrange = robjects.FloatVector([np.min(polygon[:,0]), np.max(polygon[:,0])])
poly_array_yrange = robjects.FloatVector([np.min(polygon[:, 1]), np.max(polygon[:, 1])])
# print(min(Points[:, 1]), max(Points[:, 1]))
# Get arguments: r-vector user-defined or automatic
if r is None:
args = (('X', PPP_R), ('lambda', image), ('correction', 'best'),
('normpower', normpower))
else:
args = (('X', PPP_R), ('lambda', image), ('correction', 'best'),
('normpower', normpower), ('r', robjects.FloatVector(r)))
# Get results
kinhom = spat_core.Kinhom
K_results = kinhom.rcall(args, globalenv)
K = list(K_results[2])
r_out = list(K_results[0])
if plot:
spat_geom.plot_im(image, 'asd', multiplot=True)
spat_geom.plot_ppp(PPP_R, 'asd', add=True)
# plt.plot(r_out, K, '--')
# plt.show()
return K, r_out
window= spat_geom.owin(xrange=poly_array_xrange, yrange=poly_array_yrange, poly=poly_mat)
if __name__ == "__main__":
PPP_R = spat_geom.ppp(point_array_x, point_array_y, window)
models = get_models.ten_years()
catalog = get_catalogs.filter_10yr_01_2010(get_catalogs.bollettino())
K_total = dict.fromkeys([i.name for i in models])
rates_array = robjects.FloatVector(rates.ravel())
rates_mat = robjects.r['matrix'](rates_array, nrow=rates.shape[0])
lons_array = robjects.FloatVector(np.sort(np.unique(lon_grid)).ravel())
# lons_mat = robjects.r['matrix'](lons_array, nrow=lon_grid.shape[0])
lats_array = robjects.FloatVector(np.sort(np.unique(lat_grid)).ravel())
# lats_mat = robjects.r['matrix'](lats_array, nrow=lat_grid.shape[0])
for model in models[1:2]:
print(model.name)
image = spat_geom.im(rates_array, lons_array, lats_array )
points = np.stack((catalog.get_longitudes(), catalog.get_latitudes())).T
r = np.linspace(0,800,50)
K_cat, r_cat = K_r_spatstat(points, model, r=r)
K = spat_core.Kinhom(PPP_R, image, robjects.StrVector(["all"]))
k_r = list(K[0])
r = list(K[1])
lons = model.get_longitudes()
lats = model.get_latitudes()
nsim = 100
sim_catalogs = iter([get_simulations(model, catalog, lons, lats) for i in range(nsim)])
plt.plot(r,k_r,'o')
K_sims = []
R_sims = []
K = spat_core.Kinhom(PPP_R)
k_r = list(K[0])
r = list(K[1])
n=0
for cat, inds in sim_catalogs:
plt.plot(r, k_r)
K_i, r_i = K_r_spatstat(cat, model, r=r, plot=True)
K_sims.append(K_i)
R_sims.append(R_sims)
plt.plot(r_i, K_i, '.-')
plt.title('Model %s - All simulations' % model.name)
assert r_i == r_cat
plt.grid()
plt.savefig(paths.get_ripleyk_figpath('K_sim', 10, model.name))
plt.show()
K = K_sims
K_avg = np.mean(K, axis=0)
K_up = np.quantile(K, 0.99, axis=0)
K_down = np.quantile(K, 0.01, axis=0)
plt.plot(r_cat, K_cat, color='r')
plt.plot(r_cat, K_avg)
plt.fill_between(r_cat, K_down, K_up,
color='gray', alpha=0.2)
plt.title('Model %s - K function' % model.name)
plt.grid()
plt.savefig(paths.get_ripleyk_figpath('K', 10, model.name))
plt.show()
L = np.divide(np.sqrt(K_sims),np.sqrt(np.pi))
L_avg = np.mean(L, axis=0)
L_cat = np.sqrt(np.array(K_cat) / np.pi)
L_up = np.quantile(L, 0.99, axis=0)
L_down = np.quantile(L, 0.01, axis=0)
plt.plot(r_cat, L_cat, color='r')
plt.plot(r_cat, L_avg)
plt.fill_between(r_cat, L_down, L_up,
color='gray', alpha=0.2)
plt.title('Model %s - L function' % model.name)
plt.grid()
plt.savefig(paths.get_ripleyk_figpath('L', 10, model.name))
plt.show()
K_total[model.name] = {'K_sims': K,
'K_cat': K_cat,
'K_avg': K_avg,
'K_down': K_down,
'L_sims': L,
'L_cat':L_cat,
'L_avg': L_avg,
'L_down': L_down,
'L_up': L_up}
with open(paths.get_kripley_result_path('Normpower2', 10), 'wb') as file_:
pickle.dump((model, catalog), file_)
with open(paths.get_kripley_result_path('Normpower2', 10), 'rb') as file_:
model, catalog = pickle.load(file_)
models = [model]
# if __name__ == '__main__':
#
# models = get_models.ten_years()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment