Commit bdead6ae authored by Tara Evaz Zadeh's avatar Tara Evaz Zadeh
Browse files

Added newer versions of losslib.py and the newer versions of the...

Added newer versions of losslib.py and the newer versions of the lossCalculator.py and deleted the extra functions that are included in the loss lib
parent 98fd22ba
import csv
def Taxonomy_to_Fragility(gmDict, taxonomyToFragilitySource, fragilityFileDir):
"""
Creates an extended map of taxonomies to fragility function.
The input map 'taxonomyToFragilitySource' contains the mapping for each
taxonomy to a fragility function file from which the ground-motion type is
read to be written to the extended map 'taxonomyToFragilityMap'.
Input:
gmDict: dictionary (key: ground-motion type; value: column number in the
ground-motion field file)
taxonomyToFragilitySource: taxonomy to fragility-function file map,
following the format:
[taxonomy_string, fragility-function_filename, 'weight'], [...]
Example file extract:
['CR/LDUAL+CDM/HBET:6-/11.0', 'CR_LDUAL-DUM_H6', '1']
['CR/LDUAL+CDM/HBET:6-/5.0', 'CR_LDUAL-DUL_H6', '1']
['CR/LDUAL+CDM/HBET:6-/SOS/11.0', 'CR_LDUAL-DUL_H6', '1']
...
fragilityFileDir: directory of fragility-function files
Output:
Dictionary containing the taxonomy to fragility function map considering
the ground-motion types. It follows the format:
{taxonomy_string: [fragility-function_filename, column of
ground-motion_type in ground-motion_field file]}
Example extract:
{'CR/LDUAL+CDM/HBET:6-/11.0': ['CR_LDUAL-DUM_H6', 4],
'CR/LDUAL+CDM/HBET:6-/5.0': ['CR_LDUAL-DUL_H6', 'SA(0.6)', 4],
'CR/LDUAL+CDM/HBET:6-/SOS/11.0': ['CR_LDUAL-DUL_H6', 'SA(0.6)', 4], ... }
"""
# Prepare return variable
taxonomyToFragilityMap = {}
# Loop through the taxonomy-to-fragility-function map
for mappingItem in taxonomyToFragilitySource:
# Open the fragility-function file corresponding to the taxonomy in
# 'mappingitem[1]'
fragilityFunction = list(csv.reader(open(fragilityFileDir + "/" + \
mappingItem[1] + ".csv")))
# Check if already one fragility function for a given GM type has been
# selected
if mappingItem[0] in taxonomyToFragilityMap:
# Ignore the additional fragility function to keep everything
# unambiguous
pass
# Create the entry in the extended map with adding the type of ground
# motion 'fragilityFunction[0][0]'
taxonomyToFragilityMap[mappingItem[0]] = [mappingItem[1], \
gmDict[fragilityFunction[0][0]]]
return(taxonomyToFragilityMap)
#!/usr/bin/env python
def Get_Full_GMF(groundMotionField,lons,lats,method):
# Goal: This function does a 2-dimensional interpolation.
# INPUTS: groundMotionField,lons,lats
# lons: longitudes of the locations we want to interpolate over
#(pandas.DataFrame(series) OR numpy-1darray of len a)
# lats: latitudes of the locations we want to interpolate over
#(pandas.DataFrame(series) OR numpy-1darray of len a)
# groundMotionField: Numpy nd-array of the file with headers of
#X,Y,value_1,value_2....value_n
# This file must contain longitude values as the first column,
#latitude values as the second column and ground motion values
#as the 3rd. (numpy-ndarray with shape of (m, n))
# Please note that if there are more than 3 columns,
#This function will do interpolation multiple times
#and each time using (X,Y,value)
#(value changes in range of value_1 to
#value_n each time up to last column).
# OUTPUTS: groundMotionField,lons,lats
# fullGroundMotionField: a numpy-ndarray containing
#logitudes, latitudes along with the interpolated values
import pandas as pd
import numpy as np
from scipy.interpolate import griddata
# points_given: the input points for interpolation
points_given = np.vstack((groundMotionField[:,0],groundMotionField[:,1])).transpose()
# points_todo: points to do interpolation over
points_todo = np.vstack((lons,lats)).transpose()
fullGroundMotionField = np.vstack((np.array(lons),np.array(lats))).transpose()
# Griddata Interpolation
# The for loop changes over the columns
# of the ground motion field.
# This enables us to have interpolation for all
#different given ground motion types.
# gmvs_given: the input ground motion
#values for interpolation.
# gm_value_griddata : interpolated values
#for each ground motion type.
for gmType in range(2,groundMotionField.shape[1],1):
gmvs_given = groundMotionField[:,gmType]
gm_value_griddata = griddata(points_given,gmvs_given,points_todo, method=method)
fullGroundMotionField = np.column_stack((fullGroundMotionField,gm_value_griddata))
return(fullGroundMotionField)
#!/usr/bin/env python
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
from csv import writer
#from get_gm_at_location_02 import get_gm_at_location
from get_gm_at_location_02 import interp_gm_at_location
#from interpolate_Karsten import interp_gm_at_location_karsten
#from rbf import rbf_gm_at_location
from get_fragility_function import get_fragility_function
#from interp_gm_at_location_griddata import interp_gm_at_location_griddata
from get_PoE import get_PoE
import csv
def LossCalculatorWrap(Result_path,fragilities_path,exposures_path,taxonomy_conversion_path,shakemap_path):
if os.path.exists(Result_path):
os.remove(Result_path)
################################################ Read Files ################################################
#print('Read Files')
exposures = pd.read_csv(exposures_path)
taxonomy_conversion_file=pd.read_csv(taxonomy_conversion_path)
################################################ Read Columns ################################################
#print('read Columns')
taxonomies=exposures.taxonomy
tot_num_buildings_row=exposures.number
lons=exposures.lon
lats=exposures.lat
assetids=exposures.id
############################################## Prepare Result File ################################################
#print('Prepare Result Files')
def append_list_as_row(file_name, list_of_elem):
with open(file_name, 'a+', newline='') as write_obj:
csv_writer = writer(write_obj)
csv_writer.writerow(list_of_elem)
title=['asset_id','lon','lat','taxonomy','gmfType','gmfValue','Ppoe','PoOs','tot_num_buildings','structural_No-damage','structural_Slight','structural_moderate','structural_extensive','structural_complete']
append_list_as_row(Result_path, title)
############################################### COMPUTATION ################################################
#print('Computation')
for asset in range(exposures.shape[0]):
print(asset)
taxonomy=taxonomies[asset]
tot_num_buildings=tot_num_buildings_row[asset]
lon=lons[asset]
lat=lats[asset]
asset_id=assetids[asset]
print(lon,lat)
################## find new taxonomy names ##################
fragility_function, new_tax = get_fragility_function(taxonomy,fragilities_path,taxonomy_conversion_path)
data = list(csv.reader(open(new_tax)))
gm_type1=data[0][0]
gm_type="gmv_"+gm_type1
print(gm_type)
print(taxonomy)
#gm_value = get_gm_at_location(lon, lat, shakemap_path, gm_type)
gm_value = interp_gm_at_location(lon,lat,shakemap_path,gm_type)
#gm_value = interp_gm_at_location_karsten(lon,lat,shakemap_path,gm_type)
#gm_value = rbf_gm_at_location(lon,lat,shakemap_path,gm_type)
#gm_value = interp_gm_at_location_griddata(lon, lat, shakemap_path, gm_type)
print('gmv=', gm_value)
[PoEs, PoOs] = get_PoE(fragility_function, gm_value)
################## Compute damages by assets ##################
#print('Compute damage_by_asset')
dmg_by_asset = [i * tot_num_buildings for i in PoOs]
################## Append results ##################
#print('append results')
arr0=[asset_id,lon,lat,taxonomy,gm_type,gm_value,PoEs,PoOs,tot_num_buildings]
arr0.extend(dmg_by_asset)
append_list_as_row(Result_path, arr0)
def get_PoE(fragility_function, gm_value):
import numpy as np
from scipy import interpolate
#can be done using spline
#https://stackoverflow.com/questions/2745329/how-to-make-scipy-interpolate-give-an-extrapolated-result-beyond-the-input-range
imls=fragility_function[0]
PoE_sli=fragility_function[1]
PoE_mod=fragility_function[2]
PoE_ext=fragility_function[3]
PoE_com=fragility_function[4]
print(imls)
print(gm_value)
if gm_value < imls[0] :
PoEs=[1,0,0,0,0]
PoOs=[1,0,0,0,0]
elif gm_value > imls[-1] :
iml_min=imls[-2]
idx_iml_min = -2
iml_max=imls[-1]
idx_iml_max = -1
x=[iml_min, iml_max]
x_new=gm_value
PoEs = [1]
for Poes in [PoE_sli,PoE_mod,PoE_ext,PoE_com]:
#print('Poes=',Poes)
y = [Poes[idx_iml_min], Poes[idx_iml_max]]
#print(y)
f = interpolate.interp1d(x, y, fill_value='extrapolate')
y_new=f(x_new)
#print('interpolatedValue=',y_new)
PoEs.append(y_new)
#print('PoEs=',PoEs)
PoOs = list()
for i in [1,2,3,4]:
t = np.subtract(PoEs[i-1], PoEs[i])
PoOs.append(t)
PoOs.append(PoEs[-1])
#print('PoOs=',PoOs)
else:
iml_min=imls[imls < gm_value].max()
idx_iml_min = np.searchsorted(imls, iml_min, side="left")
iml_max=imls[imls > gm_value].min()
idx_iml_max = np.searchsorted(imls, iml_max, side="left")
x=[iml_min, iml_max]
x_new=gm_value
PoEs = [1]
for Poes in [PoE_sli,PoE_mod,PoE_ext,PoE_com]:
y = [Poes[idx_iml_min], Poes[idx_iml_max]]
y_new = np.interp(x_new, x, y)
PoEs.append(y_new)
#print('PoEs=',PoEs)
PoOs = list()
for i in [1,2,3,4]:
t = np.subtract(PoEs[i-1], PoEs[i])
PoOs.append(t)
PoOs.append(PoEs[-1])
#print('PoOs=',PoOs)
return(PoEs,PoOs)
#!/usr/bin/env python
def get_fragility_function(taxonomy,fragilities_path,taxonomy_conversion_path):
import pandas as pd
import numpy as np
taxonomy_conversion_file=pd.read_csv(taxonomy_conversion_path)
################## find new taxonomy names ##################
n=taxonomy_conversion_file.loc[taxonomy_conversion_file['taxonomy'] == taxonomy, 'conversion'].iloc[0]
new_tax=fragilities_path+"/"+n+".csv"
################## find PoEs & PoOs from fragility files ##################
cls=range(1,101)
frag = np.loadtxt(fragilities_path+"/"+n+".csv", delimiter=",", usecols=cls)
return(frag,new_tax)
#!/usr/bin/env python
#interpolate GMFs and we can have enough rows as in exposures because there can be fewlines in exposure for the same location and use SDC_006.py or have the script to the read the lines it needs from gmf file that contains only unique locations.
def interp_gm_at_location(lon,lat,shakemap_path,gm_type):
print
import pandas as pd
from scipy import interpolate
shakemapFile=pd.read_csv(shakemap_path)
PGAs=shakemapFile[gm_type]
#data=pd.read_csv('interp_grid.csv')
#lons=data.lon
#lats=data.lat
#pga=data['gmv_PGA']
shake_lons=shakemapFile.lon
shake_lats=shakemapFile.lat
#z=[23.7875]
if lon in shake_lons and lat in shake_lats:
gm_value=shakemapFile.loc[(shakemapFile['lon'] == lon) & (shakemapFile['lat'] == lat), gm_type].iloc[0]
#print('gmf exists')
else:
intrp_pga=interpolate.interp2d(shake_lons, shake_lats, PGAs, kind = 'linear')
lon_new=[lon]
lat_new=[lat]
[gm_value]=intrp_pga(lon_new,lat_new)
return(gm_value)
#################################################
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.spatial.distance import cdist
def get_gm_at_location(lon,lat,shakemap_path,gm_type):
################################################ Path2Files ################################################
#exposures_path="/home/tara/Desktop/OQCalculatorScripts/1356_1357Computations/gmf_TEST/Exposure_test_g_5deci.csv"
#shakemap_path="/home/tara/Desktop/OQCalculatorScripts/1356_1357Computations/gmf_TEST/shakemap_test_g.csv"
################################################ Read Files ################################################
#exposures = pd.read_csv(exposures_path)
shakemapFile=pd.read_csv(shakemap_path)
################################################ Read Columns ################################################
#lons=exposures.lon
#lats=exposures.lat
shake_lons=shakemapFile.lon
shake_lats=shakemapFile.lat
#points=shakemapFile[shakemapFile.columns[1:3]]
points=list(zip(shake_lons,shake_lats))
#type(points)
#print('points=', points)
###############################################
def closest_point(point, points):
""" Find closest point from a list of points. """
return points[cdist(point, points).argmin()]
################################################ COMPUTATION ################################################
#print(asset)
#lon=lons[asset]
#lat=lats[asset]
point=[(lon, lat)]
#print(point)
#type(point)
#print(points)
if lon in shake_lons and lat in shake_lats:
gm_value=shakemapFile.loc[(shakemapFile['lon'] == lon) & (shakemapFile['lat'] == lat), gm_type].iloc[0]
#print('gmf exists')
else:
(ln,lt)=closest_point(point, points)
gm_value=shakemapFile.loc[(shakemapFile['lon'] == ln) & (shakemapFile['lat'] == lt), gm_type].iloc[0]
#print('gmf interp')
return(gm_value)
#!/usr/bin/env python
import numpy as np
import os
import pandas as pd
import csv
import datetime
import losslib
def LossCalculatorWrap9(Result_path,fragilityFileDir,exposures_path,\
taxonomy_conversion_path,shakemap_path,polygonSource_path,exposureType,method):
# This function prepares the inputs and calls the functions needed.
startTime = datetime.datetime.now()
print(startTime)
if os.path.exists(Result_path):
os.remove(Result_path)
# Read Files as Numpy arrays or Pandas data-frames
exposures = pd.read_csv(exposures_path)
groundMotionField = np.loadtxt(shakemap_path,delimiter=',',skiprows=1)
taxonomyToFragilitySource = csv.reader(open(taxonomy_conversion_path))
next(taxonomyToFragilitySource, None)
PolygonSource = csv.reader(open(polygonSource_path),delimiter=';')
next(PolygonSource, None)
# Read Columns
taxonomies=exposures.taxonomy
tot_num_buildings_row=exposures.number
lons=exposures.lon
lats=exposures.lat
assetids=exposures.id
originids=exposures.origin_id
#originids=exposures.id_3
# Prepare Result File
def append_list_as_row(file_name, list_of_elem):
with open(file_name, 'a+', newline='') as write_obj:
csv_writer = csv.writer(write_obj)
csv_writer.writerow(list_of_elem)
if exposureType == 'OBM':
title=['polygon','','origin_id','','RelativeCellid','','asset_id',\
'','lon','','lat','','taxonomy','','gmfValue','','PoEs',\
'','PoOs','','tot_num_buildings','','structural_No-damage','',\
'structural_Slight','','structural_Moderate','','structural_Extensive'\
,'','structural_Complete']
else:
title=['polygon','','origin_id','','asset_id','','lon','','lat','',\
'taxonomy','','gmfValue','','PoEs','','PoOs','','tot_num_buildings',\
'','structural_No-damage','','structural_Slight','',\
'structural_Moderate','','structural_Extensive','',\
'structural_Complete']
append_list_as_row(Result_path, title)
# COMPUTATION
fullGroundMotionField = losslib.Get_Full_GMF(groundMotionField,lons,lats,method)
gmDict = {'PGA' : 2 , 'SA(0.3)' : 3, 'SA(0.6)' : 4, 'SA(1.0)' : 5\
, 'SA(1)' : 5}
taxonomyToFragilityMap = losslib.Taxonomy_to_Fragility(gmDict,\
taxonomyToFragilitySource,fragilityFileDir)
OriginIdToPolygonMap = losslib.OriginId_to_Polygon(PolygonSource,exposureType)
#number of columns that contain the data in the fragiliy function files.
cls = range(1,101)
a=[0,2,4,6,8]
for asset in range(exposures.shape[0]):
taxonomy=taxonomies[asset]
fragilityFileName = taxonomyToFragilityMap[taxonomy][0] + ".csv"
tot_num_buildings=tot_num_buildings_row[asset]
lon=lons[asset]
lat=lats[asset]
asset_id=assetids[asset]
origin_id=originids[asset]
if exposureType == 'OBM':
[polygon,relativeCellid] = OriginIdToPolygonMap[origin_id]
else:
polygon = OriginIdToPolygonMap[origin_id]
# Find new taxonomy names
fragility_function = np.loadtxt(fragilityFileDir+"/"+fragilityFileName\
, delimiter=",", usecols=cls)
gm_value = fullGroundMotionField[asset,\
taxonomyToFragilityMap[taxonomy][1]]
[PoEs, PoOs] = losslib.get_PoEs(fragility_function, gm_value)
# Compute damages by assets
dmg_by_asset = [i * tot_num_buildings for i in PoOs]
for h in a:
dmg_by_asset.insert(h,'')
# Append results
if exposureType == 'OBM':
arr0=[polygon,'',origin_id,'','cell_' + relativeCellid,'',asset_id\
,'',lon,'',lat,'',taxonomy,'',gm_value,'',PoEs,'',PoOs\
,'',tot_num_buildings]
else:
arr0=[polygon,'',origin_id,'',asset_id,'',lon,'',lat,'',taxonomy\
,'',gm_value,'',PoEs,'',PoOs,'',tot_num_buildings]
arr0.extend(dmg_by_asset)
append_list_as_row(Result_path, arr0)
#print('time now',datetime.datetime.now)
print('Execution time of the script',(datetime.datetime.now() - startTime))
......@@ -5,70 +5,66 @@ import csv
from scipy import interpolate
def Get_Full_GMF(groundMotionField, lons, lats, method='linear'):
"""
Returns ground-motion values using 2-dimensional interpolation for the
given longitudes and latitudes. The deafult interpolation method is linear.
Input:
------
- groundMotionField: contains locations and the ground-motion values of
each location in the format of lon,lat,gmValueofType1,gmValueofType2,etc.
Please note that if the groundMotionField contains values for more than one
ground motion type, This function will do interpolation multiple times and
each time using one of the gmValueTypes (Numpy nd-array of shape (m,n))
with the format as below.
[lon,lat,gmValueofType1,gmValueofType2...]
As an example:
[23.6875,38.3402777778,'PGA','SA(0.3)','SA(0.6)','SA(1.0)']
Example extract:
>>> groundMotionField
array([[2.289028e+01, 3.631806e+01, 4.822530e-03, 1.255729e-02,
8.736364e-03, 5.612176e-03],
[2.289028e+01, 3.632083e+01, 4.830462e-03, 1.257595e-02,
8.748890e-03, 5.619673e-03],
...,
[2.410694e+01, 3.818194e+01, 4.961839e-02, 1.078893e-01,
7.710517e-02, 4.852522e-02]])
- lons: longitudes of the locations we want to interpolate over
(pandas.DataFrame(series) OR numpy-1darray of len a)
Input:
------
- groundMotionField: (Numpy nd-array of shape(m,n); m records with long,
lat, and n-2 gm-types)
contains locations and the ground-motion values of each location
in the format of lon, lat, gmValueofType1, gmValueofType2, ...,
gmValueofTypeN.
Please note that if the groundMotionField contains values for
more than one ground motion type, This function will do
interpolation multiple times and each time using one of the
gmValueTypes with the format as below:
[lon, lat, gmValueofType1, gmValueofType2, ..., gmValueofTypeN]
As an example:
[23.6875,38.3402777778,'PGA','SA(0.3)','SA(0.6)','SA(1.0)']
Example extract:
>>> groundMotionField
array([
[2.289e+01, 3.631e+01, 4.822e-03, 1.255e-02, 8.736e-03, 5.612e-03],
[2.289e+01, 3.632e+01, 4.830e-03, 1.257e-02, 8.748e-03, 5.619e-03],
...,
[2.410e+01, 3.818e+01, 4.961e-02, 1.078e-01, 7.710e-02, 4.852e-02]
])
- lons: (pandas.DataFrame(series) OR numpy-1darray of len a)
longitudes of the locations we want to interpolate over.
Example extract:
>>> lons
23.6875
...
24.0264
Example extract:
>>> lons
23.687500
23.687500
...
24.026389
24.026389
- lats: (pandas.DataFrame(series) OR numpy-1darray of len a)
latitudes of the locations we want to interpolate over.
Example extract:
>>> lats
38.340278
...
37.651389
- lats: latitudes of the locations we want to interpolate over
(pandas.DataFrame(series) OR numpy-1darray of len a)
Example extract:
>>> lats
38.340278
38.340278
...
37.651389
37.651389
- method: (string), optional
{‘linear’, ‘nearest’, ‘cubic’}, optional'. Linear by default.
Output:
-------
- fullGroundMotionField: (numpy-ndarray of shape(a,n))
an array of logitudes and latitudes along with their interpolated values
Example extract:
>>> fullGroundMotionField
array([
[2.36e+01, 3.83e+01, 1.79e-01, 3.62e-01, 2.52e-01, 1.52e-01],
...,
[2.40e+01, 3.76e+01, 2.03e-02, 4.61e-02, 3.07e-02, 1.81e-02]
])
"""
- method: {‘linear’, ‘nearest’, ‘cubic’}, optional'. Linear by default.
Output:
------
- fullGroundMotionField: an array of logitudes and latitudes along with
their interpolated values (numpy-ndarray of shape(a,n))
Example extract:
>>> fullGroundMotionField
array([[2.36875000e+01, 3.83402778e+01, 1.79051641e-01, 3.62318501e-01,
2.52607434e-01, 1.52471069e-01],
...,
[2.40263889e+01, 3.76513889e+01, 2.03267876e-02, 4.61331339e-02,
3.06813504e-02, 1.81060137e-02]])
"""
# points_given: the input points for interpolation
points_given = np.vstack((groundMotionField[:,0], groundMotionField[:,1]))\
.transpose()
...