Commit ff68b063 authored by Tara Evaz Zadeh's avatar Tara Evaz Zadeh
Browse files

Changed result file delimiter to semicolon

parent 780b670b
Pipeline #19548 passed with stage
in 1 minute and 36 seconds
geometry,,origin_id,,asset_id,,lon,,lat,,taxonomy,,gm_value,,PoEs,,PoOs,,num_buildings,,structural_no_damage,,structural_slight,,structural_moderate,,structural_extensive,,structural_complete geometry;origin_id;asset_id;lon;lat;taxonomy;gm_value;PoEs;PoOs;num_buildings;structural_no_damage;structural_slight;structural_moderate;structural_extensive;structural_complete
"POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111))",,cell_2423204507,,GDE_Com_940019,,23.631944444400002,,38.0625,,CR/LDUAL+CDH/H:1/15.0,,0.12688663132861894,,"[8.474877104881386e-06, 0.0, 0.0, 0.0]",,"[0.9999915251228951, 8.474877104881386e-06, -0.0, -0.0, 0.0]",,28.3,,28.299760160977932,,0.00023983902206814323,,-0.0,,-0.0,,0.0 POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111));cell_2423204507;GDE_Com_940019;23.631944444400002;38.0625;CR/LDUAL+CDH/H:1/15.0;0.12688663132861894;[8.474877104881386e-06, 0.0, 0.0, 0.0];[0.9999915251228951, 8.474877104881386e-06, -0.0, -0.0, 0.0];28.3;28.299760160977932;0.00023983902206814323;-0.0;-0.0;0.0
"POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111))",,cell_2423204507,,GDE_Com_940036,,23.631944444400002,,38.0625,,CR/LDUAL+CDM/H:1/8.0,,0.12688663132861894,,"[8.474877104881386e-06, 0.0, 0.0, 0.0]",,"[0.9999915251228951, 8.474877104881386e-06, -0.0, -0.0, 0.0]",,12.5,,12.499894064036189,,0.00010593596381101732,,-0.0,,-0.0,,0.0 POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111));cell_2423204507;GDE_Com_940036;23.631944444400002;38.0625;CR/LDUAL+CDM/H:1/8.0;0.12688663132861894;[8.474877104881386e-06, 0.0, 0.0, 0.0];[0.9999915251228951, 8.474877104881386e-06, -0.0, -0.0, 0.0];12.5;12.499894064036189;0.00010593596381101732;-0.0;-0.0;0.0
"POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111))",,cell_2423204507,,GDE_Res_1036159,,23.631944444400002,,38.0625,,CR/LDUAL+CDL/HBET:3-5/SOS/8.0,,0.251555124927799,,"[0.13663294980106422, 0.00180695837185944, 0.0001277873923228429, 1.821617605136137e-05]",,"[0.8633670501989358, 0.1348259914292048, 0.001679170979536597, 0.00010957121627148152, 1.821617605136137e-05]",,36.2,,31.253887217201477,,4.880700889737214,,0.060785989459224817,,0.003966478029027631,,0.0006594255730592816 POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111));cell_2423204507;GDE_Res_1036159;23.631944444400002;38.0625;CR/LDUAL+CDL/HBET:3-5/SOS/8.0;0.251555124927799;[0.13663294980106422, 0.00180695837185944, 0.0001277873923228429, 1.821617605136137e-05];[0.8633670501989358, 0.1348259914292048, 0.001679170979536597, 0.00010957121627148152, 1.821617605136137e-05];36.2;31.253887217201477;4.880700889737214;0.060785989459224817;0.003966478029027631;0.0006594255730592816
"POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111))",,cell_2423204507,,GDE_Ind_65405,,23.631944444400002,,38.0625,,MUR/LWAL+CDN/H:1,,0.251555124927799,,"[0.10411764976388133, 0.008593118352523686, 0.001504757955404525, 0.000383871840953854]",,"[0.8958823502361186, 0.09552453141135764, 0.007088360397119161, 0.001120886114450671, 0.000383871840953854]",,60.9,,54.55923512937962,,5.81744396295168,,0.4316811481845569,,0.06826196437004586,,0.02337779511408971 POLYGON ((23.63055555555556 38.0611111111111, 23.63333333333335 38.0611111111111, 23.63333333333335 38.0638888888889, 23.63055555555556 38.0638888888889, 23.63055555555556 38.0611111111111));cell_2423204507;GDE_Ind_65405;23.631944444400002;38.0625;MUR/LWAL+CDN/H:1;0.251555124927799;[0.10411764976388133, 0.008593118352523686, 0.001504757955404525, 0.000383871840953854];[0.8958823502361186, 0.09552453141135764, 0.007088360397119161, 0.001120886114450671, 0.000383871840953854];60.9;54.55923512937962;5.81744396295168;0.4316811481845569;0.06826196437004586;0.02337779511408971
...@@ -60,77 +60,49 @@ def get_exposure_per_tile( ...@@ -60,77 +60,49 @@ def get_exposure_per_tile(
# key and value, respectively. # key and value, respectively.
ground_motion_type_index_map[gm_types[i]] = i + 2 ground_motion_type_index_map[gm_types[i]] = i + 2
cell_ids = pd.read_csv(cell_id_source_filepath) cell_ids = pd.read_csv(cell_id_source_filepath)
with open(result_filepath, "a+", newline="") as write_obj:
csv_writer = csv.writer(write_obj) with open(result_filepath, mode="w") as write_obj:
if exposure_type == "building": if exposure_type == "building":
title = [ field_names = [
"geometry", "geometry",
"",
"origin_id", "origin_id",
"",
"respective_cell_id", "respective_cell_id",
"",
"asset_id", "asset_id",
"",
"lon", "lon",
"",
"lat", "lat",
"",
"taxonomy", "taxonomy",
"",
"gm_value", "gm_value",
"",
"PoEs", "PoEs",
"",
"PoOs", "PoOs",
"",
"num_buildings", "num_buildings",
"",
"structural_no_damage", "structural_no_damage",
"",
"structural_slight", "structural_slight",
"",
"structural_moderate", "structural_moderate",
"",
"structural_extensive", "structural_extensive",
"",
"structural_complete", "structural_complete",
] ]
tile_id = "respective_cell_id" tile_id = "respective_cell_id"
else: else:
title = [ field_names = [
"geometry", "geometry",
"",
"origin_id", "origin_id",
"",
"asset_id", "asset_id",
"",
"lon", "lon",
"",
"lat", "lat",
"",
"taxonomy", "taxonomy",
"",
"gm_value", "gm_value",
"",
"PoEs", "PoEs",
"",
"PoOs", "PoOs",
"",
"num_buildings", "num_buildings",
"",
"structural_no_damage", "structural_no_damage",
"",
"structural_slight", "structural_slight",
"",
"structural_moderate", "structural_moderate",
"",
"structural_extensive", "structural_extensive",
"",
"structural_complete", "structural_complete",
] ]
tile_id = "origin_id" tile_id = "origin_id"
csv_writer.writerow(title) writer = csv.DictWriter(write_obj, fieldnames=field_names, delimiter=";")
writer.writeheader()
for i in range(cell_ids.shape[0]): for i in range(cell_ids.shape[0]):
cell_id = cell_ids.loc[i][0] cell_id = cell_ids.loc[i][0]
......
...@@ -169,13 +169,10 @@ def damageCalculator_TileVersion( ...@@ -169,13 +169,10 @@ def damageCalculator_TileVersion(
origin_id_to_geometry_map = losslib.origin_id_to_geometry(geometry_source, exposure_type) origin_id_to_geometry_map = losslib.origin_id_to_geometry(geometry_source, exposure_type)
# Define number of columns that contain the data in the fragility function files. # Define number of columns that contain the data in the fragility function files.
cls = range(1, 101) cls = range(1, 101)
# Just a trick to have multiple commas between each result element, since we do
# not want a single comma as the delimiter due to having geometries as a result element.
a = [0, 2, 4, 6, 8]
# Looping through each line of the exposure file to run the computations asset by asset.
# Looping through each line of the exposure file to run the computations asset by asset.
with open(result_filepath, "a+", newline="") as write_obj: with open(result_filepath, "a+", newline="") as write_obj:
csv_writer = csv.writer(write_obj) csv_writer = csv.writer(write_obj, delimiter=";")
for asset in range(exposure.shape[0]): for asset in range(exposure.shape[0]):
taxonomy = taxonomies.iloc[asset] taxonomy = taxonomies.iloc[asset]
fragilityfunction_filename = taxonomy_to_fragility_map[taxonomy][0] + ".csv" fragilityfunction_filename = taxonomy_to_fragility_map[taxonomy][0] + ".csv"
...@@ -208,56 +205,36 @@ def damageCalculator_TileVersion( ...@@ -208,56 +205,36 @@ def damageCalculator_TileVersion(
[PoEs, PoOs] = losslib.get_PoEs(fragility_function, gm_value) [PoEs, PoOs] = losslib.get_PoEs(fragility_function, gm_value)
# Compute damage by assets # Compute damage by assets
dmg_by_asset = [i * num_buildings for i in PoOs] damage_by_asset = [i * num_buildings for i in PoOs]
for h in a:
dmg_by_asset.insert(h, "")
# Append results # Append results
if exposure_type == "building": if exposure_type == "building":
asset = [ result_asset_info = [
geometry, geometry,
"",
origin_id, origin_id,
"",
respective_cell_id, respective_cell_id,
"",
asset_id, asset_id,
"",
lon, lon,
"",
lat, lat,
"",
taxonomy, taxonomy,
"",
gm_value, gm_value,
"",
PoEs, PoEs,
"",
PoOs, PoOs,
"",
num_buildings, num_buildings,
] ]
else: else:
asset = [ result_asset_info = [
geometry, geometry,
"",
origin_id, origin_id,
"",
asset_id, asset_id,
"",
lon, lon,
"",
lat, lat,
"",
taxonomy, taxonomy,
"",
gm_value, gm_value,
"",
PoEs, PoEs,
"",
PoOs, PoOs,
"",
num_buildings, num_buildings,
] ]
asset.extend(dmg_by_asset) for i in range(len(damage_by_asset)):
csv_writer.writerow(asset) result_asset_info.append(damage_by_asset[i])
csv_writer.writerow(result_asset_info)
print("Execution time of the script", (datetime.datetime.now() - startTime)) print("Execution time of the script", (datetime.datetime.now() - startTime))
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment