Skip to content

Commit

Permalink
Format
Browse files Browse the repository at this point in the history
  • Loading branch information
tomalrussell committed Jul 25, 2023
1 parent b8502d0 commit 60fa4a7
Show file tree
Hide file tree
Showing 6 changed files with 133 additions and 94 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -192,10 +192,10 @@ The `snail.core.intersections` module is built using `pybind11` with
>
> Copyright (c) 2020-23 Tom Russell and all [snail contributors](https://github.com/nismod/snail/graphs/contributors)
This library is developed by researchers in the [Oxford Programme for Sustainable
This library is developed by researchers in the [Oxford Programme for Sustainable
Infrastructure Systems](https://opsis.eci.ox.ac.uk/) at the University of Oxford,
funded by multiple research projects.

This research received funding from the FCDO Climate Compatible Growth Programme.
The views expressed here do not necessarily reflect the UK government's official
This research received funding from the FCDO Climate Compatible Growth Programme.
The views expressed here do not necessarily reflect the UK government's official
policies.
2 changes: 1 addition & 1 deletion src/snail/core/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .intersections import (get_cell_indices, split_linestring, split_polygon)
from .intersections import get_cell_indices, split_linestring, split_polygon

__all__ = [
"get_cell_indices",
Expand Down
40 changes: 23 additions & 17 deletions tutorials/01-data-preparation-ghana.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@
"outputs": [],
"source": [
"roads = gpd.read_file(\n",
" data_folder / \"ghana-latest-free.shp\" / \"gis_osm_roads_free_1.shp\"\n",
" data_folder / \"ghana-latest-free.shp\" / \"gis_osm_roads_free_1.shp\"\n",
")"
]
},
Expand Down Expand Up @@ -763,12 +763,7 @@
"metadata": {},
"outputs": [],
"source": [
"job_id = client.job_submit(\n",
" country_iso,\n",
" [\n",
" \"wri_aqueduct.version_2\"\n",
" ]\n",
")"
"job_id = client.job_submit(country_iso, [\"wri_aqueduct.version_2\"])"
]
},
{
Expand Down Expand Up @@ -800,10 +795,8 @@
" country_iso,\n",
" data_folder / \"flood_layer\",\n",
" # there may be other datasets available, but only download the following\n",
" dataset_filter=[\n",
" \"wri_aqueduct.version_2\"\n",
" ],\n",
" overwrite=True\n",
" dataset_filter=[\"wri_aqueduct.version_2\"],\n",
" overwrite=True,\n",
")"
]
},
Expand Down Expand Up @@ -862,13 +855,20 @@
"for root, dirs, files in os.walk(os.path.join(data_folder, \"flood_layer\")):\n",
" print(\"Looking in\", root)\n",
" for file_ in sorted(files):\n",
" if file_.endswith(\".tif\") and not file_.endswith(f\"-{country_iso}.tif\"):\n",
" if file_.endswith(\".tif\") and not file_.endswith(\n",
" f\"-{country_iso}.tif\"\n",
" ):\n",
" print(\"Found tif file\", file_)\n",
" stem = file_[:-4]\n",
" input_file = os.path.join(root, file_)\n",
"\n",
" # Clip file to bounds\n",
" clip_file = os.path.join(root, \"gha\", \"wri_aqueduct_version_2\", f\"{stem}-{country_iso}.tif\")\n",
" clip_file = os.path.join(\n",
" root,\n",
" \"gha\",\n",
" \"wri_aqueduct_version_2\",\n",
" f\"{stem}-{country_iso}.tif\",\n",
" )\n",
" try:\n",
" os.remove(clip_file)\n",
" except FileNotFoundError:\n",
Expand All @@ -887,7 +887,7 @@
" p = subprocess.run(cmd, capture_output=True)\n",
" print(p.stdout.decode(\"utf8\"))\n",
" print(p.stderr.decode(\"utf8\"))\n",
" print(clip_file)\n"
" print(clip_file)"
]
},
{
Expand Down Expand Up @@ -970,9 +970,13 @@
"\n",
"prepared = snail.intersection.prepare_linestrings(roads)\n",
"flood_intersections = snail.intersection.split_linestrings(prepared, grid)\n",
"flood_intersections = snail.intersection.apply_indices(flood_intersections, grid)\n",
"flood_intersections = snail.intersection.apply_indices(\n",
" flood_intersections, grid\n",
")\n",
"flood_data = snail.io.read_raster_band_data(flood_path)\n",
"flood_intersections[\"inunriver__epoch_historical__rcp_baseline__rp_100\"] = snail.intersection.get_raster_values_for_splits(\n",
"flood_intersections[\n",
" \"inunriver__epoch_historical__rcp_baseline__rp_100\"\n",
"] = snail.intersection.get_raster_values_for_splits(\n",
" flood_intersections, flood_data\n",
")"
]
Expand Down Expand Up @@ -1131,7 +1135,9 @@
}
],
"source": [
"exposed_1m = flood_intersections[flood_intersections.inunriver__epoch_historical__rcp_baseline__rp_100 >= 1]\n",
"exposed_1m = flood_intersections[\n",
" flood_intersections.inunriver__epoch_historical__rcp_baseline__rp_100 >= 1\n",
"]\n",
"exposed_length_km = exposed_1m.flood_length_m.sum() * 1e-3\n",
"exposed_length_km"
]
Expand Down
135 changes: 83 additions & 52 deletions tutorials/02-assess-damage-and-disruption.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,9 @@
}
],
"source": [
"hazard_paths = sorted(glob(str(data_folder / \"flood_layer/gha/wri_aqueduct_version_2/wri*.tif\")))\n",
"hazard_paths = sorted(\n",
" glob(str(data_folder / \"flood_layer/gha/wri_aqueduct_version_2/wri*.tif\"))\n",
")\n",
"hazard_files = pd.DataFrame({\"path\": hazard_paths})\n",
"hazard_files[\"key\"] = [Path(path).stem for path in hazard_paths]\n",
"hazard_files, grids = snail.io.extend_rasters_metadata(hazard_files)\n",
Expand Down Expand Up @@ -385,14 +387,20 @@
"flood_intersections = snail.intersection.split_linestrings(prepared, grid)\n",
"\n",
"# push into split_linestrings\n",
"flood_intersections = snail.intersection.apply_indices(flood_intersections, grid, index_i=\"i_0\", index_j=\"j_0\")\n",
"flood_intersections = snail.intersection.apply_indices(\n",
" flood_intersections, grid, index_i=\"i_0\", index_j=\"j_0\"\n",
")\n",
"\n",
"flood_intersections = snail.io.associate_raster_files(flood_intersections, hazard_files)\n",
"flood_intersections = snail.io.associate_raster_files(\n",
" flood_intersections, hazard_files\n",
")\n",
"\n",
"# calculate the length of each stretch of road\n",
"# don't include in snail wrapper top-level function\n",
"geod = Geod(ellps=\"WGS84\")\n",
"flood_intersections[\"length_m\"] = flood_intersections.geometry.apply(geod.geometry_length)"
"flood_intersections[\"length_m\"] = flood_intersections.geometry.apply(\n",
" geod.geometry_length\n",
")"
]
},
{
Expand Down Expand Up @@ -633,14 +641,20 @@
],
"source": [
"# find any max depth and filter > 0\n",
"all_intersections = flood_intersections[flood_intersections[data_cols].max(axis=1) > 0]\n",
"all_intersections = flood_intersections[\n",
" flood_intersections[data_cols].max(axis=1) > 0\n",
"]\n",
"# subset columns\n",
"all_intersections = all_intersections.drop(columns=[\n",
" 'osm_id', 'name', 'from_id', 'to_id', 'geometry', 'i_0', 'j_0'\n",
"])\n",
"all_intersections = all_intersections.drop(\n",
" columns=[\"osm_id\", \"name\", \"from_id\", \"to_id\", \"geometry\", \"i_0\", \"j_0\"]\n",
")\n",
"# melt and check again for depth\n",
"all_intersections = all_intersections.melt(id_vars=['id', 'split', 'road_type', 'length_m'], value_vars=data_cols, var_name='key', value_name='depth_m') \\\n",
" .query('depth_m > 0')\n",
"all_intersections = all_intersections.melt(\n",
" id_vars=[\"id\", \"split\", \"road_type\", \"length_m\"],\n",
" value_vars=data_cols,\n",
" var_name=\"key\",\n",
" value_name=\"depth_m\",\n",
").query(\"depth_m > 0\")\n",
"all_intersections"
]
},
Expand All @@ -651,12 +665,16 @@
"metadata": {},
"outputs": [],
"source": [
"river = all_intersections[all_intersections.key.str.contains('inunriver')]\n",
"coast = all_intersections[all_intersections.key.str.contains('inuncoast')]\n",
"river = all_intersections[all_intersections.key.str.contains(\"inunriver\")]\n",
"coast = all_intersections[all_intersections.key.str.contains(\"inuncoast\")]\n",
"\n",
"coast_keys = coast.key.str.extract(r'wri_aqueduct-version_2-(?P<hazard>\\w+)_(?P<rcp>[^_]+)_(?P<sub>[^_]+)_(?P<epoch>[^_]+)_rp(?P<rp>[^-]+)-gha')\n",
"coast_keys = coast.key.str.extract(\n",
" r\"wri_aqueduct-version_2-(?P<hazard>\\w+)_(?P<rcp>[^_]+)_(?P<sub>[^_]+)_(?P<epoch>[^_]+)_rp(?P<rp>[^-]+)-gha\"\n",
")\n",
"coast = pd.concat([coast, coast_keys], axis=1)\n",
"river_keys = river.key.str.extract(r'wri_aqueduct-version_2-(?P<hazard>\\w+)_(?P<rcp>[^_]+)_(?P<gcm>[^_]+)_(?P<epoch>[^_]+)_rp(?P<rp>[^-]+)-gha')\n",
"river_keys = river.key.str.extract(\n",
" r\"wri_aqueduct-version_2-(?P<hazard>\\w+)_(?P<rcp>[^_]+)_(?P<gcm>[^_]+)_(?P<epoch>[^_]+)_rp(?P<rp>[^-]+)-gha\"\n",
")\n",
"river = pd.concat([river, river_keys], axis=1)"
]
},
Expand Down Expand Up @@ -1303,7 +1321,8 @@
],
"source": [
"summary = (\n",
" river[river.depth_m >= 2.0].drop(columns=[\"id\", \"split\", \"road_type\", \"key\"])\n",
" river[river.depth_m >= 2.0]\n",
" .drop(columns=[\"id\", \"split\", \"road_type\", \"key\"])\n",
" .groupby([\"hazard\", \"rcp\", \"gcm\", \"epoch\", \"rp\"])\n",
" .sum()\n",
" .drop(columns=[\"depth_m\"])\n",
Expand Down Expand Up @@ -1496,7 +1515,7 @@
],
"source": [
"plot_data = summary.reset_index()\n",
"plot_data = plot_data[plot_data.epoch.isin(['1980', '2080'])]\n",
"plot_data = plot_data[plot_data.epoch.isin([\"1980\", \"2080\"])]\n",
"plot_data.rp = plot_data.rp.apply(lambda rp: int(rp.lstrip(\"0\")))\n",
"plot_data[\"probability\"] = 1 / plot_data.rp\n",
"plot_data"
Expand Down Expand Up @@ -1537,7 +1556,7 @@
" hue=\"gcm\",\n",
" col=\"rcp\",\n",
" kind=\"line\",\n",
" marker=\"o\"\n",
" marker=\"o\",\n",
")"
]
},
Expand Down Expand Up @@ -1583,10 +1602,20 @@
],
"source": [
"paved = snail.damages.PiecewiseLinearDamageCurve(\n",
" pd.DataFrame({\"intensity\": [0.0, 0.999999999, 1, 2, 3], \"damage\": [0.0, 0.0, 0.1, 0.3, 0.5]})\n",
" pd.DataFrame(\n",
" {\n",
" \"intensity\": [0.0, 0.999999999, 1, 2, 3],\n",
" \"damage\": [0.0, 0.0, 0.1, 0.3, 0.5],\n",
" }\n",
" )\n",
")\n",
"unpaved = snail.damages.PiecewiseLinearDamageCurve(\n",
" pd.DataFrame({\"intensity\": [0.0, 0.999999999, 1, 2, 3], \"damage\": [0.0, 0.0, 0.9, 1.0, 1.0]})\n",
" pd.DataFrame(\n",
" {\n",
" \"intensity\": [0.0, 0.999999999, 1, 2, 3],\n",
" \"damage\": [0.0, 0.0, 0.9, 1.0, 1.0],\n",
" }\n",
" )\n",
")\n",
"paved, unpaved"
]
Expand Down Expand Up @@ -1927,13 +1956,13 @@
"metadata": {},
"outputs": [],
"source": [
"paved_depths = river.loc[river.paved, 'depth_m']\n",
"paved_depths = river.loc[river.paved, \"depth_m\"]\n",
"paved_damage = paved.damage_fraction(paved_depths)\n",
"river.loc[river.paved, 'proportion_damaged'] = paved_damage\n",
"river.loc[river.paved, \"proportion_damaged\"] = paved_damage\n",
"\n",
"unpaved_depths = river.loc[~river.paved, 'depth_m']\n",
"unpaved_depths = river.loc[~river.paved, \"depth_m\"]\n",
"unpaved_damage = paved.damage_fraction(unpaved_depths)\n",
"river.loc[~river.paved, 'proportion_damaged'] = unpaved_damage"
"river.loc[~river.paved, \"proportion_damaged\"] = unpaved_damage"
]
},
{
Expand Down Expand Up @@ -2057,11 +2086,7 @@
}
],
"source": [
"river[\"damage_usd\"] = (\n",
" river.length_m\n",
" * river.cost_usd_per_km\n",
" * 1e-3\n",
")\n",
"river[\"damage_usd\"] = river.length_m * river.cost_usd_per_km * 1e-3\n",
"river.head(2)"
]
},
Expand Down Expand Up @@ -2211,11 +2236,20 @@
],
"source": [
"summary = (\n",
" river\n",
" .drop(columns=[\"id\", \"split\", \"length_m\", \"key\", \"depth_m\", \"paved\", \"kind\", \"cost_usd_per_km\", \"proportion_damaged\"])\n",
" .groupby(\n",
" [\"road_type\", \"hazard\", \"rcp\", \"gcm\", \"epoch\", \"rp\"]\n",
" river.drop(\n",
" columns=[\n",
" \"id\",\n",
" \"split\",\n",
" \"length_m\",\n",
" \"key\",\n",
" \"depth_m\",\n",
" \"paved\",\n",
" \"kind\",\n",
" \"cost_usd_per_km\",\n",
" \"proportion_damaged\",\n",
" ]\n",
" )\n",
" .groupby([\"road_type\", \"hazard\", \"rcp\", \"gcm\", \"epoch\", \"rp\"])\n",
" .sum()\n",
")\n",
"summary"
Expand Down Expand Up @@ -2248,9 +2282,7 @@
"metadata": {},
"outputs": [],
"source": [
"historical = river[\n",
" river.rcp == \"historical\"\n",
"][[\"id\", \"rp\", \"damage_usd\"]]"
"historical = river[river.rcp == \"historical\"][[\"id\", \"rp\", \"damage_usd\"]]"
]
},
{
Expand Down Expand Up @@ -2355,11 +2387,7 @@
}
],
"source": [
"historical = (\n",
" historical.groupby([\"id\", \"rp\"])\n",
" .sum()\n",
" .reset_index()\n",
")\n",
"historical = historical.groupby([\"id\", \"rp\"]).sum().reset_index()\n",
"historical = historical.pivot(index=\"id\", columns=\"rp\").replace(\n",
" float(\"NaN\"), 0\n",
")\n",
Expand Down Expand Up @@ -2479,14 +2507,17 @@
],
"source": [
"def calculate_ead(df):\n",
" rp_cols = sorted(list(df.columns), key=lambda col: 1/int(col.replace(\"rp\", \"\")))\n",
" rp_cols = sorted(\n",
" list(df.columns), key=lambda col: 1 / int(col.replace(\"rp\", \"\"))\n",
" )\n",
" rps = np.array([int(col.replace(\"rp\", \"\")) for col in rp_cols])\n",
" probabilities = 1 / rps\n",
" rp_damages = df[rp_cols]\n",
" return simpson(rp_damages, x=probabilities, axis=1)\n",
"\n",
"\n",
"historical[\"ead_usd\"] = calculate_ead(historical)\n",
"historical.head(2)\n"
"historical.head(2)"
]
},
{
Expand Down Expand Up @@ -2521,9 +2552,7 @@
"metadata": {},
"outputs": [],
"source": [
"future = river[\n",
" [\"id\", \"rp\", \"rcp\", \"gcm\", \"epoch\", \"damage_usd\"]\n",
"].copy()"
"future = river[[\"id\", \"rp\", \"rcp\", \"gcm\", \"epoch\", \"damage_usd\"]].copy()"
]
},
{
Expand Down Expand Up @@ -2606,9 +2635,7 @@
],
"source": [
"future = (\n",
" future.groupby([\"id\", \"rp\", \"rcp\", \"gcm\", \"epoch\"])\n",
" .sum()\n",
" .reset_index()\n",
" future.groupby([\"id\", \"rp\", \"rcp\", \"gcm\", \"epoch\"]).sum().reset_index()\n",
")\n",
"future.head(2)"
]
Expand Down Expand Up @@ -2745,9 +2772,9 @@
}
],
"source": [
"future = future.pivot(index=[\"id\", \"rcp\", \"gcm\", \"epoch\"], columns=\"rp\").replace(\n",
" float(\"NaN\"), 0\n",
")\n",
"future = future.pivot(\n",
" index=[\"id\", \"rcp\", \"gcm\", \"epoch\"], columns=\"rp\"\n",
").replace(float(\"NaN\"), 0)\n",
"future.columns = [f\"rp{int(rp)}\" for _, rp in future.columns]\n",
"future.head(2)"
]
Expand Down Expand Up @@ -3372,7 +3399,11 @@
],
"source": [
"sns.lmplot(\n",
" data=summary, col=\"rcp\", x=\"epoch\", y=\"ead_usd\", hue=\"gcm\", #fit_reg=False\n",
" data=summary,\n",
" col=\"rcp\",\n",
" x=\"epoch\",\n",
" y=\"ead_usd\",\n",
" hue=\"gcm\", # fit_reg=False\n",
")"
]
}
Expand Down
Loading

0 comments on commit 60fa4a7

Please sign in to comment.