From f118bb1adec31d12435ca6112b30f2ec00436b1e Mon Sep 17 00:00:00 2001 From: Joe Moorhouse Date: Sun, 17 Sep 2023 20:43:32 +0100 Subject: [PATCH] Onboard Iris wind model (#146) Signed-off-by: Joe Moorhouse --- README.md | 2 +- docs/handbook/onboarding.rst | 2 +- ...C global flood depth-damage functions.json | 193 ++++++++++++++++-- .../onboard.ipynb | 6 +- src/physrisk/data/hazard_data_provider.py | 22 +- src/physrisk/data/image_creator.py | 2 +- .../data/pregenerated_hazard_model.py | 20 +- .../data/static/hazard/inventory.json | 74 ++++++- src/physrisk/data/zarr_reader.py | 4 +- src/physrisk/hazard_models/core_hazards.py | 27 ++- src/physrisk/kernel/curve.py | 6 +- src/physrisk/kernel/exposure.py | 23 ++- src/physrisk/kernel/hazards.py | 2 +- .../kernel/vulnerability_matrix_provider.py | 46 +++-- src/physrisk/kernel/vulnerability_model.py | 43 +++- src/physrisk/requests.py | 11 +- .../real_estate_models.py | 39 +--- src/test/api/test_data_requests.py | 17 +- 18 files changed, 427 insertions(+), 112 deletions(-) diff --git a/README.md b/README.md index 0fd3f7dc..b34d90af 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ pip install physrisk-lib Access to hazard event data requires setting of environment variables specifying the S3 Bucket, for example: ``` -OSC_S3_BUCKET=redhat-osc-physical-landing-647521352890 +OSC_S3_BUCKET=physrisk-hazard-indicators OSC_S3_ACCESS_KEY=**********6I OSC_S3_SECRET_KEY=**********mS ``` diff --git a/docs/handbook/onboarding.rst b/docs/handbook/onboarding.rst index 65c1c39c..5da648af 100644 --- a/docs/handbook/onboarding.rst +++ b/docs/handbook/onboarding.rst @@ -55,7 +55,7 @@ Specific types of model also exist for common modelling approaches. In particula On-boarding a model based on a damage/disruption curve ------------------------------------------------------ -A vulnerability distribition can be inferred directly from a damage/disruption curve: +A vulnerability distribution can be inferred directly from a damage/disruption curve: .. image:: onboarding/disruption_curve.png :width: 500 diff --git a/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/EU JRC global flood depth-damage functions.json b/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/EU JRC global flood depth-damage functions.json index ed3fef45..490c7228 100644 --- a/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/EU JRC global flood depth-damage functions.json +++ b/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/EU JRC global flood depth-damage functions.json @@ -256,10 +256,30 @@ { "asset_type": "Buildings/Commercial", "event_type": "RiverineInundation", - "impact_mean": [], + "impact_mean": [ + 0.0, + 0.15, + 0.3, + 0.45, + 0.55, + 0.75, + 0.9, + 1.0, + 1.0 + ], "impact_std": [], "impact_type": "Damage", - "intensity": [], + "intensity": [ + 0.0, + 0.5, + 1.0, + 1.5, + 2.0, + 3.0, + 4.0, + 5.0, + 6.0 + ], "intensity_units": "m", "location": "Europe" }, @@ -267,15 +287,30 @@ "asset_type": "Buildings/Commercial", "event_type": "RiverineInundation", "impact_mean": [ - 0.0 - ], - "impact_std": [ - 0.0 + 0.0, + 0.018404908, + 0.239263804, + 0.374233129, + 0.466257669, + 0.552147239, + 0.687116564, + 0.82208589, + 0.90797546, + 1.0 ], + "impact_std": [], "impact_type": "Damage", "intensity": [ 0.0, - 0.01 + 0.01, + 0.5, + 1.0, + 1.5, + 2.0, + 3.0, + 4.0, + 5.0, + 6.0 ], "intensity_units": "m", "location": "North America" @@ -283,20 +318,80 @@ { "asset_type": "Buildings/Commercial", "event_type": "RiverineInundation", - "impact_mean": [], - "impact_std": [], + "impact_mean": [ + 0.0, + 0.611477587, + 0.839531094, + 0.923588457, + 0.991972477, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "impact_std": [ + 0.0, + 0.077023435, + 0.035924027, + 0.026876525, + 0.016055046, + 0.0, + 0.0, + 0.0, + 0.0 + ], "impact_type": "Damage", - "intensity": [], + "intensity": [ + 0.0, + 0.5, + 1.0, + 1.5, + 2.0, + 3.0, + 4.0, + 5.0, + 6.0 + ], "intensity_units": "m", "location": "South America" }, { "asset_type": "Buildings/Commercial", "event_type": "RiverineInundation", - "impact_mean": [], - "impact_std": [], + "impact_mean": [ + 0.0, + 0.376789623, + 0.537681619, + 0.659336684, + 0.762845232, + 0.883348656, + 0.941854895, + 0.98075938, + 1.0 + ], + "impact_std": [ + 0.0, + 0.240462285, + 0.240596279, + 0.243605156, + 0.250253511, + 0.171703625, + 0.11240992, + 0.052781064, + 0.0 + ], "impact_type": "Damage", - "intensity": [], + "intensity": [ + 0.0, + 0.5, + 1.0, + 1.5, + 2.0, + 3.0, + 4.0, + 5.0, + 6.0 + ], "intensity_units": "m", "location": "Asia" }, @@ -306,27 +401,87 @@ "impact_mean": [], "impact_std": [], "impact_type": "Damage", - "intensity": [], + "intensity": [ + 0.0, + 0.5, + 1.0, + 1.5, + 2.0, + 3.0, + 4.0, + 5.0, + 6.0 + ], "intensity_units": "m", "location": "Africa" }, { "asset_type": "Buildings/Commercial", "event_type": "RiverineInundation", - "impact_mean": [], - "impact_std": [], + "impact_mean": [ + 0.0, + 0.238953575, + 0.481199682, + 0.673795091, + 0.864583333, + 1.0, + 1.0, + 1.0, + 1.0 + ], + "impact_std": [ + 0.0, + 0.142878204, + 0.204113206, + 0.190903594, + 0.178000078, + 0.0, + 0.0, + 0.0, + 0.0 + ], "impact_type": "Damage", - "intensity": [], + "intensity": [ + 0.0, + 0.5, + 1.0, + 1.5, + 2.0, + 3.0, + 4.0, + 5.0, + 6.0 + ], "intensity_units": "m", "location": "Oceania" }, { "asset_type": "Buildings/Commercial", "event_type": "RiverineInundation", - "impact_mean": [], + "impact_mean": [ + 0.0, + 0.323296918, + 0.506529105, + 0.63459558, + 0.744309656, + 0.864093044, + 0.932788157, + 0.977746968, + 1.0 + ], "impact_std": [], "impact_type": "Damage", - "intensity": [], + "intensity": [ + 0.0, + 0.5, + 1.0, + 1.5, + 2.0, + 3.0, + 4.0, + 5.0, + 6.0 + ], "intensity_units": "m", "location": "Global" }, diff --git a/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/onboard.ipynb b/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/onboard.ipynb index 23123036..0e5ab42e 100644 --- a/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/onboard.ipynb +++ b/notebooks/vulnerability_onboarding/EU JRC global flood depth-damage functions/onboard.ipynb @@ -18,7 +18,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 8, "metadata": {}, "outputs": [], "source": [ @@ -31,7 +31,7 @@ "# consistent with physrisk continent definition\n", "location_mappings = { \"Europe\": \"Europe\", \"North America\": \"North America\", \"Central & South America\": \"South America\", \"Asia\": \"Asia\", \"Africa\": \"Africa\", \"Oceania\": \"Oceania\", \"Global\": \"Global\" }\n", "type_mappings = { \"Residential buildings\": \"Buildings/Residential\",\n", - " \"Commerical buildings\": \"Buildings/Commercial\",\n", + " \"Commercial buildings\": \"Buildings/Commercial\",\n", " \"Industrial buildings\": \"Buildings/Industrial\"\n", "}\n", "\n", @@ -89,7 +89,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.10" + "version": "3.8.13" }, "orig_nbformat": 4 }, diff --git a/src/physrisk/data/hazard_data_provider.py b/src/physrisk/data/hazard_data_provider.py index fdc98622..4bcdae96 100644 --- a/src/physrisk/data/hazard_data_provider.py +++ b/src/physrisk/data/hazard_data_provider.py @@ -67,7 +67,14 @@ def __init__( super().__init__(get_source_path, store=store, zarr_reader=zarr_reader, interpolation=interpolation) def get_intensity_curves( - self, longitudes: List[float], latitudes: List[float], *, indicator_id: str, scenario: str, year: int + self, + longitudes: List[float], + latitudes: List[float], + *, + indicator_id: str, + scenario: str, + year: int, + hint: Optional[HazardDataHint] = None, ): """Get intensity curve for each latitude and longitude coordinate pair. @@ -83,7 +90,7 @@ def get_intensity_curves( return_periods: return periods in years. """ - path = self._get_source_path(indicator_id=indicator_id, scenario=scenario, year=year) + path = self._get_source_path(indicator_id=indicator_id, scenario=scenario, year=year, hint=hint) curves, return_periods = self._reader.get_curves( path, longitudes, latitudes, self._interpolation ) # type: ignore @@ -104,7 +111,14 @@ def __init__( super().__init__(get_source_path, store=store, zarr_reader=zarr_reader, interpolation=interpolation) def get_parameters( - self, longitudes: List[float], latitudes: List[float], *, indicator_id: str, scenario: str, year: int + self, + longitudes: List[float], + latitudes: List[float], + *, + indicator_id: str, + scenario: str, + year: int, + hint: Optional[HazardDataHint] = None, ): """Get hazard parameters for each latitude and longitude coordinate pair. @@ -119,6 +133,6 @@ def get_parameters( parameters: numpy array of parameters """ - path = self._get_source_path(indicator_id=indicator_id, scenario=scenario, year=year) + path = self._get_source_path(indicator_id=indicator_id, scenario=scenario, year=year, hint=hint) parameters, _ = self._reader.get_curves(path, longitudes, latitudes, self._interpolation) return parameters[:, 0] diff --git a/src/physrisk/data/image_creator.py b/src/physrisk/data/image_creator.py index daa0d19d..183b8c76 100644 --- a/src/physrisk/data/image_creator.py +++ b/src/physrisk/data/image_creator.py @@ -95,7 +95,7 @@ def _to_image( # (from zarr 2.16.0 we can also use block indexing) data = data[index, 256 * tile.y : 256 * (tile.y + 1), 256 * tile.x : 256 * (tile.x + 1)] - if any(dim > 1500 for dim in data.shape): + if any(dim > 4000 for dim in data.shape): raise Exception("dimension too large (over 1500).") map_defn = colormap_provider.colormap(colormap) diff --git a/src/physrisk/data/pregenerated_hazard_model.py b/src/physrisk/data/pregenerated_hazard_model.py index 50121682..d2963820 100644 --- a/src/physrisk/data/pregenerated_hazard_model.py +++ b/src/physrisk/data/pregenerated_hazard_model.py @@ -1,6 +1,8 @@ from collections import defaultdict from typing import Dict, List, Mapping, MutableMapping, Optional, cast +import numpy as np + from physrisk.data.zarr_reader import ZarrReader from physrisk.kernel.hazards import Hazard, HazardKind @@ -40,24 +42,26 @@ def get_hazard_events(self, requests: List[HazardDataRequest]) -> Mapping[Hazard responses: MutableMapping[HazardDataRequest, HazardDataResponse] = {} for key in batches.keys(): batch: List[HazardDataRequest] = batches[key] - event_type, indicator_id, scenario, year = ( + hazard_type, indicator_id, scenario, year, hint = ( batch[0].hazard_type, batch[0].indicator_id, batch[0].scenario, batch[0].year, + batch[0].hint, ) longitudes = [req.longitude for req in batch] latitudes = [req.latitude for req in batch] - if event_type.kind == HazardKind.acute: # type: ignore - intensities, return_periods = self.acute_hazard_data_providers[event_type].get_intensity_curves( - longitudes, latitudes, indicator_id=indicator_id, scenario=scenario, year=year + if hazard_type.kind == HazardKind.acute: # type: ignore + intensities, return_periods = self.acute_hazard_data_providers[hazard_type].get_intensity_curves( + longitudes, latitudes, indicator_id=indicator_id, scenario=scenario, year=year, hint=hint ) for i, req in enumerate(batch): - responses[req] = HazardEventDataResponse(return_periods, intensities[i, :]) - elif event_type.kind == HazardKind.chronic: # type: ignore - parameters = self.chronic_hazard_data_providers[event_type].get_parameters( - longitudes, latitudes, indicator_id=indicator_id, scenario=scenario, year=year + valid = ~np.isnan(intensities[i, :]) + responses[req] = HazardEventDataResponse(return_periods[valid], intensities[i, :][valid]) + elif hazard_type.kind == HazardKind.chronic: # type: ignore + parameters = self.chronic_hazard_data_providers[hazard_type].get_parameters( + longitudes, latitudes, indicator_id=indicator_id, scenario=scenario, year=year, hint=hint ) for i, req in enumerate(batch): diff --git a/src/physrisk/data/static/hazard/inventory.json b/src/physrisk/data/static/hazard/inventory.json index 3560351f..434fefc8 100644 --- a/src/physrisk/data/static/hazard/inventory.json +++ b/src/physrisk/data/static/hazard/inventory.json @@ -1269,8 +1269,8 @@ "hazard_type": "Wind", "group_id": "jupiter_osc", "path": "wind/jupiter/v1/max_1min_{scenario}_{year}", - "indicator_id": "max/1min", - "indicator_model_id": null, + "indicator_id": "max_speed", + "indicator_model_id": "1min", "indicator_model_gcm": "unknown", "params": {}, "display_name": "Max 1 minute sustained wind speed (Jupiter)", @@ -1588,6 +1588,76 @@ } ], "units": "days/year" + }, + { + "hazard_type": "Wind", + "group_id": "iris_osc", + "path": "wind/iris/v1/max_speed_{scenario}_{year}", + "indicator_id": "max_speed", + "indicator_model_id": null, + "indicator_model_gcm": "combined", + "params": {}, + "display_name": "Max wind speed (IRIS)", + "display_groups": [], + "description": "Assessing tropical cyclone risk on a global scale given the infrequency of landfalling tropical cyclones and the short period of reliable observations remains a challenge. Synthetic tropical cyclone datasets can help overcome these problems. Here we present a new global dataset created by IRIS, the ImpeRIal college Storm Model. IRIS is novel because, unlike other synthetic TC models, it only simulates the decay from the point of lifetime maximum intensity. This minimises the bias in the dataset. It takes input from 42 years of observed tropical cyclones and creates a 10,000 year synthetic dataset which is then validated against the observations. IRIS captures important statistical characteristics of the observed data. The return periods of the landfall maximum wind speed (1 minute sustained in m/s) are realistic globally. Climate model projections are used to adjust the life-time maximum intensity. \nhttps://www.imperial.ac.uk/grantham/research/climate-science/modelling-tropical-cyclones/\n", + "map": { + "colormap": { + "min_index": 1, + "min_value": 0.0, + "max_index": 255, + "max_value": 120.0, + "name": "heating", + "nodata_index": 0, + "units": "m/s" + }, + "path": "wind/iris/v1/max_speed_{scenario}_{year}_map", + "bounds": [ + [ + -180.0, + 60.0 + ], + [ + 180.0, + 60.0 + ], + [ + 180.0, + -60.0 + ], + [ + -180.0, + -60.0 + ] + ], + "source": "map_array" + }, + "scenarios": [ + { + "id": "historical", + "years": [ + 2010 + ] + }, + { + "id": "ssp119", + "years": [ + 2050 + ] + }, + { + "id": "ssp245", + "years": [ + 2050 + ] + }, + { + "id": "ssp585", + "years": [ + 2050 + ] + } + ], + "units": "m/s" } ] } \ No newline at end of file diff --git a/src/physrisk/data/zarr_reader.py b/src/physrisk/data/zarr_reader.py index b8125a02..622c6ab9 100644 --- a/src/physrisk/data/zarr_reader.py +++ b/src/physrisk/data/zarr_reader.py @@ -60,7 +60,7 @@ def all_data(self, set_id: str): def create_s3_zarr_store(cls, get_env: Callable[[str, Optional[str]], str] = get_env): access_key = get_env(cls.__access_key, None) secret_key = get_env(cls.__secret_key, None) - s3_bucket = get_env(cls.__S3_bucket, "redhat-osc-physical-landing-647521352890") + s3_bucket = get_env(cls.__S3_bucket, "physrisk-hazard-indicators") zarr_path = get_env(cls.__zarr_path, "hazard/hazard.zarr") s3 = s3fs.S3FileSystem(anon=False, key=access_key, secret=secret_key) @@ -92,7 +92,7 @@ def get_curves(self, set_id, longitudes, latitudes, interpolation="floor"): path = self._path_provider(set_id) if self._path_provider is not None else set_id z = self._root[path] # e.g. inundation/wri/v2/ - # OSC-specific attributes contain tranform and return periods + # OSC-specific attributes contain transform and return periods t = z.attrs["transform_mat3x3"] # type: ignore transform = Affine(t[0], t[1], t[2], t[3], t[4], t[5]) diff --git a/src/physrisk/hazard_models/core_hazards.py b/src/physrisk/hazard_models/core_hazards.py index 55a26ef1..72697b90 100644 --- a/src/physrisk/hazard_models/core_hazards.py +++ b/src/physrisk/hazard_models/core_hazards.py @@ -14,6 +14,9 @@ def __init__(self, resources: Iterable[HazardResource]): def first(self): return next(r for r in self.resources) + def match(self, hint: HazardDataHint): + return next(r for r in self.resources if r.path == hint.path) + def with_model_gcm(self, gcm): return ResourceSubset(r for r in self.resources if r.indicator_model_gcm == gcm) @@ -26,7 +29,9 @@ class ResourceSelector(Protocol): defines the rule for selecting a resource from all matches. The selection rule depends on scenario and year.""" - def __call__(self, *, candidates: ResourceSubset, scenario: str, year: int) -> HazardResource: + def __call__( + self, *, candidates: ResourceSubset, scenario: str, year: int, hint: Optional[HazardDataHint] = None + ) -> HazardResource: ... @@ -64,9 +69,11 @@ def _get_source_path(*, indicator_id: str, scenario: str, year: int, hint: Optio self._no_selector, ) resources = self._inventory.resources_by_type_id[(hazard_type, indicator_id)] - resource = selector(candidates=ResourceSubset(resources), scenario=scenario, year=year) + candidates = ResourceSubset(resources) if hint is not None: - return hint.path + resource = candidates.match(hint) + else: + resource = selector(candidates=candidates, scenario=scenario, year=year, hint=hint) proxy_scenario = cmip6_scenario_to_rcp(scenario) if resource.scenarios[0].id.startswith("rcp") else scenario if scenario == "historical": year = next(y for y in next(s for s in resource.scenarios if s.id == "historical").years) @@ -75,7 +82,7 @@ def _get_source_path(*, indicator_id: str, scenario: str, year: int, hint: Optio return _get_source_path @staticmethod - def _no_selector(candidates: ResourceSubset, scenario: str, year: int): + def _no_selector(candidates: ResourceSubset, scenario: str, year: int, hint: Optional[HazardDataHint] = None): return candidates.first() @@ -92,11 +99,15 @@ def resources_with(self, *, hazard_type: type, indicator_id: str): return ResourceSubset(self._inventory.resources_by_type_id[(hazard_type.__name__, indicator_id)]) @staticmethod - def _select_chronic_heat(candidates: ResourceSubset, scenario: str, year: int): + def _select_chronic_heat( + candidates: ResourceSubset, scenario: str, year: int, hint: Optional[HazardDataHint] = None + ): return candidates.with_model_gcm("ACCESS-CM2").first() @staticmethod - def _select_coastal_inundation(candidates: ResourceSubset, scenario: str, year: int): + def _select_coastal_inundation( + candidates: ResourceSubset, scenario: str, year: int, hint: Optional[HazardDataHint] = None + ): return ( candidates.with_model_id("nosub").first() if scenario == "historical" @@ -104,7 +115,9 @@ def _select_coastal_inundation(candidates: ResourceSubset, scenario: str, year: ) @staticmethod - def _select_riverine_inundation(candidates: ResourceSubset, scenario: str, year: int): + def _select_riverine_inundation( + candidates: ResourceSubset, scenario: str, year: int, hint: Optional[HazardDataHint] = None + ): return ( candidates.with_model_gcm("historical").first() if scenario == "historical" diff --git a/src/physrisk/kernel/curve.py b/src/physrisk/kernel/curve.py index 14b0691f..e9e5b9ca 100644 --- a/src/physrisk/kernel/curve.py +++ b/src/physrisk/kernel/curve.py @@ -150,15 +150,15 @@ def get_value(self, prob): return np.interp(prob, self.probs[::-1], self.values[::-1]) def get_probability_bins(self): - r"""Convert from exceedance (cumulative) probability to bins of constant probability. + r"""Convert from exceedance (cumulative) probability to bins of constant probability density. This is equivalent to the assumption of linear interpolation of exceedance points. .. math:: p^\text{b}_i = p^\text{e}_{i + 1} - p^\text{e}_i Returns: - value_bins (ndarray), probs: the contiguous bin lower and upper values, probabilities of each bin - If value_bins is of lenth n then ther are n-1 bins and n-1 probabilities + value_bins (ndarray), probs: The contiguous bin lower and upper values, probabilities of each bin. + If value_bins is of length n then there are n-1 bins and n-1 probabilities """ # value bins are contiguous diff --git a/src/physrisk/kernel/exposure.py b/src/physrisk/kernel/exposure.py index 90e2af81..faacfc1e 100644 --- a/src/physrisk/kernel/exposure.py +++ b/src/physrisk/kernel/exposure.py @@ -8,7 +8,13 @@ import numpy as np from physrisk.kernel.assets import Asset -from physrisk.kernel.hazard_model import HazardDataRequest, HazardDataResponse, HazardModel, HazardParameterDataResponse +from physrisk.kernel.hazard_model import ( + HazardDataRequest, + HazardDataResponse, + HazardEventDataResponse, + HazardModel, + HazardParameterDataResponse, +) from physrisk.kernel.hazards import ChronicHeat, CombinedInundation, Drought, Fire, Hail, Wind from physrisk.kernel.impact import _request_consolidated from physrisk.kernel.vulnerability_model import DataRequester @@ -66,14 +72,19 @@ def get_data_requests(self, asset: Asset, *, scenario: str, year: int) -> Iterab def get_exposures(self, asset: Asset, data_responses: Iterable[HazardDataResponse]): result: Dict[type, Tuple[Category, float]] = {} for (k, v), resp in zip(self.exposure_bins.items(), data_responses): - assert isinstance(resp, HazardParameterDataResponse) # should all be parameters + if isinstance(resp, HazardParameterDataResponse): + param = resp.parameter + elif isinstance(resp, HazardEventDataResponse): + if len(resp.intensities) > 1: + raise ValueError("single-value curve expected") + param = resp.intensities[0] (hazard_type, _) = k (lower_bounds, categories) = v - if math.isnan(resp.parameter): - result[hazard_type] = (Category.NODATA, float(resp.parameter)) + if math.isnan(param): + result[hazard_type] = (Category.NODATA, float(param)) else: - index = np.searchsorted(lower_bounds, resp.parameter, side="right") - 1 - result[hazard_type] = (categories[index], float(resp.parameter)) + index = np.searchsorted(lower_bounds, param, side="right") - 1 + result[hazard_type] = (categories[index], float(param)) return result def get_exposure_bins(self): diff --git a/src/physrisk/kernel/hazards.py b/src/physrisk/kernel/hazards.py index cfd1f1cc..2bdb350b 100644 --- a/src/physrisk/kernel/hazards.py +++ b/src/physrisk/kernel/hazards.py @@ -65,7 +65,7 @@ class RiverineInundation(Inundation): class Wind(Hazard): - kind = HazardKind.chronic + kind = HazardKind.acute pass diff --git a/src/physrisk/kernel/vulnerability_matrix_provider.py b/src/physrisk/kernel/vulnerability_matrix_provider.py index 8b339763..60317b97 100644 --- a/src/physrisk/kernel/vulnerability_matrix_provider.py +++ b/src/physrisk/kernel/vulnerability_matrix_provider.py @@ -1,4 +1,4 @@ -from typing import List, Union +from typing import Callable, Sequence import numpy as np @@ -10,37 +10,49 @@ def __init__(self, mean, std_dev): class VulnMatrixProvider: - """Provides the impact on an asset, an impact being either a fractional damage or disruption, - that occurs as a result of a hazard event of a given intensity.""" - - __slots__ = ["intensities", "impact_cdfs"] + __slots__ = ["intensity_bin_centres", "impact_cdfs"] def __init__( self, - intensities: Union[List[float], np.ndarray], - impact_cdfs=None, + intensity_bin_centres: np.ndarray, + impact_cdfs: Sequence[Callable[[np.ndarray], np.ndarray]], ): - """Create a new asset event distribution. + """Via to_prob_matrix method, provides the probability that the impact on an asset falls within + a specified bin, an impact being either a fractional damage or disruption that occurs as a result of a + hazard event of a given intensity. Args: - intensities: possible intensities of hazard event. - impacts: fractional damage or fractional average disruption occurring as a result - of hazard event of given intensity. - distributions: provides the pdf and optiononally cdf of the impact distribution + intensity_bin_centres (Iterable[float]): The centres of the intensity bins. + impact_cdfs (Iterable[Callable[[float], float]]): For each intensity bin centre, provides a function + that takes parameter, d, and returns the probability that the impact is less than d. This is used to + construct the probability matrix. """ - if not np.all(np.diff(intensities) >= 0): + if not np.all(np.diff(intensity_bin_centres) >= 0): raise ValueError("intensities must be sorted and increasing") - self.intensities = np.array(intensities) + if len(intensity_bin_centres) != len(impact_cdfs): + raise ValueError("one impact_cdf expected for each intensity_bin_centre") + + self.intensity_bin_centres = np.array(intensity_bin_centres) self.impact_cdfs = impact_cdfs - def to_prob_matrix(self, impact_bin_edges): + def to_prob_matrix(self, impact_bin_edges: np.ndarray) -> np.ndarray: + """Return probability matrix, p with dimension (number intensity bins, number impact bins) + where p[i, j] is the conditional probability that given the intensity falls in bin i, the impact is + in bin j. + + Args: + impact_bin_edges (Iterable[float]): Bin edges of the impact bins. + + Returns: + np.ndarray: Probability matrix. + """ # construct a cdf probability matrix at each intensity point # the probability is the prob that the impact is greater than the specified - cdf_matrix = np.empty([len(self.intensities), len(impact_bin_edges)]) + cdf_matrix = np.empty([len(self.intensity_bin_centres), len(impact_bin_edges)]) - for i, _ in enumerate(self.intensities): + for i, _ in enumerate(self.intensity_bin_centres): cdf_matrix[i, :] = self.impact_cdfs[i](impact_bin_edges) # type: ignore prob_matrix = cdf_matrix[:, 1:] - cdf_matrix[:, :-1] diff --git a/src/physrisk/kernel/vulnerability_model.py b/src/physrisk/kernel/vulnerability_model.py index e524f8a7..32b4ec4d 100644 --- a/src/physrisk/kernel/vulnerability_model.py +++ b/src/physrisk/kernel/vulnerability_model.py @@ -4,11 +4,12 @@ from typing import Iterable, List, Protocol, Tuple, Union import numpy as np +from scipy import stats import physrisk.data.static.vulnerability from physrisk.kernel.impact_distrib import ImpactDistrib -from ..api.v1.common import VulnerabilityCurves +from ..api.v1.common import VulnerabilityCurve, VulnerabilityCurves from .assets import Asset from .curve import ExceedanceCurve from .hazard_event_distrib import HazardEventDistrib @@ -164,11 +165,47 @@ def get_distributions( return vul, event @abstractmethod - def get_impact_curve(self, intensities, asset) -> VulnMatrixProvider: - """Get the impact curves for the intensities specified.""" + def get_impact_curve(self, intensity_bin_centres: np.ndarray, asset: Asset) -> VulnMatrixProvider: + """Defines a VulnMatrixProvider. The VulnMatrixProvider returns probabilities of specified impact bins + for the intensity bin centres.""" ... +class CurveBasedVulnerabilityModel(VulnerabilityModel): + def get_impact_curve(self, intensity_bin_centres: np.ndarray, asset: Asset) -> VulnMatrixProvider: + curve: VulnerabilityCurve = self.get_vulnerability_curve(asset) + impact_means = np.interp(intensity_bin_centres, curve.intensity, curve.impact_mean) + impact_stddevs = np.interp(intensity_bin_centres, curve.intensity, curve.impact_std) + return VulnMatrixProvider( + intensity_bin_centres, + impact_cdfs=[checked_beta_distrib(m, s) for m, s in zip(impact_means, impact_stddevs)], + ) + + @abstractmethod + def get_vulnerability_curve(self, asset: Asset) -> VulnerabilityCurve: + ... + + +def delta_cdf(y): + return lambda x: np.where(x >= y, 1, 0) + + +def checked_beta_distrib(mean, std): + if mean == 0: + return delta_cdf(0) + if mean == 1.0: + return delta_cdf(1) + else: + return beta_distrib(mean, std) + + +def beta_distrib(mean, std): + cv = std / mean + a = (1 - mean) / (cv * cv) - mean + b = a * (1 - mean) / mean + return lambda x, a=a, b=b: stats.beta.cdf(x, a, b) + + class DeterministicVulnerabilityModel(VulnerabilityModelAcuteBase): """A vulnerability model that requires only specification of a damage/disruption curve. This simple model contains no uncertainty around damage/disruption.""" diff --git a/src/physrisk/requests.py b/src/physrisk/requests.py index c846c687..2717202a 100644 --- a/src/physrisk/requests.py +++ b/src/physrisk/requests.py @@ -66,7 +66,7 @@ def __init__( def get(self, *, request_id, request_dict): if request_id == "get_hazard_data": request = HazardDataRequest(**request_dict) - return json.dumps(_get_hazard_data(request, hazard_model=self.hazard_model).dict()) + return json.dumps(_get_hazard_data(request, hazard_model=self.hazard_model).dict()) # , allow_nan=False) elif request_id == "get_hazard_data_availability": request = HazardAvailabilityRequest(**request_dict) return json.dumps(_get_hazard_data_availability(request, self.inventory, self.colormaps).dict()) @@ -91,9 +91,12 @@ def get_image(self, *, request_dict): if not _read_permitted(request.group_ids, inventory.resources[request.resource]): raise PermissionError() model = inventory.resources[request.resource] - path = str(PosixPath(model.path).with_name(model.map.path)).format( - scenario=request.scenarioId, year=request.year - ) + len(PosixPath(model.map.path).parts) + path = ( + str(PosixPath(model.path).with_name(model.map.path)) + if len(PosixPath(model.map.path).parts) == 1 + else model.map.path + ).format(scenario=request.scenarioId, year=request.year) colormap = request.colormap if request.colormap is not None else model.map.colormap.name creator = ImageCreator(zarr_reader) # store=ImageCreator.test_store(path)) return creator.convert( diff --git a/src/physrisk/vulnerability_models/real_estate_models.py b/src/physrisk/vulnerability_models/real_estate_models.py index ee61c48e..7e839e31 100644 --- a/src/physrisk/vulnerability_models/real_estate_models.py +++ b/src/physrisk/vulnerability_models/real_estate_models.py @@ -2,14 +2,14 @@ from typing import Dict, Tuple import numpy as np -import scipy.stats as stats from physrisk.api.v1.common import VulnerabilityCurve, VulnerabilityCurves -from physrisk.kernel.assets import RealEstateAsset -from physrisk.kernel.vulnerability_model import VulnerabilityModel, VulnMatrixProvider +from physrisk.kernel.assets import Asset, RealEstateAsset +from physrisk.kernel.vulnerability_matrix_provider import VulnMatrixProvider +from physrisk.kernel.vulnerability_model import VulnerabilityModel from ..kernel.hazards import CoastalInundation, RiverineInundation -from ..kernel.vulnerability_model import applies_to_events, get_vulnerability_curves_from_resource +from ..kernel.vulnerability_model import applies_to_events, checked_beta_distrib, get_vulnerability_curves_from_resource class RealEstateInundationModel(VulnerabilityModel): @@ -46,9 +46,9 @@ def __init__( # global circulation parameter 'model' is a hint; can be overriden by hazard model super().__init__(indicator_id=indicator_id, hazard_type=event_type, impact_bin_edges=impact_bin_edges) - def get_impact_curve(self, intensities, asset: RealEstateAsset): + def get_impact_curve(self, intensity_bin_centres: np.ndarray, asset: Asset): # we interpolate the mean and standard deviation and use this to construct distributions - # assert asset is RealEstateAsset + assert isinstance(asset, RealEstateAsset) key = (asset.location, asset.type) curve = self.vulnerability_curves[key] @@ -59,11 +59,12 @@ def get_impact_curve(self, intensities, asset: RealEstateAsset): self.proxy_curves[key] = self.closest_curve_of_type(curve, asset) std_curve = self.proxy_curves[key] - impact_means = np.interp(intensities, curve.intensity, curve.impact_mean) - impact_stddevs = np.interp(intensities, std_curve.intensity, std_curve.impact_std) + impact_means = np.interp(intensity_bin_centres, curve.intensity, curve.impact_mean) + impact_stddevs = np.interp(intensity_bin_centres, std_curve.intensity, std_curve.impact_std) return VulnMatrixProvider( - intensities, impact_cdfs=[checked_beta_distrib(m, s) for m, s in zip(impact_means, impact_stddevs)] + intensity_bin_centres, + impact_cdfs=[checked_beta_distrib(m, s) for m, s in zip(impact_means, impact_stddevs)], ) def closest_curve_of_type(self, curve: VulnerabilityCurve, asset: RealEstateAsset): @@ -77,26 +78,6 @@ def sum_square_diff(self, curve1: VulnerabilityCurve, curve2: VulnerabilityCurve return np.sum((curve1.impact_mean - np.interp(curve1.intensity, curve2.intensity, curve2.impact_mean)) ** 2) -def delta_cdf(y): - return lambda x: np.where(x >= y, 1, 0) - - -def checked_beta_distrib(mean, std): - if mean == 0: - return delta_cdf(0) - if mean == 1.0: - return delta_cdf(1) - else: - return beta_distrib(mean, std) - - -def beta_distrib(mean, std): - cv = std / mean - a = (1 - mean) / (cv * cv) - mean - b = a * (1 - mean) / mean - return lambda x, a=a, b=b: stats.beta.cdf(x, a, b) - - @applies_to_events([CoastalInundation]) class RealEstateCoastalInundationModel(RealEstateInundationModel): def __init__( diff --git a/src/test/api/test_data_requests.py b/src/test/api/test_data_requests.py index 67f96378..7aee8fab 100644 --- a/src/test/api/test_data_requests.py +++ b/src/test/api/test_data_requests.py @@ -13,6 +13,7 @@ from physrisk import requests from physrisk.container import Container +from physrisk.data.hazard_data_provider import HazardDataHint from physrisk.data.inventory import EmbeddedInventory from physrisk.data.pregenerated_hazard_model import ZarrHazardModel from physrisk.data.zarr_reader import ZarrReader @@ -49,9 +50,17 @@ def test_generic_source_path(self): result_flood_hist = source_paths[RiverineInundation]( indicator_id="flood_depth", scenario="historical", year=2080 ) + result_heat_hint = source_paths[ChronicHeat]( + indicator_id="mean_degree_days/above/32c", + scenario="rcp8p5", + year=2050, + hint=HazardDataHint(path="chronic_heat/osc/v2/mean_degree_days_v2_above_32c_CMCC-ESM2_{scenario}_{year}"), + ) + assert result_heat == "chronic_heat/osc/v2/mean_degree_days_v2_above_32c_ACCESS-CM2_rcp8p5_2050" assert result_flood == "inundation/wri/v2/inunriver_rcp8p5_MIROC-ESM-CHEM_2050" assert result_flood_hist == "inundation/wri/v2/inunriver_historical_000000000WATCH_1980" + assert result_heat_hint == "chronic_heat/osc/v2/mean_degree_days_v2_above_32c_CMCC-ESM2_rcp8p5_2050" def test_zarr_reading(self): request_dict = { @@ -110,6 +119,12 @@ def test_zarr_reading_chronic(self): ) numpy.testing.assert_array_almost_equal_nulp(result.items[0].intensity_curve_set[0].intensities[0], 600.0) + # request_with_hint = request.copy() + # request_with_hint.items[0].path = "chronic_heat/osc/v2/mean_degree_days_v2_above_32c_CMCC-ESM2_rcp8p5_2050" + # result = requests._get_hazard_data( + # request_with_hint, ZarrHazardModel(source_paths=source_paths, reader=ZarrReader(store)) + # ) + @unittest.skip("requires OSC environment variables set") def test_zarr_reading_live(self): # needs valid OSC_S3_BUCKET, OSC_S3_ACCESS_KEY, OSC_S3_SECRET_KEY @@ -125,7 +140,7 @@ def test_zarr_reading_live(self): "latitudes": TestData.latitudes, "year": 2030, "scenario": "ssp585", - "model": "mean_work_loss/high", + "indicator_id": "mean_work_loss/high", } ], }