Skip to content

Commit

Permalink
Merge branch 'gui-updates' of https://github.com/Deltares/hydromt_fiat
Browse files Browse the repository at this point in the history
…into gui-updates
  • Loading branch information
Santonia27 committed Dec 14, 2023
2 parents ee0b77d + 11e743d commit 60d0374
Show file tree
Hide file tree
Showing 8 changed files with 85 additions and 30 deletions.
Binary file not shown.
Binary file not shown.
Binary file not shown.
32 changes: 29 additions & 3 deletions hydromt_fiat/fiat.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@
import pandas as pd
from hydromt.models.model_grid import GridModel
from shapely.geometry import box
import os
import shutil

from hydromt_fiat import DATADIR
from hydromt_fiat.config import Config
Expand Down Expand Up @@ -102,6 +104,7 @@ def setup_output(
output_dir: str = "output",
output_csv_name: str = "output.csv",
output_vector_name: Union[str, List[str]] = "spatial.gpkg",

) -> None:
"""Setup Delft-FIAT output folder and files.
Expand Down Expand Up @@ -501,7 +504,7 @@ def setup_hazard(
# read maps and retrieve their attributes
da_map_fn, da_name, da_type = read_maps(params, da_map_fn, idx)

da = self.data_catalog.get_rasterdataset(da_map_fn, geom=self.region)
da = self.data_catalog.get_rasterdataset(da_map_fn) # removed geom=self.region because it is not always there

# Convert to units of the exposure data if required
if (
Expand Down Expand Up @@ -653,7 +656,8 @@ def setup_social_vulnerability_index(
county_numbers = get_us_county_numbers(counties, us_states_counties)

# Create SVI object
svi = SocialVulnerabilityIndex(self.data_catalog, self.logger)
save_folder = str(Path(self.root) / "exposure" / "SVI")
svi = SocialVulnerabilityIndex(self.data_catalog, self.logger, save_folder)

# Call functionalities of SVI
svi.set_up_census_key(census_key)
Expand Down Expand Up @@ -741,7 +745,8 @@ def setup_equity_data(
county_numbers = get_us_county_numbers(counties, us_states_counties)

# Create equity object
equity = EquityData(self.data_catalog, self.logger)
save_folder = str(Path(self.root) / "equity")
equity = EquityData(self.data_catalog, self.logger, save_folder)

# Call functionalities of equity
equity.set_up_census_key(census_key)
Expand Down Expand Up @@ -781,6 +786,18 @@ def setup_aggregation_areas(
self.exposure.exposure_db = join_exposure_aggregation_areas(
exposure_gdf, aggregation_area_fn, attribute_names, label_names
)

# Create additional attributes folder in root
additional_att_input = Path(self.root).joinpath("additional_attributes")
if not os.path.exists(additional_att_input):
os.makedirs(additional_att_input)

if isinstance(aggregation_area_fn,list):
for file in aggregation_area_fn:
shutil.copy2(file, additional_att_input)
else:
shutil.copy2(aggregation_area_fn, additional_att_input)


def setup_building_footprint(
self,
Expand All @@ -807,6 +824,15 @@ def setup_building_footprint(
building_footprint_fn,
attribute_name,
)
# Create BF folder in Exposure
building_footprints_exp = Path(self.root).joinpath("exposure" , "building_footprints")
if not os.path.exists(building_footprints_exp):
os.makedirs(building_footprints_exp)
if isinstance(building_footprint_fn,list):
for file in building_footprint_fn:
shutil.copy2(file, building_footprints_exp)
else:
shutil.copy2(building_footprint_fn, building_footprints_exp)

# Update functions
def update_all(self):
Expand Down
14 changes: 6 additions & 8 deletions hydromt_fiat/workflows/equity_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,9 @@


class EquityData:
def __init__(self, data_catalog: DataCatalog, logger: Logger):
def __init__(self, data_catalog: DataCatalog, logger: Logger, save_folder: str):
self.data_catalog = data_catalog
self.save_folder = save_folder
self.census_key = Census
self.download_codes = {}
self.state_fips = []
Expand All @@ -26,8 +27,6 @@ def __init__(self, data_catalog: DataCatalog, logger: Logger):
self.logger = logger
self.svi_data_shp = gpd.GeoDataFrame()
self.block_groups = gpd.GeoDataFrame()
self.temp_folder = []


def set_up_census_key(self, census_key: str):
"""The Census key can be inputted in the ini file.
Expand Down Expand Up @@ -55,7 +54,7 @@ def set_up_state_code(self, state_abbreviation: List[str]):
states_done = []
for state in state_abbreviation:
if state not in states_done:
self.logger.info(f"The state abbreviation specified is: {state}")
self.logger.info(f"The states for which census data will be downloaded is (it's an abbreviation): {state}")
state_obj = getattr(states, state)
self.state_fips.append(state_obj.fips)
states_done.append(state)
Expand Down Expand Up @@ -135,7 +134,7 @@ def download_and_unzip(self, url, extract_to='.'):
zipfile = ZipFile(BytesIO(http_response.read()))
zipfile.extractall(path=extract_to)
except Exception as e:
print(f"Error during download and unzip: {e}")
self.logger.warning(f"Error during download and unzip: {e}")

def download_shp_geom(self, year_data: int, counties: List[str]):
"""Downloading the shapefiles from the government Tiger website
Expand All @@ -161,16 +160,15 @@ def download_shp_geom(self, year_data: int, counties: List[str]):
return

# Save shapefiles
folder = f'Shapefiles/{sf}{county}/{year_data}'
self.temp_folder.append(folder)
folder = Path(self.save_folder) / "shapefiles" / (sf + county) / str(year_data)
self.logger.info(f"Downloading the county shapefile for {str(year_data)}")
self.download_and_unzip(url, folder)
shapefiles = list(Path(folder).glob("*.shp"))
if shapefiles:
shp = gpd.read_file(shapefiles[0])
self.logger.info("The shapefile was downloaded")
else:
print("No shapefile found in the directory.")
self.logger.warning("No county shapefile found in the directory.")

# Dissolve shapefile based on block groups
code = "20"
Expand Down
42 changes: 32 additions & 10 deletions hydromt_fiat/workflows/exposure_vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@
get_road_lengths,
)

from hydromt_fiat.workflows.aggregation_areas import join_exposure_aggregation_areas


class ExposureVector(Exposure):
_REQUIRED_COLUMNS = ["Object ID", "Extraction Method", "Ground Floor Height"]
Expand Down Expand Up @@ -913,9 +915,10 @@ def setup_new_composite_areas(
elevation_reference: str,
path_ref: str = None,
attr_ref: str = None,
ground_elevation: Union[
int, float, None, str, Path, List[str], List[Path]
] = None,
ground_elevation: Union[None, str, Path] = None,
aggregation_area_fn: Union[List[str], List[Path], str, Path] = None,
attribute_names: Union[List[str], str] = None,
label_names: Union[List[str], str] = None,
) -> None:
"""Adds one or multiple (polygon) areas to the exposure database with
a composite damage function and a percentage of the total damage.
Expand Down Expand Up @@ -1015,7 +1018,9 @@ def setup_new_composite_areas(
# TODO: Take ground elevation from DEM?
# For water level calculation this will not take into account the
# non-flooded cells separately, just averaged over the whole area.
self.logger.warning("The ground elevation is set to 0.")
self.logger.warning(
"The ground elevation is set to 0 if no DEM is supplied."
)

# Idea: Reduction factor for the part of the area is not build-up?

Expand Down Expand Up @@ -1079,17 +1084,34 @@ def setup_new_composite_areas(
self.crs,
)

# Update the exposure_geoms
self.set_geom_names("new_development_area")
self.set_exposure_geoms(_new_exposure_geoms)

# If the user supplied ground elevation data, assign that to the new
# composite areas
if ground_elevation is not None:
new_objects["Ground Elevation"] = ground_elevation_from_dem(
ground_elevation=ground_elevation,
exposure_db=new_objects,
exposure_geoms=_new_exposure_geoms,
)

# If the user supplied aggregation area data, assign that to the
# new composite areas
if aggregation_area_fn is not None:
new_objects = join_exposure_aggregation_areas(
_new_exposure_geoms.merge(new_objects, on="Object ID"),
aggregation_area_fn=aggregation_area_fn,
attribute_names=attribute_names,
label_names=label_names,
)

# Update the exposure_db
self.exposure_db = pd.concat([self.exposure_db, new_objects]).reset_index(
drop=True
)

# Update the exposure_geoms
self.set_exposure_geoms(_new_exposure_geoms)

# Adding elevation data into the new objects
self.setup_ground_elevation(ground_elevation)

def link_exposure_vulnerability(
self,
exposure_linking_table: pd.DataFrame,
Expand Down
9 changes: 4 additions & 5 deletions hydromt_fiat/workflows/social_vulnerability_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,9 @@ def list_of_states():
return states_inverted

class SocialVulnerabilityIndex:
def __init__(self, data_catalog: DataCatalog, logger: Logger):
def __init__(self, data_catalog: DataCatalog, logger: Logger, save_folder: str):
self.data_catalog = data_catalog
self.save_folder = save_folder
self.census_key = Census
self.download_codes = {}
self.state_fips = []
Expand All @@ -90,7 +91,6 @@ def __init__(self, data_catalog: DataCatalog, logger: Logger):
self.logger = logger
self.svi_data_shp = gpd.GeoDataFrame()
self.block_groups = gpd.GeoDataFrame()
self.temp_folder = []

def read_dataset(self, path: str):
"""If you have your own dataset (e.g. already downloaded census data), you can use this to load it from a csv
Expand Down Expand Up @@ -144,7 +144,7 @@ def set_up_state_code(self, state_abbreviation: List[str]):
states_done = []
for state in state_abbreviation:
if state not in states_done:
self.logger.info(f"The state abbreviation specified is: {state}")
self.logger.info(f"The states for which census data will be downloaded is (it's an abbreviation): {state}")
state_obj = getattr(states, state)
self.state_fips.append(state_obj.fips)
states_done.append(state)
Expand Down Expand Up @@ -581,8 +581,7 @@ def download_shp_geom(self, year_data: int, counties: List[str]):
return

# Save shapefiles
folder = f'Shapefiles/{sf}{county}/{year_data}'
self.temp_folder.append(folder)
folder = Path(self.save_folder) / "shapefiles" / (sf + county) / str(year_data)
self.logger.info(f"Downloading the county shapefile for {str(year_data)}")
self.download_and_unzip(url, folder)
shapefiles = list(Path(folder).glob("*.shp"))
Expand Down
18 changes: 14 additions & 4 deletions tests/test_setup_new_composite_areas_ground_elevation.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,10 @@
"type": "datum",
"path_ref": None,
"attr_ref": None,
"ground_elevation_file": DATADIRDEM
/ "charleston_14m.tif",
"ground_elevation_file": None,
"aggregation_area_fn": None,
"attribute_names": None,
"label_names": None,
},
"setup_new_composite_area_geom": {
"dir": "test_read",
Expand All @@ -33,8 +35,10 @@
"type": "geom",
"path_ref": DATADIR / "new_composite_areas" / "reference_groundHeight_test.shp",
"attr_ref": "bfe",
"ground_elevation_file": DATADIRDEM
/ "charleston_14m.tif",
"ground_elevation_file": None,
"aggregation_area_fn": None,
"attribute_names": None,
"label_names": None,
},
"setup_new_composite_area_elevation": {
"dir": "test_read",
Expand All @@ -45,6 +49,9 @@
"attr_ref": None,
"ground_elevation_file": DATADIRDEM
/ "charleston_14m.tif",
"aggregation_area_fn": EXAMPLEDIR.joinpath("test_read", "exposure", "aggregation_areas", "block_groups.gpkg"),
"attribute_names": "GEOID_short",
"label_names": "Aggregation Label: Census Block",
},
}

Expand All @@ -70,6 +77,9 @@ def test_setup_new_composite_areas_ground_elevation(case):
path_ref=_cases[case]["path_ref"],
attr_ref=_cases[case]["attr_ref"],
ground_elevation=_cases[case]["ground_elevation_file"],
aggregation_area_fn=_cases[case]["aggregation_area_fn"],
attribute_names=_cases[case]["attribute_names"],
label_names=_cases[case]["label_names"],
)

if _cases[case]["new_root"].exists():
Expand Down

0 comments on commit 60d0374

Please sign in to comment.