Skip to content

Commit

Permalink
Merge pull request #120 from Geosyntec/patch-land-surfaces
Browse files Browse the repository at this point in the history
land surface improvements, bump dependencies
  • Loading branch information
austinorr authored Apr 7, 2022
2 parents 438c1ca + ce8f266 commit c4166af
Show file tree
Hide file tree
Showing 8 changed files with 164 additions and 125 deletions.
2 changes: 1 addition & 1 deletion nereid/nereid/api/api_v1/models/land_surface_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ class LandSurfaceDetails(LandSurfaceBase):


class LandSurfaceResults(BaseModel):
summary: List[LandSurfaceSummary]
summary: Optional[List[LandSurfaceSummary]] = None
details: Optional[List[LandSurfaceDetails]] = None
errors: Optional[List[str]] = None

Expand Down
7 changes: 7 additions & 0 deletions nereid/nereid/src/land_surface/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,13 @@ def detailed_volume_loading_results(df: pandas.DataFrame) -> pandas.DataFrame:

df = df.loc[df["area_acres"] > 0]

if "precip_depth_inches" in df:
if "imp_ro_depth_inches" not in df: # pragma: no branch
df["imp_ro_depth_inches"] = df["precip_depth_inches"] * df["imp_ro_coeff"]

if "perv_ro_depth_inches" not in df: # pragma: no branch
df["perv_ro_depth_inches"] = df["precip_depth_inches"] * df["perv_ro_coeff"]

# method chaining with 'df.assign' looks better, but it's much less memory efficient
df["imp_pct"] = 100 * df["imp_area_acres"] / df["area_acres"]
df["perv_area_acres"] = df["area_acres"] - df["imp_area_acres"]
Expand Down
103 changes: 55 additions & 48 deletions nereid/nereid/src/land_surface/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,56 +22,63 @@ def land_surface_loading(
aggregate the load to each node_id, and are always returned.
"""

response: Dict[str, Any] = {}
response["errors"] = []
response: Dict[str, Any] = {"errors": []}

land_surface_list = land_surfaces.get("land_surfaces") or []

if land_surface_list: # pragma: no branch
df = pandas.DataFrame(land_surface_list)
df["imp_pct"] = 100 * df["imp_area_acres"] / df["area_acres"]

df, messages = parse_configuration_logic(
df=df,
config_section="api_recognize",
config_object="land_surfaces",
context=context,
)

# TODO: add validator function to ensure config & request are complete.

if len(messages) > 0:
response["errors"].extend(messages)

wet_weather_parameters = init_wq_parameters("land_surface_emc_table", context)
dry_weather_parameters = init_wq_parameters(
"dry_weather_land_surface_emc_table", context
)

seasons = (
context.get("project_reference_data", {})
.get("dry_weather_flow_table", {})
.get("seasons", {})
)

detailed_results = detailed_loading_results(
df,
wet_weather_parameters,
dry_weather_parameters,
seasons,
)
summary_results = summary_loading_results(
detailed_results,
wet_weather_parameters,
dry_weather_parameters,
season_names=seasons.keys(),
)

response["summary"] = summary_results.fillna(0).to_dict(orient="records")

if details:
response["details"] = detailed_results.fillna(0).to_dict(orient="records")
else: # pragma: no cover
response["errors"].append("ERROR: no land surface input data provided.")
try:
if land_surface_list: # pragma: no branch
df = pandas.DataFrame(land_surface_list)
df["imp_pct"] = 100 * df["imp_area_acres"] / df["area_acres"]

df, messages = parse_configuration_logic(
df=df,
config_section="api_recognize",
config_object="land_surfaces",
context=context,
)

# TODO: add validator function to ensure config & request are complete.

if len(messages) > 0:
response["errors"].extend(messages)

wet_weather_parameters = init_wq_parameters(
"land_surface_emc_table", context
)
dry_weather_parameters = init_wq_parameters(
"dry_weather_land_surface_emc_table", context
)

seasons = (
context.get("project_reference_data", {})
.get("dry_weather_flow_table", {})
.get("seasons", {})
)

detailed_results = detailed_loading_results(
df,
wet_weather_parameters,
dry_weather_parameters,
seasons,
)
summary_results = summary_loading_results(
detailed_results,
wet_weather_parameters,
dry_weather_parameters,
season_names=seasons.keys(),
)

response["summary"] = summary_results.fillna(0).to_dict(orient="records")

if details:
response["details"] = detailed_results.fillna(0).to_dict(
orient="records"
)
else: # pragma: no cover
response["errors"].append("ERROR: no land surface input data provided.")

except Exception as e: # pragma: no cover
response["errors"].append(str(e))

return response
36 changes: 20 additions & 16 deletions nereid/nereid/src/treatment_facility/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,26 +17,30 @@ def initialize_treatment_facilities(
context: Dict[str, Any],
) -> Dict[str, Any]:

treatment_facility_list = treatment_facilities.get("treatment_facilities") or []
if not pre_validated:
treatment_facility_list = validate_treatment_facility_models(
treatment_facility_list, context
)
response: Dict[str, Any] = {"errors": []}

df, messages = parse_configuration_logic(
df=pandas.DataFrame(treatment_facility_list),
config_section="api_recognize",
config_object="treatment_facility",
context=context,
)
try:
treatment_facility_list = treatment_facilities.get("treatment_facilities") or []
if not pre_validated:
treatment_facility_list = validate_treatment_facility_models(
treatment_facility_list, context
)

df, messages = parse_configuration_logic(
df=pandas.DataFrame(treatment_facility_list),
config_section="api_recognize",
config_object="treatment_facility",
context=context,
)

treatment_facility_nodes = build_treatment_facility_nodes(df=df)
treatment_facility_nodes = build_treatment_facility_nodes(df=df)

response: Dict[str, Any] = {"errors": []}
if len(messages) > 0:
response["errors"] = messages

if len(messages) > 0:
response["errors"] = messages
response["treatment_facilities"] = treatment_facility_nodes

response["treatment_facilities"] = treatment_facility_nodes
except Exception as e: # pragma: no cover
response["errors"].append(str(e))

return response
103 changes: 54 additions & 49 deletions nereid/nereid/src/treatment_site/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,58 +9,63 @@ def initialize_treatment_sites(

response: Dict[str, Any] = {"errors": []}

sites = treatment_sites.get("treatment_sites") or []

# tmnt_map is connects the facility name with the treatment
# key for the influent-> effluent concentration transformation
# e.g. {"bioretention": "Biofiltration"}
tmnt_map = {
k: dct["tmnt_performance_facility_type"]
for k, dct in (
context["api_recognize"]
.get("treatment_facility", {})
.get("facility_type", {})
.items()
)
}

tmnt_sites = []

if sites:

_df = pandas.DataFrame(sites)

remainder_data = []
for node, g in _df.groupby("node_id"):
remainder = 100 - g["area_pct"].sum()
remainder_data.append(
{
"node_id": node,
"area_pct": remainder,
"facility_type": "no_treatment",
"eliminate_all_dry_weather_flow_override": False,
}
try:

sites = treatment_sites.get("treatment_sites") or []

# tmnt_map is connects the facility name with the treatment
# key for the influent-> effluent concentration transformation
# e.g. {"bioretention": "Biofiltration"}
tmnt_map = {
k: dct["tmnt_performance_facility_type"]
for k, dct in (
context["api_recognize"]
.get("treatment_facility", {})
.get("facility_type", {})
.items()
)
}

tmnt_sites = []

if sites:

_df = pandas.DataFrame(sites)

df = (
pandas.concat([_df, pandas.DataFrame(remainder_data)])
.fillna(0)
.assign(
tmnt_performance_facility_type=lambda df: df["facility_type"].replace(
tmnt_map
remainder_data = []
for node, g in _df.groupby("node_id"):
remainder = 100 - g["area_pct"].sum()
remainder_data.append(
{
"node_id": node,
"area_pct": remainder,
"facility_type": "no_treatment",
"eliminate_all_dry_weather_flow_override": False,
}
)

df = (
pandas.concat([_df, pandas.DataFrame(remainder_data)])
.fillna(0)
.assign(
tmnt_performance_facility_type=lambda df: df[
"facility_type"
].replace(tmnt_map)
)
)
)

tmnt_sites = [
{
"node_id": key,
"treatment_facilities": g.to_dict("records"),
"node_type": "site_based",
}
for key, g in df.groupby("node_id")
]

response["treatment_sites"] = tmnt_sites

tmnt_sites = [
{
"node_id": key,
"treatment_facilities": g.to_dict("records"),
"node_type": "site_based",
}
for key, g in df.groupby("node_id")
]

response["treatment_sites"] = tmnt_sites

except Exception as e: # pragma: no cover
response["errors"].append(str(e))

return response
18 changes: 17 additions & 1 deletion nereid/nereid/tests/test_src/test_land_surface/test_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,23 @@ def test_detailed_land_surface_loading_results(
dry_weather_parameters,
seasons,
)
assert t["area_acres"].sum() == land_surfaces_df["area_acres"].sum()

land_surfaces_df_reduced = land_surfaces_df.drop(
columns=["imp_ro_depth_inches", "perv_ro_depth_inches"]
).assign(precip_depth_inches=12)

t_precip = detailed_loading_results(
land_surfaces_df_reduced,
wet_weather_parameters,
dry_weather_parameters,
seasons,
)

assert (
t["area_acres"].sum()
== land_surfaces_df["area_acres"].sum()
== t_precip["area_acres"].sum()
)
assert len(t) == len(land_surfaces_list)
if not "no_joins" in key and not "no_params" in key:
assert any(["conc" in c for c in t.columns])
Expand Down
14 changes: 7 additions & 7 deletions nereid/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
aiofiles==0.8.0
celery==5.2.3
fastapi==0.75.0
celery==5.2.6
fastapi==0.75.1
graphviz==0.19.1
jinja2==3.0.3
jinja2==3.1.1
matplotlib==3.5.1
networkx==2.7.1
orjson==3.6.7
pandas==1.4.1
pint==0.18
pandas==1.4.2
pint==0.19.1
pydot==1.4.2
python-dotenv==0.19.2
python-dotenv==0.20.0
pyyaml==6.0
redis==4.1.4
redis==4.2.2
scipy==1.8.0
6 changes: 3 additions & 3 deletions nereid/requirements_tests.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
black==22.1.0
black==22.3.0
codecov==2.1.12
coverage==6.3.2
isort==5.10.1
mypy==0.941
pytest==7.1.0
mypy==0.942
pytest==7.1.1
requests==2.27.1

0 comments on commit c4166af

Please sign in to comment.