Skip to content

Commit

Permalink
Test multi-ghe network on CI (#15)
Browse files Browse the repository at this point in the history
* experiment with 2_ghe test on new github runners

* downselect geojson to only include buildings from sys-params file

* update TN version identifier in ghe sys-params test files

* more debug logging to check run time

* TEMPORARY: notebook code to run examples a little more granularly

* don't run coverage every time, and adapt to new tests dir location

* move tests folder up to project root

* upgrade ruff to v0.2.1

* restore coverage, now outputting to `htmlcov` dir

* TEMPORARY: updates to notebook to generate experimental test files

* update GHED & ruff versions

* use new continue_if_design_unmet parameter of GHED

* add more comments to building downselect code

* point test sys-param file to the test buildings that exist

* move tests to their own top-level dir. Did I not already do this?

* add max_boreholes parameter to the GHED call

* Revert "TEMPORARY: updates to notebook to generate experimental test files"

This reverts commit 7207374.

* Revert "TEMPORARY: notebook code to run examples a little more granularly"

This reverts commit 132585a.

* lint & format
  • Loading branch information
vtnate authored Feb 29, 2024
1 parent 985f548 commit f323d74
Show file tree
Hide file tree
Showing 12 changed files with 52 additions and 47 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ dmypy.json
.vscode/

# Test output files
thermalnetwork/tests/test_outputs
tests/test_outputs

output/
tmp/
Expand Down
3 changes: 2 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ repos:
# hooks:
# - id: check-useless-excludes # Ensure the exclude syntax is correct
# - id: check-hooks-apply # Fails if a hook doesn't apply to any file
# Run the Ruff linter
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.0
hooks:
# Run the Ruff linter
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
types_or: [python, pyi, jupyter]
Expand All @@ -37,3 +37,4 @@ repos:
rev: v0.3.0
hooks:
- id: ruff-format
types_or: [python, pyi, jupyter]
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
"district_system": {
"fifth_generation": {
"ghe_parameters": {
"version": "1.0",
"version": "0.2.3",
"ghe_dir": "tests\\management\\data\\sdk_project_scraps\\run\\baseline_scenario\\ghe_dir",
"fluid": {
"fluid_name": "Water",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
"district_system": {
"fifth_generation": {
"ghe_parameters": {
"version": "1.0",
"version": "0.2.3",
"ghe_dir": "tests\\management\\data\\sdk_project_scraps\\run\\baseline_scenario\\ghe_dir",
"fluid": {
"fluid_name": "Water",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@
"district_system": {
"fifth_generation": {
"ghe_parameters": {
"version": "1.0",
"version": "0.2.3",
"ghe_dir": "ghe\\run\\baseline_scenario\\ghe_dir",
"fluid": {
"fluid_name": "Water",
Expand Down
32 changes: 15 additions & 17 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -56,36 +56,34 @@ dev = [
[tool.setuptools.dynamic]
readme = {file = "README.md", content-type = "text/markdown"}

# https://setuptools-scm.readthedocs.io/
# Presence of this command tells it to find the version from GitHub
[tool.setuptools_scm]

# https://docs.pytest.org/en/6.2.x/customize.html#pyproject-toml
[tool.pytest.ini_options]
minversion = "6.0"
testpaths = "thermalnetwork/tests"
addopts = ["--cov=thermalnetwork"]
testpaths = "tests"
# Manually add these flags to `pytest` when running locally for coverage details.
addopts = ["--cov=thermalnetwork", "--cov-report=html"]


# https://pytest-cov.readthedocs.io/en/latest/config.html
# https://coverage.readthedocs.io/en/latest/config.html
[tool.coverage.run]
omit = [
"thermalnetwork/tests/**"
]

# https://docs.astral.sh/ruff/settings/
# https://docs.astral.sh/ruff/tutorial/#configuration
[tool.ruff]
fix = true # automatically fix problems if possible
select = ["RUF", "E", "F", "I", "UP", "N", "S", "BLE", "A", "C4", "T10", "ISC", "ICN", "PT",
line-length = 120

# https://docs.astral.sh/ruff/linter/#rule-selection
[tool.ruff.lint]
extend-select = ["RUF", "E", "F", "I", "UP", "N", "S", "BLE", "A", "C4", "T10", "ISC", "ICN", "PT",
"Q", "SIM", "TID", "ARG", "DTZ", "PD", "PGH", "PLC", "PLE", "PLR", "PLW", "PIE", "COM"] # Enable these rules
ignore = ["PLR0913", "PLR2004", "PLR0402", "COM812", "COM819", "SIM108", "ARG002", "ISC001"] # except for these specific errors
line-length = 120

# https://docs.astral.sh/ruff/settings/#format
[tool.ruff.lint.per-file-ignores]
"tests/*" = ["S101", "S607", "S603"] # assert statements are allowed in tests, and paths are safe

# https://docs.astral.sh/ruff/formatter/#configuration
[tool.ruff.format]
# quote-style = "double"

[tool.ruff.per-file-ignores]
"thermalnetwork/tests/*" = ["S101", "S607", "S603"] # assert statements are allowed in tests, and paths are safe

[project.scripts]
thermalnetwork = "thermalnetwork.network:run_sizer_from_cli"
2 changes: 1 addition & 1 deletion thermalnetwork/tests/test_base.py → tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def setUp(self) -> None:
here = Path(__file__).parent

# -- Input paths
self.demos_path = here.parent.parent / "demos"
self.demos_path = here.parent / "demos"

self.geojson_file_path_1_ghe = (self.demos_path / "sdk_output_skeleton_1_ghe" / "network.geojson").resolve()
self.scenario_directory_path_1_ghe = (
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from tests.test_base import BaseCase
from thermalnetwork.ground_heat_exchanger import GHE
from thermalnetwork.tests.test_base import BaseCase


class TestGHE(BaseCase):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import pytest

from tests.test_base import BaseCase
from thermalnetwork.heat_pump import HeatPump
from thermalnetwork.tests.test_base import BaseCase


class TestHeatPump(BaseCase):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import json

import pytest

from tests.test_base import BaseCase
from thermalnetwork.network import run_sizer_from_cli_worker
from thermalnetwork.tests.test_base import BaseCase


class TestNetwork(BaseCase):
Expand Down Expand Up @@ -43,7 +41,6 @@ def test_network_one_ghe(self):
# Restore the trailing newline
sys_param_file.write("\n")

@pytest.mark.skip(reason="Test consumes too much memory/cpu for GHA runners. Please run locally instead")
def test_network_two_ghe(self):
# -- Set up
output_path = self.test_outputs_path / "two_ghe"
Expand Down
6 changes: 6 additions & 0 deletions thermalnetwork/ground_heat_exchanger.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def ghe_size(self, total_space_loads, output_path: Path) -> float:
max_height=self.json_data["geometric_constraints"]["max_height"],
min_height=self.json_data["geometric_constraints"]["min_height"],
continue_if_design_unmet=True,
max_boreholes=2500,
)
ghe.set_ground_loads_from_hourly_list(self.json_data["loads"]["ground_loads"])
ghe.set_geometry_constraints_rectangle(
Expand Down Expand Up @@ -83,13 +84,18 @@ def ghe_size(self, total_space_loads, output_path: Path) -> float:
file_name = output_file_directory / "ground_loads.csv"
logger.info(f"saving loads to: {file_name}")
ground_loads_df.to_csv(file_name, index=False)
logger.debug("loads saved to csv file")

ghe.find_design()
logger.debug("design found")
ghe.prepare_results("Project Name", "Notes", "Author", "Iteration Name")
logger.debug("results prepared for writing to output directory")

ghe.write_output_files(output_file_directory, "")
logger.debug("output written to output directory")
u_tube_height = ghe.results.output_dict["ghe_system"]["active_borehole_length"]["value"]
# selected_coordinates = ghe.results.borehole_location_data_rows # includes a header row
logger.debug("Done writing output")
return u_tube_height

def get_atlanta_loads(self) -> list[float]:
Expand Down
39 changes: 21 additions & 18 deletions topology/final.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"outputs": [],
"source": [
"import json\n",
"from pathlib import Path\n"
"from pathlib import Path"
]
},
{
Expand All @@ -20,7 +20,7 @@
"source": [
"geojson_path = Path.cwd().parent / \"demos\" / \"sdk_output_skeleton_1_ghe\" / \"network.geojson\"\n",
"with open(geojson_path) as f:\n",
" geojson_data = json.load(f)\n"
" geojson_data = json.load(f)"
]
},
{
Expand All @@ -32,7 +32,7 @@
"source": [
"geojson_path = Path.cwd().parent / \"demos\" / \"sdk_output_skeleton_2_ghe_sequential\" / \"network.geojson\"\n",
"with open(geojson_path) as f:\n",
" geojson_data2 = json.load(f)\n"
" geojson_data2 = json.load(f)"
]
},
{
Expand All @@ -44,7 +44,7 @@
"source": [
"geojson_path = Path.cwd().parent / \"demos\" / \"sdk_output_skeleton_2_ghe_staggered\" / \"network.geojson\"\n",
"with open(geojson_path) as f:\n",
" geojson_data2_staggered = json.load(f)\n"
" geojson_data2_staggered = json.load(f)"
]
},
{
Expand Down Expand Up @@ -73,21 +73,21 @@
"metadata": {},
"outputs": [],
"source": [
"\n",
"#THIS IS GOOD\n",
"# THIS IS GOOD\n",
"def find_startloop_feature_id(features):\n",
" for feature in features:\n",
" if feature[\"properties\"].get(\"is_ghe_start_loop\") == \"true\":\n",
" start_feature_id = feature[\"properties\"].get(\"buildingId\") or feature[\"properties\"].get(\"DSId\")\n",
" return start_feature_id\n",
" return None\n",
"\n",
"\n",
"def get_connected_features(geojson_data):\n",
" features = geojson_data[\"features\"]\n",
" connectors = [feature for feature in features if feature[\"properties\"][\"type\"] == \"ThermalConnector\"]\n",
" connected_features = []\n",
"\n",
" #get the id of the building or ds from the thermaljunction that has startloop: true\n",
" # get the id of the building or ds from the thermaljunction that has startloop: true\n",
" startloop_feature_id = find_startloop_feature_id(features)\n",
"\n",
" # Start with the first connector\n",
Expand All @@ -113,19 +113,22 @@
" for feature in features:\n",
" feature_id = feature[\"properties\"][\"id\"]\n",
" if feature_id in connected_features and feature[\"properties\"][\"type\"] in [\"Building\", \"District System\"]:\n",
" connected_objects.append({\n",
" \"id\": feature_id,\n",
" \"type\": feature[\"properties\"][\"type\"],\n",
" \"name\": feature[\"properties\"].get(\"name\", \"\"),\n",
" \"start_loop\": \"true\" if feature_id == startloop_feature_id else None\n",
" })\n",
" connected_objects.append(\n",
" {\n",
" \"id\": feature_id,\n",
" \"type\": feature[\"properties\"][\"type\"],\n",
" \"name\": feature[\"properties\"].get(\"name\", \"\"),\n",
" \"start_loop\": \"true\" if feature_id == startloop_feature_id else None,\n",
" }\n",
" )\n",
"\n",
" return connected_objects\n",
"\n",
"\n",
"def reorder_connected_features(features):\n",
" while features[0].get(\"start_loop\") != \"true\":\n",
" features.append(features.pop(0))\n",
" return features\n"
" return features"
]
},
{
Expand All @@ -135,7 +138,7 @@
"metadata": {},
"outputs": [],
"source": [
"connected_features = get_connected_features(geojson_data2)\n"
"connected_features = get_connected_features(geojson_data2)"
]
},
{
Expand All @@ -159,7 +162,7 @@
"source": [
"connected_features = get_connected_features(geojson_data2)\n",
"for feature in connected_features:\n",
" print(feature)\n"
" print(feature)"
]
},
{
Expand All @@ -169,7 +172,7 @@
"metadata": {},
"outputs": [],
"source": [
"reordered_features = reorder_connected_features(connected_features)\n"
"reordered_features = reorder_connected_features(connected_features)"
]
},
{
Expand Down Expand Up @@ -200,7 +203,7 @@
}
],
"source": [
"reordered_features\n"
"reordered_features"
]
},
{
Expand Down

0 comments on commit f323d74

Please sign in to comment.