Skip to content

Commit

Permalink
Improve test coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
autoSteve committed Jan 29, 2025
1 parent 6d641d3 commit 7d96329
Show file tree
Hide file tree
Showing 2 changed files with 36 additions and 14 deletions.
17 changes: 7 additions & 10 deletions custom_components/solcast_solar/solcastapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -2157,7 +2157,10 @@ def next_update():
async def set_metadata_and_serialise(data):
data["last_updated"] = dt.now(datetime.UTC).replace(microsecond=0)
data["last_attempt"] = last_attempt
data["auto_updated"] = self.auto_update_divisions if self.options.auto_update > 0 else 0
# Set to divisions if auto update is enabled, but not forced, in which case set to 99999 (otherwise zero).
data["auto_updated"] = (
self.auto_update_divisions if self.options.auto_update > 0 and not force else 0 if not force else 99999
)
return await self.serialise_data(data, self._filename if data == self._data else self._filename_undampened)

s_status = await set_metadata_and_serialise(self._data)
Expand Down Expand Up @@ -2357,10 +2360,7 @@ async def __http_data_call(
)
await self.tasks["fetch"]
finally:
if self.tasks.get("fetch") is not None:
response = self.tasks.pop("fetch").result()
else:
response = None
response = self.tasks.pop("fetch").result() if self.tasks.get("fetch") is not None else None
if not isinstance(response, dict):
_LOGGER.error(
"No valid data was returned for estimated_actuals so this will cause issues (API limit may be exhausted, or Solcast might have a problem)"
Expand Down Expand Up @@ -2403,10 +2403,7 @@ async def __http_data_call(
)
await self.tasks["fetch"]
finally:
if self.tasks.get("fetch") is not None:
response = self.tasks.pop("fetch").result()
else:
response = None
response = self.tasks.pop("fetch").result() if self.tasks.get("fetch") is not None else None
if response is None:
_LOGGER.error("No data was returned for forecasts")
return DataCallStatus.FAIL, "No data returned for forecasts"
Expand Down Expand Up @@ -2908,7 +2905,7 @@ async def build_data( # noqa: C901
}
site_data_forecasts[site] = sorted(site_forecasts.values(), key=itemgetter("period_start"))
if update_tally:
rounded_tally = round(tally, 4)
rounded_tally = round(tally, 4) if tally is not None else 0.0
if tally is not None:
siteinfo["tally"] = rounded_tally
self._tally[site] = rounded_tally
Expand Down
33 changes: 29 additions & 4 deletions tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,8 +553,11 @@ async def test_integration(
# Test force, force abort because running and clear data actions
await _exec_update(hass, solcast, caplog, "force_update_forecasts", wait=False)
caplog.clear()
await _exec_update(hass, solcast, caplog, "force_update_forecasts", wait=False) # Twice to cover abort
await _exec_update(hass, solcast, caplog, "force_update_forecasts", wait=False) # Twice to cover abort force
await _wait_for_abort(caplog)
await _exec_update(hass, solcast, caplog, "update_forecasts", wait=False) # Thrice to cover abort normal
await _wait_for_abort(caplog)
await hass.async_block_till_done()
await _exec_update(hass, solcast, caplog, "clear_all_solcast_data") # Will cancel active fetch

# Test update within ten seconds of prior update
Expand Down Expand Up @@ -1006,7 +1009,7 @@ async def test_integration_scenarios(
data_file = Path(f"{config_dir}/solcast.json")
original_data = json.loads(data_file.read_text(encoding="utf-8"))

def alter_last_updated():
def alter_last_updated_as_stale():
data = json.loads(data_file.read_text(encoding="utf-8"))
data["last_updated"] = (dt.now(datetime.UTC) - timedelta(days=5)).isoformat()
data["last_attempt"] = data["last_updated"]
Expand All @@ -1017,12 +1020,20 @@ def alter_last_updated():
data_file.write_text(json.dumps(data), encoding="utf-8")
session_reset_usage()

def alter_last_updated_as_fresh(last_update: str):
data = json.loads(data_file.read_text(encoding="utf-8"))
data["last_updated"] = dt.now(datetime.UTC).isoformat()
data["last_updated"] = data["last_updated"].split("T")[0] + "T" + last_update + "+10:00"
data["last_attempt"] = data["last_updated"]
data["auto_updated"] = 10
data_file.write_text(json.dumps(data), encoding="utf-8")

def restore_data():
data_file.write_text(json.dumps(original_data), encoding="utf-8")

# Test stale start with auto update enabled
_LOGGER.debug("Testing stale start with auto update enabled")
alter_last_updated()
alter_last_updated_as_stale()
coordinator, solcast = await _reload(hass, entry)
await _wait_for_update(caplog)
assert "is older than expected, should be" in caplog.text
Expand All @@ -1037,13 +1048,27 @@ def restore_data():
opt[AUTO_UPDATE] = 0
hass.config_entries.async_update_entry(entry, options=opt)
await hass.async_block_till_done()
alter_last_updated()
alter_last_updated_as_stale()
coordinator, solcast = await _reload(hass, entry)
_no_exception(caplog)
caplog.clear()

restore_data()

# Re-load integration, test forecast is fresh
opt = {**entry.options}
opt[AUTO_UPDATE] = 1
hass.config_entries.async_update_entry(entry, options=opt)
await hass.async_block_till_done()
last_update = ""
for line in caplog.messages:
if line.startswith("Previous auto update would have been at "):
last_update = line[-8:]
break
alter_last_updated_as_fresh(last_update)
coordinator, solcast = await _reload(hass, entry)
assert "Auto update forecast is fresh" in caplog.text

# Test API key change, start with an API failure and invalid sites cache
# Verify API key change removes sites, and migrates undampened history for new site
_LOGGER.debug("Testing API key change")
Expand Down

0 comments on commit 7d96329

Please sign in to comment.