Skip to content

Commit

Permalink
Code readability improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
dummylabs committed Jul 3, 2024
1 parent 410bab9 commit 023cac8
Show file tree
Hide file tree
Showing 8 changed files with 192 additions and 108 deletions.
47 changes: 28 additions & 19 deletions custom_components/watchman/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,13 @@
CONF_TEST_MODE,
EVENT_AUTOMATION_RELOADED,
EVENT_SCENE_RELOADED,
HASS_DATA_CANCEL_HANDLERS,
HASS_DATA_COORDINATOR,
HASS_DATA_FILES_IGNORED,
HASS_DATA_FILES_PARSED,
HASS_DATA_PARSE_DURATION,
HASS_DATA_PARSED_ENTITY_LIST,
HASS_DATA_PARSED_SERVICE_LIST,
TRACKED_EVENT_DOMAINS,
MONITORED_STATES,
PLATFORMS,
Expand Down Expand Up @@ -122,7 +129,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
raise ConfigEntryNotReady

hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator
hass.data[DOMAIN]["coordinator"] = coordinator
hass.data[DOMAIN][HASS_DATA_COORDINATOR] = coordinator
hass.data[DOMAIN_DATA] = entry.options # TODO: refactor

await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
Expand All @@ -148,7 +155,7 @@ async def update_listener(hass: HomeAssistant, entry: ConfigEntry):

async def async_unload_entry(hass: HomeAssistant, config_entry): # pylint: disable=unused-argument
"""Handle integration unload"""
for cancel_handle in hass.data[DOMAIN].get("cancel_handlers", []):
for cancel_handle in hass.data[DOMAIN].get(HASS_DATA_CANCEL_HANDLERS, []):
if cancel_handle:
cancel_handle()

Expand Down Expand Up @@ -256,7 +263,7 @@ async def async_schedule_refresh_states(hass, delay):
async def async_delayed_refresh_states(timedate): # pylint: disable=unused-argument
"""refresh sensors state"""
# parse_config should be invoked beforehand
coordinator = hass.data[DOMAIN]["coordinator"]
coordinator = hass.data[DOMAIN][HASS_DATA_COORDINATOR]
await coordinator.async_refresh()

async def async_on_home_assistant_started(event): # pylint: disable=unused-argument
Expand All @@ -274,19 +281,19 @@ async def async_on_configuration_changed(event):
"reload",
]:
await parse_config(hass, reason="configuration changes")
coordinator = hass.data[DOMAIN]["coordinator"]
coordinator = hass.data[DOMAIN][HASS_DATA_COORDINATOR]
await coordinator.async_refresh()

elif typ in [EVENT_AUTOMATION_RELOADED, EVENT_SCENE_RELOADED]:
await parse_config(hass, reason="configuration changes")
coordinator = hass.data[DOMAIN]["coordinator"]
coordinator = hass.data[DOMAIN][HASS_DATA_COORDINATOR]
await coordinator.async_refresh()

async def async_on_service_changed(event):
service = f"{event.data['domain']}.{event.data['service']}"
if service in hass.data[DOMAIN].get("service_list", []):
if service in hass.data[DOMAIN].get(HASS_DATA_PARSED_SERVICE_LIST, []):
_LOGGER.debug("Monitored service changed: %s", service)
coordinator = hass.data[DOMAIN]["coordinator"]
coordinator = hass.data[DOMAIN][HASS_DATA_COORDINATOR]
await coordinator.async_refresh()

async def async_on_state_changed(event):
Expand All @@ -296,14 +303,16 @@ def state_or_missing(state_id):
"""return missing state if entity not found"""
return "missing" if not event.data[state_id] else event.data[state_id].state

if event.data["entity_id"] in hass.data[DOMAIN].get("entity_list", []):
if event.data["entity_id"] in hass.data[DOMAIN].get(
HASS_DATA_PARSED_ENTITY_LIST, []
):
ignored_states = get_config(hass, CONF_IGNORED_STATES, [])
old_state = state_or_missing("old_state")
new_state = state_or_missing("new_state")
checked_states = set(MONITORED_STATES) - set(ignored_states)
if new_state in checked_states or old_state in checked_states:
_LOGGER.debug("Monitored entity changed: %s", event.data["entity_id"])
coordinator = hass.data[DOMAIN]["coordinator"]
coordinator = hass.data[DOMAIN][HASS_DATA_COORDINATOR]
await coordinator.async_refresh()

# hass is not started yet, schedule config parsing once it loaded
Expand All @@ -327,7 +336,7 @@ def state_or_missing(state_id):
)
hdlr.append(hass.bus.async_listen(EVENT_SERVICE_REMOVED, async_on_service_changed))
hdlr.append(hass.bus.async_listen(EVENT_STATE_CHANGED, async_on_state_changed))
hass.data[DOMAIN]["cancel_handlers"] = hdlr
hass.data[DOMAIN][HASS_DATA_CANCEL_HANDLERS] = hdlr


async def parse_config(hass: HomeAssistant, reason=None):
Expand All @@ -337,19 +346,19 @@ async def parse_config(hass: HomeAssistant, reason=None):
included_folders = get_included_folders(hass)
ignored_files = hass.data[DOMAIN_DATA].get(CONF_IGNORED_FILES, None)

entity_list, service_list, files_parsed, files_ignored = await parse(
parsed_entity_list, parsed_service_list, files_parsed, files_ignored = await parse(
hass, included_folders, ignored_files, hass.config.config_dir
)
hass.data[DOMAIN]["entity_list"] = entity_list
hass.data[DOMAIN]["service_list"] = service_list
hass.data[DOMAIN]["files_parsed"] = files_parsed
hass.data[DOMAIN]["files_ignored"] = files_ignored
hass.data[DOMAIN]["parse_duration"] = time.time() - start_time
hass.data[DOMAIN][HASS_DATA_PARSED_ENTITY_LIST] = parsed_entity_list
hass.data[DOMAIN][HASS_DATA_PARSED_SERVICE_LIST] = parsed_service_list
hass.data[DOMAIN][HASS_DATA_FILES_PARSED] = files_parsed
hass.data[DOMAIN][HASS_DATA_FILES_IGNORED] = files_ignored
hass.data[DOMAIN][HASS_DATA_PARSE_DURATION] = time.time() - start_time
_LOGGER.info(
"%s files parsed and %s files ignored in %.2fs. due to %s",
files_parsed,
files_ignored,
hass.data[DOMAIN]["parse_duration"],
hass.data[DOMAIN][HASS_DATA_PARSE_DURATION],
reason,
)

Expand All @@ -375,7 +384,7 @@ def get_included_folders(hass):

async def async_report_to_file(hass, path, test_mode):
"""save report to a file"""
coordinator = hass.data[DOMAIN]["coordinator"]
coordinator = hass.data[DOMAIN][HASS_DATA_COORDINATOR]
await coordinator.async_refresh()
report_chunks = await report(
hass, table_renderer, chunk_size=0, test_mode=test_mode
Expand Down Expand Up @@ -415,7 +424,7 @@ async def async_report_to_notification(hass, service_str, service_data, chunk_si

data = {} if service_data is None else json.loads(service_data)

coordinator = hass.data[DOMAIN]["coordinator"]
coordinator = hass.data[DOMAIN][HASS_DATA_COORDINATOR]
await coordinator.async_refresh()
report_chunks = await report(hass, text_renderer, chunk_size)
for chunk in report_chunks:
Expand Down
20 changes: 20 additions & 0 deletions custom_components/watchman/const.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,26 @@
DEFAULT_HEADER = "-== WATCHMAN REPORT ==- "
DEFAULT_CHUNK_SIZE = 3500

HASS_DATA_PARSED_ENTITY_LIST = "entity_list"
HASS_DATA_PARSED_SERVICE_LIST = "service_list"
HASS_DATA_FILES_PARSED = "files_parsed"
HASS_DATA_FILES_IGNORED = "files_ignored"
HASS_DATA_PARSE_DURATION = "parse_duration"
HASS_DATA_CANCEL_HANDLERS = "cancel_handlers"
HASS_DATA_COORDINATOR = "coordinator"
HASS_DATA_MISSING_ENTITIES = "entities_missing"
HASS_DATA_MISSING_SERVICES = "services_missing"
HASS_DATA_CHECK_DURATION = "check_duration"

COORD_DATA_MISSING_ENTITIES = "entities_missing"
COORD_DATA_MISSING_SERVICES = "services_missing"
COORD_DATA_LAST_UPDATE = "last_update"
COORD_DATA_SERVICE_ATTRS = "service_attrs"
COORD_DATA_ENTITY_ATTRS = "entity_attrs"

REPORT_ENTRY_TYPE_SERVICE = "service_list"
REPORT_ENTRY_TYPE_ENTITY = "entity_list"

CONF_IGNORED_FILES = "ignored_files"
CONF_HEADER = "report_header"
CONF_REPORT_PATH = "report_path"
Expand Down
38 changes: 25 additions & 13 deletions custom_components/watchman/coordinator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,19 @@
import time
from homeassistant.util import dt as dt_util
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .const import DOMAIN
from .const import (
COORD_DATA_ENTITY_ATTRS,
COORD_DATA_LAST_UPDATE,
COORD_DATA_MISSING_ENTITIES,
COORD_DATA_MISSING_SERVICES,
COORD_DATA_SERVICE_ATTRS,
DOMAIN,
HASS_DATA_CHECK_DURATION,
HASS_DATA_MISSING_ENTITIES,
HASS_DATA_MISSING_SERVICES,
HASS_DATA_PARSED_ENTITY_LIST,
HASS_DATA_PARSED_SERVICE_LIST,
)
from .utils import check_entitites, check_services, get_entity_state, fill


Expand All @@ -29,38 +41,38 @@ async def _async_update_data(self) -> None:
start_time = time.time()
services_missing = check_services(self.hass)
entities_missing = check_entitites(self.hass)
self.hass.data[DOMAIN]["check_duration"] = time.time() - start_time
self.hass.data[DOMAIN]["entities_missing"] = entities_missing
self.hass.data[DOMAIN]["services_missing"] = services_missing
self.hass.data[DOMAIN][HASS_DATA_CHECK_DURATION] = time.time() - start_time
self.hass.data[DOMAIN][HASS_DATA_MISSING_ENTITIES] = entities_missing
self.hass.data[DOMAIN][HASS_DATA_MISSING_SERVICES] = services_missing

# build entity attributes map for missing_entities sensor
entity_attrs = []
entity_list = self.hass.data[DOMAIN]["entity_list"]
parsed_entity_list = self.hass.data[DOMAIN][HASS_DATA_PARSED_ENTITY_LIST]
for entity in entities_missing:
state, name = get_entity_state(self.hass, entity, friendly_names=True)
entity_attrs.append(
{
"id": entity,
"state": state,
"friendly_name": name or "",
"occurrences": fill(entity_list[entity], 0),
"occurrences": fill(parsed_entity_list[entity], 0),
}
)

# build service attributes map for missing_services sensor
service_attrs = []
service_list = self.hass.data[DOMAIN]["service_list"]
parsed_service_list = self.hass.data[DOMAIN][HASS_DATA_PARSED_SERVICE_LIST]
for service in services_missing:
service_attrs.append(
{"id": service, "occurrences": fill(service_list[service], 0)}
{"id": service, "occurrences": fill(parsed_service_list[service], 0)}
)

self.data = {
"entities_missing": len(entities_missing),
"services_missing": len(services_missing),
"last_update": dt_util.now(),
"service_attrs": service_attrs,
"entity_attrs": entity_attrs,
COORD_DATA_MISSING_ENTITIES: len(entities_missing),
COORD_DATA_MISSING_SERVICES: len(services_missing),
COORD_DATA_LAST_UPDATE: dt_util.now(),
COORD_DATA_SERVICE_ATTRS: service_attrs,
COORD_DATA_ENTITY_ATTRS: entity_attrs,
}

_LOGGER.debug("Watchman sensors updated")
Expand Down
25 changes: 15 additions & 10 deletions custom_components/watchman/sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@
from .entity import WatchmanEntity

from .const import (
COORD_DATA_ENTITY_ATTRS,
COORD_DATA_LAST_UPDATE,
COORD_DATA_MISSING_ENTITIES,
COORD_DATA_MISSING_SERVICES,
COORD_DATA_SERVICE_ATTRS,
DOMAIN,
SENSOR_LAST_UPDATE,
SENSOR_MISSING_ENTITIES,
Expand Down Expand Up @@ -71,15 +76,15 @@ def should_poll(self) -> bool:
def native_value(self):
"""Return the native value of the sensor."""
if self.coordinator.data:
return self.coordinator.data["last_update"]
return self.coordinator.data[COORD_DATA_LAST_UPDATE]
else:
return self._attr_native_value

@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.coordinator.data:
self._attr_native_value = self.coordinator.data["last_update"]
self._attr_native_value = self.coordinator.data[COORD_DATA_LAST_UPDATE]
self.async_write_ha_state()
super()._handle_coordinator_update()

Expand All @@ -100,25 +105,25 @@ def should_poll(self) -> bool:
def native_value(self):
"""Return the native value of the sensor."""
if self.coordinator.data:
return self.coordinator.data["entities_missing"]
return self.coordinator.data[COORD_DATA_MISSING_ENTITIES]
else:
return self._attr_native_value

@property
def extra_state_attributes(self):
"""Return the state attributes."""
if self.coordinator.data:
return {"entities": self.coordinator.data["entity_attrs"]}
return {"entities": self.coordinator.data[COORD_DATA_ENTITY_ATTRS]}
else:
return {}

@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.coordinator.data:
self._attr_native_value = self.coordinator.data["entities_missing"]
self._attr_native_value = self.coordinator.data[COORD_DATA_MISSING_ENTITIES]
self._attr_extra_state_attributes = {
"entities": self.coordinator.data["entity_attrs"]
"entities": self.coordinator.data[COORD_DATA_ENTITY_ATTRS]
}
self.async_write_ha_state()
super()._handle_coordinator_update()
Expand All @@ -140,25 +145,25 @@ def should_poll(self) -> bool:
def native_value(self):
"""Return the native value of the sensor."""
if self.coordinator.data:
return self.coordinator.data["services_missing"]
return self.coordinator.data[COORD_DATA_MISSING_SERVICES]
else:
return self._attr_native_value

@property
def extra_state_attributes(self):
"""Return the state attributes."""
if self.coordinator.data:
return {"entities": self.coordinator.data["service_attrs"]}
return {"entities": self.coordinator.data[COORD_DATA_SERVICE_ATTRS]}
else:
return {}

@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if self.coordinator.data:
self._attr_native_value = self.coordinator.data["services_missing"]
self._attr_native_value = self.coordinator.data[COORD_DATA_MISSING_SERVICES]
self._attr_extra_state_attributes = {
"services": self.coordinator.data["service_attrs"]
"services": self.coordinator.data[COORD_DATA_SERVICE_ATTRS]
}
self.async_write_ha_state()
super()._handle_coordinator_update()
Loading

0 comments on commit 023cac8

Please sign in to comment.