1 """Helpers for listening to events."""
3 from __future__
import annotations
6 from collections
import defaultdict
7 from collections.abc
import Callable, Coroutine, Iterable, Mapping, Sequence
9 from dataclasses
import dataclass
10 from datetime
import datetime, timedelta
11 from functools
import partial, wraps
13 from random
import randint
15 from typing
import TYPE_CHECKING, Any, Concatenate, Generic, TypeVar
18 EVENT_CORE_CONFIG_UPDATE,
29 EventStateChangedData
as EventStateChangedData,
31 EventStateReportedData,
47 from .device_registry
import (
48 EVENT_DEVICE_REGISTRY_UPDATED,
49 EventDeviceRegistryUpdatedData,
51 from .entity_registry
import (
52 EVENT_ENTITY_REGISTRY_UPDATED,
53 EventEntityRegistryUpdatedData,
55 from .ratelimit
import KeyedRateLimit
56 from .sun
import get_astral_event_next
57 from .template
import RenderInfo, Template, result_as_boolean
58 from .typing
import TemplateVarsType
60 _TRACK_STATE_CHANGE_DATA: HassKey[_KeyedEventData[EventStateChangedData]] = HassKey(
61 "track_state_change_data"
63 _TRACK_STATE_REPORT_DATA: HassKey[_KeyedEventData[EventStateReportedData]] = HassKey(
64 "track_state_report_data"
66 _TRACK_STATE_ADDED_DOMAIN_DATA: HassKey[_KeyedEventData[EventStateChangedData]] = (
67 HassKey(
"track_state_added_domain_data")
69 _TRACK_STATE_REMOVED_DOMAIN_DATA: HassKey[_KeyedEventData[EventStateChangedData]] = (
70 HassKey(
"track_state_removed_domain_data")
72 _TRACK_ENTITY_REGISTRY_UPDATED_DATA: HassKey[
73 _KeyedEventData[EventEntityRegistryUpdatedData]
74 ] = HassKey(
"track_entity_registry_updated_data")
75 _TRACK_DEVICE_REGISTRY_UPDATED_DATA: HassKey[
76 _KeyedEventData[EventDeviceRegistryUpdatedData]
77 ] = HassKey(
"track_device_registry_updated_data")
80 _DOMAINS_LISTENER =
"domains"
81 _ENTITIES_LISTENER =
"entities"
83 _LOGGER = logging.getLogger(__name__)
89 RANDOM_MICROSECOND_MIN = 50000
90 RANDOM_MICROSECOND_MAX = 500000
92 _TypedDictT = TypeVar(
"_TypedDictT", bound=Mapping[str, Any])
93 _StateEventDataT = TypeVar(
"_StateEventDataT", bound=EventStateEventData)
96 @dataclass(slots=True, frozen=True)
98 """Class to track events by key."""
100 key: HassKey[_KeyedEventData[_TypedDictT]]
101 event_type: EventType[_TypedDictT] | str
102 dispatcher_callable: Callable[
105 dict[str, list[HassJob[[Event[_TypedDictT]], Any]]],
110 filter_callable: Callable[
113 dict[str, list[HassJob[[Event[_TypedDictT]], Any]]],
120 @dataclass(slots=True, frozen=True)
122 """Class to track data for events by key."""
124 listener: CALLBACK_TYPE
125 callbacks: defaultdict[str, list[HassJob[[Event[_TypedDictT]], Any]]]
128 @dataclass(slots=True)
130 """Class for keeping track of states being tracked.
132 all_states: All states on the system are being tracked
133 entities: Lowercased entities to track
134 domains: Lowercased domains to track
142 @dataclass(slots=True)
144 """Class for keeping track of a template with variables.
146 The template is template to calculate.
147 The variables are variables to pass to the template.
148 The rate_limit is a rate limit on how often the template is re-rendered.
152 variables: TemplateVarsType
153 rate_limit: float |
None =
None
156 @dataclass(slots=True)
158 """Class for result of template tracking.
161 The template that has changed.
163 The output from the template on the last successful run, or None
164 if no previous successful run.
166 Result from the template run. This will be a string or an
167 TemplateError if the template resulted in an error.
175 def threaded_listener_factory[**_P](
176 async_factory: Callable[Concatenate[HomeAssistant, _P], Any],
177 ) -> Callable[Concatenate[HomeAssistant, _P], CALLBACK_TYPE]:
178 """Convert an async event helper to a threaded one."""
180 @wraps(async_factory)
182 hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs
184 """Call async event helper safely."""
185 if not isinstance(hass, HomeAssistant):
186 raise TypeError(
"First parameter needs to be a hass instance")
188 async_remove = run_callback_threadsafe(
189 hass.loop, partial(async_factory, hass, *args, **kwargs)
193 """Threadsafe removal."""
194 run_callback_threadsafe(hass.loop, async_remove).result()
205 entity_ids: str | Iterable[str],
207 [str, State |
None, State |
None], Coroutine[Any, Any,
None] |
None
209 from_state: str | Iterable[str] |
None =
None,
210 to_state: str | Iterable[str] |
None =
None,
212 """Track specific state changes.
214 entity_ids, from_state and to_state can be string or list.
215 Use list to match multiple.
217 Returns a function that can be called to remove the listener.
219 If entity_ids are not MATCH_ALL along with from_state and to_state
220 being None, async_track_state_change_event should be used instead
221 as it is slightly faster.
223 This function is deprecated and will be removed in Home Assistant 2025.5.
225 Must be run within the event loop.
228 "calls `async_track_state_change` instead of `async_track_state_change_event`"
229 " which is deprecated and will be removed in Home Assistant 2025.5",
230 core_behavior=frame.ReportBehavior.LOG,
233 if from_state
is not None:
235 if to_state
is not None:
239 if entity_ids == MATCH_ALL:
241 elif isinstance(entity_ids, str):
242 entity_ids = (entity_ids.lower(),)
244 entity_ids =
tuple(entity_id.lower()
for entity_id
in entity_ids)
246 job = HassJob(action, f
"track state change {entity_ids} {from_state} {to_state}")
249 def state_change_filter(event_data: EventStateChangedData) -> bool:
250 """Handle specific state changes."""
251 if from_state
is not None:
252 old_state_str: str |
None =
None
253 if (old_state := event_data[
"old_state"])
is not None:
254 old_state_str = old_state.state
256 if not match_from_state(old_state_str):
259 if to_state
is not None:
260 new_state_str: str |
None =
None
261 if (new_state := event_data[
"new_state"])
is not None:
262 new_state_str = new_state.state
264 if not match_to_state(new_state_str):
270 def state_change_dispatcher(event: Event[EventStateChangedData]) ->
None:
271 """Handle specific state changes."""
272 hass.async_run_hass_job(
274 event.data[
"entity_id"],
275 event.data[
"old_state"],
276 event.data[
"new_state"],
280 def state_change_listener(event: Event[EventStateChangedData]) ->
None:
281 """Handle specific state changes."""
282 if not state_change_filter(event.data):
285 state_change_dispatcher(event)
287 if entity_ids != MATCH_ALL:
298 return hass.bus.async_listen(
300 state_change_dispatcher,
301 event_filter=state_change_filter,
305 track_state_change = threaded_listener_factory(async_track_state_change)
311 entity_ids: str | Iterable[str],
312 action: Callable[[Event[EventStateChangedData]], Any],
313 job_type: HassJobType |
None =
None,
315 """Track specific state change events indexed by entity_id.
317 Unlike async_track_state_change, async_track_state_change_event
318 passes the full event to the callback.
320 In order to avoid having to iterate a long list
321 of EVENT_STATE_CHANGED and fire and create a job
322 for each one, we keep a dict of entity ids that
323 care about the state change events so we can
324 do a fast dict lookup to route events.
325 The passed in entity_ids will be automatically lower cased.
327 EVENT_STATE_CHANGED is fired on each occasion the state is updated
328 and changed, opposite of EVENT_STATE_REPORTED.
331 return _remove_empty_listener
338 callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]],
339 event: Event[_StateEventDataT],
341 """Dispatch to listeners soon to ensure one event loop runs before dispatch."""
342 hass.loop.call_soon(_async_dispatch_entity_id_event, hass, callbacks, event)
348 callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]],
349 event: Event[_StateEventDataT],
351 """Dispatch to listeners."""
352 if not (callbacks_list := callbacks.get(event.data[
"entity_id"])):
354 for job
in callbacks_list.copy():
356 hass.async_run_hass_job(job, event)
359 "Error while dispatching event for %s to %s",
360 event.data[
"entity_id"],
368 callbacks: dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]],
369 event_data: _StateEventDataT,
371 """Filter state changes by entity_id."""
372 return event_data[
"entity_id"]
in callbacks
376 key=_TRACK_STATE_CHANGE_DATA,
377 event_type=EVENT_STATE_CHANGED,
378 dispatcher_callable=_async_dispatch_entity_id_event_soon,
379 filter_callable=_async_state_filter,
386 entity_ids: str | Iterable[str],
387 action: Callable[[Event[EventStateChangedData]], Any],
388 job_type: HassJobType |
None,
390 """Faster version of async_track_state_change_event.
392 The passed in entity_ids will not be automatically lower cased.
395 _KEYED_TRACK_STATE_CHANGE, hass, entity_ids, action, job_type
400 key=_TRACK_STATE_REPORT_DATA,
401 event_type=EVENT_STATE_REPORTED,
402 dispatcher_callable=_async_dispatch_entity_id_event,
403 filter_callable=_async_state_filter,
409 entity_ids: str | Iterable[str],
410 action: Callable[[Event[EventStateReportedData]], Any],
411 job_type: HassJobType |
None =
None,
413 """Track EVENT_STATE_REPORTED by entity_ids.
415 EVENT_STATE_REPORTED is fired on each occasion the state is updated
416 but not changed, opposite of EVENT_STATE_CHANGED.
419 _KEYED_TRACK_STATE_REPORT, hass, entity_ids, action, job_type
425 """Remove a listener that does nothing."""
431 tracker: _KeyedEventTracker[_TypedDictT],
433 job: HassJob[[Event[_TypedDictT]], Any],
434 callbacks: dict[str, list[HassJob[[Event[_TypedDictT]], Any]]],
436 """Remove listener."""
438 callbacks[key].
remove(job)
439 if not callbacks[key]:
443 hass.data.pop(tracker.key).listener()
449 tracker: _KeyedEventTracker[_TypedDictT],
451 keys: str | Iterable[str],
452 action: Callable[[Event[_TypedDictT]],
None],
453 job_type: HassJobType |
None,
455 """Track an event by a specific key.
457 This function is intended for internal use only.
460 return _remove_empty_listener
462 hass_data = hass.data
463 tracker_key = tracker.key
464 if tracker_key
in hass_data:
465 event_data = hass_data[tracker_key]
466 callbacks = event_data.callbacks
468 callbacks = defaultdict(list)
469 listener = hass.bus.async_listen(
471 partial(tracker.dispatcher_callable, hass, callbacks),
472 event_filter=partial(tracker.filter_callable, hass, callbacks),
475 hass_data[tracker_key] = event_data
477 job = HassJob(action, f
"track {tracker.event_type} event {keys}", job_type=job_type)
479 if isinstance(keys, str):
485 callbacks[keys].append(job)
489 callbacks[key].append(job)
491 return partial(_remove_listener, hass, tracker, keys, job, callbacks)
497 callbacks: dict[str, list[HassJob[[Event[EventEntityRegistryUpdatedData]], Any]]],
498 event: Event[EventEntityRegistryUpdatedData],
500 """Dispatch to listeners."""
502 callbacks_list := callbacks.get(
503 event.data.get(
"old_entity_id", event.data[
"entity_id"])
507 for job
in callbacks_list.copy():
509 hass.async_run_hass_job(job, event)
512 "Error while dispatching event for %s to %s",
513 event.data.get(
"old_entity_id", event.data[
"entity_id"]),
521 callbacks: dict[str, list[HassJob[[Event[EventEntityRegistryUpdatedData]], Any]]],
522 event_data: EventEntityRegistryUpdatedData,
524 """Filter entity registry updates by entity_id."""
525 return event_data.get(
"old_entity_id", event_data[
"entity_id"])
in callbacks
529 key=_TRACK_ENTITY_REGISTRY_UPDATED_DATA,
530 event_type=EVENT_ENTITY_REGISTRY_UPDATED,
531 dispatcher_callable=_async_dispatch_old_entity_id_or_entity_id_event,
532 filter_callable=_async_entity_registry_updated_filter,
540 entity_ids: str | Iterable[str],
541 action: Callable[[Event[EventEntityRegistryUpdatedData]], Any],
542 job_type: HassJobType |
None =
None,
544 """Track specific entity registry updated events indexed by entity_id.
546 Entities must be lower case.
548 Similar to async_track_state_change_event.
551 _KEYED_TRACK_ENTITY_REGISTRY_UPDATED, hass, entity_ids, action, job_type
558 callbacks: dict[str, list[HassJob[[Event[EventDeviceRegistryUpdatedData]], Any]]],
559 event_data: EventDeviceRegistryUpdatedData,
561 """Filter device registry updates by device_id."""
562 return event_data[
"device_id"]
in callbacks
568 callbacks: dict[str, list[HassJob[[Event[EventDeviceRegistryUpdatedData]], Any]]],
569 event: Event[EventDeviceRegistryUpdatedData],
571 """Dispatch to listeners."""
572 if not (callbacks_list := callbacks.get(event.data[
"device_id"])):
574 for job
in callbacks_list.copy():
576 hass.async_run_hass_job(job, event)
579 "Error while dispatching event for %s to %s",
580 event.data[
"device_id"],
586 key=_TRACK_DEVICE_REGISTRY_UPDATED_DATA,
587 event_type=EVENT_DEVICE_REGISTRY_UPDATED,
588 dispatcher_callable=_async_dispatch_device_id_event,
589 filter_callable=_async_device_registry_updated_filter,
596 device_ids: str | Iterable[str],
597 action: Callable[[Event[EventDeviceRegistryUpdatedData]], Any],
598 job_type: HassJobType |
None =
None,
600 """Track specific device registry updated events indexed by device_id.
602 Similar to async_track_entity_registry_updated_event.
605 _KEYED_TRACK_DEVICE_REGISTRY_UPDATED, hass, device_ids, action, job_type
612 callbacks: dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]],
613 event: Event[EventStateChangedData],
615 """Dispatch domain event listeners."""
617 for job
in callbacks.get(domain, []) + callbacks.get(MATCH_ALL, []):
619 hass.async_run_hass_job(job, event)
622 "Error while processing event %s for domain %s", event, domain
629 callbacks: dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]],
630 event_data: EventStateChangedData,
632 """Filter state changes by entity_id."""
633 return event_data[
"old_state"]
is None and (
634 MATCH_ALL
in callbacks
638 event_data[
"new_state"].domain
in callbacks
645 domains: str | Iterable[str],
646 action: Callable[[Event[EventStateChangedData]], Any],
647 job_type: HassJobType |
None =
None,
649 """Track state change events when an entity is added to domains."""
651 return _remove_empty_listener
656 key=_TRACK_STATE_ADDED_DOMAIN_DATA,
657 event_type=EVENT_STATE_CHANGED,
658 dispatcher_callable=_async_dispatch_domain_event,
659 filter_callable=_async_domain_added_filter,
666 domains: str | Iterable[str],
667 action: Callable[[Event[EventStateChangedData]], Any],
668 job_type: HassJobType |
None,
670 """Track state change events when an entity is added to domains."""
672 _KEYED_TRACK_STATE_ADDED_DOMAIN, hass, domains, action, job_type
679 callbacks: dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]],
680 event_data: EventStateChangedData,
682 """Filter state changes by entity_id."""
683 return event_data[
"new_state"]
is None and (
684 MATCH_ALL
in callbacks
688 event_data[
"old_state"].domain
in callbacks
693 key=_TRACK_STATE_REMOVED_DOMAIN_DATA,
694 event_type=EVENT_STATE_CHANGED,
695 dispatcher_callable=_async_dispatch_domain_event,
696 filter_callable=_async_domain_removed_filter,
703 domains: str | Iterable[str],
704 action: Callable[[Event[EventStateChangedData]], Any],
705 job_type: HassJobType |
None =
None,
707 """Track state change events when an entity is removed from domains."""
709 _KEYED_TRACK_STATE_REMOVED_DOMAIN, hass, domains, action, job_type
715 if isinstance(instr, str):
716 return [instr.lower()]
718 return [mstr.lower()
for mstr
in instr]
722 """Handle removal / refresh of tracker."""
727 track_states: TrackStates,
728 action: Callable[[Event[EventStateChangedData]], Any],
730 """Handle removal / refresh of tracker init."""
734 action, f
"track state change filtered {track_states}"
736 self._listeners: dict[str, Callable[[],
None]] = {}
741 """Create listeners to track states."""
745 not track_states.all_states
746 and not track_states.domains
747 and not track_states.entities
751 if track_states.all_states:
760 """State changes that will cause a re-render."""
763 _ALL_LISTENER: track_states.all_states,
764 _ENTITIES_LISTENER: track_states.entities,
765 _DOMAINS_LISTENER: track_states.domains,
770 """Update the listeners based on the new TrackStates."""
774 had_all_listener = last_track_states.all_states
776 if new_track_states.all_states:
787 domains_changed = new_track_states.domains != last_track_states.domains
789 if had_all_listener
or domains_changed:
790 domains_changed =
True
797 or new_track_states.entities != last_track_states.entities
801 new_track_states.domains, new_track_states.entities
806 """Cancel the listeners."""
807 for key
in list(self._listeners):
808 self._listeners.pop(key)()
812 if listener_name
not in self._listeners:
815 self._listeners.pop(listener_name)()
820 entities = entities.copy()
821 entities.update(self.
hasshass.states.async_entity_ids(domains))
850 self._listeners[_ALL_LISTENER] = self.
hasshass.bus.async_listen(
851 EVENT_STATE_CHANGED, self.
_action_action
859 track_states: TrackStates,
860 action: Callable[[Event[EventStateChangedData]], Any],
861 ) -> _TrackStateChangeFiltered:
862 """Track state changes with a TrackStates filter that can be updated.
867 Home assistant object.
869 A TrackStates data class.
871 Callable to call with results.
875 Object used to update the listeners (async_update_listeners) with a new
876 TrackStates or cancel the tracking (async_remove).
880 tracker.async_setup()
890 [str, State |
None, State |
None], Coroutine[Any, Any,
None] |
None
892 variables: TemplateVarsType |
None =
None,
894 """Add a listener that fires when a template evaluates to 'true'.
896 Listen for the result of the template becoming true, or a true-like
897 string result, such as 'On', 'Open', or 'Yes'. If the template results
898 in an error state when the value changes, this will be logged and not
901 If the initial check of the template is invalid and results in an
902 exception, the listener will still be registered but will only
903 fire if the template result becomes true without an exception.
908 ID of the entity that triggered the state change.
910 The old state of the entity that changed.
912 New state of the entity that changed.
917 Home assistant object.
919 The template to calculate.
921 Callable to call with results. See above for arguments.
923 Variables to pass to the template.
927 Callable to unregister the listener.
930 job = HassJob(action, f
"track template {template}")
933 def _template_changed_listener(
934 event: Event[EventStateChangedData] |
None,
935 updates: list[TrackTemplateResult],
937 """Check if condition is correct and run action."""
938 track_result = updates.pop()
940 template = track_result.template
941 last_result = track_result.last_result
942 result = track_result.result
944 if isinstance(result, TemplateError):
946 "Error while processing template: %s",
953 not isinstance(last_result, TemplateError)
959 hass.async_run_hass_job(
961 event
and event.data[
"entity_id"],
962 event
and event.data[
"old_state"],
963 event
and event.data[
"new_state"],
967 hass, [
TrackTemplate(template, variables)], _template_changed_listener
970 return info.async_remove
973 track_template = threaded_listener_factory(async_track_template)
977 """Handle removal / refresh of tracker."""
982 track_templates: Sequence[TrackTemplate],
983 action: TrackTemplateResultListener,
984 has_super_template: bool =
False,
986 """Handle removal / refresh of tracker init."""
988 self.
_job_job = HassJob(action, f
"track template result {track_templates}")
993 self._last_result: dict[Template, bool | str | TemplateError] = {}
995 for track_template_
in track_templates:
996 if track_template_.template.hass:
1000 "calls async_track_template_result with template without hass",
1001 core_behavior=frame.ReportBehavior.LOG,
1002 breaks_in_ha_version=
"2025.10",
1004 track_template_.template.hass = hass
1007 self._info: dict[Template, RenderInfo] = {}
1009 self._time_listeners: dict[Template, Callable[[],
None]] = {}
1012 """Return the representation."""
1013 return f
"<TrackTemplateResultInfo {self._info}>"
1017 strict: bool =
False,
1018 log_fn: Callable[[int, str],
None] |
None =
None,
1020 """Activation of template tracking."""
1021 block_render =
False
1025 if super_template
is not None:
1026 template = super_template.template
1027 variables = super_template.variables
1028 self._info[template] = info = template.async_render_to_info(
1029 variables, strict=strict, log_fn=log_fn
1034 super_result: str | TemplateError = info.result()
1035 except TemplateError
as ex:
1038 super_result
is not None
1045 if block_render
or track_template_ == super_template:
1047 template = track_template_.template
1048 variables = track_template_.variables
1049 self._info[template] = info = template.async_render_to_info(
1050 variables, strict=strict, log_fn=log_fn
1056 "Error while processing template: %s",
1057 track_template_.template,
1058 exc_info=info.exception,
1061 log_fn(logging.ERROR,
str(info.exception))
1069 "Template group %s listens for %s, first render blocked by super"
1079 """State changes that will cause a re-render."""
1083 "time": bool(self._time_listeners),
1089 if template
in self._time_listeners:
1091 self._time_listeners.pop(template)()
1094 if template
in self._time_listeners:
1100 if track_template_.template == template
1104 def _refresh_from_time(now: datetime) ->
None:
1105 self.
_refresh_refresh(
None, track_templates=track_templates)
1108 self.
hasshass, _refresh_from_time, second=0
1113 for template, info
in self._info.items():
1118 """Cancel the listener."""
1122 for template
in list(self._time_listeners):
1123 self._time_listeners.pop(template)()
1127 """Force recalculate the template."""
1132 track_template_: TrackTemplate,
1134 event: Event[EventStateChangedData] |
None,
1135 ) -> bool | TrackTemplateResult:
1136 """Re-render the template if conditions match.
1138 Returns False if the template was not re-rendered.
1140 Returns True if the template re-rendered and did not
1143 Returns TrackTemplateResult if the template re-render
1144 generates a new result.
1146 template = track_template_.template
1149 info = self._info[template]
1154 had_timer = self.
_rate_limit_rate_limit.async_has_timer(template)
1156 if self.
_rate_limit_rate_limit.async_schedule_action(
1165 return not had_timer
1168 "Template update %s triggered by event: %s",
1173 self.
_rate_limit_rate_limit.async_triggered(template, now)
1174 self._info[template] = info = template.async_render_to_info(
1175 track_template_.variables
1179 result: str | TemplateError = info.result()
1180 except TemplateError
as ex:
1183 last_result = self._last_result.
get(template)
1186 if result == last_result
and template
in self._last_result:
1189 if isinstance(result, TemplateError)
and isinstance(last_result, TemplateError):
1196 """Return True if the result is truthy or a TemplateError."""
1197 if isinstance(result, TemplateError):
1205 updates: list[TrackTemplateResult],
1206 update: bool | TrackTemplateResult,
1209 """Handle updates of a tracked template."""
1215 if isinstance(update, TrackTemplateResult):
1216 updates.append(update)
1223 event: Event[EventStateChangedData] |
None,
1224 track_templates: Iterable[TrackTemplate] |
None =
None,
1225 replayed: bool |
None =
False,
1227 """Refresh the template.
1229 The event is the state_changed event that caused the refresh
1232 track_templates is an optional list of TrackTemplate objects
1233 to refresh. If not provided, all tracked templates will be
1236 replayed is True if the event is being replayed because the
1239 updates: list[TrackTemplateResult] = []
1240 info_changed =
False
1241 now = event.time_fired_timestamp
if not replayed
and event
else time.time()
1243 block_updates =
False
1249 if super_template
is not None:
1251 info_changed |= self.
_apply_update_apply_update(updates, update, super_template.template)
1253 if isinstance(update, TrackTemplateResult):
1254 super_result = update.result
1256 super_result = self._last_result.
get(super_template.template)
1260 super_result
is not None
1263 block_updates =
True
1266 isinstance(update, TrackTemplateResult)
1276 if not block_updates:
1277 for track_template_
in track_templates:
1278 if track_template_ == super_template:
1283 updates, update, track_template_.template
1292 if self.
_rate_limit_rate_limit.async_has_timer(template)
1294 for template, info
in self._info.items()
1300 "Template group %s listens for %s, re-render blocked by super"
1311 for track_result
in updates:
1312 self._last_result[track_result.template] = track_result.result
1314 self.
hasshass.async_run_hass_job(self.
_job_job, event, updates)
1317 type TrackTemplateResultListener = Callable[
1319 Event[EventStateChangedData] |
None,
1320 list[TrackTemplateResult],
1322 Coroutine[Any, Any,
None] |
None,
1324 """Type for the listener for template results.
1329 Event that caused the template to change output. None if not
1330 triggered by an event.
1332 A list of TrackTemplateResult
1339 hass: HomeAssistant,
1340 track_templates: Sequence[TrackTemplate],
1341 action: TrackTemplateResultListener,
1342 strict: bool =
False,
1343 log_fn: Callable[[int, str],
None] |
None =
None,
1344 has_super_template: bool =
False,
1345 ) -> TrackTemplateResultInfo:
1346 """Add a listener that fires when the result of a template changes.
1348 The action will fire with the initial result from the template, and
1349 then whenever the output from the template changes. The template will
1350 be reevaluated if any states referenced in the last run of the
1351 template change, or if manually triggered. If the result of the
1352 evaluation is different from the previous run, the listener is passed
1355 If the template results in an TemplateError, this will be returned to
1356 the listener the first time this happens but not for subsequent errors.
1357 Once the template returns to a non-error condition the result is sent
1358 to the action as usual.
1363 Home assistant object.
1365 An iterable of TrackTemplate.
1367 Callable to call with results.
1369 When set to True, raise on undefined variables.
1371 If not None, template error messages will logging by calling log_fn
1372 instead of the normal logging facility.
1374 When set to True, the first template will block rendering of other
1375 templates if it doesn't render as True.
1379 Info object used to unregister the listener, and refresh the template.
1383 tracker.async_setup(strict=strict, log_fn=log_fn)
1390 hass: HomeAssistant,
1392 action: Callable[[], Coroutine[Any, Any,
None] |
None],
1393 async_check_same_func: Callable[[str, State |
None, State |
None], bool],
1394 entity_ids: str | Iterable[str] = MATCH_ALL,
1396 """Track the state of entities for a period and run an action.
1398 If async_check_func is None it use the state of orig_value.
1399 Without entity_ids we track all state changes.
1401 async_remove_state_for_cancel: CALLBACK_TYPE |
None =
None
1402 async_remove_state_for_listener: CALLBACK_TYPE |
None =
None
1404 job = HassJob(action, f
"track same state {period} {entity_ids}")
1407 def clear_listener() -> None:
1408 """Clear all unsub listener."""
1409 nonlocal async_remove_state_for_cancel, async_remove_state_for_listener
1411 if async_remove_state_for_listener
is not None:
1412 async_remove_state_for_listener()
1413 async_remove_state_for_listener =
None
1414 if async_remove_state_for_cancel
is not None:
1415 async_remove_state_for_cancel()
1416 async_remove_state_for_cancel =
None
1419 def state_for_listener(now: Any) ->
None:
1420 """Fire on state changes after a delay and calls action."""
1421 nonlocal async_remove_state_for_listener
1422 async_remove_state_for_listener =
None
1424 hass.async_run_hass_job(job)
1427 def state_for_cancel_listener(event: Event[EventStateChangedData]) ->
None:
1428 """Fire on changes and cancel for listener if changed."""
1429 entity = event.data[
"entity_id"]
1430 from_state = event.data[
"old_state"]
1431 to_state = event.data[
"new_state"]
1433 if not async_check_same_func(entity, from_state, to_state):
1436 async_remove_state_for_listener =
async_call_later(hass, period, state_for_listener)
1438 if entity_ids == MATCH_ALL:
1439 async_remove_state_for_cancel = hass.bus.async_listen(
1440 EVENT_STATE_CHANGED, state_for_cancel_listener
1446 state_for_cancel_listener,
1449 return clear_listener
1452 track_same_state = threaded_listener_factory(async_track_same_state)
1458 hass: HomeAssistant,
1459 action: HassJob[[datetime], Coroutine[Any, Any,
None] |
None]
1460 | Callable[[datetime], Coroutine[Any, Any,
None] |
None],
1461 point_in_time: datetime,
1463 """Add a listener that fires once at or after a specific point in time.
1465 The listener is passed the time it fires in local time.
1469 if isinstance(action, HassJob)
1470 else HassJob(action, f
"track point in time {point_in_time}")
1474 def utc_converter(utc_now: datetime) ->
None:
1475 """Convert passed in UTC now to local now."""
1476 hass.async_run_hass_job(job, dt_util.as_local(utc_now))
1478 track_job = HassJob(
1480 name=f
"{job.name} UTC converter",
1481 cancel_on_shutdown=job.cancel_on_shutdown,
1482 job_type=HassJobType.Callback,
1487 track_point_in_time = threaded_listener_factory(async_track_point_in_time)
1490 @dataclass(slots=True)
1493 job: HassJob[[datetime], Coroutine[Any, Any,
None] |
None]
1494 utc_point_in_time: datetime
1495 expected_fire_timestamp: float
1496 _cancel_callback: asyncio.TimerHandle |
None =
None
1499 """Initialize track job."""
1500 loop = self.hass.loop
1502 loop.time() + self.expected_fire_timestamp - time.time(), self
1509 We implement this as __call__ so when debug logging logs the object
1510 it shows the name of the job. This is especially helpful when asyncio
1511 debug logging is enabled as we can see the name of the job that is
1512 being called that is blocking the event loop.
1520 _LOGGER.debug(
"Called %f seconds too early, rearming", delta)
1521 loop = self.hass.loop
1522 self.
_cancel_callback_cancel_callback = loop.call_at(loop.time() + delta, self)
1525 self.hass.async_run_hass_job(self.job, self.utc_point_in_time)
1529 """Cancel the call_at."""
1538 hass: HomeAssistant,
1539 action: HassJob[[datetime], Coroutine[Any, Any,
None] |
None]
1540 | Callable[[datetime], Coroutine[Any, Any,
None] |
None],
1541 point_in_time: datetime,
1543 """Add a listener that fires once at or after a specific point in time.
1545 The listener is passed the time it fires in UTC time.
1548 utc_point_in_time = dt_util.as_utc(point_in_time)
1549 expected_fire_timestamp = utc_point_in_time.timestamp()
1552 if isinstance(action, HassJob)
1553 else HassJob(action, f
"track point in utc time {utc_point_in_time}")
1556 track.async_attach()
1557 return track.async_cancel
1560 track_point_in_utc_time = threaded_listener_factory(async_track_point_in_utc_time)
1564 hass: HomeAssistant, job: HassJob[[datetime], Coroutine[Any, Any,
None] |
None]
1573 hass: HomeAssistant,
1574 action: HassJob[[datetime], Coroutine[Any, Any,
None] |
None]
1575 | Callable[[datetime], Coroutine[Any, Any,
None] |
None],
1578 """Add a listener that fires at or after <loop_time>.
1580 The listener is passed the time it fires in UTC time.
1584 if isinstance(action, HassJob)
1585 else HassJob(action, f
"call_at {loop_time}")
1587 return hass.loop.call_at(loop_time, _run_async_call_action, hass, job).cancel
1593 hass: HomeAssistant,
1594 delay: float | timedelta,
1595 action: HassJob[[datetime], Coroutine[Any, Any,
None] |
None]
1596 | Callable[[datetime], Coroutine[Any, Any,
None] |
None],
1598 """Add a listener that fires at or after <delay>.
1600 The listener is passed the time it fires in UTC time.
1602 if isinstance(delay, timedelta):
1603 delay = delay.total_seconds()
1606 if isinstance(action, HassJob)
1607 else HassJob(action, f
"call_later {delay}")
1610 return loop.call_at(loop.time() + delay, _run_async_call_action, hass, job).cancel
1613 call_later = threaded_listener_factory(async_call_later)
1616 @dataclass(slots=True)
1618 """Helper class to help listen to time interval events."""
1623 action: Callable[[datetime], Coroutine[Any, Any,
None] |
None]
1624 cancel_on_shutdown: bool |
None
1625 _track_job: HassJob[[datetime], Coroutine[Any, Any,
None] |
None] |
None =
None
1626 _run_job: HassJob[[datetime], Coroutine[Any, Any,
None] |
None] |
None =
None
1627 _timer_handle: asyncio.TimerHandle |
None =
None
1630 """Initialize track job."""
1634 job_type=HassJobType.Callback,
1635 cancel_on_shutdown=self.cancel_on_shutdown,
1639 f
"track time interval {self.seconds}",
1640 cancel_on_shutdown=self.cancel_on_shutdown,
1645 """Schedule the timer."""
1656 """Handle elapsed intervals."""
1658 assert self.
_run_job_run_job
is not None
1660 self.hass.async_run_hass_job(self.
_run_job_run_job, dt_util.utcnow(), background=
True)
1664 """Cancel the call_at."""
1673 hass: HomeAssistant,
1674 action: Callable[[datetime], Coroutine[Any, Any,
None] |
None],
1675 interval: timedelta,
1677 name: str |
None =
None,
1678 cancel_on_shutdown: bool |
None =
None,
1680 """Add a listener that fires repetitively at every timedelta interval.
1682 The listener is passed the time it fires in UTC time.
1684 seconds = interval.total_seconds()
1685 job_name = f
"track time interval {seconds} {action}"
1687 job_name = f
"{name}: {job_name}"
1689 track.async_attach()
1690 return track.async_cancel
1693 track_time_interval = threaded_listener_factory(async_track_time_interval)
1696 @dataclass(slots=True)
1698 """Helper class to help listen to sun events."""
1701 job: HassJob[[], Coroutine[Any, Any,
None] |
None]
1703 offset: timedelta |
None
1704 _unsub_sun: CALLBACK_TYPE |
None =
None
1705 _unsub_config: CALLBACK_TYPE |
None =
None
1709 """Attach a sun listener."""
1720 """Detach the sun listener."""
1731 """Set up the sun event listener."""
1742 """Handle solar event."""
1745 self.hass.async_run_hass_job(self.job, background=
True)
1749 """Handle core config update."""
1759 hass: HomeAssistant, action: Callable[[],
None], offset: timedelta |
None =
None
1761 """Add a listener that will fire a specified offset from sunrise daily."""
1763 hass, HassJob(action,
"track sunrise"), SUN_EVENT_SUNRISE, offset
1765 listener.async_attach()
1766 return listener.async_detach
1769 track_sunrise = threaded_listener_factory(async_track_sunrise)
1775 hass: HomeAssistant, action: Callable[[],
None], offset: timedelta |
None =
None
1777 """Add a listener that will fire a specified offset from sunset daily."""
1779 hass, HassJob(action,
"track sunset"), SUN_EVENT_SUNSET, offset
1781 listener.async_attach()
1782 return listener.async_detach
1785 track_sunset = threaded_listener_factory(async_track_sunset)
1788 time_tracker_utcnow = dt_util.utcnow
1789 time_tracker_timestamp = time.time
1792 @dataclass(slots=True)
1795 time_match_expression: tuple[list[int], list[int], list[int]]
1798 job: HassJob[[datetime], Coroutine[Any, Any,
None] |
None]
1799 listener_job_name: str
1800 _pattern_time_change_listener_job: HassJob[[datetime],
None] |
None =
None
1801 _cancel_callback: CALLBACK_TYPE |
None =
None
1804 """Initialize track job."""
1807 self.listener_job_name,
1808 job_type=HassJobType.Callback,
1817 """Calculate and set the next time the trigger should fire."""
1818 localized_now = dt_util.as_local(utc_now)
if self.local
else utc_now
1819 return dt_util.find_next_time_expression_time(
1820 localized_now, *self.time_match_expression
1821 ).replace(microsecond=self.microsecond)
1825 """Listen for matching time_changed events."""
1830 localized_now = dt_util.as_local(utc_now)
if self.local
else utc_now
1838 hass.async_run_hass_job(self.job, localized_now, background=
True)
1842 """Cancel the call_at."""
1851 hass: HomeAssistant,
1852 action: Callable[[datetime], Coroutine[Any, Any,
None] |
None],
1853 hour: Any |
None =
None,
1854 minute: Any |
None =
None,
1855 second: Any |
None =
None,
1856 local: bool =
False,
1858 """Add a listener that will fire every time the UTC or local time matches a pattern.
1860 The listener is passed the time it fires in UTC or local time.
1864 if all(val
is None or val ==
"*" for val
in (hour, minute, second)):
1872 job = HassJob(action, f
"track time change {hour}:{minute}:{second} local={local}")
1873 matching_seconds = dt_util.parse_time_expression(second, 0, 59)
1874 matching_minutes = dt_util.parse_time_expression(minute, 0, 59)
1875 matching_hours = dt_util.parse_time_expression(hour, 0, 23)
1879 microsecond = randint(RANDOM_MICROSECOND_MIN, RANDOM_MICROSECOND_MAX)
1880 listener_job_name = f
"time change listener {hour}:{minute}:{second} {action}"
1883 (matching_seconds, matching_minutes, matching_hours),
1889 track.async_attach()
1890 return track.async_cancel
1893 track_utc_time_change = threaded_listener_factory(async_track_utc_time_change)
1899 hass: HomeAssistant,
1900 action: Callable[[datetime], Coroutine[Any, Any,
None] |
None],
1901 hour: Any |
None =
None,
1902 minute: Any |
None =
None,
1903 second: Any |
None =
None,
1905 """Add a listener that will fire every time the local time matches a pattern.
1907 The listener is passed the time it fires in local time.
1912 track_time_change = threaded_listener_factory(async_track_time_change)
1916 parameter: str | Iterable[str] |
None, invert: bool =
False
1917 ) -> Callable[[str |
None], bool]:
1918 """Convert parameter to function that matches input against parameter."""
1919 if parameter
is None or parameter == MATCH_ALL:
1920 return lambda _:
not invert
1922 if isinstance(parameter, str)
or not hasattr(parameter,
"__iter__"):
1923 return lambda state: invert
is not (state == parameter)
1925 parameter_set = set(parameter)
1926 return lambda state: invert
is not (state
in parameter_set)
1931 render_infos: Iterable[RenderInfo],
1932 ) -> tuple[set[str], set[str]]:
1933 """Combine from multiple RenderInfo."""
1934 entities: set[str] = set()
1935 domains: set[str] = set()
1937 for render_info
in render_infos:
1938 if render_info.entities:
1939 entities.update(render_info.entities)
1940 if render_info.domains:
1941 domains.update(render_info.domains)
1942 if render_info.domains_lifecycle:
1943 domains.update(render_info.domains_lifecycle)
1944 return entities, domains
1949 """Determine if an all listener is needed from RenderInfo."""
1950 for render_info
in render_infos:
1952 if render_info.all_states
or render_info.all_states_lifecycle:
1960 """Create a TrackStates dataclass from the latest RenderInfo."""
1969 event: Event[EventStateChangedData], info: RenderInfo
1971 """Determine if a template should be re-rendered from an event."""
1972 entity_id = event.data[
"entity_id"]
1974 if info.filter(entity_id):
1977 if event.data[
"new_state"]
is not None and event.data[
"old_state"]
is not None:
1980 return bool(info.filter_lifecycle(entity_id))
1985 event: Event[EventStateChangedData],
1987 track_template_: TrackTemplate,
1989 """Determine the rate limit for an event."""
1992 if event.data[
"entity_id"]
in info.entities:
1995 if track_template_.rate_limit
is not None:
1996 return track_template_.rate_limit
1998 rate_limit: float |
None = info.rate_limit
2003 """Remove the domains and all_states from render info during a ratelimit."""
2004 rate_limited_render_info = copy.copy(render_info)
2005 rate_limited_render_info.all_states =
False
2006 rate_limited_render_info.all_states_lifecycle =
False
2007 rate_limited_render_info.domains = set()
2008 rate_limited_render_info.domains_lifecycle = set()
2009 return rate_limited_render_info
None _handle_config_event(self, Any _event)
None _handle_sun_event(self, Any _now)
None _listen_next_sun_event(self)
None _refresh(self, Event[EventStateChangedData]|None event, Iterable[TrackTemplate]|None track_templates=None, bool|None replayed=False)
None _setup_time_listener(self, Template template, bool has_time)
dict[str, bool|set[str]] listeners(self)
None _update_time_listeners(self)
None async_setup(self, bool strict=False, Callable[[int, str], None]|None log_fn=None)
None __init__(self, HomeAssistant hass, Sequence[TrackTemplate] track_templates, TrackTemplateResultListener action, bool has_super_template=False)
bool|TrackTemplateResult _render_template_if_ready(self, TrackTemplate track_template_, float now, Event[EventStateChangedData]|None event)
bool _apply_update(self, list[TrackTemplateResult] updates, bool|TrackTemplateResult update, Template template)
bool _super_template_as_boolean(bool|str|TemplateError result)
None _setup_domains_listener(self, set[str] domains)
None _setup_all_listener(self)
None __init__(self, HomeAssistant hass, TrackStates track_states, Callable[[Event[EventStateChangedData]], Any] action)
None _cancel_listener(self, str listener_name)
None _setup_entities_listener(self, set[str] domains, set[str] entities)
None async_update_listeners(self, TrackStates new_track_states)
dict[str, bool|set[str]] listeners(self)
None _state_added(self, Event[EventStateChangedData] event)
None _schedule_timer(self)
None _interval_listener(self, Any _)
datetime _calculate_next(self, datetime utc_now)
None _pattern_time_change_listener(self, datetime _)
_pattern_time_change_listener_job
bool remove(self, _T matcher)
web.Response get(self, web.Request request, str config_key)
tuple[str, str] split_entity_id(str entity_id)
tuple[set[str], set[str]] _entities_domains_from_render_infos(Iterable[RenderInfo] render_infos)
None _async_dispatch_device_id_event(HomeAssistant hass, dict[str, list[HassJob[[Event[EventDeviceRegistryUpdatedData]], Any]]] callbacks, Event[EventDeviceRegistryUpdatedData] event)
CALLBACK_TYPE _async_track_state_change_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventStateChangedData]], Any] action, HassJobType|None job_type)
CALLBACK_TYPE async_track_time_change(HomeAssistant hass, Callable[[datetime], Coroutine[Any, Any, None]|None] action, Any|None hour=None, Any|None minute=None, Any|None second=None)
CALLBACK_TYPE async_track_device_registry_updated_event(HomeAssistant hass, str|Iterable[str] device_ids, Callable[[Event[EventDeviceRegistryUpdatedData]], Any] action, HassJobType|None job_type=None)
_TrackStateChangeFiltered async_track_state_change_filtered(HomeAssistant hass, TrackStates track_states, Callable[[Event[EventStateChangedData]], Any] action)
bool _async_device_registry_updated_filter(HomeAssistant hass, dict[str, list[HassJob[[Event[EventDeviceRegistryUpdatedData]], Any]]] callbacks, EventDeviceRegistryUpdatedData event_data)
CALLBACK_TYPE async_track_sunset(HomeAssistant hass, Callable[[], None] action, timedelta|None offset=None)
None _async_dispatch_domain_event(HomeAssistant hass, dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]] callbacks, Event[EventStateChangedData] event)
CALLBACK_TYPE async_track_template(HomeAssistant hass, Template template, Callable[[str, State|None, State|None], Coroutine[Any, Any, None]|None] action, TemplateVarsType|None variables=None)
bool _async_domain_added_filter(HomeAssistant hass, dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]] callbacks, EventStateChangedData event_data)
CALLBACK_TYPE async_track_state_added_domain(HomeAssistant hass, str|Iterable[str] domains, Callable[[Event[EventStateChangedData]], Any] action, HassJobType|None job_type=None)
float|None _rate_limit_for_event(Event[EventStateChangedData] event, RenderInfo info, TrackTemplate track_template_)
None _run_async_call_action(HomeAssistant hass, HassJob[[datetime], Coroutine[Any, Any, None]|None] job)
CALLBACK_TYPE async_call_later(HomeAssistant hass, float|timedelta delay, HassJob[[datetime], Coroutine[Any, Any, None]|None]|Callable[[datetime], Coroutine[Any, Any, None]|None] action)
CALLBACK_TYPE _async_track_state_added_domain(HomeAssistant hass, str|Iterable[str] domains, Callable[[Event[EventStateChangedData]], Any] action, HassJobType|None job_type)
TrackStates _render_infos_to_track_states(Iterable[RenderInfo] render_infos)
CALLBACK_TYPE async_track_same_state(HomeAssistant hass, timedelta period, Callable[[], Coroutine[Any, Any, None]|None] action, Callable[[str, State|None, State|None], bool] async_check_same_func, str|Iterable[str] entity_ids=MATCH_ALL)
CALLBACK_TYPE async_track_state_report_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventStateReportedData]], Any] action, HassJobType|None job_type=None)
CALLBACK_TYPE async_track_utc_time_change(HomeAssistant hass, Callable[[datetime], Coroutine[Any, Any, None]|None] action, Any|None hour=None, Any|None minute=None, Any|None second=None, bool local=False)
None _async_dispatch_old_entity_id_or_entity_id_event(HomeAssistant hass, dict[str, list[HassJob[[Event[EventEntityRegistryUpdatedData]], Any]]] callbacks, Event[EventEntityRegistryUpdatedData] event)
CALLBACK_TYPE async_track_entity_registry_updated_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventEntityRegistryUpdatedData]], Any] action, HassJobType|None job_type=None)
CALLBACK_TYPE _async_track_event(_KeyedEventTracker[_TypedDictT] tracker, HomeAssistant hass, str|Iterable[str] keys, Callable[[Event[_TypedDictT]], None] action, HassJobType|None job_type)
CALLBACK_TYPE async_track_state_change_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventStateChangedData]], Any] action, HassJobType|None job_type=None)
None _remove_empty_listener()
CALLBACK_TYPE async_track_sunrise(HomeAssistant hass, Callable[[], None] action, timedelta|None offset=None)
bool _async_entity_registry_updated_filter(HomeAssistant hass, dict[str, list[HassJob[[Event[EventEntityRegistryUpdatedData]], Any]]] callbacks, EventEntityRegistryUpdatedData event_data)
Callable[[str|None], bool] process_state_match(str|Iterable[str]|None parameter, bool invert=False)
CALLBACK_TYPE async_track_point_in_utc_time(HomeAssistant hass, HassJob[[datetime], Coroutine[Any, Any, None]|None]|Callable[[datetime], Coroutine[Any, Any, None]|None] action, datetime point_in_time)
CALLBACK_TYPE async_track_point_in_time(HomeAssistant hass, HassJob[[datetime], Coroutine[Any, Any, None]|None]|Callable[[datetime], Coroutine[Any, Any, None]|None] action, datetime point_in_time)
bool _async_state_filter(HomeAssistant hass, dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]] callbacks, _StateEventDataT event_data)
bool _event_triggers_rerender(Event[EventStateChangedData] event, RenderInfo info)
CALLBACK_TYPE async_track_state_removed_domain(HomeAssistant hass, str|Iterable[str] domains, Callable[[Event[EventStateChangedData]], Any] action, HassJobType|None job_type=None)
CALLBACK_TYPE async_call_at(HomeAssistant hass, HassJob[[datetime], Coroutine[Any, Any, None]|None]|Callable[[datetime], Coroutine[Any, Any, None]|None] action, float loop_time)
list[str] _async_string_to_lower_list(str|Iterable[str] instr)
RenderInfo _suppress_domain_all_in_render_info(RenderInfo render_info)
bool _async_domain_removed_filter(HomeAssistant hass, dict[str, list[HassJob[[Event[EventStateChangedData]], Any]]] callbacks, EventStateChangedData event_data)
TrackTemplateResultInfo async_track_template_result(HomeAssistant hass, Sequence[TrackTemplate] track_templates, TrackTemplateResultListener action, bool strict=False, Callable[[int, str], None]|None log_fn=None, bool has_super_template=False)
CALLBACK_TYPE async_track_time_interval(HomeAssistant hass, Callable[[datetime], Coroutine[Any, Any, None]|None] action, timedelta interval, *str|None name=None, bool|None cancel_on_shutdown=None)
None _async_dispatch_entity_id_event_soon(HomeAssistant hass, dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]] callbacks, Event[_StateEventDataT] event)
None _remove_listener(HomeAssistant hass, _KeyedEventTracker[_TypedDictT] tracker, Iterable[str] keys, HassJob[[Event[_TypedDictT]], Any] job, dict[str, list[HassJob[[Event[_TypedDictT]], Any]]] callbacks)
bool _render_infos_needs_all_listener(Iterable[RenderInfo] render_infos)
None _async_dispatch_entity_id_event(HomeAssistant hass, dict[str, list[HassJob[[Event[_StateEventDataT]], Any]]] callbacks, Event[_StateEventDataT] event)
CALLBACK_TYPE async_track_state_change(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[str, State|None, State|None], Coroutine[Any, Any, None]|None] action, str|Iterable[str]|None from_state=None, str|Iterable[str]|None to_state=None)
datetime.datetime get_astral_event_next(HomeAssistant hass, str event, datetime.datetime|None utc_point_in_time=None, datetime.timedelta|None offset=None)
bool result_as_boolean(Any|None template_result)