1 """Support for statistics for sensor values."""
3 from __future__
import annotations
5 from collections
import deque
6 from collections.abc
import Callable, Mapping
8 from datetime
import datetime, timedelta
12 from typing
import Any, cast
14 import voluptuous
as vol
19 DEVICE_CLASS_STATE_CLASSES,
21 PLATFORM_SCHEMA
as SENSOR_PLATFORM_SCHEMA,
29 ATTR_UNIT_OF_MEASUREMENT,
40 EventStateChangedData,
41 EventStateReportedData,
51 async_track_point_in_utc_time,
52 async_track_state_change_event,
53 async_track_state_report_event,
60 from .
import DOMAIN, PLATFORMS
62 _LOGGER = logging.getLogger(__name__)
65 STAT_AGE_COVERAGE_RATIO =
"age_coverage_ratio"
66 STAT_BUFFER_USAGE_RATIO =
"buffer_usage_ratio"
67 STAT_SOURCE_VALUE_VALID =
"source_value_valid"
70 STAT_AVERAGE_LINEAR =
"average_linear"
71 STAT_AVERAGE_STEP =
"average_step"
72 STAT_AVERAGE_TIMELESS =
"average_timeless"
73 STAT_CHANGE =
"change"
74 STAT_CHANGE_SAMPLE =
"change_sample"
75 STAT_CHANGE_SECOND =
"change_second"
77 STAT_COUNT_BINARY_ON =
"count_on"
78 STAT_COUNT_BINARY_OFF =
"count_off"
79 STAT_DATETIME_NEWEST =
"datetime_newest"
80 STAT_DATETIME_OLDEST =
"datetime_oldest"
81 STAT_DATETIME_VALUE_MAX =
"datetime_value_max"
82 STAT_DATETIME_VALUE_MIN =
"datetime_value_min"
83 STAT_DISTANCE_95P =
"distance_95_percent_of_values"
84 STAT_DISTANCE_99P =
"distance_99_percent_of_values"
85 STAT_DISTANCE_ABSOLUTE =
"distance_absolute"
87 STAT_MEAN_CIRCULAR =
"mean_circular"
88 STAT_MEDIAN =
"median"
89 STAT_NOISINESS =
"noisiness"
90 STAT_PERCENTILE =
"percentile"
91 STAT_STANDARD_DEVIATION =
"standard_deviation"
93 STAT_SUM_DIFFERENCES =
"sum_differences"
94 STAT_SUM_DIFFERENCES_NONNEGATIVE =
"sum_differences_nonnegative"
96 STAT_VALUE_MAX =
"value_max"
97 STAT_VALUE_MIN =
"value_min"
98 STAT_VARIANCE =
"variance"
101 STATS_NUMERIC_SUPPORT = {
104 STAT_AVERAGE_TIMELESS,
109 STAT_DATETIME_NEWEST,
110 STAT_DATETIME_OLDEST,
111 STAT_DATETIME_VALUE_MAX,
112 STAT_DATETIME_VALUE_MIN,
115 STAT_DISTANCE_ABSOLUTE,
121 STAT_STANDARD_DEVIATION,
123 STAT_SUM_DIFFERENCES,
124 STAT_SUM_DIFFERENCES_NONNEGATIVE,
132 STATS_BINARY_SUPPORT = {
134 STAT_AVERAGE_TIMELESS,
136 STAT_COUNT_BINARY_ON,
137 STAT_COUNT_BINARY_OFF,
138 STAT_DATETIME_NEWEST,
139 STAT_DATETIME_OLDEST,
143 STATS_NOT_A_NUMBER = {
144 STAT_DATETIME_NEWEST,
145 STAT_DATETIME_OLDEST,
146 STAT_DATETIME_VALUE_MAX,
147 STAT_DATETIME_VALUE_MIN,
151 STAT_DATETIME_NEWEST,
152 STAT_DATETIME_OLDEST,
153 STAT_DATETIME_VALUE_MAX,
154 STAT_DATETIME_VALUE_MIN,
158 STATS_NUMERIC_RETAIN_UNIT = {
161 STAT_AVERAGE_TIMELESS,
165 STAT_DISTANCE_ABSOLUTE,
171 STAT_STANDARD_DEVIATION,
173 STAT_SUM_DIFFERENCES,
174 STAT_SUM_DIFFERENCES_NONNEGATIVE,
181 STATS_BINARY_PERCENTAGE = {
183 STAT_AVERAGE_TIMELESS,
187 CONF_STATE_CHARACTERISTIC =
"state_characteristic"
188 CONF_SAMPLES_MAX_BUFFER_SIZE =
"sampling_size"
189 CONF_MAX_AGE =
"max_age"
190 CONF_KEEP_LAST_SAMPLE =
"keep_last_sample"
191 CONF_PRECISION =
"precision"
192 CONF_PERCENTILE =
"percentile"
194 DEFAULT_NAME =
"Statistical characteristic"
195 DEFAULT_PRECISION = 2
196 ICON =
"mdi:calculator"
200 """Validate that the characteristic selected is valid for the source sensor type, throw if it isn't."""
201 is_binary =
split_entity_id(config[CONF_ENTITY_ID])[0] == BINARY_SENSOR_DOMAIN
202 characteristic = cast(str, config[CONF_STATE_CHARACTERISTIC])
203 if (is_binary
and characteristic
not in STATS_BINARY_SUPPORT)
or (
204 not is_binary
and characteristic
not in STATS_NUMERIC_SUPPORT
206 raise vol.ValueInvalid(
207 f
"The configured characteristic '{characteristic}' is not supported "
208 "for the configured source sensor"
214 """Validate that max_age, sampling_size, or both are provided."""
217 config.get(CONF_SAMPLES_MAX_BUFFER_SIZE)
is None
218 and config.get(CONF_MAX_AGE)
is None
220 raise vol.RequiredFieldInvalid(
221 "The sensor configuration must provide 'max_age' and/or 'sampling_size'"
227 """Validate that if keep_last_sample is set, max_age must also be set."""
229 if config.get(CONF_KEEP_LAST_SAMPLE)
is True and config.get(CONF_MAX_AGE)
is None:
230 raise vol.RequiredFieldInvalid(
231 "The sensor configuration must provide 'max_age' if 'keep_last_sample' is True"
236 _PLATFORM_SCHEMA_BASE = SENSOR_PLATFORM_SCHEMA.extend(
238 vol.Required(CONF_ENTITY_ID): cv.entity_id,
239 vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
240 vol.Optional(CONF_UNIQUE_ID): cv.string,
241 vol.Required(CONF_STATE_CHARACTERISTIC): cv.string,
242 vol.Optional(CONF_SAMPLES_MAX_BUFFER_SIZE): vol.All(
243 vol.Coerce(int), vol.Range(min=1)
245 vol.Optional(CONF_MAX_AGE): cv.time_period,
246 vol.Optional(CONF_KEEP_LAST_SAMPLE, default=
False): cv.boolean,
247 vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION): vol.Coerce(int),
248 vol.Optional(CONF_PERCENTILE, default=50): vol.All(
249 vol.Coerce(int), vol.Range(min=1, max=99)
253 PLATFORM_SCHEMA = vol.All(
254 _PLATFORM_SCHEMA_BASE,
255 valid_state_characteristic_configuration,
256 valid_boundary_configuration,
257 valid_keep_last_sample,
264 async_add_entities: AddEntitiesCallback,
265 discovery_info: DiscoveryInfoType |
None =
None,
267 """Set up the Statistics sensor."""
275 source_entity_id=config[CONF_ENTITY_ID],
276 name=config[CONF_NAME],
277 unique_id=config.get(CONF_UNIQUE_ID),
278 state_characteristic=config[CONF_STATE_CHARACTERISTIC],
279 samples_max_buffer_size=config.get(CONF_SAMPLES_MAX_BUFFER_SIZE),
280 samples_max_age=config.get(CONF_MAX_AGE),
281 samples_keep_last=config[CONF_KEEP_LAST_SAMPLE],
282 precision=config[CONF_PRECISION],
283 percentile=config[CONF_PERCENTILE],
286 update_before_add=
True,
293 async_add_entities: AddEntitiesCallback,
295 """Set up the Statistics sensor entry."""
296 sampling_size = entry.options.get(CONF_SAMPLES_MAX_BUFFER_SIZE)
298 sampling_size =
int(sampling_size)
301 if max_age_input := entry.options.get(CONF_MAX_AGE):
303 hours=max_age_input[
"hours"],
304 minutes=max_age_input[
"minutes"],
305 seconds=max_age_input[
"seconds"],
312 source_entity_id=entry.options[CONF_ENTITY_ID],
313 name=entry.options[CONF_NAME],
314 unique_id=entry.entry_id,
315 state_characteristic=entry.options[CONF_STATE_CHARACTERISTIC],
316 samples_max_buffer_size=sampling_size,
317 samples_max_age=max_age,
318 samples_keep_last=entry.options[CONF_KEEP_LAST_SAMPLE],
319 precision=
int(entry.options[CONF_PRECISION]),
320 percentile=
int(entry.options[CONF_PERCENTILE]),
328 """Representation of a Statistics sensor."""
330 _attr_should_poll =
False
336 source_entity_id: str,
338 unique_id: str |
None,
339 state_characteristic: str,
340 samples_max_buffer_size: int |
None,
341 samples_max_age: timedelta |
None,
342 samples_keep_last: bool,
346 """Initialize the Statistics sensor."""
347 self._attr_name: str = name
348 self._attr_unique_id: str |
None = unique_id
349 self._source_entity_id: str = source_entity_id
354 self.is_binary: bool = (
358 self._samples_max_buffer_size: int |
None = samples_max_buffer_size
359 self._samples_max_age: timedelta |
None = samples_max_age
360 self.samples_keep_last: bool = samples_keep_last
362 self._percentile: int = percentile
365 self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size)
366 self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size)
369 self._state_characteristic_fn: Callable[[], float | int | datetime |
None] = (
374 self.
_preview_callback_preview_callback: Callable[[str, Mapping[str, Any]],
None] |
None =
None
378 preview_callback: Callable[[str, Mapping[str, Any]],
None],
380 """Render a preview."""
384 if not self._source_entity_id
or (
385 self._samples_max_buffer_size
is None and self._samples_max_age
is None
389 preview_callback(calculated_state.state, calculated_state.attributes)
399 reported_state: State |
None,
401 """Handle the sensor state changes."""
402 if (new_state := reported_state)
is None:
409 self.
_preview_callback_preview_callback(calculated_state.state, calculated_state.attributes)
417 event: Event[EventStateChangedData],
424 event: Event[EventStateReportedData],
429 """Add listener and get recorded state.
431 Historical data needs to be loaded from the database first before we
432 can start accepting new incoming changes.
433 This is needed to ensure that the buffer is properly sorted by time.
435 _LOGGER.debug(
"Startup for %s", self.
entity_identity_id)
436 if "recorder" in self.
hasshass.config.components:
441 [self._source_entity_id],
448 [self._source_entity_id],
454 """Register callbacks."""
458 """Add the state to the queue."""
463 self.
_attr_available_attr_available = new_state.state != STATE_UNAVAILABLE
464 if new_state.state == STATE_UNAVAILABLE:
467 if new_state.state
in (STATE_UNKNOWN,
None,
""):
473 assert new_state.state
in (
"on",
"off")
474 self.states.append(new_state.state ==
"on")
476 self.states.append(
float(new_state.state))
477 self.ages.append(new_state.last_reported)
482 "%s: parsing error. Expected number or binary state, but received '%s'",
491 """Set the entity state attributes."""
502 """Return the calculated unit of measurement.
504 The unit of measurement is that of the source sensor, adjusted based on the
505 state characteristics.
508 base_unit: str |
None = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
509 unit: str |
None =
None
521 STAT_COUNT_BINARY_ON,
522 STAT_COUNT_BINARY_OFF,
527 unit = base_unit +
"²"
529 unit = base_unit +
"/sample"
531 unit = base_unit +
"/s"
536 self, new_state: State, unit: str |
None
537 ) -> SensorDeviceClass |
None:
538 """Return the calculated device class.
540 The device class is calculated based on the state characteristics,
541 the source device class and the unit of measurement is
542 in the device class units list.
545 device_class: SensorDeviceClass |
None =
None
547 return SensorDeviceClass.TIMESTAMP
549 device_class = new_state.attributes.get(ATTR_DEVICE_CLASS)
550 if device_class
is None:
553 sensor_device_class := try_parse_enum(SensorDeviceClass, device_class)
559 sensor_state_classes := DEVICE_CLASS_STATE_CLASSES.get(
563 and sensor_state_classes
564 and SensorStateClass.MEASUREMENT
not in sensor_state_classes
567 if device_class
not in DEVICE_CLASS_UNITS:
570 device_class
in DEVICE_CLASS_UNITS
571 and unit
not in DEVICE_CLASS_UNITS[device_class]
578 """Return the calculated state class.
580 Will be None if the characteristics is not numerical, otherwise
581 SensorStateClass.MEASUREMENT.
585 return SensorStateClass.MEASUREMENT
588 """Remove states which are older than a given age."""
589 now = dt_util.utcnow()
592 "%s: purging records older then %s(%s)(keep_last_sample: %s)",
594 dt_util.as_local(now - max_age),
595 self._samples_max_age,
596 self.samples_keep_last,
599 while self.ages
and (now - self.ages[0]) > max_age:
600 if self.samples_keep_last
and len(self.ages) == 1:
606 "%s: preserving expired record with datetime %s(%s)",
608 dt_util.as_local(self.ages[0]),
609 (now - self.ages[0]),
614 "%s: purging record with datetime %s(%s)",
616 dt_util.as_local(self.ages[0]),
617 (now - self.ages[0]),
620 self.states.popleft()
624 """Find the timestamp when the next purge would occur."""
625 if self.ages
and self._samples_max_age:
626 if self.samples_keep_last
and len(self.ages) == 1:
631 "%s: skipping purge cycle for last record with datetime %s(%s)",
633 dt_util.as_local(self.ages[0]),
634 (dt_util.utcnow() - self.ages[0]),
640 return self.ages[0] + self._samples_max_age
644 """Get the latest data and updates the states."""
648 """Purge old states, update the sensor and schedule the next update."""
649 _LOGGER.debug(
"%s: updating statistics", self.
entity_identity_id)
650 if self._samples_max_age
is not None:
660 _LOGGER.debug(
"%s: scheduling update at %s", self.
entity_identity_id, timestamp)
668 """Cancel the scheduled update listener."""
675 """Timer callback for sensor update."""
676 _LOGGER.debug(
"%s: executing scheduled update", self.
entity_identity_id)
684 """Fetch the states from the database."""
685 _LOGGER.debug(
"%s: initializing values from the database", self.
entity_identity_id)
686 lower_entity_id = self._source_entity_id.lower()
687 if self._samples_max_age
is not None:
689 dt_util.utcnow() - self._samples_max_age -
timedelta(microseconds=1)
692 "%s: retrieve records not older then %s",
697 start_date = datetime.fromtimestamp(0, tz=dt_util.UTC)
698 _LOGGER.debug(
"%s: retrieving all records", self.
entity_identity_id)
699 return history.state_changes_during_period(
702 entity_id=lower_entity_id,
704 limit=self._samples_max_buffer_size,
705 include_start_time_state=
False,
706 ).
get(lower_entity_id, [])
709 """Initialize the list of states from the database.
711 The query will get the list of states in DESCENDING order so that we
712 can limit the result to self._sample_size. Afterwards reverse the
713 list so that we get it in the right order again.
715 If MaxAge is provided then query will restrict to entries younger then
716 current datetime - MaxAge.
721 for state
in reversed(states):
729 self.
_preview_callback_preview_callback(calculated_state.state, calculated_state.attributes)
732 _LOGGER.debug(
"%s: initializing from database completed", self.
entity_identity_id)
735 """Calculate and update the various attributes."""
736 if self._samples_max_buffer_size
is not None:
738 len(self.states) / self._samples_max_buffer_size, 2
741 if self._samples_max_age
is not None:
742 if len(self.states) >= 1:
744 (self.ages[-1] - self.ages[0]).total_seconds()
745 / self._samples_max_age.total_seconds(),
752 """Front to call the right statistical characteristics functions.
754 One of the _stat_*() functions is represented by self._state_characteristic_fn().
757 value = self._state_characteristic_fn()
759 "Updating value: states: %s, ages: %s => %s", self.states, self.ages, value
762 with contextlib.suppress(TypeError):
763 value = round(cast(float, value), self.
_precision_precision)
769 self, characteristic: str
770 ) -> Callable[[], float | int | datetime |
None]:
771 """Return the function callable of one characteristic function."""
772 function: Callable[[], float | int | datetime |
None] = getattr(
774 f
"_stat_binary_{characteristic}"
776 else f
"_stat_{characteristic}",
783 if len(self.states) == 1:
784 return self.states[0]
785 if len(self.states) >= 2:
787 for i
in range(1, len(self.states)):
790 * (self.states[i] + self.states[i - 1])
791 * (self.ages[i] - self.ages[i - 1]).total_seconds()
793 age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
794 return area / age_range_seconds
798 if len(self.states) == 1:
799 return self.states[0]
800 if len(self.states) >= 2:
802 for i
in range(1, len(self.states)):
805 * (self.ages[i] - self.ages[i - 1]).total_seconds()
807 age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
808 return area / age_range_seconds
815 if len(self.states) > 0:
816 return self.states[-1] - self.states[0]
820 if len(self.states) > 1:
821 return (self.states[-1] - self.states[0]) / (len(self.states) - 1)
825 if len(self.states) > 1:
826 age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
827 if age_range_seconds > 0:
828 return (self.states[-1] - self.states[0]) / age_range_seconds
832 return len(self.states)
835 if len(self.states) > 0:
840 if len(self.states) > 0:
845 if len(self.states) > 0:
846 return self.ages[self.states.index(
max(self.states))]
850 if len(self.states) > 0:
851 return self.ages[self.states.index(
min(self.states))]
855 if len(self.states) >= 1:
860 if len(self.states) >= 1:
865 if len(self.states) > 0:
866 return max(self.states) -
min(self.states)
870 if len(self.states) > 0:
871 return statistics.mean(self.states)
875 if len(self.states) > 0:
876 sin_sum = sum(math.sin(math.radians(x))
for x
in self.states)
877 cos_sum = sum(math.cos(math.radians(x))
for x
in self.states)
878 return (math.degrees(math.atan2(sin_sum, cos_sum)) + 360) % 360
882 if len(self.states) > 0:
883 return statistics.median(self.states)
887 if len(self.states) == 1:
889 if len(self.states) >= 2:
894 if len(self.states) == 1:
895 return self.states[0]
896 if len(self.states) >= 2:
897 percentiles = statistics.quantiles(self.states, n=100, method=
"exclusive")
898 return percentiles[self._percentile - 1]
902 if len(self.states) == 1:
904 if len(self.states) >= 2:
905 return statistics.stdev(self.states)
909 if len(self.states) > 0:
910 return sum(self.states)
914 if len(self.states) == 1:
916 if len(self.states) >= 2:
919 for i, j
in zip(
list(self.states),
list(self.states)[1:], strict=
False)
924 if len(self.states) == 1:
926 if len(self.states) >= 2:
928 (j - i
if j >= i
else j - 0)
929 for i, j
in zip(
list(self.states),
list(self.states)[1:], strict=
False)
937 if len(self.states) > 0:
938 return max(self.states)
942 if len(self.states) > 0:
943 return min(self.states)
947 if len(self.states) == 1:
949 if len(self.states) >= 2:
950 return statistics.variance(self.states)
956 if len(self.states) == 1:
957 return 100.0 *
int(self.states[0]
is True)
958 if len(self.states) >= 2:
959 on_seconds: float = 0
960 for i
in range(1, len(self.states)):
961 if self.states[i - 1]
is True:
962 on_seconds += (self.ages[i] - self.ages[i - 1]).total_seconds()
963 age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
964 return 100 / age_range_seconds * on_seconds
971 return len(self.states)
974 return self.states.count(
True)
977 return self.states.count(
False)
986 if len(self.states) > 0:
987 return 100.0 / len(self.states) * self.states.count(
True)
None _async_stats_sensor_startup(self)
None _async_stats_sensor_state_report_listener(self, Event[EventStateReportedData] event)
StateType _stat_binary_average_timeless(self)
StateType _stat_change(self)
datetime|None _stat_datetime_value_min(self)
StateType _stat_sum(self)
StateType _stat_total(self)
None async_added_to_hass(self)
StateType _stat_sum_differences_nonnegative(self)
StateType _stat_median(self)
StateType _stat_standard_deviation(self)
StateType _stat_binary_average_step(self)
StateType _stat_sum_differences(self)
str|None _calculate_unit_of_measurement(self, State new_state)
None _async_purge_update_and_schedule(self)
StateType _stat_variance(self)
StateType _stat_binary_count_on(self)
StateType _stat_binary_count_off(self)
None _update_extra_state_attributes(self)
StateType _stat_binary_count(self)
datetime|None _async_next_to_purge_timestamp(self)
StateType _stat_percentile(self)
None _async_stats_sensor_state_change_listener(self, Event[EventStateChangedData] event)
None _async_handle_new_state(self, State|None reported_state)
SensorStateClass|None _calculate_state_class(self, State new_state)
StateType _stat_count(self)
SensorDeviceClass|None _calculate_device_class(self, State new_state, str|None unit)
StateType _stat_change_second(self)
None _calculate_state_attributes(self, State new_state)
StateType _stat_mean_circular(self)
CALLBACK_TYPE async_start_preview(self, Callable[[str, Mapping[str, Any]], None] preview_callback)
list[State] _fetch_states_from_database(self)
StateType _stat_average_step(self)
StateType _stat_average_timeless(self)
_attr_native_unit_of_measurement
StateType _stat_noisiness(self)
StateType _stat_distance_95_percent_of_values(self)
StateType _stat_value_min(self)
None _purge_old_states(self, timedelta max_age)
None _add_state_to_queue(self, State new_state)
StateType _stat_distance_99_percent_of_values(self)
datetime|None _stat_datetime_oldest(self)
datetime|None _stat_datetime_value_max(self)
datetime|None _stat_binary_datetime_newest(self)
None _async_scheduled_update(self, datetime now)
None __init__(self, HomeAssistant hass, str source_entity_id, str name, str|None unique_id, str state_characteristic, int|None samples_max_buffer_size, timedelta|None samples_max_age, bool samples_keep_last, int precision, int percentile)
datetime|None _stat_binary_datetime_oldest(self)
StateType _stat_mean(self)
StateType _stat_distance_absolute(self)
datetime|None _stat_datetime_newest(self)
None _initialize_from_database(self)
_attr_extra_state_attributes
StateType _stat_change_sample(self)
StateType _stat_average_linear(self)
StateType _stat_binary_mean(self)
Callable[[], float|int|datetime|None] _callable_characteristic_fn(self, str characteristic)
None _async_cancel_update_listener(self)
StateType _stat_value_max(self)
None async_write_ha_state(self)
CalculatedState _async_calculate_state(self)
None _call_on_remove_callbacks(self)
None async_on_remove(self, CALLBACK_TYPE func)
web.Response get(self, web.Request request, str config_key)
None async_setup_platform(HomeAssistant hass, ConfigType config, AddEntitiesCallback async_add_entities, DiscoveryInfoType|None discovery_info=None)
dict[str, Any] valid_keep_last_sample(dict[str, Any] config)
dict[str, Any] valid_state_characteristic_configuration(dict[str, Any] config)
dict[str, Any] valid_boundary_configuration(dict[str, Any] config)
None async_setup_entry(HomeAssistant hass, ConfigEntry entry, AddEntitiesCallback async_add_entities)
tuple[str, str] split_entity_id(str entity_id)
dr.DeviceInfo|None async_device_info_to_link_from_entity(HomeAssistant hass, str entity_id_or_uuid)
CALLBACK_TYPE async_track_state_report_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventStateReportedData]], Any] action, HassJobType|None job_type=None)
CALLBACK_TYPE async_track_state_change_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventStateChangedData]], Any] action, HassJobType|None job_type=None)
CALLBACK_TYPE async_track_point_in_utc_time(HomeAssistant hass, HassJob[[datetime], Coroutine[Any, Any, None]|None]|Callable[[datetime], Coroutine[Any, Any, None]|None] action, datetime point_in_time)
Recorder get_instance(HomeAssistant hass)
None async_setup_reload_service(HomeAssistant hass, str domain, Iterable[str] platforms)