Home Assistant Unofficial Reference 2024.12.1
sensor.py
Go to the documentation of this file.
1 """Support for statistics for sensor values."""
2 
3 from __future__ import annotations
4 
5 from collections import deque
6 from collections.abc import Callable, Mapping
7 import contextlib
8 from datetime import datetime, timedelta
9 import logging
10 import math
11 import statistics
12 from typing import Any, cast
13 
14 import voluptuous as vol
15 
16 from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
17 from homeassistant.components.recorder import get_instance, history
19  DEVICE_CLASS_STATE_CLASSES,
20  DEVICE_CLASS_UNITS,
21  PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
22  SensorDeviceClass,
23  SensorEntity,
24  SensorStateClass,
25 )
26 from homeassistant.config_entries import ConfigEntry
27 from homeassistant.const import (
28  ATTR_DEVICE_CLASS,
29  ATTR_UNIT_OF_MEASUREMENT,
30  CONF_ENTITY_ID,
31  CONF_NAME,
32  CONF_UNIQUE_ID,
33  PERCENTAGE,
34  STATE_UNAVAILABLE,
35  STATE_UNKNOWN,
36 )
37 from homeassistant.core import (
38  CALLBACK_TYPE,
39  Event,
40  EventStateChangedData,
41  EventStateReportedData,
42  HomeAssistant,
43  State,
44  callback,
45  split_entity_id,
46 )
47 from homeassistant.helpers import config_validation as cv
48 from homeassistant.helpers.device import async_device_info_to_link_from_entity
49 from homeassistant.helpers.entity_platform import AddEntitiesCallback
50 from homeassistant.helpers.event import (
51  async_track_point_in_utc_time,
52  async_track_state_change_event,
53  async_track_state_report_event,
54 )
55 from homeassistant.helpers.reload import async_setup_reload_service
56 from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
57 from homeassistant.util import dt as dt_util
58 from homeassistant.util.enum import try_parse_enum
59 
60 from . import DOMAIN, PLATFORMS
61 
62 _LOGGER = logging.getLogger(__name__)
63 
64 # Stats for attributes only
65 STAT_AGE_COVERAGE_RATIO = "age_coverage_ratio"
66 STAT_BUFFER_USAGE_RATIO = "buffer_usage_ratio"
67 STAT_SOURCE_VALUE_VALID = "source_value_valid"
68 
69 # All sensor statistics
70 STAT_AVERAGE_LINEAR = "average_linear"
71 STAT_AVERAGE_STEP = "average_step"
72 STAT_AVERAGE_TIMELESS = "average_timeless"
73 STAT_CHANGE = "change"
74 STAT_CHANGE_SAMPLE = "change_sample"
75 STAT_CHANGE_SECOND = "change_second"
76 STAT_COUNT = "count"
77 STAT_COUNT_BINARY_ON = "count_on"
78 STAT_COUNT_BINARY_OFF = "count_off"
79 STAT_DATETIME_NEWEST = "datetime_newest"
80 STAT_DATETIME_OLDEST = "datetime_oldest"
81 STAT_DATETIME_VALUE_MAX = "datetime_value_max"
82 STAT_DATETIME_VALUE_MIN = "datetime_value_min"
83 STAT_DISTANCE_95P = "distance_95_percent_of_values"
84 STAT_DISTANCE_99P = "distance_99_percent_of_values"
85 STAT_DISTANCE_ABSOLUTE = "distance_absolute"
86 STAT_MEAN = "mean"
87 STAT_MEAN_CIRCULAR = "mean_circular"
88 STAT_MEDIAN = "median"
89 STAT_NOISINESS = "noisiness"
90 STAT_PERCENTILE = "percentile"
91 STAT_STANDARD_DEVIATION = "standard_deviation"
92 STAT_SUM = "sum"
93 STAT_SUM_DIFFERENCES = "sum_differences"
94 STAT_SUM_DIFFERENCES_NONNEGATIVE = "sum_differences_nonnegative"
95 STAT_TOTAL = "total"
96 STAT_VALUE_MAX = "value_max"
97 STAT_VALUE_MIN = "value_min"
98 STAT_VARIANCE = "variance"
99 
100 # Statistics supported by a sensor source (numeric)
101 STATS_NUMERIC_SUPPORT = {
102  STAT_AVERAGE_LINEAR,
103  STAT_AVERAGE_STEP,
104  STAT_AVERAGE_TIMELESS,
105  STAT_CHANGE_SAMPLE,
106  STAT_CHANGE_SECOND,
107  STAT_CHANGE,
108  STAT_COUNT,
109  STAT_DATETIME_NEWEST,
110  STAT_DATETIME_OLDEST,
111  STAT_DATETIME_VALUE_MAX,
112  STAT_DATETIME_VALUE_MIN,
113  STAT_DISTANCE_95P,
114  STAT_DISTANCE_99P,
115  STAT_DISTANCE_ABSOLUTE,
116  STAT_MEAN,
117  STAT_MEAN_CIRCULAR,
118  STAT_MEDIAN,
119  STAT_NOISINESS,
120  STAT_PERCENTILE,
121  STAT_STANDARD_DEVIATION,
122  STAT_SUM,
123  STAT_SUM_DIFFERENCES,
124  STAT_SUM_DIFFERENCES_NONNEGATIVE,
125  STAT_TOTAL,
126  STAT_VALUE_MAX,
127  STAT_VALUE_MIN,
128  STAT_VARIANCE,
129 }
130 
131 # Statistics supported by a binary_sensor source
132 STATS_BINARY_SUPPORT = {
133  STAT_AVERAGE_STEP,
134  STAT_AVERAGE_TIMELESS,
135  STAT_COUNT,
136  STAT_COUNT_BINARY_ON,
137  STAT_COUNT_BINARY_OFF,
138  STAT_DATETIME_NEWEST,
139  STAT_DATETIME_OLDEST,
140  STAT_MEAN,
141 }
142 
143 STATS_NOT_A_NUMBER = {
144  STAT_DATETIME_NEWEST,
145  STAT_DATETIME_OLDEST,
146  STAT_DATETIME_VALUE_MAX,
147  STAT_DATETIME_VALUE_MIN,
148 }
149 
150 STATS_DATETIME = {
151  STAT_DATETIME_NEWEST,
152  STAT_DATETIME_OLDEST,
153  STAT_DATETIME_VALUE_MAX,
154  STAT_DATETIME_VALUE_MIN,
155 }
156 
157 # Statistics which retain the unit of the source entity
158 STATS_NUMERIC_RETAIN_UNIT = {
159  STAT_AVERAGE_LINEAR,
160  STAT_AVERAGE_STEP,
161  STAT_AVERAGE_TIMELESS,
162  STAT_CHANGE,
163  STAT_DISTANCE_95P,
164  STAT_DISTANCE_99P,
165  STAT_DISTANCE_ABSOLUTE,
166  STAT_MEAN,
167  STAT_MEAN_CIRCULAR,
168  STAT_MEDIAN,
169  STAT_NOISINESS,
170  STAT_PERCENTILE,
171  STAT_STANDARD_DEVIATION,
172  STAT_SUM,
173  STAT_SUM_DIFFERENCES,
174  STAT_SUM_DIFFERENCES_NONNEGATIVE,
175  STAT_TOTAL,
176  STAT_VALUE_MAX,
177  STAT_VALUE_MIN,
178 }
179 
180 # Statistics which produce percentage ratio from binary_sensor source entity
181 STATS_BINARY_PERCENTAGE = {
182  STAT_AVERAGE_STEP,
183  STAT_AVERAGE_TIMELESS,
184  STAT_MEAN,
185 }
186 
187 CONF_STATE_CHARACTERISTIC = "state_characteristic"
188 CONF_SAMPLES_MAX_BUFFER_SIZE = "sampling_size"
189 CONF_MAX_AGE = "max_age"
190 CONF_KEEP_LAST_SAMPLE = "keep_last_sample"
191 CONF_PRECISION = "precision"
192 CONF_PERCENTILE = "percentile"
193 
194 DEFAULT_NAME = "Statistical characteristic"
195 DEFAULT_PRECISION = 2
196 ICON = "mdi:calculator"
197 
198 
199 def valid_state_characteristic_configuration(config: dict[str, Any]) -> dict[str, Any]:
200  """Validate that the characteristic selected is valid for the source sensor type, throw if it isn't."""
201  is_binary = split_entity_id(config[CONF_ENTITY_ID])[0] == BINARY_SENSOR_DOMAIN
202  characteristic = cast(str, config[CONF_STATE_CHARACTERISTIC])
203  if (is_binary and characteristic not in STATS_BINARY_SUPPORT) or (
204  not is_binary and characteristic not in STATS_NUMERIC_SUPPORT
205  ):
206  raise vol.ValueInvalid(
207  f"The configured characteristic '{characteristic}' is not supported "
208  "for the configured source sensor"
209  )
210  return config
211 
212 
213 def valid_boundary_configuration(config: dict[str, Any]) -> dict[str, Any]:
214  """Validate that max_age, sampling_size, or both are provided."""
215 
216  if (
217  config.get(CONF_SAMPLES_MAX_BUFFER_SIZE) is None
218  and config.get(CONF_MAX_AGE) is None
219  ):
220  raise vol.RequiredFieldInvalid(
221  "The sensor configuration must provide 'max_age' and/or 'sampling_size'"
222  )
223  return config
224 
225 
226 def valid_keep_last_sample(config: dict[str, Any]) -> dict[str, Any]:
227  """Validate that if keep_last_sample is set, max_age must also be set."""
228 
229  if config.get(CONF_KEEP_LAST_SAMPLE) is True and config.get(CONF_MAX_AGE) is None:
230  raise vol.RequiredFieldInvalid(
231  "The sensor configuration must provide 'max_age' if 'keep_last_sample' is True"
232  )
233  return config
234 
235 
236 _PLATFORM_SCHEMA_BASE = SENSOR_PLATFORM_SCHEMA.extend(
237  {
238  vol.Required(CONF_ENTITY_ID): cv.entity_id,
239  vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
240  vol.Optional(CONF_UNIQUE_ID): cv.string,
241  vol.Required(CONF_STATE_CHARACTERISTIC): cv.string,
242  vol.Optional(CONF_SAMPLES_MAX_BUFFER_SIZE): vol.All(
243  vol.Coerce(int), vol.Range(min=1)
244  ),
245  vol.Optional(CONF_MAX_AGE): cv.time_period,
246  vol.Optional(CONF_KEEP_LAST_SAMPLE, default=False): cv.boolean,
247  vol.Optional(CONF_PRECISION, default=DEFAULT_PRECISION): vol.Coerce(int),
248  vol.Optional(CONF_PERCENTILE, default=50): vol.All(
249  vol.Coerce(int), vol.Range(min=1, max=99)
250  ),
251  }
252 )
253 PLATFORM_SCHEMA = vol.All(
254  _PLATFORM_SCHEMA_BASE,
255  valid_state_characteristic_configuration,
256  valid_boundary_configuration,
257  valid_keep_last_sample,
258 )
259 
260 
262  hass: HomeAssistant,
263  config: ConfigType,
264  async_add_entities: AddEntitiesCallback,
265  discovery_info: DiscoveryInfoType | None = None,
266 ) -> None:
267  """Set up the Statistics sensor."""
268 
269  await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
270 
272  new_entities=[
274  hass=hass,
275  source_entity_id=config[CONF_ENTITY_ID],
276  name=config[CONF_NAME],
277  unique_id=config.get(CONF_UNIQUE_ID),
278  state_characteristic=config[CONF_STATE_CHARACTERISTIC],
279  samples_max_buffer_size=config.get(CONF_SAMPLES_MAX_BUFFER_SIZE),
280  samples_max_age=config.get(CONF_MAX_AGE),
281  samples_keep_last=config[CONF_KEEP_LAST_SAMPLE],
282  precision=config[CONF_PRECISION],
283  percentile=config[CONF_PERCENTILE],
284  )
285  ],
286  update_before_add=True,
287  )
288 
289 
291  hass: HomeAssistant,
292  entry: ConfigEntry,
293  async_add_entities: AddEntitiesCallback,
294 ) -> None:
295  """Set up the Statistics sensor entry."""
296  sampling_size = entry.options.get(CONF_SAMPLES_MAX_BUFFER_SIZE)
297  if sampling_size:
298  sampling_size = int(sampling_size)
299 
300  max_age = None
301  if max_age_input := entry.options.get(CONF_MAX_AGE):
302  max_age = timedelta(
303  hours=max_age_input["hours"],
304  minutes=max_age_input["minutes"],
305  seconds=max_age_input["seconds"],
306  )
307 
309  [
311  hass=hass,
312  source_entity_id=entry.options[CONF_ENTITY_ID],
313  name=entry.options[CONF_NAME],
314  unique_id=entry.entry_id,
315  state_characteristic=entry.options[CONF_STATE_CHARACTERISTIC],
316  samples_max_buffer_size=sampling_size,
317  samples_max_age=max_age,
318  samples_keep_last=entry.options[CONF_KEEP_LAST_SAMPLE],
319  precision=int(entry.options[CONF_PRECISION]),
320  percentile=int(entry.options[CONF_PERCENTILE]),
321  )
322  ],
323  True,
324  )
325 
326 
328  """Representation of a Statistics sensor."""
329 
330  _attr_should_poll = False
331  _attr_icon = ICON
332 
333  def __init__(
334  self,
335  hass: HomeAssistant,
336  source_entity_id: str,
337  name: str,
338  unique_id: str | None,
339  state_characteristic: str,
340  samples_max_buffer_size: int | None,
341  samples_max_age: timedelta | None,
342  samples_keep_last: bool,
343  precision: int,
344  percentile: int,
345  ) -> None:
346  """Initialize the Statistics sensor."""
347  self._attr_name: str = name
348  self._attr_unique_id: str | None = unique_id
349  self._source_entity_id: str = source_entity_id
351  hass,
352  source_entity_id,
353  )
354  self.is_binary: bool = (
355  split_entity_id(self._source_entity_id)[0] == BINARY_SENSOR_DOMAIN
356  )
357  self._state_characteristic_state_characteristic: str = state_characteristic
358  self._samples_max_buffer_size: int | None = samples_max_buffer_size
359  self._samples_max_age: timedelta | None = samples_max_age
360  self.samples_keep_last: bool = samples_keep_last
361  self._precision_precision: int = precision
362  self._percentile: int = percentile
363  self._attr_available_attr_available: bool = False
364 
365  self.states: deque[float | bool] = deque(maxlen=self._samples_max_buffer_size)
366  self.ages: deque[datetime] = deque(maxlen=self._samples_max_buffer_size)
367  self._attr_extra_state_attributes_attr_extra_state_attributes = {}
368 
369  self._state_characteristic_fn: Callable[[], float | int | datetime | None] = (
370  self._callable_characteristic_fn_callable_characteristic_fn(self._state_characteristic_state_characteristic)
371  )
372 
373  self._update_listener_update_listener: CALLBACK_TYPE | None = None
374  self._preview_callback_preview_callback: Callable[[str, Mapping[str, Any]], None] | None = None
375 
377  self,
378  preview_callback: Callable[[str, Mapping[str, Any]], None],
379  ) -> CALLBACK_TYPE:
380  """Render a preview."""
381  # abort early if there is no entity_id
382  # as without we can't track changes
383  # or either size or max_age is not set
384  if not self._source_entity_id or (
385  self._samples_max_buffer_size is None and self._samples_max_age is None
386  ):
387  self._attr_available_attr_available = False
388  calculated_state = self._async_calculate_state_async_calculate_state()
389  preview_callback(calculated_state.state, calculated_state.attributes)
390  return self._call_on_remove_callbacks_call_on_remove_callbacks
391 
392  self._preview_callback_preview_callback = preview_callback
393 
394  await self._async_stats_sensor_startup_async_stats_sensor_startup()
395  return self._call_on_remove_callbacks_call_on_remove_callbacks
396 
398  self,
399  reported_state: State | None,
400  ) -> None:
401  """Handle the sensor state changes."""
402  if (new_state := reported_state) is None:
403  return
404  self._add_state_to_queue_add_state_to_queue(new_state)
405  self._async_purge_update_and_schedule_async_purge_update_and_schedule()
406 
407  if self._preview_callback_preview_callback:
408  calculated_state = self._async_calculate_state_async_calculate_state()
409  self._preview_callback_preview_callback(calculated_state.state, calculated_state.attributes)
410  # only write state to the state machine if we are not in preview mode
411  if not self._preview_callback_preview_callback:
412  self.async_write_ha_stateasync_write_ha_state()
413 
414  @callback
416  self,
417  event: Event[EventStateChangedData],
418  ) -> None:
419  self._async_handle_new_state_async_handle_new_state(event.data["new_state"])
420 
421  @callback
423  self,
424  event: Event[EventStateReportedData],
425  ) -> None:
426  self._async_handle_new_state_async_handle_new_state(event.data["new_state"])
427 
428  async def _async_stats_sensor_startup(self) -> None:
429  """Add listener and get recorded state.
430 
431  Historical data needs to be loaded from the database first before we
432  can start accepting new incoming changes.
433  This is needed to ensure that the buffer is properly sorted by time.
434  """
435  _LOGGER.debug("Startup for %s", self.entity_identity_id)
436  if "recorder" in self.hasshass.config.components:
437  await self._initialize_from_database_initialize_from_database()
438  self.async_on_removeasync_on_remove(
440  self.hasshass,
441  [self._source_entity_id],
442  self._async_stats_sensor_state_change_listener_async_stats_sensor_state_change_listener,
443  )
444  )
445  self.async_on_removeasync_on_remove(
447  self.hasshass,
448  [self._source_entity_id],
449  self._async_stats_sensor_state_report_listener_async_stats_sensor_state_report_listener,
450  )
451  )
452 
453  async def async_added_to_hass(self) -> None:
454  """Register callbacks."""
455  await self._async_stats_sensor_startup_async_stats_sensor_startup()
456 
457  def _add_state_to_queue(self, new_state: State) -> None:
458  """Add the state to the queue."""
459 
460  # Attention: it is not safe to store the new_state object,
461  # since the "last_reported" value will be updated over time.
462  # Here we make a copy the current value, which is okay.
463  self._attr_available_attr_available = new_state.state != STATE_UNAVAILABLE
464  if new_state.state == STATE_UNAVAILABLE:
465  self._attr_extra_state_attributes_attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = None
466  return
467  if new_state.state in (STATE_UNKNOWN, None, ""):
468  self._attr_extra_state_attributes_attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False
469  return
470 
471  try:
472  if self.is_binary:
473  assert new_state.state in ("on", "off")
474  self.states.append(new_state.state == "on")
475  else:
476  self.states.append(float(new_state.state))
477  self.ages.append(new_state.last_reported)
478  self._attr_extra_state_attributes_attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = True
479  except ValueError:
480  self._attr_extra_state_attributes_attr_extra_state_attributes[STAT_SOURCE_VALUE_VALID] = False
481  _LOGGER.error(
482  "%s: parsing error. Expected number or binary state, but received '%s'",
483  self.entity_identity_id,
484  new_state.state,
485  )
486  return
487 
488  self._calculate_state_attributes_calculate_state_attributes(new_state)
489 
490  def _calculate_state_attributes(self, new_state: State) -> None:
491  """Set the entity state attributes."""
492 
493  self._attr_native_unit_of_measurement_attr_native_unit_of_measurement = self._calculate_unit_of_measurement_calculate_unit_of_measurement(
494  new_state
495  )
496  self._attr_device_class_attr_device_class = self._calculate_device_class_calculate_device_class(
497  new_state, self._attr_native_unit_of_measurement_attr_native_unit_of_measurement
498  )
499  self._attr_state_class_attr_state_class = self._calculate_state_class_calculate_state_class(new_state)
500 
501  def _calculate_unit_of_measurement(self, new_state: State) -> str | None:
502  """Return the calculated unit of measurement.
503 
504  The unit of measurement is that of the source sensor, adjusted based on the
505  state characteristics.
506  """
507 
508  base_unit: str | None = new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
509  unit: str | None = None
510  if self.is_binary and self._state_characteristic_state_characteristic in STATS_BINARY_PERCENTAGE:
511  unit = PERCENTAGE
512  elif not base_unit:
513  unit = None
514  elif self._state_characteristic_state_characteristic in STATS_NUMERIC_RETAIN_UNIT:
515  unit = base_unit
516  elif (
517  self._state_characteristic_state_characteristic in STATS_NOT_A_NUMBER
518  or self._state_characteristic_state_characteristic
519  in (
520  STAT_COUNT,
521  STAT_COUNT_BINARY_ON,
522  STAT_COUNT_BINARY_OFF,
523  )
524  ):
525  unit = None
526  elif self._state_characteristic_state_characteristic == STAT_VARIANCE:
527  unit = base_unit + "²"
528  elif self._state_characteristic_state_characteristic == STAT_CHANGE_SAMPLE:
529  unit = base_unit + "/sample"
530  elif self._state_characteristic_state_characteristic == STAT_CHANGE_SECOND:
531  unit = base_unit + "/s"
532 
533  return unit
534 
536  self, new_state: State, unit: str | None
537  ) -> SensorDeviceClass | None:
538  """Return the calculated device class.
539 
540  The device class is calculated based on the state characteristics,
541  the source device class and the unit of measurement is
542  in the device class units list.
543  """
544 
545  device_class: SensorDeviceClass | None = None
546  if self._state_characteristic_state_characteristic in STATS_DATETIME:
547  return SensorDeviceClass.TIMESTAMP
548  if self._state_characteristic_state_characteristic in STATS_NUMERIC_RETAIN_UNIT:
549  device_class = new_state.attributes.get(ATTR_DEVICE_CLASS)
550  if device_class is None:
551  return None
552  if (
553  sensor_device_class := try_parse_enum(SensorDeviceClass, device_class)
554  ) is None:
555  return None
556  if (
557  sensor_device_class
558  and (
559  sensor_state_classes := DEVICE_CLASS_STATE_CLASSES.get(
560  sensor_device_class
561  )
562  )
563  and sensor_state_classes
564  and SensorStateClass.MEASUREMENT not in sensor_state_classes
565  ):
566  return None
567  if device_class not in DEVICE_CLASS_UNITS:
568  return None
569  if (
570  device_class in DEVICE_CLASS_UNITS
571  and unit not in DEVICE_CLASS_UNITS[device_class]
572  ):
573  return None
574 
575  return device_class
576 
577  def _calculate_state_class(self, new_state: State) -> SensorStateClass | None:
578  """Return the calculated state class.
579 
580  Will be None if the characteristics is not numerical, otherwise
581  SensorStateClass.MEASUREMENT.
582  """
583  if self._state_characteristic_state_characteristic in STATS_NOT_A_NUMBER:
584  return None
585  return SensorStateClass.MEASUREMENT
586 
587  def _purge_old_states(self, max_age: timedelta) -> None:
588  """Remove states which are older than a given age."""
589  now = dt_util.utcnow()
590 
591  _LOGGER.debug(
592  "%s: purging records older then %s(%s)(keep_last_sample: %s)",
593  self.entity_identity_id,
594  dt_util.as_local(now - max_age),
595  self._samples_max_age,
596  self.samples_keep_last,
597  )
598 
599  while self.ages and (now - self.ages[0]) > max_age:
600  if self.samples_keep_last and len(self.ages) == 1:
601  # Under normal circumstance this will not be executed, as a purge will not
602  # be scheduled for the last value if samples_keep_last is enabled.
603  # If this happens to be called outside normal scheduling logic or a
604  # source sensor update, this ensures the last value is preserved.
605  _LOGGER.debug(
606  "%s: preserving expired record with datetime %s(%s)",
607  self.entity_identity_id,
608  dt_util.as_local(self.ages[0]),
609  (now - self.ages[0]),
610  )
611  break
612 
613  _LOGGER.debug(
614  "%s: purging record with datetime %s(%s)",
615  self.entity_identity_id,
616  dt_util.as_local(self.ages[0]),
617  (now - self.ages[0]),
618  )
619  self.ages.popleft()
620  self.states.popleft()
621 
622  @callback
623  def _async_next_to_purge_timestamp(self) -> datetime | None:
624  """Find the timestamp when the next purge would occur."""
625  if self.ages and self._samples_max_age:
626  if self.samples_keep_last and len(self.ages) == 1:
627  # Preserve the most recent entry if it is the only value.
628  # Do not schedule another purge. When a new source
629  # value is inserted it will restart purge cycle.
630  _LOGGER.debug(
631  "%s: skipping purge cycle for last record with datetime %s(%s)",
632  self.entity_identity_id,
633  dt_util.as_local(self.ages[0]),
634  (dt_util.utcnow() - self.ages[0]),
635  )
636  return None
637  # Take the oldest entry from the ages list and add the configured max_age.
638  # If executed after purging old states, the result is the next timestamp
639  # in the future when the oldest state will expire.
640  return self.ages[0] + self._samples_max_age
641  return None
642 
643  async def async_update(self) -> None:
644  """Get the latest data and updates the states."""
645  self._async_purge_update_and_schedule_async_purge_update_and_schedule()
646 
648  """Purge old states, update the sensor and schedule the next update."""
649  _LOGGER.debug("%s: updating statistics", self.entity_identity_id)
650  if self._samples_max_age is not None:
651  self._purge_old_states_purge_old_states(self._samples_max_age)
652 
653  self._update_extra_state_attributes_update_extra_state_attributes()
654  self._update_value_update_value()
655 
656  # If max_age is set, ensure to update again after the defined interval.
657  # By basing updates off the timestamps of sampled data we avoid updating
658  # when none of the observed entities change.
659  if timestamp := self._async_next_to_purge_timestamp_async_next_to_purge_timestamp():
660  _LOGGER.debug("%s: scheduling update at %s", self.entity_identity_id, timestamp)
661  self._async_cancel_update_listener_async_cancel_update_listener()
663  self.hasshass, self._async_scheduled_update_async_scheduled_update, timestamp
664  )
665 
666  @callback
667  def _async_cancel_update_listener(self) -> None:
668  """Cancel the scheduled update listener."""
669  if self._update_listener_update_listener:
670  self._update_listener_update_listener()
671  self._update_listener_update_listener = None
672 
673  @callback
674  def _async_scheduled_update(self, now: datetime) -> None:
675  """Timer callback for sensor update."""
676  _LOGGER.debug("%s: executing scheduled update", self.entity_identity_id)
677  self._async_cancel_update_listener_async_cancel_update_listener()
678  self._async_purge_update_and_schedule_async_purge_update_and_schedule()
679  # only write state to the state machine if we are not in preview mode
680  if not self._preview_callback_preview_callback:
681  self.async_write_ha_stateasync_write_ha_state()
682 
683  def _fetch_states_from_database(self) -> list[State]:
684  """Fetch the states from the database."""
685  _LOGGER.debug("%s: initializing values from the database", self.entity_identity_id)
686  lower_entity_id = self._source_entity_id.lower()
687  if self._samples_max_age is not None:
688  start_date = (
689  dt_util.utcnow() - self._samples_max_age - timedelta(microseconds=1)
690  )
691  _LOGGER.debug(
692  "%s: retrieve records not older then %s",
693  self.entity_identity_id,
694  start_date,
695  )
696  else:
697  start_date = datetime.fromtimestamp(0, tz=dt_util.UTC)
698  _LOGGER.debug("%s: retrieving all records", self.entity_identity_id)
699  return history.state_changes_during_period(
700  self.hasshass,
701  start_date,
702  entity_id=lower_entity_id,
703  descending=True,
704  limit=self._samples_max_buffer_size,
705  include_start_time_state=False,
706  ).get(lower_entity_id, [])
707 
708  async def _initialize_from_database(self) -> None:
709  """Initialize the list of states from the database.
710 
711  The query will get the list of states in DESCENDING order so that we
712  can limit the result to self._sample_size. Afterwards reverse the
713  list so that we get it in the right order again.
714 
715  If MaxAge is provided then query will restrict to entries younger then
716  current datetime - MaxAge.
717  """
718  if states := await get_instance(self.hasshass).async_add_executor_job(
719  self._fetch_states_from_database_fetch_states_from_database
720  ):
721  for state in reversed(states):
722  self._add_state_to_queue_add_state_to_queue(state)
723  self._calculate_state_attributes_calculate_state_attributes(state)
724  self._async_purge_update_and_schedule_async_purge_update_and_schedule()
725 
726  # only write state to the state machine if we are not in preview mode
727  if self._preview_callback_preview_callback:
728  calculated_state = self._async_calculate_state_async_calculate_state()
729  self._preview_callback_preview_callback(calculated_state.state, calculated_state.attributes)
730  else:
731  self.async_write_ha_stateasync_write_ha_state()
732  _LOGGER.debug("%s: initializing from database completed", self.entity_identity_id)
733 
735  """Calculate and update the various attributes."""
736  if self._samples_max_buffer_size is not None:
737  self._attr_extra_state_attributes_attr_extra_state_attributes[STAT_BUFFER_USAGE_RATIO] = round(
738  len(self.states) / self._samples_max_buffer_size, 2
739  )
740 
741  if self._samples_max_age is not None:
742  if len(self.states) >= 1:
743  self._attr_extra_state_attributes_attr_extra_state_attributes[STAT_AGE_COVERAGE_RATIO] = round(
744  (self.ages[-1] - self.ages[0]).total_seconds()
745  / self._samples_max_age.total_seconds(),
746  2,
747  )
748  else:
749  self._attr_extra_state_attributes_attr_extra_state_attributes[STAT_AGE_COVERAGE_RATIO] = 0
750 
751  def _update_value(self) -> None:
752  """Front to call the right statistical characteristics functions.
753 
754  One of the _stat_*() functions is represented by self._state_characteristic_fn().
755  """
756 
757  value = self._state_characteristic_fn()
758  _LOGGER.debug(
759  "Updating value: states: %s, ages: %s => %s", self.states, self.ages, value
760  )
761  if self._state_characteristic_state_characteristic not in STATS_NOT_A_NUMBER:
762  with contextlib.suppress(TypeError):
763  value = round(cast(float, value), self._precision_precision)
764  if self._precision_precision == 0:
765  value = int(value)
766  self._attr_native_value_attr_native_value = value
767 
769  self, characteristic: str
770  ) -> Callable[[], float | int | datetime | None]:
771  """Return the function callable of one characteristic function."""
772  function: Callable[[], float | int | datetime | None] = getattr(
773  self,
774  f"_stat_binary_{characteristic}"
775  if self.is_binary
776  else f"_stat_{characteristic}",
777  )
778  return function
779 
780  # Statistics for numeric sensor
781 
782  def _stat_average_linear(self) -> StateType:
783  if len(self.states) == 1:
784  return self.states[0]
785  if len(self.states) >= 2:
786  area: float = 0
787  for i in range(1, len(self.states)):
788  area += (
789  0.5
790  * (self.states[i] + self.states[i - 1])
791  * (self.ages[i] - self.ages[i - 1]).total_seconds()
792  )
793  age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
794  return area / age_range_seconds
795  return None
796 
797  def _stat_average_step(self) -> StateType:
798  if len(self.states) == 1:
799  return self.states[0]
800  if len(self.states) >= 2:
801  area: float = 0
802  for i in range(1, len(self.states)):
803  area += (
804  self.states[i - 1]
805  * (self.ages[i] - self.ages[i - 1]).total_seconds()
806  )
807  age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
808  return area / age_range_seconds
809  return None
810 
811  def _stat_average_timeless(self) -> StateType:
812  return self._stat_mean_stat_mean()
813 
814  def _stat_change(self) -> StateType:
815  if len(self.states) > 0:
816  return self.states[-1] - self.states[0]
817  return None
818 
819  def _stat_change_sample(self) -> StateType:
820  if len(self.states) > 1:
821  return (self.states[-1] - self.states[0]) / (len(self.states) - 1)
822  return None
823 
824  def _stat_change_second(self) -> StateType:
825  if len(self.states) > 1:
826  age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
827  if age_range_seconds > 0:
828  return (self.states[-1] - self.states[0]) / age_range_seconds
829  return None
830 
831  def _stat_count(self) -> StateType:
832  return len(self.states)
833 
834  def _stat_datetime_newest(self) -> datetime | None:
835  if len(self.states) > 0:
836  return self.ages[-1]
837  return None
838 
839  def _stat_datetime_oldest(self) -> datetime | None:
840  if len(self.states) > 0:
841  return self.ages[0]
842  return None
843 
844  def _stat_datetime_value_max(self) -> datetime | None:
845  if len(self.states) > 0:
846  return self.ages[self.states.index(max(self.states))]
847  return None
848 
849  def _stat_datetime_value_min(self) -> datetime | None:
850  if len(self.states) > 0:
851  return self.ages[self.states.index(min(self.states))]
852  return None
853 
854  def _stat_distance_95_percent_of_values(self) -> StateType:
855  if len(self.states) >= 1:
856  return 2 * 1.96 * cast(float, self._stat_standard_deviation_stat_standard_deviation())
857  return None
858 
859  def _stat_distance_99_percent_of_values(self) -> StateType:
860  if len(self.states) >= 1:
861  return 2 * 2.58 * cast(float, self._stat_standard_deviation_stat_standard_deviation())
862  return None
863 
864  def _stat_distance_absolute(self) -> StateType:
865  if len(self.states) > 0:
866  return max(self.states) - min(self.states)
867  return None
868 
869  def _stat_mean(self) -> StateType:
870  if len(self.states) > 0:
871  return statistics.mean(self.states)
872  return None
873 
874  def _stat_mean_circular(self) -> StateType:
875  if len(self.states) > 0:
876  sin_sum = sum(math.sin(math.radians(x)) for x in self.states)
877  cos_sum = sum(math.cos(math.radians(x)) for x in self.states)
878  return (math.degrees(math.atan2(sin_sum, cos_sum)) + 360) % 360
879  return None
880 
881  def _stat_median(self) -> StateType:
882  if len(self.states) > 0:
883  return statistics.median(self.states)
884  return None
885 
886  def _stat_noisiness(self) -> StateType:
887  if len(self.states) == 1:
888  return 0.0
889  if len(self.states) >= 2:
890  return cast(float, self._stat_sum_differences_stat_sum_differences()) / (len(self.states) - 1)
891  return None
892 
893  def _stat_percentile(self) -> StateType:
894  if len(self.states) == 1:
895  return self.states[0]
896  if len(self.states) >= 2:
897  percentiles = statistics.quantiles(self.states, n=100, method="exclusive")
898  return percentiles[self._percentile - 1]
899  return None
900 
901  def _stat_standard_deviation(self) -> StateType:
902  if len(self.states) == 1:
903  return 0.0
904  if len(self.states) >= 2:
905  return statistics.stdev(self.states)
906  return None
907 
908  def _stat_sum(self) -> StateType:
909  if len(self.states) > 0:
910  return sum(self.states)
911  return None
912 
913  def _stat_sum_differences(self) -> StateType:
914  if len(self.states) == 1:
915  return 0.0
916  if len(self.states) >= 2:
917  return sum(
918  abs(j - i)
919  for i, j in zip(list(self.states), list(self.states)[1:], strict=False)
920  )
921  return None
922 
923  def _stat_sum_differences_nonnegative(self) -> StateType:
924  if len(self.states) == 1:
925  return 0.0
926  if len(self.states) >= 2:
927  return sum(
928  (j - i if j >= i else j - 0)
929  for i, j in zip(list(self.states), list(self.states)[1:], strict=False)
930  )
931  return None
932 
933  def _stat_total(self) -> StateType:
934  return self._stat_sum_stat_sum()
935 
936  def _stat_value_max(self) -> StateType:
937  if len(self.states) > 0:
938  return max(self.states)
939  return None
940 
941  def _stat_value_min(self) -> StateType:
942  if len(self.states) > 0:
943  return min(self.states)
944  return None
945 
946  def _stat_variance(self) -> StateType:
947  if len(self.states) == 1:
948  return 0.0
949  if len(self.states) >= 2:
950  return statistics.variance(self.states)
951  return None
952 
953  # Statistics for binary sensor
954 
955  def _stat_binary_average_step(self) -> StateType:
956  if len(self.states) == 1:
957  return 100.0 * int(self.states[0] is True)
958  if len(self.states) >= 2:
959  on_seconds: float = 0
960  for i in range(1, len(self.states)):
961  if self.states[i - 1] is True:
962  on_seconds += (self.ages[i] - self.ages[i - 1]).total_seconds()
963  age_range_seconds = (self.ages[-1] - self.ages[0]).total_seconds()
964  return 100 / age_range_seconds * on_seconds
965  return None
966 
967  def _stat_binary_average_timeless(self) -> StateType:
968  return self._stat_binary_mean_stat_binary_mean()
969 
970  def _stat_binary_count(self) -> StateType:
971  return len(self.states)
972 
973  def _stat_binary_count_on(self) -> StateType:
974  return self.states.count(True)
975 
976  def _stat_binary_count_off(self) -> StateType:
977  return self.states.count(False)
978 
979  def _stat_binary_datetime_newest(self) -> datetime | None:
980  return self._stat_datetime_newest_stat_datetime_newest()
981 
982  def _stat_binary_datetime_oldest(self) -> datetime | None:
983  return self._stat_datetime_oldest_stat_datetime_oldest()
984 
985  def _stat_binary_mean(self) -> StateType:
986  if len(self.states) > 0:
987  return 100.0 / len(self.states) * self.states.count(True)
988  return None
None _async_stats_sensor_state_report_listener(self, Event[EventStateReportedData] event)
Definition: sensor.py:425
str|None _calculate_unit_of_measurement(self, State new_state)
Definition: sensor.py:501
None _async_stats_sensor_state_change_listener(self, Event[EventStateChangedData] event)
Definition: sensor.py:418
None _async_handle_new_state(self, State|None reported_state)
Definition: sensor.py:400
SensorStateClass|None _calculate_state_class(self, State new_state)
Definition: sensor.py:577
SensorDeviceClass|None _calculate_device_class(self, State new_state, str|None unit)
Definition: sensor.py:537
None _calculate_state_attributes(self, State new_state)
Definition: sensor.py:490
CALLBACK_TYPE async_start_preview(self, Callable[[str, Mapping[str, Any]], None] preview_callback)
Definition: sensor.py:379
None __init__(self, HomeAssistant hass, str source_entity_id, str name, str|None unique_id, str state_characteristic, int|None samples_max_buffer_size, timedelta|None samples_max_age, bool samples_keep_last, int precision, int percentile)
Definition: sensor.py:345
Callable[[], float|int|datetime|None] _callable_characteristic_fn(self, str characteristic)
Definition: sensor.py:770
CalculatedState _async_calculate_state(self)
Definition: entity.py:1059
None async_on_remove(self, CALLBACK_TYPE func)
Definition: entity.py:1331
web.Response get(self, web.Request request, str config_key)
Definition: view.py:88
None async_setup_platform(HomeAssistant hass, ConfigType config, AddEntitiesCallback async_add_entities, DiscoveryInfoType|None discovery_info=None)
Definition: sensor.py:266
dict[str, Any] valid_keep_last_sample(dict[str, Any] config)
Definition: sensor.py:226
dict[str, Any] valid_state_characteristic_configuration(dict[str, Any] config)
Definition: sensor.py:199
dict[str, Any] valid_boundary_configuration(dict[str, Any] config)
Definition: sensor.py:213
None async_setup_entry(HomeAssistant hass, ConfigEntry entry, AddEntitiesCallback async_add_entities)
Definition: sensor.py:294
tuple[str, str] split_entity_id(str entity_id)
Definition: core.py:214
dr.DeviceInfo|None async_device_info_to_link_from_entity(HomeAssistant hass, str entity_id_or_uuid)
Definition: device.py:28
CALLBACK_TYPE async_track_state_report_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventStateReportedData]], Any] action, HassJobType|None job_type=None)
Definition: event.py:412
CALLBACK_TYPE async_track_state_change_event(HomeAssistant hass, str|Iterable[str] entity_ids, Callable[[Event[EventStateChangedData]], Any] action, HassJobType|None job_type=None)
Definition: event.py:314
CALLBACK_TYPE async_track_point_in_utc_time(HomeAssistant hass, HassJob[[datetime], Coroutine[Any, Any, None]|None]|Callable[[datetime], Coroutine[Any, Any, None]|None] action, datetime point_in_time)
Definition: event.py:1542
Recorder get_instance(HomeAssistant hass)
Definition: recorder.py:74
None async_setup_reload_service(HomeAssistant hass, str domain, Iterable[str] platforms)
Definition: reload.py:191