Home Assistant Unofficial Reference 2024.12.1
recorder.py
Go to the documentation of this file.
1 """Statistics helper for sensor."""
2 
3 from __future__ import annotations
4 
5 from collections import defaultdict
6 from collections.abc import Callable, Iterable
7 from contextlib import suppress
8 import datetime
9 import itertools
10 import logging
11 import math
12 from typing import Any
13 
14 from sqlalchemy.orm.session import Session
15 
17  DOMAIN as RECORDER_DOMAIN,
18  get_instance,
19  history,
20  statistics,
21 )
23  StatisticData,
24  StatisticMetaData,
25  StatisticResult,
26 )
27 from homeassistant.const import (
28  ATTR_UNIT_OF_MEASUREMENT,
29  REVOLUTIONS_PER_MINUTE,
30  UnitOfIrradiance,
31  UnitOfSoundPressure,
32  UnitOfVolume,
33 )
34 from homeassistant.core import HomeAssistant, State, callback, split_entity_id
35 from homeassistant.exceptions import HomeAssistantError
36 from homeassistant.helpers import issue_registry as ir
37 from homeassistant.helpers.entity import entity_sources
38 from homeassistant.helpers.typing import UNDEFINED, UndefinedType
39 from homeassistant.loader import async_suggest_report_issue
40 from homeassistant.util import dt as dt_util
41 from homeassistant.util.async_ import run_callback_threadsafe
42 from homeassistant.util.enum import try_parse_enum
43 from homeassistant.util.hass_dict import HassKey
44 
45 from .const import (
46  ATTR_LAST_RESET,
47  ATTR_STATE_CLASS,
48  DOMAIN,
49  SensorStateClass,
50  UnitOfVolumeFlowRate,
51 )
52 
53 _LOGGER = logging.getLogger(__name__)
54 
55 DEFAULT_STATISTICS = {
56  SensorStateClass.MEASUREMENT: {"mean", "min", "max"},
57  SensorStateClass.TOTAL: {"sum"},
58  SensorStateClass.TOTAL_INCREASING: {"sum"},
59 }
60 
61 EQUIVALENT_UNITS = {
62  "BTU/(h×ft²)": UnitOfIrradiance.BTUS_PER_HOUR_SQUARE_FOOT,
63  "dBa": UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
64  "RPM": REVOLUTIONS_PER_MINUTE,
65  "ft3": UnitOfVolume.CUBIC_FEET,
66  "m3": UnitOfVolume.CUBIC_METERS,
67  "ft³/m": UnitOfVolumeFlowRate.CUBIC_FEET_PER_MINUTE,
68 }
69 
70 
71 # Keep track of entities for which a warning about decreasing value has been logged
72 SEEN_DIP: HassKey[set[str]] = HassKey(f"{DOMAIN}_seen_total_increasing_dip")
73 WARN_DIP: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_dip")
74 # Keep track of entities for which a warning about negative value has been logged
75 WARN_NEGATIVE: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_total_increasing_negative")
76 # Keep track of entities for which a warning about unsupported unit has been logged
77 WARN_UNSUPPORTED_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unsupported_unit")
78 WARN_UNSTABLE_UNIT: HassKey[set[str]] = HassKey(f"{DOMAIN}_warn_unstable_unit")
79 # Link to dev statistics where issues around LTS can be fixed
80 LINK_DEV_STATISTICS = "https://my.home-assistant.io/redirect/developer_statistics"
81 
82 
83 def _get_sensor_states(hass: HomeAssistant) -> list[State]:
84  """Get the current state of all sensors for which to compile statistics."""
85  instance = get_instance(hass)
86  # We check for state class first before calling the filter
87  # function as the filter function is much more expensive
88  # than checking the state class
89  entity_filter = instance.entity_filter
90  return [
91  state
92  for state in hass.states.all(DOMAIN)
93  if (state_class := state.attributes.get(ATTR_STATE_CLASS))
94  and (
95  type(state_class) is SensorStateClass
96  or try_parse_enum(SensorStateClass, state_class)
97  )
98  and (not entity_filter or entity_filter(state.entity_id))
99  ]
100 
101 
103  fstates: list[tuple[float, State]], start: datetime.datetime, end: datetime.datetime
104 ) -> float:
105  """Calculate a time weighted average.
106 
107  The average is calculated by weighting the states by duration in seconds between
108  state changes.
109  Note: there's no interpolation of values between state changes.
110  """
111  old_fstate: float | None = None
112  old_start_time: datetime.datetime | None = None
113  accumulated = 0.0
114 
115  for fstate, state in fstates:
116  # The recorder will give us the last known state, which may be well
117  # before the requested start time for the statistics
118  start_time = max(state.last_updated, start)
119  if old_start_time is None:
120  # Adjust start time, if there was no last known state
121  start = start_time
122  else:
123  duration = start_time - old_start_time
124  # Accumulate the value, weighted by duration until next state change
125  assert old_fstate is not None
126  accumulated += old_fstate * duration.total_seconds()
127 
128  old_fstate = fstate
129  old_start_time = start_time
130 
131  if old_fstate is not None:
132  # Accumulate the value, weighted by duration until end of the period
133  assert old_start_time is not None
134  duration = end - old_start_time
135  accumulated += old_fstate * duration.total_seconds()
136 
137  period_seconds = (end - start).total_seconds()
138  if period_seconds == 0:
139  # If the only state changed that happened was at the exact moment
140  # at the end of the period, we can't calculate a meaningful average
141  # so we return 0.0 since it represents a time duration smaller than
142  # we can measure. This probably means the precision of statistics
143  # column schema in the database is incorrect but it is actually possible
144  # to happen if the state change event fired at the exact microsecond
145  return 0.0
146  return accumulated / period_seconds
147 
148 
149 def _get_units(fstates: list[tuple[float, State]]) -> set[str | None]:
150  """Return a set of all units."""
151  return {item[1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) for item in fstates}
152 
153 
154 def _equivalent_units(units: set[str | None]) -> bool:
155  """Return True if the units are equivalent."""
156  if len(units) == 1:
157  return True
158  units = {
159  EQUIVALENT_UNITS[unit] if unit in EQUIVALENT_UNITS else unit # noqa: SIM401
160  for unit in units
161  }
162  return len(units) == 1
163 
164 
166  entity_history: Iterable[State],
167 ) -> list[tuple[float, State]]:
168  """Return a list of (float, state) tuples for the given entity."""
169  float_states: list[tuple[float, State]] = []
170  append = float_states.append
171  isfinite = math.isfinite
172  for state in entity_history:
173  try:
174  if (float_state := float(state.state)) is not None and isfinite(
175  float_state
176  ):
177  append((float_state, state))
178  except (ValueError, TypeError):
179  pass
180  return float_states
181 
182 
183 def _is_numeric(state: State) -> bool:
184  """Return if the state is numeric."""
185  with suppress(ValueError, TypeError):
186  if (num_state := float(state.state)) is not None and math.isfinite(num_state):
187  return True
188  return False
189 
190 
192  hass: HomeAssistant,
193  old_metadatas: dict[str, tuple[int, StatisticMetaData]],
194  fstates: list[tuple[float, State]],
195  entity_id: str,
196 ) -> tuple[str | None, list[tuple[float, State]]]:
197  """Normalize units."""
198  state_unit: str | None = None
199  statistics_unit: str | None
200  state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT)
201  old_metadata = old_metadatas[entity_id][1] if entity_id in old_metadatas else None
202  if not old_metadata:
203  # We've not seen this sensor before, the first valid state determines the unit
204  # used for statistics
205  statistics_unit = state_unit
206  else:
207  # We have seen this sensor before, use the unit from metadata
208  statistics_unit = old_metadata["unit_of_measurement"]
209 
210  if statistics_unit not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER:
211  # The unit used by this sensor doesn't support unit conversion
212 
213  all_units = _get_units(fstates)
214  if not _equivalent_units(all_units):
215  if WARN_UNSTABLE_UNIT not in hass.data:
216  hass.data[WARN_UNSTABLE_UNIT] = set()
217  if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
218  hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
219  extra = ""
220  if old_metadata:
221  extra = (
222  " and matches the unit of already compiled statistics "
223  f"({old_metadata['unit_of_measurement']})"
224  )
225  _LOGGER.warning(
226  (
227  "The unit of %s is changing, got multiple %s, generation of"
228  " long term statistics will be suppressed unless the unit is"
229  " stable%s. Go to %s to fix this"
230  ),
231  entity_id,
232  all_units,
233  extra,
234  LINK_DEV_STATISTICS,
235  )
236  return None, []
237 
238  return state_unit, fstates
239 
240  converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[statistics_unit]
241  valid_fstates: list[tuple[float, State]] = []
242  convert: Callable[[float], float] | None = None
243  last_unit: str | None | UndefinedType = UNDEFINED
244  valid_units = converter.VALID_UNITS
245 
246  for fstate, state in fstates:
247  state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
248  # Exclude states with unsupported unit from statistics
249  if state_unit not in valid_units:
250  if WARN_UNSUPPORTED_UNIT not in hass.data:
251  hass.data[WARN_UNSUPPORTED_UNIT] = set()
252  if entity_id not in hass.data[WARN_UNSUPPORTED_UNIT]:
253  hass.data[WARN_UNSUPPORTED_UNIT].add(entity_id)
254  _LOGGER.warning(
255  (
256  "The unit of %s (%s) cannot be converted to the unit of"
257  " previously compiled statistics (%s). Generation of long term"
258  " statistics will be suppressed unless the unit changes back to"
259  " %s or a compatible unit. Go to %s to fix this"
260  ),
261  entity_id,
262  state_unit,
263  statistics_unit,
264  statistics_unit,
265  LINK_DEV_STATISTICS,
266  )
267  continue
268 
269  if state_unit != last_unit:
270  # The unit of measurement has changed since the last state change
271  # recreate the converter factory
272  if state_unit == statistics_unit:
273  convert = None
274  else:
275  convert = converter.converter_factory(state_unit, statistics_unit)
276  last_unit = state_unit
277 
278  if convert is not None:
279  fstate = convert(fstate)
280 
281  valid_fstates.append((fstate, state))
282 
283  return statistics_unit, valid_fstates
284 
285 
286 def _suggest_report_issue(hass: HomeAssistant, entity_id: str) -> str:
287  """Suggest to report an issue."""
288  entity_info = entity_sources(hass).get(entity_id)
289 
291  hass, integration_domain=entity_info["domain"] if entity_info else None
292  )
293 
294 
296  hass: HomeAssistant, entity_id: str, state: State, previous_fstate: float
297 ) -> None:
298  """Log a warning once if a sensor with state_class_total has a decreasing value.
299 
300  The log will be suppressed until two dips have been seen to prevent warning due to
301  rounding issues with databases storing the state as a single precision float, which
302  was fixed in recorder DB version 20.
303  """
304  if SEEN_DIP not in hass.data:
305  hass.data[SEEN_DIP] = set()
306  if entity_id not in hass.data[SEEN_DIP]:
307  hass.data[SEEN_DIP].add(entity_id)
308  return
309  if WARN_DIP not in hass.data:
310  hass.data[WARN_DIP] = set()
311  if entity_id not in hass.data[WARN_DIP]:
312  hass.data[WARN_DIP].add(entity_id)
313  entity_info = entity_sources(hass).get(entity_id)
314  domain = entity_info["domain"] if entity_info else None
315  if domain in ["energy", "growatt_server", "solaredge"]:
316  return
317  _LOGGER.warning(
318  (
319  "Entity %s %shas state class total_increasing, but its state is not"
320  " strictly increasing. Triggered by state %s (%s) with last_updated set"
321  " to %s. Please %s"
322  ),
323  entity_id,
324  f"from integration {domain} " if domain else "",
325  state.state,
326  previous_fstate,
327  state.last_updated.isoformat(),
328  _suggest_report_issue(hass, entity_id),
329  )
330 
331 
332 def warn_negative(hass: HomeAssistant, entity_id: str, state: State) -> None:
333  """Log a warning once if a sensor with state_class_total has a negative value."""
334  if WARN_NEGATIVE not in hass.data:
335  hass.data[WARN_NEGATIVE] = set()
336  if entity_id not in hass.data[WARN_NEGATIVE]:
337  hass.data[WARN_NEGATIVE].add(entity_id)
338  entity_info = entity_sources(hass).get(entity_id)
339  domain = entity_info["domain"] if entity_info else None
340  _LOGGER.warning(
341  (
342  "Entity %s %shas state class total_increasing, but its state is "
343  "negative. Triggered by state %s with last_updated set to %s. Please %s"
344  ),
345  entity_id,
346  f"from integration {domain} " if domain else "",
347  state.state,
348  state.last_updated.isoformat(),
349  _suggest_report_issue(hass, entity_id),
350  )
351 
352 
354  hass: HomeAssistant,
355  entity_id: str,
356  fstate: float,
357  previous_fstate: float | None,
358  state: State,
359 ) -> bool:
360  """Test if a total_increasing sensor has been reset."""
361  if previous_fstate is None:
362  return False
363 
364  if 0.9 * previous_fstate <= fstate < previous_fstate:
365  warn_dip(hass, entity_id, state, previous_fstate)
366 
367  if fstate < 0:
368  warn_negative(hass, entity_id, state)
369  raise HomeAssistantError
370 
371  return fstate < 0.9 * previous_fstate
372 
373 
374 def _wanted_statistics(sensor_states: list[State]) -> dict[str, set[str]]:
375  """Prepare a dict with wanted statistics for entities."""
376  return {
377  state.entity_id: DEFAULT_STATISTICS[state.attributes[ATTR_STATE_CLASS]]
378  for state in sensor_states
379  }
380 
381 
382 def _last_reset_as_utc_isoformat(last_reset_s: Any, entity_id: str) -> str | None:
383  """Parse last_reset and convert it to UTC."""
384  if last_reset_s is None:
385  return None
386  if isinstance(last_reset_s, str):
387  last_reset = dt_util.parse_datetime(last_reset_s)
388  else:
389  last_reset = None
390  if last_reset is None:
391  _LOGGER.warning(
392  "Ignoring invalid last reset '%s' for %s", last_reset_s, entity_id
393  )
394  return None
395  return dt_util.as_utc(last_reset).isoformat()
396 
397 
398 def _timestamp_to_isoformat_or_none(timestamp: float | None) -> str | None:
399  """Convert a timestamp to ISO format or return None."""
400  if timestamp is None:
401  return None
402  return dt_util.utc_from_timestamp(timestamp).isoformat()
403 
404 
405 def compile_statistics( # noqa: C901
406  hass: HomeAssistant,
407  session: Session,
408  start: datetime.datetime,
409  end: datetime.datetime,
410 ) -> statistics.PlatformCompiledStatistics:
411  """Compile statistics for all entities during start-end."""
412  result: list[StatisticResult] = []
413 
414  sensor_states = _get_sensor_states(hass)
415  wanted_statistics = _wanted_statistics(sensor_states)
416  # Get history between start and end
417  entities_full_history = [
418  i.entity_id for i in sensor_states if "sum" in wanted_statistics[i.entity_id]
419  ]
420  history_list: dict[str, list[State]] = {}
421  if entities_full_history:
422  history_list = history.get_full_significant_states_with_session(
423  hass,
424  session,
425  start - datetime.timedelta.resolution,
426  end,
427  entity_ids=entities_full_history,
428  significant_changes_only=False,
429  )
430  entities_significant_history = [
431  i.entity_id
432  for i in sensor_states
433  if "sum" not in wanted_statistics[i.entity_id]
434  ]
435  if entities_significant_history:
436  _history_list = history.get_full_significant_states_with_session(
437  hass,
438  session,
439  start - datetime.timedelta.resolution,
440  end,
441  entity_ids=entities_significant_history,
442  )
443  history_list = {**history_list, **_history_list}
444 
445  entities_with_float_states: dict[str, list[tuple[float, State]]] = {}
446  for _state in sensor_states:
447  entity_id = _state.entity_id
448  # If there are no recent state changes, the sensor's state may already be pruned
449  # from the recorder. Get the state from the state machine instead.
450  if not (entity_history := history_list.get(entity_id, [_state])):
451  continue
452  if not (float_states := _entity_history_to_float_and_state(entity_history)):
453  continue
454  entities_with_float_states[entity_id] = float_states
455 
456  # Only lookup metadata for entities that have valid float states
457  # since it will result in cache misses for statistic_ids
458  # that are not in the metadata table and we are not working
459  # with them anyway.
460  old_metadatas = statistics.get_metadata_with_session(
461  get_instance(hass), session, statistic_ids=set(entities_with_float_states)
462  )
463  to_process: list[tuple[str, str | None, str, list[tuple[float, State]]]] = []
464  to_query: set[str] = set()
465  for _state in sensor_states:
466  entity_id = _state.entity_id
467  if not (maybe_float_states := entities_with_float_states.get(entity_id)):
468  continue
469  statistics_unit, valid_float_states = _normalize_states(
470  hass,
471  old_metadatas,
472  maybe_float_states,
473  entity_id,
474  )
475  if not valid_float_states:
476  continue
477  state_class: str = _state.attributes[ATTR_STATE_CLASS]
478  to_process.append((entity_id, statistics_unit, state_class, valid_float_states))
479  if "sum" in wanted_statistics[entity_id]:
480  to_query.add(entity_id)
481 
482  last_stats = statistics.get_latest_short_term_statistics_with_session(
483  hass, session, to_query, {"last_reset", "state", "sum"}, metadata=old_metadatas
484  )
485  for ( # pylint: disable=too-many-nested-blocks
486  entity_id,
487  statistics_unit,
488  state_class,
489  valid_float_states,
490  ) in to_process:
491  # Check metadata
492  if old_metadata := old_metadatas.get(entity_id):
493  if not _equivalent_units(
494  {old_metadata[1]["unit_of_measurement"], statistics_unit}
495  ):
496  if WARN_UNSTABLE_UNIT not in hass.data:
497  hass.data[WARN_UNSTABLE_UNIT] = set()
498  if entity_id not in hass.data[WARN_UNSTABLE_UNIT]:
499  hass.data[WARN_UNSTABLE_UNIT].add(entity_id)
500  _LOGGER.warning(
501  (
502  "The unit of %s (%s) cannot be converted to the unit of"
503  " previously compiled statistics (%s). Generation of long"
504  " term statistics will be suppressed unless the unit"
505  " changes back to %s or a compatible unit. Go to %s to fix"
506  " this"
507  ),
508  entity_id,
509  statistics_unit,
510  old_metadata[1]["unit_of_measurement"],
511  old_metadata[1]["unit_of_measurement"],
512  LINK_DEV_STATISTICS,
513  )
514  continue
515 
516  # Set meta data
517  meta: StatisticMetaData = {
518  "has_mean": "mean" in wanted_statistics[entity_id],
519  "has_sum": "sum" in wanted_statistics[entity_id],
520  "name": None,
521  "source": RECORDER_DOMAIN,
522  "statistic_id": entity_id,
523  "unit_of_measurement": statistics_unit,
524  }
525 
526  # Make calculations
527  stat: StatisticData = {"start": start}
528  if "max" in wanted_statistics[entity_id]:
529  stat["max"] = max(
530  *itertools.islice(zip(*valid_float_states, strict=False), 1)
531  )
532  if "min" in wanted_statistics[entity_id]:
533  stat["min"] = min(
534  *itertools.islice(zip(*valid_float_states, strict=False), 1)
535  )
536 
537  if "mean" in wanted_statistics[entity_id]:
538  stat["mean"] = _time_weighted_average(valid_float_states, start, end)
539 
540  if "sum" in wanted_statistics[entity_id]:
541  last_reset = old_last_reset = None
542  new_state = old_state = None
543  _sum = 0.0
544  if entity_id in last_stats:
545  # We have compiled history for this sensor before,
546  # use that as a starting point.
547  last_stat = last_stats[entity_id][0]
548  last_reset = _timestamp_to_isoformat_or_none(last_stat["last_reset"])
549  old_last_reset = last_reset
550  # If there are no previous values and has_sum
551  # was previously false there will be no last_stat
552  # for state or sum
553  new_state = old_state = last_stat.get("state")
554  _sum = last_stat.get("sum") or 0.0
555 
556  for fstate, state in valid_float_states:
557  reset = False
558  if (
559  state_class != SensorStateClass.TOTAL_INCREASING
560  and (
561  last_reset := _last_reset_as_utc_isoformat(
562  state.attributes.get("last_reset"), entity_id
563  )
564  )
565  != old_last_reset
566  and last_reset is not None
567  ):
568  if old_state is None:
569  _LOGGER.info(
570  (
571  "Compiling initial sum statistics for %s, zero point"
572  " set to %s"
573  ),
574  entity_id,
575  fstate,
576  )
577  else:
578  _LOGGER.info(
579  (
580  "Detected new cycle for %s, last_reset set to %s (old"
581  " last_reset %s)"
582  ),
583  entity_id,
584  last_reset,
585  old_last_reset,
586  )
587  reset = True
588  elif old_state is None and last_reset is None:
589  reset = True
590  _LOGGER.info(
591  "Compiling initial sum statistics for %s, zero point set to %s",
592  entity_id,
593  fstate,
594  )
595  elif state_class == SensorStateClass.TOTAL_INCREASING:
596  try:
597  if old_state is None or reset_detected(
598  hass, entity_id, fstate, new_state, state
599  ):
600  reset = True
601  _LOGGER.info(
602  (
603  "Detected new cycle for %s, value dropped from %s"
604  " to %s, triggered by state with last_updated set"
605  " to %s"
606  ),
607  entity_id,
608  new_state,
609  fstate,
610  state.last_updated.isoformat(),
611  )
612  except HomeAssistantError:
613  continue
614 
615  if reset:
616  # The sensor has been reset, update the sum
617  if old_state is not None and new_state is not None:
618  _sum += new_state - old_state
619  # ..and update the starting point
620  new_state = fstate
621  old_last_reset = last_reset
622  # Force a new cycle for an existing sensor to start at 0
623  if old_state is not None:
624  old_state = 0.0
625  else:
626  old_state = new_state
627  else:
628  new_state = fstate
629 
630  if new_state is None or old_state is None:
631  # No valid updates
632  continue
633 
634  # Update the sum with the last state
635  _sum += new_state - old_state
636  if last_reset is not None:
637  stat["last_reset"] = dt_util.parse_datetime(last_reset)
638  stat["sum"] = _sum
639  stat["state"] = new_state
640 
641  result.append({"meta": meta, "stat": stat})
642 
643  return statistics.PlatformCompiledStatistics(result, old_metadatas)
644 
645 
647  hass: HomeAssistant,
648  statistic_ids: list[str] | tuple[str] | None = None,
649  statistic_type: str | None = None,
650 ) -> dict:
651  """Return all or filtered statistic_ids and meta data."""
652  entities = _get_sensor_states(hass)
653 
654  result: dict[str, StatisticMetaData] = {}
655 
656  for state in entities:
657  entity_id = state.entity_id
658  if statistic_ids is not None and entity_id not in statistic_ids:
659  continue
660 
661  attributes = state.attributes
662  state_class = attributes[ATTR_STATE_CLASS]
663  provided_statistics = DEFAULT_STATISTICS[state_class]
664  if statistic_type is not None and statistic_type not in provided_statistics:
665  continue
666 
667  if (
668  (has_sum := "sum" in provided_statistics)
669  and ATTR_LAST_RESET not in attributes
670  and state_class == SensorStateClass.MEASUREMENT
671  ):
672  continue
673 
674  result[entity_id] = {
675  "has_mean": "mean" in provided_statistics,
676  "has_sum": has_sum,
677  "name": None,
678  "source": RECORDER_DOMAIN,
679  "statistic_id": entity_id,
680  "unit_of_measurement": attributes.get(ATTR_UNIT_OF_MEASUREMENT),
681  }
682 
683  return result
684 
685 
686 @callback
688  report_issue: Callable[[str, str, dict[str, Any]], None],
689  sensor_states: list[State],
690  metadatas: dict[str, tuple[int, StatisticMetaData]],
691 ) -> None:
692  """Update repair issues."""
693  for state in sensor_states:
694  entity_id = state.entity_id
695  numeric = _is_numeric(state)
696  state_class = try_parse_enum(
697  SensorStateClass, state.attributes.get(ATTR_STATE_CLASS)
698  )
699  state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
700 
701  if metadata := metadatas.get(entity_id):
702  if numeric and state_class is None:
703  # Sensor no longer has a valid state class
704  report_issue(
705  "state_class_removed",
706  entity_id,
707  {"statistic_id": entity_id},
708  )
709 
710  metadata_unit = metadata[1]["unit_of_measurement"]
711  converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER.get(metadata_unit)
712  if not converter:
713  if numeric and not _equivalent_units({state_unit, metadata_unit}):
714  # The unit has changed, and it's not possible to convert
715  report_issue(
716  "units_changed",
717  entity_id,
718  {
719  "statistic_id": entity_id,
720  "state_unit": state_unit,
721  "metadata_unit": metadata_unit,
722  "supported_unit": metadata_unit,
723  },
724  )
725  elif numeric and state_unit not in converter.VALID_UNITS:
726  # The state unit can't be converted to the unit in metadata
727  valid_units = (unit or "<None>" for unit in converter.VALID_UNITS)
728  valid_units_str = ", ".join(sorted(valid_units))
729  report_issue(
730  "units_changed",
731  entity_id,
732  {
733  "statistic_id": entity_id,
734  "state_unit": state_unit,
735  "metadata_unit": metadata_unit,
736  "supported_unit": valid_units_str,
737  },
738  )
739 
740 
742  hass: HomeAssistant,
743  session: Session,
744 ) -> None:
745  """Validate statistics."""
746  instance = get_instance(hass)
747  sensor_states = hass.states.all(DOMAIN)
748  metadatas = statistics.get_metadata_with_session(
749  instance, session, statistic_source=RECORDER_DOMAIN
750  )
751 
752  @callback
753  def get_sensor_statistics_issues(hass: HomeAssistant) -> set[str]:
754  """Return a list of statistics issues."""
755  issues = set()
756  issue_registry = ir.async_get(hass)
757  for issue in issue_registry.issues.values():
758  if (
759  issue.domain != DOMAIN
760  or not (issue_data := issue.data)
761  or issue_data.get("issue_type")
762  not in ("state_class_removed", "units_changed")
763  ):
764  continue
765  issues.add(issue.issue_id)
766  return issues
767 
768  issues = run_callback_threadsafe(
769  hass.loop, get_sensor_statistics_issues, hass
770  ).result()
771 
772  def create_issue_registry_issue(
773  issue_type: str, statistic_id: str, data: dict[str, Any]
774  ) -> None:
775  """Create an issue registry issue."""
776  issue_id = f"{issue_type}_{statistic_id}"
777  issues.discard(issue_id)
778  ir.create_issue(
779  hass,
780  DOMAIN,
781  issue_id,
782  data=data | {"issue_type": issue_type},
783  is_fixable=False,
784  severity=ir.IssueSeverity.WARNING,
785  translation_key=issue_type,
786  translation_placeholders=data,
787  )
788 
790  create_issue_registry_issue,
791  sensor_states,
792  metadatas,
793  )
794  for issue_id in issues:
795  hass.loop.call_soon_threadsafe(ir.async_delete_issue, hass, DOMAIN, issue_id)
796 
797 
799  hass: HomeAssistant,
800 ) -> dict[str, list[statistics.ValidationIssue]]:
801  """Validate statistics."""
802  validation_result = defaultdict(list)
803 
804  sensor_states = hass.states.all(DOMAIN)
805  metadatas = statistics.get_metadata(hass, statistic_source=RECORDER_DOMAIN)
806  sensor_entity_ids = {i.entity_id for i in sensor_states}
807  sensor_statistic_ids = set(metadatas)
808  instance = get_instance(hass)
809  entity_filter = instance.entity_filter
810 
811  def create_statistic_validation_issue(
812  issue_type: str, statistic_id: str, data: dict[str, Any]
813  ) -> None:
814  """Create a statistic validation issue."""
815  validation_result[statistic_id].append(
816  statistics.ValidationIssue(issue_type, data)
817  )
818 
820  create_statistic_validation_issue,
821  sensor_states,
822  metadatas,
823  )
824 
825  for state in sensor_states:
826  entity_id = state.entity_id
827  state_class = try_parse_enum(
828  SensorStateClass, state.attributes.get(ATTR_STATE_CLASS)
829  )
830 
831  if entity_id in metadatas:
832  if entity_filter and not entity_filter(state.entity_id):
833  # Sensor was previously recorded, but no longer is
834  validation_result[entity_id].append(
835  statistics.ValidationIssue(
836  "entity_no_longer_recorded",
837  {"statistic_id": entity_id},
838  )
839  )
840  elif state_class is not None:
841  if entity_filter and not entity_filter(state.entity_id):
842  # Sensor is not recorded
843  validation_result[entity_id].append(
844  statistics.ValidationIssue(
845  "entity_not_recorded",
846  {"statistic_id": entity_id},
847  )
848  )
849 
850  for statistic_id in sensor_statistic_ids - sensor_entity_ids:
851  if split_entity_id(statistic_id)[0] != DOMAIN:
852  continue
853  # There is no sensor matching the statistics_id
854  validation_result[statistic_id].append(
855  statistics.ValidationIssue(
856  "no_state",
857  {
858  "statistic_id": statistic_id,
859  },
860  )
861  )
862 
863  return validation_result
bool add(self, _T matcher)
Definition: match.py:185
web.Response get(self, web.Request request, str config_key)
Definition: view.py:88
statistics.PlatformCompiledStatistics compile_statistics(HomeAssistant hass, Session session, datetime.datetime start, datetime.datetime end)
Definition: recorder.py:410
list[State] _get_sensor_states(HomeAssistant hass)
Definition: recorder.py:83
str _suggest_report_issue(HomeAssistant hass, str entity_id)
Definition: recorder.py:286
bool _equivalent_units(set[str|None] units)
Definition: recorder.py:154
float _time_weighted_average(list[tuple[float, State]] fstates, datetime.datetime start, datetime.datetime end)
Definition: recorder.py:104
dict list_statistic_ids(HomeAssistant hass, list[str]|tuple[str]|None statistic_ids=None, str|None statistic_type=None)
Definition: recorder.py:650
None warn_dip(HomeAssistant hass, str entity_id, State state, float previous_fstate)
Definition: recorder.py:297
bool reset_detected(HomeAssistant hass, str entity_id, float fstate, float|None previous_fstate, State state)
Definition: recorder.py:359
dict[str, set[str]] _wanted_statistics(list[State] sensor_states)
Definition: recorder.py:374
None _update_issues(Callable[[str, str, dict[str, Any]], None] report_issue, list[State] sensor_states, dict[str, tuple[int, StatisticMetaData]] metadatas)
Definition: recorder.py:691
None warn_negative(HomeAssistant hass, str entity_id, State state)
Definition: recorder.py:332
dict[str, list[statistics.ValidationIssue]] validate_statistics(HomeAssistant hass)
Definition: recorder.py:800
None update_statistics_issues(HomeAssistant hass, Session session)
Definition: recorder.py:744
set[str|None] _get_units(list[tuple[float, State]] fstates)
Definition: recorder.py:149
str|None _timestamp_to_isoformat_or_none(float|None timestamp)
Definition: recorder.py:398
str|None _last_reset_as_utc_isoformat(Any last_reset_s, str entity_id)
Definition: recorder.py:382
list[tuple[float, State]] _entity_history_to_float_and_state(Iterable[State] entity_history)
Definition: recorder.py:167
tuple[str|None, list[tuple[float, State]]] _normalize_states(HomeAssistant hass, dict[str, tuple[int, StatisticMetaData]] old_metadatas, list[tuple[float, State]] fstates, str entity_id)
Definition: recorder.py:196
tuple[str, str] split_entity_id(str entity_id)
Definition: core.py:214
dict[str, EntityInfo] entity_sources(HomeAssistant hass)
Definition: entity.py:98
Recorder get_instance(HomeAssistant hass)
Definition: recorder.py:74
str async_suggest_report_issue(HomeAssistant|None hass, *Integration|None integration=None, str|None integration_domain=None, str|None module=None)
Definition: loader.py:1752