1 """Models for SQLAlchemy."""
3 from __future__
import annotations
5 from collections.abc
import Callable
6 from datetime
import datetime, timedelta
9 from typing
import Any, Self, cast
12 from fnv_hash_fast
import fnv1a_32
13 from sqlalchemy
import (
32 from sqlalchemy.dialects
import mysql, oracle, postgresql, sqlite
33 from sqlalchemy.engine.interfaces
import Dialect
34 from sqlalchemy.ext.compiler
import compiles
35 from sqlalchemy.orm
import DeclarativeBase, Mapped, aliased, mapped_column, relationship
36 from sqlalchemy.types
import TypeDecorator
42 ATTR_UNIT_OF_MEASUREMENT,
44 MAX_LENGTH_EVENT_EVENT_TYPE,
45 MAX_LENGTH_STATE_ENTITY_ID,
46 MAX_LENGTH_STATE_STATE,
48 from homeassistant.core import Context, Event, EventOrigin, EventStateChangedData, State
52 JSON_DECODE_EXCEPTIONS,
57 from .const
import ALL_DOMAIN_EXCLUDE_ATTRS, SupportedDialect
60 StatisticDataTimestamp,
62 bytes_to_ulid_or_none,
63 bytes_to_uuid_hex_or_none,
64 datetime_to_timestamp_or_none,
66 ulid_to_bytes_or_none,
67 uuid_hex_to_bytes_or_none,
72 class Base(DeclarativeBase):
73 """Base class for tables."""
77 """Base class for tables, used for schema migration."""
82 _LOGGER = logging.getLogger(__name__)
84 TABLE_EVENTS =
"events"
85 TABLE_EVENT_DATA =
"event_data"
86 TABLE_EVENT_TYPES =
"event_types"
87 TABLE_STATES =
"states"
88 TABLE_STATE_ATTRIBUTES =
"state_attributes"
89 TABLE_STATES_META =
"states_meta"
90 TABLE_RECORDER_RUNS =
"recorder_runs"
91 TABLE_SCHEMA_CHANGES =
"schema_changes"
92 TABLE_STATISTICS =
"statistics"
93 TABLE_STATISTICS_META =
"statistics_meta"
94 TABLE_STATISTICS_RUNS =
"statistics_runs"
95 TABLE_STATISTICS_SHORT_TERM =
"statistics_short_term"
96 TABLE_MIGRATION_CHANGES =
"migration_changes"
98 STATISTICS_TABLES = (
"statistics",
"statistics_short_term")
100 MAX_STATE_ATTRS_BYTES = 16384
101 MAX_EVENT_DATA_BYTES = 32768
103 PSQL_DIALECT = SupportedDialect.POSTGRESQL
107 TABLE_STATE_ATTRIBUTES,
112 TABLE_SCHEMA_CHANGES,
113 TABLE_MIGRATION_CHANGES,
116 TABLE_STATISTICS_META,
117 TABLE_STATISTICS_RUNS,
118 TABLE_STATISTICS_SHORT_TERM,
125 TABLE_SCHEMA_CHANGES,
128 LAST_UPDATED_INDEX_TS =
"ix_states_last_updated_ts"
129 METADATA_ID_LAST_UPDATED_INDEX_TS =
"ix_states_metadata_id_last_updated_ts"
130 EVENTS_CONTEXT_ID_BIN_INDEX =
"ix_events_context_id_bin"
131 STATES_CONTEXT_ID_BIN_INDEX =
"ix_states_context_id_bin"
132 LEGACY_STATES_EVENT_ID_INDEX =
"ix_states_event_id"
133 LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX =
"ix_states_entity_id_last_updated_ts"
134 CONTEXT_ID_BIN_MAX_LENGTH = 16
136 MYSQL_COLLATE =
"utf8mb4_unicode_ci"
137 MYSQL_DEFAULT_CHARSET =
"utf8mb4"
138 MYSQL_ENGINE =
"InnoDB"
140 _DEFAULT_TABLE_ARGS = {
141 "mysql_default_charset": MYSQL_DEFAULT_CHARSET,
142 "mysql_collate": MYSQL_COLLATE,
143 "mysql_engine": MYSQL_ENGINE,
144 "mariadb_default_charset": MYSQL_DEFAULT_CHARSET,
145 "mariadb_collate": MYSQL_COLLATE,
146 "mariadb_engine": MYSQL_ENGINE,
152 ATTR_UNIT_OF_MEASUREMENT,
158 """An unused column type that behaves like a datetime."""
162 """An unused column type that behaves like a string."""
165 @compiles(UnusedDateTime,
"mysql",
"mariadb",
"sqlite")
166 @compiles(Unused, "mysql", "mariadb", "sqlite")
168 """Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite."""
172 @compiles(Unused, "postgresql")
174 """Compile Unused as CHAR(1) on postgresql."""
179 """Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex."""
182 """Offload the datetime parsing to ciso8601."""
183 return lambda value:
None if value
is None else ciso8601.parse_datetime(value)
187 """A faster version of LargeBinary for engines that support python bytes natively."""
190 """No conversion needed for engines that support native bytes."""
196 ID_TYPE = BigInteger().with_variant(sqlite.INTEGER,
"sqlite")
199 UINT_32_TYPE = BigInteger().with_variant(
200 mysql.INTEGER(unsigned=
True),
204 JSON_VARIANT_CAST = Text().with_variant(
205 postgresql.JSON(none_as_null=
True),
208 JSONB_VARIANT_CAST = Text().with_variant(
209 postgresql.JSONB(none_as_null=
True),
213 DateTime(timezone=
True)
214 .with_variant(mysql.DATETIME(timezone=
True, fsp=6),
"mysql",
"mariadb")
219 .with_variant(mysql.DOUBLE(asdecimal=
False),
"mysql",
"mariadb")
220 .with_variant(oracle.DOUBLE_PRECISION(),
"oracle")
221 .with_variant(postgresql.DOUBLE_PRECISION(),
"postgresql")
225 UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger()
226 DOUBLE_PRECISION_TYPE_SQL =
"DOUBLE PRECISION"
227 BIG_INTEGER_SQL =
"BIGINT"
228 CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant(
232 TIMESTAMP_TYPE = DOUBLE_TYPE
236 """Teach SA how to literalize json."""
239 """Processor to convert a value to JSON."""
241 def process(value: Any) -> str:
243 return JSON_DUMP(value)
248 EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote]
252 """Event history data."""
258 "ix_events_event_type_id_time_fired_ts",
"event_type_id",
"time_fired_ts"
261 EVENTS_CONTEXT_ID_BIN_INDEX,
263 mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
264 mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
268 __tablename__ = TABLE_EVENTS
269 event_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
270 event_type: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
271 event_data: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
272 origin: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
273 origin_idx: Mapped[int |
None] = mapped_column(SmallInteger)
274 time_fired: Mapped[datetime |
None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
275 time_fired_ts: Mapped[float |
None] = mapped_column(TIMESTAMP_TYPE, index=
True)
276 context_id: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
277 context_user_id: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
278 context_parent_id: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
279 data_id: Mapped[int |
None] = mapped_column(
280 ID_TYPE, ForeignKey(
"event_data.data_id"), index=
True
282 context_id_bin: Mapped[bytes |
None] = mapped_column(CONTEXT_BINARY_TYPE)
283 context_user_id_bin: Mapped[bytes |
None] = mapped_column(CONTEXT_BINARY_TYPE)
284 context_parent_id_bin: Mapped[bytes |
None] = mapped_column(CONTEXT_BINARY_TYPE)
285 event_type_id: Mapped[int |
None] = mapped_column(
286 ID_TYPE, ForeignKey(
"event_types.event_type_id")
288 event_data_rel: Mapped[EventData |
None] = relationship(
"EventData")
289 event_type_rel: Mapped[EventTypes |
None] = relationship(
"EventTypes")
292 """Return string representation of instance for debugging."""
295 f
"id={self.event_id}, event_type_id='{self.event_type_id}', "
296 f
"origin_idx='{self.origin_idx}', time_fired='{self._time_fired_isotime}'"
297 f
", data_id={self.data_id})>"
302 """Return time_fired as an isotime string."""
303 date_time: datetime |
None
304 if self.time_fired_ts
is not None:
305 date_time = dt_util.utc_from_timestamp(self.time_fired_ts)
308 if date_time
is None:
310 return date_time.isoformat(sep=
" ", timespec=
"seconds")
314 """Create an event database object from a native event."""
315 context = event.context
319 origin_idx=event.origin.idx,
321 time_fired_ts=event.time_fired_timestamp,
323 context_id_bin=ulid_to_bytes_or_none(context.id),
324 context_user_id=
None,
326 context_parent_id=
None,
327 context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
330 def to_native(self, validate_entity_id: bool =
True) -> Event |
None:
331 """Convert to a native HA Event."""
333 id=bytes_to_ulid_or_none(self.context_id_bin),
335 parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
339 self.event_type
or "",
343 else EVENT_ORIGIN_ORDER[self.origin_idx
or 0],
344 self.time_fired_ts
or 0,
347 except JSON_DECODE_EXCEPTIONS:
349 _LOGGER.exception(
"Error converting to event: %s", self)
354 """Event data history."""
356 __table_args__ = (_DEFAULT_TABLE_ARGS,)
357 __tablename__ = TABLE_EVENT_DATA
358 data_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
359 hash: Mapped[int |
None] = mapped_column(UINT_32_TYPE, index=
True)
361 shared_data: Mapped[str |
None] = mapped_column(
362 Text().with_variant(mysql.LONGTEXT,
"mysql",
"mariadb")
366 """Return string representation of instance for debugging."""
368 "<recorder.EventData("
369 f
"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
375 event: Event, dialect: SupportedDialect |
None
377 """Create shared_data from an event."""
378 encoder = json_bytes_strip_null
if dialect == PSQL_DIALECT
else json_bytes
379 bytes_result = encoder(event.data)
380 if len(bytes_result) > MAX_EVENT_DATA_BYTES:
382 "Event data for %s exceed maximum size of %s bytes. "
383 "This can cause database performance issues; Event data "
384 "will not be stored",
386 MAX_EVENT_DATA_BYTES,
393 """Return the hash of json encoded shared data."""
394 return fnv1a_32(shared_data_bytes)
397 """Convert to an event data dictionary."""
398 shared_data = self.shared_data
399 if shared_data
is None:
402 return cast(dict[str, Any],
json_loads(shared_data))
403 except JSON_DECODE_EXCEPTIONS:
404 _LOGGER.exception(
"Error converting row to event data: %s", self)
409 """Event type history."""
411 __table_args__ = (_DEFAULT_TABLE_ARGS,)
412 __tablename__ = TABLE_EVENT_TYPES
413 event_type_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
414 event_type: Mapped[str |
None] = mapped_column(
415 String(MAX_LENGTH_EVENT_EVENT_TYPE), index=
True, unique=
True
419 """Return string representation of instance for debugging."""
421 "<recorder.EventTypes("
422 f
"id={self.event_type_id}, event_type='{self.event_type}'"
428 """State change history."""
433 Index(METADATA_ID_LAST_UPDATED_INDEX_TS,
"metadata_id",
"last_updated_ts"),
435 STATES_CONTEXT_ID_BIN_INDEX,
437 mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
438 mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
442 __tablename__ = TABLE_STATES
443 state_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
444 entity_id: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
445 state: Mapped[str |
None] = mapped_column(String(MAX_LENGTH_STATE_STATE))
446 attributes: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
447 event_id: Mapped[int |
None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN)
448 last_changed: Mapped[datetime |
None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
449 last_changed_ts: Mapped[float |
None] = mapped_column(TIMESTAMP_TYPE)
450 last_reported_ts: Mapped[float |
None] = mapped_column(TIMESTAMP_TYPE)
451 last_updated: Mapped[datetime |
None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
452 last_updated_ts: Mapped[float |
None] = mapped_column(
453 TIMESTAMP_TYPE, default=time.time, index=
True
455 old_state_id: Mapped[int |
None] = mapped_column(
456 ID_TYPE, ForeignKey(
"states.state_id"), index=
True
458 attributes_id: Mapped[int |
None] = mapped_column(
459 ID_TYPE, ForeignKey(
"state_attributes.attributes_id"), index=
True
461 context_id: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
462 context_user_id: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
463 context_parent_id: Mapped[str |
None] = mapped_column(UNUSED_LEGACY_COLUMN)
464 origin_idx: Mapped[int |
None] = mapped_column(
467 old_state: Mapped[States |
None] = relationship(
"States", remote_side=[state_id])
468 state_attributes: Mapped[StateAttributes |
None] = relationship(
"StateAttributes")
469 context_id_bin: Mapped[bytes |
None] = mapped_column(CONTEXT_BINARY_TYPE)
470 context_user_id_bin: Mapped[bytes |
None] = mapped_column(CONTEXT_BINARY_TYPE)
471 context_parent_id_bin: Mapped[bytes |
None] = mapped_column(CONTEXT_BINARY_TYPE)
472 metadata_id: Mapped[int |
None] = mapped_column(
473 ID_TYPE, ForeignKey(
"states_meta.metadata_id")
475 states_meta_rel: Mapped[StatesMeta |
None] = relationship(
"StatesMeta")
478 """Return string representation of instance for debugging."""
480 f
"<recorder.States(id={self.state_id}, entity_id='{self.entity_id}'"
481 f
" metadata_id={self.metadata_id},"
482 f
" state='{self.state}', event_id='{self.event_id}',"
483 f
" last_updated='{self._last_updated_isotime}',"
484 f
" old_state_id={self.old_state_id}, attributes_id={self.attributes_id})>"
489 """Return last_updated as an isotime string."""
490 date_time: datetime |
None
491 if self.last_updated_ts
is not None:
492 date_time = dt_util.utc_from_timestamp(self.last_updated_ts)
495 if date_time
is None:
497 return date_time.isoformat(sep=
" ", timespec=
"seconds")
500 def from_event(event: Event[EventStateChangedData]) -> States:
501 """Create object from a state_changed event."""
502 state = event.data[
"new_state"]
506 last_updated_ts = event.time_fired_timestamp
507 last_changed_ts =
None
508 last_reported_ts =
None
510 state_value = state.state
511 last_updated_ts = state.last_updated_timestamp
512 if state.last_updated == state.last_changed:
513 last_changed_ts =
None
515 last_changed_ts = state.last_changed_timestamp
516 if state.last_updated == state.last_reported:
517 last_reported_ts =
None
519 last_reported_ts = state.last_reported_timestamp
520 context = event.context
523 entity_id=event.data[
"entity_id"],
526 context_id_bin=ulid_to_bytes_or_none(context.id),
527 context_user_id=
None,
529 context_parent_id=
None,
530 context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
531 origin_idx=event.origin.idx,
534 last_updated_ts=last_updated_ts,
535 last_changed_ts=last_changed_ts,
536 last_reported_ts=last_reported_ts,
539 def to_native(self, validate_entity_id: bool =
True) -> State |
None:
540 """Convert to an HA state object."""
542 id=bytes_to_ulid_or_none(self.context_id_bin),
544 parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
548 except JSON_DECODE_EXCEPTIONS:
550 _LOGGER.exception(
"Error converting row to state: %s", self)
552 last_updated = dt_util.utc_from_timestamp(self.last_updated_ts
or 0)
554 last_changed = dt_util.utc_from_timestamp(self.last_updated_ts
or 0)
556 last_changed = dt_util.utc_from_timestamp(self.
last_changed_tslast_changed_ts
or 0)
561 last_reported = dt_util.utc_from_timestamp(self.last_updated_ts
or 0)
563 last_reported = dt_util.utc_from_timestamp(self.
last_reported_tslast_reported_ts
or 0)
565 self.entity_id
or "",
570 last_changed=last_changed,
571 last_reported=last_reported,
572 last_updated=last_updated,
574 validate_entity_id=validate_entity_id,
579 """State attribute change history."""
581 __table_args__ = (_DEFAULT_TABLE_ARGS,)
582 __tablename__ = TABLE_STATE_ATTRIBUTES
583 attributes_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
584 hash: Mapped[int |
None] = mapped_column(UINT_32_TYPE, index=
True)
586 shared_attrs: Mapped[str |
None] = mapped_column(
587 Text().with_variant(mysql.LONGTEXT,
"mysql",
"mariadb")
591 """Return string representation of instance for debugging."""
593 f
"<recorder.StateAttributes(id={self.attributes_id}, hash='{self.hash}',"
594 f
" attributes='{self.shared_attrs}')>"
599 event: Event[EventStateChangedData],
600 dialect: SupportedDialect |
None,
602 """Create shared_attrs from a state_changed event."""
604 if (state := event.data[
"new_state"])
is None:
606 if state_info := state.state_info:
607 unrecorded_attributes = state_info[
"unrecorded_attributes"]
609 *ALL_DOMAIN_EXCLUDE_ATTRS,
610 *unrecorded_attributes,
612 if MATCH_ALL
in unrecorded_attributes:
615 exclude_attrs.update(state.attributes)
616 exclude_attrs -= _MATCH_ALL_KEEP
618 exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS
619 encoder = json_bytes_strip_null
if dialect == PSQL_DIALECT
else json_bytes
620 bytes_result = encoder(
621 {k: v
for k, v
in state.attributes.items()
if k
not in exclude_attrs}
623 if len(bytes_result) > MAX_STATE_ATTRS_BYTES:
625 "State attributes for %s exceed maximum size of %s bytes. "
626 "This can cause database performance issues; Attributes "
627 "will not be stored",
629 MAX_STATE_ATTRS_BYTES,
636 """Return the hash of json encoded shared attributes."""
637 return fnv1a_32(shared_attrs_bytes)
640 """Convert to a state attributes dictionary."""
641 shared_attrs = self.shared_attrs
642 if shared_attrs
is None:
645 return cast(dict[str, Any],
json_loads(shared_attrs))
646 except JSON_DECODE_EXCEPTIONS:
648 _LOGGER.exception(
"Error converting row to state attributes: %s", self)
653 """Metadata for states."""
655 __table_args__ = (_DEFAULT_TABLE_ARGS,)
656 __tablename__ = TABLE_STATES_META
657 metadata_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
658 entity_id: Mapped[str |
None] = mapped_column(
659 String(MAX_LENGTH_STATE_ENTITY_ID), index=
True, unique=
True
663 """Return string representation of instance for debugging."""
665 "<recorder.StatesMeta("
666 f
"id={self.metadata_id}, entity_id='{self.entity_id}'"
672 """Statistics base class."""
674 id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
675 created: Mapped[datetime |
None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
676 created_ts: Mapped[float |
None] = mapped_column(TIMESTAMP_TYPE, default=time.time)
677 metadata_id: Mapped[int |
None] = mapped_column(
679 ForeignKey(f
"{TABLE_STATISTICS_META}.id", ondelete=
"CASCADE"),
681 start: Mapped[datetime |
None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
682 start_ts: Mapped[float |
None] = mapped_column(TIMESTAMP_TYPE, index=
True)
683 mean: Mapped[float |
None] = mapped_column(DOUBLE_TYPE)
684 min: Mapped[float |
None] = mapped_column(DOUBLE_TYPE)
685 max: Mapped[float |
None] = mapped_column(DOUBLE_TYPE)
686 last_reset: Mapped[datetime |
None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
687 last_reset_ts: Mapped[float |
None] = mapped_column(TIMESTAMP_TYPE)
688 state: Mapped[float |
None] = mapped_column(DOUBLE_TYPE)
689 sum: Mapped[float |
None] = mapped_column(DOUBLE_TYPE)
694 def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self:
695 """Create object from a statistics with datetime objects."""
697 metadata_id=metadata_id,
699 created_ts=time.time(),
701 start_ts=stats[
"start"].timestamp(),
702 mean=stats.get(
"mean"),
703 min=stats.get(
"min"),
704 max=stats.get(
"max"),
707 state=stats.get(
"state"),
708 sum=stats.get(
"sum"),
712 def from_stats_ts(cls, metadata_id: int, stats: StatisticDataTimestamp) -> Self:
713 """Create object from a statistics with timestamps."""
715 metadata_id=metadata_id,
717 created_ts=time.time(),
719 start_ts=stats[
"start_ts"],
720 mean=stats.get(
"mean"),
721 min=stats.get(
"min"),
722 max=stats.get(
"max"),
724 last_reset_ts=stats.get(
"last_reset_ts"),
725 state=stats.get(
"state"),
726 sum=stats.get(
"sum"),
731 """Long term statistics."""
738 "ix_statistics_statistic_id_start_ts",
745 __tablename__ = TABLE_STATISTICS
749 """Short term statistics."""
753 __tablename__ = TABLE_STATISTICS_SHORT_TERM
757 """Short term statistics."""
762 "ix_statistics_short_term_statistic_id_start_ts",
772 """Short term statistics with 32-bit index, used for schema migration."""
777 "ix_statistics_short_term_statistic_id_start_ts",
785 metadata_id: Mapped[int |
None] = mapped_column(
787 ForeignKey(f
"{TABLE_STATISTICS_META}.id", ondelete=
"CASCADE"),
788 use_existing_column=
True,
793 """Statistics meta data."""
795 __table_args__ = (_DEFAULT_TABLE_ARGS,)
796 __tablename__ = TABLE_STATISTICS_META
797 id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
798 statistic_id: Mapped[str |
None] = mapped_column(
799 String(255), index=
True, unique=
True
801 source: Mapped[str |
None] = mapped_column(String(32))
802 unit_of_measurement: Mapped[str |
None] = mapped_column(String(255))
803 has_mean: Mapped[bool |
None] = mapped_column(Boolean)
804 has_sum: Mapped[bool |
None] = mapped_column(Boolean)
805 name: Mapped[str |
None] = mapped_column(String(255))
808 def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
809 """Create object from meta data."""
814 """Statistics meta data."""
817 class LegacyStatisticsMeta(LegacyBase, _StatisticsMeta):
818 """Statistics meta data with 32-bit index, used for schema migration."""
820 id: Mapped[int] = mapped_column(
824 use_existing_column=
True,
829 """Representation of recorder run."""
832 Index(
"ix_recorder_runs_start_end",
"start",
"end"),
835 __tablename__ = TABLE_RECORDER_RUNS
836 run_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
837 start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
838 end: Mapped[datetime |
None] = mapped_column(DATETIME_TYPE)
839 closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=
False)
840 created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
843 """Return string representation of instance for debugging."""
845 f
"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end
else None
848 f
"<recorder.RecorderRuns(id={self.run_id},"
849 f
" start='{self.start.isoformat(sep=' ', timespec='seconds')}', end={end},"
850 f
" closed_incorrect={self.closed_incorrect},"
851 f
" created='{self.created.isoformat(sep=' ', timespec='seconds')}')>"
854 def to_native(self, validate_entity_id: bool =
True) -> Self:
855 """Return self, native format is this model."""
860 """Representation of migration changes."""
862 __tablename__ = TABLE_MIGRATION_CHANGES
863 __table_args__ = (_DEFAULT_TABLE_ARGS,)
865 migration_id: Mapped[str] = mapped_column(String(255), primary_key=
True)
866 version: Mapped[int] = mapped_column(SmallInteger)
870 """Representation of schema version changes."""
872 __tablename__ = TABLE_SCHEMA_CHANGES
873 __table_args__ = (_DEFAULT_TABLE_ARGS,)
875 change_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
876 schema_version: Mapped[int |
None] = mapped_column(Integer)
877 changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
880 """Return string representation of instance for debugging."""
882 "<recorder.SchemaChanges("
883 f
"id={self.change_id}, schema_version={self.schema_version}, "
884 f
"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
890 """Representation of statistics run."""
892 __tablename__ = TABLE_STATISTICS_RUNS
893 __table_args__ = (_DEFAULT_TABLE_ARGS,)
895 run_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=
True)
896 start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=
True)
899 """Return string representation of instance for debugging."""
901 f
"<recorder.StatisticsRuns(id={self.run_id},"
902 f
" start='{self.start.isoformat(sep=' ', timespec='seconds')}', )>"
906 EVENT_DATA_JSON = type_coerce(
907 EventData.shared_data.cast(JSONB_VARIANT_CAST),
JSONLiteral(none_as_null=
True)
909 OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
910 Events.event_data.cast(JSONB_VARIANT_CAST),
JSONLiteral(none_as_null=
True)
913 SHARED_ATTRS_JSON = type_coerce(
914 StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=
True)
916 OLD_FORMAT_ATTRS_JSON = type_coerce(
917 States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=
True)
920 ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON[
"entity_id"]
921 OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON[
"entity_id"]
922 DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON[
"device_id"]
923 OLD_STATE = aliased(States, name=
"old_state")
925 SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case(
926 (StateAttributes.shared_attrs.is_(
None), States.attributes),
927 else_=StateAttributes.shared_attrs,
928 ).label(
"attributes")
929 SHARED_DATA_OR_LEGACY_EVENT_DATA = case(
930 (EventData.shared_data.is_(
None), Events.event_data), else_=EventData.shared_data
931 ).label(
"event_data")
int hash_shared_data_bytes(bytes shared_data_bytes)
dict[str, Any] to_native(self)
bytes shared_data_bytes_from_event(Event event, SupportedDialect|None dialect)
str|None _time_fired_isotime(self)
Event|None to_native(self, bool validate_entity_id=True)
Events from_event(Event event)
Callable|None result_processor(self, Dialect dialect, Any coltype)
Callable[[Any], str] literal_processor(self, Dialect dialect)
Callable|None result_processor(self, Dialect dialect, Any coltype)
Self to_native(self, bool validate_entity_id=True)
bytes shared_attrs_bytes_from_event(Event[EventStateChangedData] event, SupportedDialect|None dialect)
dict[str, Any] to_native(self)
int hash_shared_attrs_bytes(bytes shared_attrs_bytes)
State|None to_native(self, bool validate_entity_id=True)
States from_event(Event[EventStateChangedData] event)
str|None _last_updated_isotime(self)
Self from_stats(cls, int metadata_id, StatisticData stats)
Self from_stats_ts(cls, int metadata_id, StatisticDataTimestamp stats)
str compile_char_zero(TypeDecorator type_, Any compiler, **Any kw)
str compile_char_one(TypeDecorator type_, Any compiler, **Any kw)
str|None bytes_to_uuid_hex_or_none(bytes|None _bytes)
bytes|None uuid_hex_to_bytes_or_none(str|None uuid_hex)
None process_timestamp(None ts)
float|None datetime_to_timestamp_or_none(datetime|None dt)
JsonObjectType json_loads_object(bytes|bytearray|memoryview|str obj)