1 """SQLAlchemy util functions."""
3 from __future__
import annotations
5 from collections.abc
import Callable, Generator, Sequence
7 from contextlib
import contextmanager
8 from datetime
import date, datetime, timedelta
13 from typing
import TYPE_CHECKING, Any, Concatenate, NoReturn
15 from awesomeversion
import (
17 AwesomeVersionException,
18 AwesomeVersionStrategy,
21 from sqlalchemy
import inspect, text
22 from sqlalchemy.engine
import Result, Row
23 from sqlalchemy.engine.interfaces
import DBAPIConnection
24 from sqlalchemy.exc
import OperationalError, SQLAlchemyError, StatementError
25 from sqlalchemy.orm.query
import Query
26 from sqlalchemy.orm.session
import Session
27 from sqlalchemy.sql.lambdas
import StatementLambdaElement
28 import voluptuous
as vol
40 DEFAULT_MAX_BIND_VARS,
43 SQLITE_MODERN_MAX_BIND_VARS,
47 from .db_schema
import (
62 from sqlite3.dbapi2
import Cursor
as SQLiteCursor
64 from .
import Recorder
66 _LOGGER = logging.getLogger(__name__)
69 QUERY_RETRY_WAIT = 0.1
70 SQLITE3_POSTFIXES = [
"",
"-wal",
"-shm"]
71 DEFAULT_YIELD_STATES_ROWS = 32768
80 """Return a simple version."""
81 return AwesomeVersion(version, ensure_strategy=AwesomeVersionStrategy.SIMPLEVER)
109 RETRYABLE_MYSQL_ERRORS = (1205, 1206, 1213)
114 FIRST_POSSIBLE_SUNDAY = 8
120 qry: Query, to_native: bool =
False, validate_entity_ids: bool =
True
122 """Query the database and convert the objects to HA native form.
124 This method also retries a few times in the case of stale connections.
126 debug = _LOGGER.isEnabledFor(logging.DEBUG)
127 for tryno
in range(RETRIES):
130 timer_start = time.perf_counter()
136 row.to_native(validate_entity_id=validate_entity_ids)
145 elapsed = time.perf_counter() - timer_start
148 "converting %d rows to native objects took %fs",
154 "querying %d rows took %fs",
159 except SQLAlchemyError
as err:
160 _LOGGER.error(
"Error executing query: %s", err)
162 if tryno == RETRIES - 1:
164 time.sleep(QUERY_RETRY_WAIT)
174 stmt: StatementLambdaElement,
175 start_time: datetime |
None =
None,
176 end_time: datetime |
None =
None,
177 yield_per: int = DEFAULT_YIELD_STATES_ROWS,
178 orm_rows: bool =
True,
179 ) -> Sequence[Row] | Result:
180 """Execute a StatementLambdaElement.
182 If the time window passed is greater than one day
183 the execution method will switch to yield_per to
184 reduce memory pressure.
186 It is not recommended to pass a time window
187 when selecting non-ranged rows (ie selecting
188 specific entities) since they are usually faster
191 use_all =
not start_time
or ((end_time
or dt_util.utcnow()) - start_time).days <= 1
192 for tryno
in range(RETRIES):
195 executed = session.execute(stmt)
197 executed = session.connection().
execute(stmt)
199 return executed.all()
200 return executed.yield_per(yield_per)
201 except SQLAlchemyError
as err:
202 _LOGGER.error(
"Error executing query: %s", err)
203 if tryno == RETRIES - 1:
205 time.sleep(QUERY_RETRY_WAIT)
212 """Ensure that the database is valid or move it away."""
215 if not os.path.exists(dbpath):
227 """Convert the db url into a filesystem path."""
228 return dburl.removeprefix(SQLITE_URL_PREFIX)
232 """Verify the last recorder run was recently clean."""
234 cursor.execute(
"SELECT end FROM recorder_runs ORDER BY start DESC LIMIT 1;")
235 end_time = cursor.fetchone()
237 if not end_time
or not end_time[0]:
241 assert last_run_end_time
is not None
242 now = dt_util.utcnow()
244 _LOGGER.debug(
"The last run ended at: %s (now: %s)", last_run_end_time, now)
246 if last_run_end_time + MAX_RESTART_TIME < now:
253 """Check tables to make sure select does not fail."""
255 for table
in TABLES_TO_CHECK:
256 if table
in (TABLE_RECORDER_RUNS, TABLE_SCHEMA_CHANGES):
257 cursor.execute(f
"SELECT * FROM {table};")
260 f
"SELECT * FROM {table} LIMIT 1;"
267 """Run a quick check on an sqlite database to see if it is corrupt."""
271 conn = sqlite3.connect(dbpath)
274 except sqlite3.DatabaseError:
275 _LOGGER.exception(
"The database at %s is corrupt or malformed", dbpath)
282 """Run checks that will generate a sqlite3 exception if there is corruption."""
286 if sanity_check_passed
and last_run_was_clean:
288 "The system was restarted cleanly and passed the basic sanity check"
292 if not sanity_check_passed:
294 "The database sanity check failed to validate the sqlite3 database at %s",
298 if not last_run_was_clean:
301 "The system could not validate that the sqlite3 database at %s was"
309 """Move away a broken sqlite3 database."""
311 isotime = dt_util.utcnow().isoformat()
312 corrupt_postfix = f
".corrupt.{isotime}"
316 "The system will rename the corrupt database file %s to %s in order to"
317 " allow startup to proceed"
320 f
"{dbfile}{corrupt_postfix}",
323 for postfix
in SQLITE3_POSTFIXES:
324 path = f
"{dbfile}{postfix}"
325 if not os.path.exists(path):
327 os.rename(path, f
"{path}{corrupt_postfix}")
331 """Execute a single statement with a dbapi connection."""
332 cursor = dbapi_connection.cursor()
333 cursor.execute(statement)
338 """Execute a single statement with a dbapi connection and return the result."""
339 cursor = dbapi_connection.cursor()
340 cursor.execute(statement)
341 result = cursor.fetchall()
347 """Warn about unsupported database version."""
350 "Database %s is not supported; Home Assistant supports %s. "
351 "Starting with Home Assistant 2022.6 this prevents the recorder from "
352 "starting. Please migrate your database to a supported software"
355 "MariaDB ≥ 10.3, MySQL ≥ 8.0, PostgreSQL ≥ 12, SQLite ≥ 3.31.0",
357 raise UnsupportedDialect
361 server_version: str, dialect_name: str, minimum_version: str
363 """Warn about unsupported database version."""
366 "Version %s of %s is not supported; minimum supported version is %s. "
367 "Starting with Home Assistant 2022.6 this prevents the recorder from "
368 "starting. Please upgrade your database software"
374 raise UnsupportedDialect
379 hass: HomeAssistant, dialect_name: str
381 """Delete the issue about upcoming unsupported database version."""
382 ir.async_delete_issue(hass, DOMAIN, f
"{dialect_name}_too_old")
388 server_version: AwesomeVersion,
390 min_version: AwesomeVersion,
392 """Warn about upcoming unsupported database version."""
393 ir.async_create_issue(
396 f
"{dialect_name}_too_old",
398 severity=ir.IssueSeverity.CRITICAL,
399 translation_key=f
"{dialect_name}_too_old",
400 translation_placeholders={
401 "server_version":
str(server_version),
402 "min_version":
str(min_version),
404 breaks_in_ha_version=
"2025.2.0",
409 server_response: str,
411 """Extract version from server response."""
412 return AwesomeVersion(
414 ensure_strategy=AwesomeVersionStrategy.SIMPLEVER,
415 find_first_match=
True,
420 server_response: str,
421 ) -> AwesomeVersion |
None:
422 """Attempt to extract version from server response."""
425 except AwesomeVersionException:
430 """Fast version of mysqldb DateTime_or_None.
432 https://github.com/PyMySQL/mysqlclient/blob/v2.1.0/MySQLdb/times.py#L66
435 return ciso8601.parse_datetime(value)
441 """Build a MySQLDB conv dict that uses cisco8601 to parse datetimes."""
444 from MySQLdb.constants
import FIELD_TYPE
445 from MySQLdb.converters
import conversions
447 return {**conversions, FIELD_TYPE.DATETIME: _datetime_or_none}
452 hass: HomeAssistant, version: AwesomeVersion
454 """Create an issue for the index range regression in older MariaDB.
456 The range scan issue was fixed in MariaDB 10.5.17, 10.6.9, 10.7.5, 10.8.4 and later.
458 if version >= MARIA_DB_108:
459 min_version = RECOMMENDED_MIN_VERSION_MARIA_DB_108
460 elif version >= MARIA_DB_107:
461 min_version = RECOMMENDED_MIN_VERSION_MARIA_DB_107
462 elif version >= MARIA_DB_106:
463 min_version = RECOMMENDED_MIN_VERSION_MARIA_DB_106
465 min_version = RECOMMENDED_MIN_VERSION_MARIA_DB
466 ir.async_create_issue(
469 "maria_db_range_index_regression",
471 severity=ir.IssueSeverity.CRITICAL,
472 learn_more_url=
"https://jira.mariadb.org/browse/MDEV-25020",
473 translation_key=
"maria_db_range_index_regression",
474 translation_placeholders={
"min_version":
str(min_version)},
481 local_start_time: datetime,
483 """Create an issue when the backup fails because we run out of resources."""
484 ir.async_create_issue(
487 "backup_failed_out_of_resources",
489 severity=ir.IssueSeverity.CRITICAL,
490 learn_more_url=
"https://www.home-assistant.io/integrations/recorder",
491 translation_key=
"backup_failed_out_of_resources",
492 translation_placeholders={
"start_time": local_start_time.strftime(
"%H:%M:%S")},
499 dbapi_connection: DBAPIConnection,
500 first_connection: bool,
501 ) -> DatabaseEngine |
None:
502 """Execute statements needed for dialect connection."""
503 version: AwesomeVersion |
None =
None
504 slow_range_in_select =
False
505 if dialect_name == SupportedDialect.SQLITE:
506 max_bind_vars = SQLITE_MAX_BIND_VARS
508 old_isolation = dbapi_connection.isolation_level
509 dbapi_connection.isolation_level =
None
511 dbapi_connection.isolation_level = old_isolation
516 version_string = result[0][0]
519 if version < MIN_VERSION_SQLITE:
521 version
or version_string,
"SQLite", MIN_VERSION_SQLITE
525 if version < UPCOMING_MIN_VERSION_SQLITE:
526 instance.hass.add_job(
527 _async_create_issue_deprecated_version,
529 version
or version_string,
531 UPCOMING_MIN_VERSION_SQLITE,
534 instance.hass.add_job(
535 _async_delete_issue_deprecated_version, instance.hass, dialect_name
538 if version
and version > MIN_VERSION_SQLITE_MODERN_BIND_VARS:
539 max_bind_vars = SQLITE_MODERN_MAX_BIND_VARS
552 synchronous =
"NORMAL" if instance.commit_interval
else "FULL"
558 elif dialect_name == SupportedDialect.MYSQL:
559 max_bind_vars = DEFAULT_MAX_BIND_VARS
563 version_string = result[0][0]
565 is_maria_db =
"mariadb" in version_string.lower()
568 if not version
or version < MIN_VERSION_MARIA_DB:
570 version
or version_string,
"MariaDB", MIN_VERSION_MARIA_DB
573 (version < RECOMMENDED_MIN_VERSION_MARIA_DB)
574 or (MARIA_DB_106 <= version < RECOMMENDED_MIN_VERSION_MARIA_DB_106)
575 or (MARIA_DB_107 <= version < RECOMMENDED_MIN_VERSION_MARIA_DB_107)
576 or (MARIA_DB_108 <= version < RECOMMENDED_MIN_VERSION_MARIA_DB_108)
578 instance.hass.add_job(
579 _async_create_mariadb_range_index_regression_issue,
584 elif not version
or version < MIN_VERSION_MYSQL:
586 version
or version_string,
"MySQL", MIN_VERSION_MYSQL
589 slow_range_in_select = bool(
591 or version < MARIADB_WITH_FIXED_IN_QUERIES_105
592 or MARIA_DB_106 <= version < MARIADB_WITH_FIXED_IN_QUERIES_106
593 or MARIA_DB_107 <= version < MARIADB_WITH_FIXED_IN_QUERIES_107
594 or MARIA_DB_108 <= version < MARIADB_WITH_FIXED_IN_QUERIES_108
599 elif dialect_name == SupportedDialect.POSTGRESQL:
600 max_bind_vars = DEFAULT_MAX_BIND_VARS
604 version_string = result[0][0]
606 if not version
or version < MIN_VERSION_PGSQL:
608 version
or version_string,
"PostgreSQL", MIN_VERSION_PGSQL
614 if not first_connection:
621 max_bind_vars=max_bind_vars,
626 """End any incomplete recorder runs."""
627 for run
in session.query(RecorderRuns).filter_by(end=
None):
628 run.closed_incorrect =
True
631 "Ended unfinished session (id=%s from %s)", run.run_id, run.start
637 """Return True if the error is retryable."""
638 assert instance.engine
is not None
640 instance.engine.dialect.name == SupportedDialect.MYSQL
641 and isinstance(err.orig, BaseException)
643 and err.orig.args[0]
in RETRYABLE_MYSQL_ERRORS
647 type _FuncType[**P, R] = Callable[Concatenate[Recorder, P], R]
648 type _MethType[Self, **P, R] = Callable[Concatenate[Self, Recorder, P], R]
649 type _FuncOrMethType[**_P, _R] = Callable[_P, _R]
652 def retryable_database_job[**_P](
654 ) -> Callable[[_FuncType[_P, bool]], _FuncType[_P, bool]]:
655 """Execute a database job repeatedly until it succeeds.
657 The job should return True if it finished, and False if it needs to be rescheduled.
660 def decorator(job: _FuncType[_P, bool]) -> _FuncType[_P, bool]:
661 return _wrap_retryable_database_job_func_or_meth(job, description,
False)
666 def retryable_database_job_method[_Self, **_P](
668 ) -> Callable[[_MethType[_Self, _P, bool]], _MethType[_Self, _P, bool]]:
669 """Execute a database job repeatedly until it succeeds.
671 The job should return True if it finished, and False if it needs to be rescheduled.
674 def decorator(job: _MethType[_Self, _P, bool]) -> _MethType[_Self, _P, bool]:
675 return _wrap_retryable_database_job_func_or_meth(job, description,
True)
680 def _wrap_retryable_database_job_func_or_meth[**_P](
681 job: _FuncOrMethType[_P, bool], description: str, method: bool
682 ) -> _FuncOrMethType[_P, bool]:
683 recorder_pos = 1
if method
else 0
685 @functools.wraps(job)
686 def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> bool:
687 instance: Recorder = args[recorder_pos]
689 return job(*args, **kwargs)
690 except OperationalError
as err:
692 assert isinstance(err.orig, BaseException)
694 "%s; %s not completed, retrying", err.orig.args[1], description
696 time.sleep(instance.db_retry_wait)
700 _LOGGER.warning(
"Error executing %s: %s", description, err)
708 def database_job_retry_wrapper[**_P, _R](
709 description: str, attempts: int
710 ) -> Callable[[_FuncType[_P, _R]], _FuncType[_P, _R]]:
711 """Execute a database job repeatedly until it succeeds, at most attempts times.
713 This wrapper handles InnoDB deadlocks and lock timeouts.
715 This is different from retryable_database_job in that it will retry the job
716 attempts number of times instead of returning False if the job fails.
720 job: _FuncType[_P, _R],
721 ) -> _FuncType[_P, _R]:
722 return _database_job_retry_wrapper_func_or_meth(
723 job, description, attempts,
False
729 def database_job_retry_wrapper_method[_Self, **_P, _R](
730 description: str, attempts: int
731 ) -> Callable[[_MethType[_Self, _P, _R]], _MethType[_Self, _P, _R]]:
732 """Execute a database job repeatedly until it succeeds, at most attempts times.
734 This wrapper handles InnoDB deadlocks and lock timeouts.
736 This is different from retryable_database_job in that it will retry the job
737 attempts number of times instead of returning False if the job fails.
741 job: _MethType[_Self, _P, _R],
742 ) -> _MethType[_Self, _P, _R]:
743 return _database_job_retry_wrapper_func_or_meth(
744 job, description, attempts,
True
750 def _database_job_retry_wrapper_func_or_meth[**_P, _R](
751 job: _FuncOrMethType[_P, _R],
755 ) -> _FuncOrMethType[_P, _R]:
756 recorder_pos = 1
if method
else 0
758 @functools.wraps(job)
759 def wrapper(*args: _P.args, **kwargs: _P.kwargs) -> _R:
760 instance: Recorder = args[recorder_pos]
761 for attempt
in range(attempts):
763 return job(*args, **kwargs)
764 except OperationalError
as err:
768 assert isinstance(err.orig, BaseException)
769 _LOGGER.info(
"%s; %s failed, retrying", err.orig.args[1], description)
770 time.sleep(instance.db_retry_wait)
772 raise ValueError(
"attempts must be a positive integer")
778 """Run any database cleanups that need to happen periodically.
780 These cleanups will happen nightly or after any purge.
782 assert instance.engine
is not None
783 if instance.engine.dialect.name == SupportedDialect.SQLITE:
785 _LOGGER.debug(
"WAL checkpoint")
786 with instance.engine.connect()
as connection:
787 connection.execute(text(
"PRAGMA wal_checkpoint(TRUNCATE);"))
788 connection.execute(text(
"PRAGMA OPTIMIZE;"))
793 """Lock database for writes."""
794 assert instance.engine
is not None
795 with instance.engine.connect()
as connection:
798 connection.execute(text(
"PRAGMA wal_checkpoint(TRUNCATE)"))
800 _LOGGER.debug(
"Lock database")
801 connection.execute(text(
"BEGIN IMMEDIATE;"))
805 _LOGGER.debug(
"Unlock database")
806 connection.execute(text(
"END;"))
810 """Determine if a migration is in progress.
812 This is a thin wrapper that allows us to change
813 out the implementation later.
815 if DATA_INSTANCE
not in hass.data:
817 return hass.data[DATA_INSTANCE].migration_in_progress
821 """Determine if a migration is live.
823 This is a thin wrapper that allows us to change
824 out the implementation later.
826 if DATA_INSTANCE
not in hass.data:
828 return hass.data[DATA_INSTANCE].migration_is_live
832 """Return the datetime.date for the second sunday of a month."""
833 second =
date(year, month, FIRST_POSSIBLE_SUNDAY)
834 day_of_week = second.weekday()
835 if day_of_week == SUNDAY_WEEKDAY:
837 return second.replace(
838 day=(FIRST_POSSIBLE_SUNDAY + (SUNDAY_WEEKDAY - day_of_week) % DAYS_IN_WEEK)
843 """Check if a time is the second sunday of the month."""
844 return bool(
second_sunday(date_time.year, date_time.month).day == date_time.day)
847 PERIOD_SCHEMA = vol.Schema(
849 vol.Exclusive(
"calendar",
"period"): vol.Schema(
851 vol.Required(
"period"): vol.Any(
"hour",
"day",
"week",
"month",
"year"),
852 vol.Optional(
"offset"): int,
855 vol.Exclusive(
"fixed_period",
"period"): vol.Schema(
857 vol.Optional(
"start_time"): vol.All(cv.datetime, dt_util.as_utc),
858 vol.Optional(
"end_time"): vol.All(cv.datetime, dt_util.as_utc),
861 vol.Exclusive(
"rolling_window",
"period"): vol.Schema(
863 vol.Required(
"duration"): cv.time_period_dict,
864 vol.Optional(
"offset"): cv.time_period_dict,
872 period_def: StatisticPeriod,
873 ) -> tuple[datetime |
None, datetime |
None]:
874 """Return start and end datetimes for a statistic period definition."""
878 if "calendar" in period_def:
879 calendar_period = period_def[
"calendar"][
"period"]
880 start_of_day = dt_util.start_of_local_day()
881 cal_offset = period_def[
"calendar"].
get(
"offset", 0)
882 if calendar_period ==
"hour":
883 start_time = dt_util.now().replace(minute=0, second=0, microsecond=0)
884 start_time +=
timedelta(hours=cal_offset)
885 end_time = start_time +
timedelta(hours=1)
886 elif calendar_period ==
"day":
887 start_time = start_of_day
889 end_time = start_time +
timedelta(days=1)
890 elif calendar_period ==
"week":
891 start_time = start_of_day -
timedelta(days=start_of_day.weekday())
892 start_time +=
timedelta(days=cal_offset * 7)
893 end_time = start_time +
timedelta(weeks=1)
894 elif calendar_period ==
"month":
895 start_time = start_of_day.replace(day=28)
897 start_time = (start_time +
timedelta(days=cal_offset * 31)).replace(day=1)
898 end_time = (start_time +
timedelta(days=31)).replace(day=1)
900 start_time = start_of_day.replace(month=12, day=31)
902 start_time = (start_time +
timedelta(days=cal_offset * 366)).replace(
905 end_time = (start_time +
timedelta(days=366)).replace(day=1)
907 start_time = dt_util.as_utc(start_time)
908 end_time = dt_util.as_utc(end_time)
910 elif "fixed_period" in period_def:
911 start_time = period_def[
"fixed_period"].
get(
"start_time")
912 end_time = period_def[
"fixed_period"].
get(
"end_time")
914 elif "rolling_window" in period_def:
915 duration = period_def[
"rolling_window"][
"duration"]
916 now = dt_util.utcnow()
917 start_time = now - duration
918 end_time = start_time + duration
920 if offset := period_def[
"rolling_window"].
get(
"offset"):
924 return (start_time, end_time)
928 """Get an index by name."""
929 connection = session.connection()
930 inspector = inspect(connection)
931 indexes = inspector.get_indexes(table_name)
934 possible_index[
"name"]
935 for possible_index
in indexes
936 if possible_index[
"name"]
938 possible_index[
"name"] == index_name
939 or possible_index[
"name"].endswith(f
"_{index_name}")
947 instance: Recorder, row_type: str
948 ) -> Callable[[Exception], bool]:
949 """Create a filter for unique constraint integrity errors."""
951 def _filter_unique_constraint_integrity_error(err: Exception) -> bool:
952 """Handle unique constraint integrity errors."""
953 if not isinstance(err, StatementError):
956 assert instance.engine
is not None
957 dialect_name = instance.engine.dialect.name
961 dialect_name == SupportedDialect.SQLITE
962 and "UNIQUE constraint failed" in str(err)
966 dialect_name == SupportedDialect.POSTGRESQL
968 and hasattr(err.orig,
"pgcode")
969 and err.orig.pgcode ==
"23505"
973 dialect_name == SupportedDialect.MYSQL
975 and hasattr(err.orig,
"args")
977 with contextlib.suppress(TypeError):
978 if err.orig.args[0] == 1062:
984 "Blocked attempt to insert duplicated %s rows, please report"
988 "https://github.com/home-assistant/core/issues?q=is%3Aopen+is%3Aissue+label%3A%22integration%3A+recorder%22",
994 return _filter_unique_constraint_integrity_error
web.Response get(self, web.Request request, str config_key)
None process_timestamp(None ts)
bool _is_retryable_error(Recorder instance, OperationalError err)
tuple[datetime|None, datetime|None] resolve_period(StatisticPeriod period_def)
DatabaseEngine|None setup_connection_for_dialect(Recorder instance, str dialect_name, DBAPIConnection dbapi_connection, bool first_connection)
None async_create_backup_failure_issue(HomeAssistant hass, datetime local_start_time)
None end_incomplete_runs(Session session, datetime start_time)
None execute_on_connection(DBAPIConnection dbapi_connection, str statement)
bool basic_sanity_check(SQLiteCursor cursor)
date second_sunday(int year, int month)
bool validate_or_move_away_sqlite_database(str dburl)
bool last_run_was_recently_clean(SQLiteCursor cursor)
None _async_delete_issue_deprecated_version(HomeAssistant hass, str dialect_name)
None periodic_db_cleanups(Recorder instance)
NoReturn _raise_if_version_unsupported(str server_version, str dialect_name, str minimum_version)
Generator[None] write_lock_db_sqlite(Recorder instance)
AwesomeVersion|None _extract_version_from_server_response(str server_response)
str dburl_to_path(str dburl)
Any query_on_connection(DBAPIConnection dbapi_connection, str statement)
None _async_create_issue_deprecated_version(HomeAssistant hass, AwesomeVersion server_version, str dialect_name, AwesomeVersion min_version)
bool async_migration_in_progress(HomeAssistant hass)
list[Row] execute(Query qry, bool to_native=False, bool validate_entity_ids=True)
dict build_mysqldb_conv()
bool validate_sqlite_database(str dbpath)
None _async_create_mariadb_range_index_regression_issue(HomeAssistant hass, AwesomeVersion version)
None move_away_broken_database(str dbfile)
str|None get_index_by_name(Session session, str table_name, str index_name)
AwesomeVersion _extract_version_from_server_response_or_raise(str server_response)
None run_checks_on_open_db(str dbpath, SQLiteCursor cursor)
Sequence[Row]|Result execute_stmt_lambda_element(Session session, StatementLambdaElement stmt, datetime|None start_time=None, datetime|None end_time=None, int yield_per=DEFAULT_YIELD_STATES_ROWS, bool orm_rows=True)
bool is_second_sunday(datetime date_time)
datetime|None _datetime_or_none(str value)
Callable[[Exception], bool] filter_unique_constraint_integrity_error(Recorder instance, str row_type)
AwesomeVersion _simple_version(str version)
NoReturn _fail_unsupported_dialect(str dialect_name)
bool async_migration_is_live(HomeAssistant hass)