Home Assistant Unofficial Reference 2024.12.1
bootstrap.py
Go to the documentation of this file.
1 """Provide methods to bootstrap a Home Assistant instance."""
2 
3 from __future__ import annotations
4 
5 import asyncio
6 from collections import defaultdict
7 import contextlib
8 from functools import partial
9 from itertools import chain
10 import logging
11 from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler
12 import mimetypes
13 from operator import contains, itemgetter
14 import os
15 import platform
16 import sys
17 import threading
18 from time import monotonic
19 from typing import TYPE_CHECKING, Any
20 
21 # Import cryptography early since import openssl is not thread-safe
22 # _frozen_importlib._DeadlockError: deadlock detected by _ModuleLock('cryptography.hazmat.backends.openssl.backend')
23 import cryptography.hazmat.backends.openssl.backend # noqa: F401
24 import voluptuous as vol
25 import yarl
26 
27 from . import (
28  block_async_io,
29  config as conf_util,
30  config_entries,
31  core,
32  loader,
33  requirements,
34 )
35 
36 # Pre-import frontend deps which have no requirements here to avoid
37 # loading them at run time and blocking the event loop. We do this ahead
38 # of time so that we do not have to flag frontend deps with `import_executor`
39 # as it would create a thundering heard of executor jobs trying to import
40 # frontend deps at the same time.
41 from .components import (
42  api as api_pre_import, # noqa: F401
43  auth as auth_pre_import, # noqa: F401
44  config as config_pre_import, # noqa: F401
45  default_config as default_config_pre_import, # noqa: F401
46  device_automation as device_automation_pre_import, # noqa: F401
47  diagnostics as diagnostics_pre_import, # noqa: F401
48  file_upload as file_upload_pre_import, # noqa: F401
49  group as group_pre_import, # noqa: F401
50  history as history_pre_import, # noqa: F401
51  http, # not named pre_import since it has requirements
52  image_upload as image_upload_import, # noqa: F401 - not named pre_import since it has requirements
53  logbook as logbook_pre_import, # noqa: F401
54  lovelace as lovelace_pre_import, # noqa: F401
55  onboarding as onboarding_pre_import, # noqa: F401
56  recorder as recorder_import, # noqa: F401 - not named pre_import since it has requirements
57  repairs as repairs_pre_import, # noqa: F401
58  search as search_pre_import, # noqa: F401
59  sensor as sensor_pre_import, # noqa: F401
60  system_log as system_log_pre_import, # noqa: F401
61  webhook as webhook_pre_import, # noqa: F401
62  websocket_api as websocket_api_pre_import, # noqa: F401
63 )
64 from .components.sensor import recorder as sensor_recorder # noqa: F401
65 from .const import (
66  BASE_PLATFORMS,
67  FORMAT_DATETIME,
68  KEY_DATA_LOGGING as DATA_LOGGING,
69  REQUIRED_NEXT_PYTHON_HA_RELEASE,
70  REQUIRED_NEXT_PYTHON_VER,
71  SIGNAL_BOOTSTRAP_INTEGRATIONS,
72 )
73 from .core_config import async_process_ha_core_config
74 from .exceptions import HomeAssistantError
75 from .helpers import (
76  area_registry,
77  category_registry,
78  config_validation as cv,
79  device_registry,
80  entity,
81  entity_registry,
82  floor_registry,
83  issue_registry,
84  label_registry,
85  recorder,
86  restore_state,
87  template,
88  translation,
89 )
90 from .helpers.dispatcher import async_dispatcher_send_internal
91 from .helpers.storage import get_internal_store_manager
92 from .helpers.system_info import async_get_system_info, is_official_image
93 from .helpers.typing import ConfigType
94 from .setup import (
95  # _setup_started is marked as protected to make it clear
96  # that it is not part of the public API and should not be used
97  # by integrations. It is only used for internal tracking of
98  # which integrations are being set up.
99  _setup_started,
100  async_get_setup_timings,
101  async_notify_setup_error,
102  async_set_domains_to_be_loaded,
103  async_setup_component,
104 )
105 from .util.async_ import create_eager_task
106 from .util.hass_dict import HassKey
107 from .util.logging import async_activate_log_queue_handler
108 from .util.package import async_get_user_site, is_docker_env, is_virtual_env
109 
110 with contextlib.suppress(ImportError):
111  # Ensure anyio backend is imported to avoid it being imported in the event loop
112  from anyio._backends import _asyncio # noqa: F401
113 
114 
115 if TYPE_CHECKING:
116  from .runner import RuntimeConfig
117 
118 _LOGGER = logging.getLogger(__name__)
119 
120 SETUP_ORDER_SORT_KEY = partial(contains, BASE_PLATFORMS)
121 
122 
123 ERROR_LOG_FILENAME = "home-assistant.log"
124 
125 # hass.data key for logging information.
126 DATA_REGISTRIES_LOADED: HassKey[None] = HassKey("bootstrap_registries_loaded")
127 
128 LOG_SLOW_STARTUP_INTERVAL = 60
129 SLOW_STARTUP_CHECK_INTERVAL = 1
130 
131 STAGE_1_TIMEOUT = 120
132 STAGE_2_TIMEOUT = 300
133 WRAP_UP_TIMEOUT = 300
134 COOLDOWN_TIME = 60
135 
136 
137 DEBUGGER_INTEGRATIONS = {"debugpy"}
138 
139 # Core integrations are unconditionally loaded
140 CORE_INTEGRATIONS = {"homeassistant", "persistent_notification"}
141 
142 # Integrations that are loaded right after the core is set up
143 LOGGING_AND_HTTP_DEPS_INTEGRATIONS = {
144  # isal is loaded right away before `http` to ensure if its
145  # enabled, that `isal` is up to date.
146  "isal",
147  # Set log levels
148  "logger",
149  # Error logging
150  "system_log",
151  "sentry",
152 }
153 FRONTEND_INTEGRATIONS = {
154  # Get the frontend up and running as soon as possible so problem
155  # integrations can be removed and database migration status is
156  # visible in frontend
157  "frontend",
158 }
159 RECORDER_INTEGRATIONS = {
160  # Setup after frontend
161  # To record data
162  "recorder",
163 }
164 DISCOVERY_INTEGRATIONS = ("bluetooth", "dhcp", "ssdp", "usb", "zeroconf")
165 STAGE_1_INTEGRATIONS = {
166  # We need to make sure discovery integrations
167  # update their deps before stage 2 integrations
168  # load them inadvertently before their deps have
169  # been updated which leads to using an old version
170  # of the dep, or worse (import errors).
171  *DISCOVERY_INTEGRATIONS,
172  # To make sure we forward data to other instances
173  "mqtt_eventstream",
174  # To provide account link implementations
175  "cloud",
176  # Ensure supervisor is available
177  "hassio",
178 }
179 DEFAULT_INTEGRATIONS = {
180  # These integrations are set up unless recovery mode is activated.
181  #
182  # Integrations providing core functionality:
183  "analytics", # Needed for onboarding
184  "application_credentials",
185  "backup",
186  "frontend",
187  "hardware",
188  "logger",
189  "network",
190  "system_health",
191  #
192  # Key-feature:
193  "automation",
194  "person",
195  "scene",
196  "script",
197  "tag",
198  "zone",
199  #
200  # Built-in helpers:
201  "counter",
202  "input_boolean",
203  "input_button",
204  "input_datetime",
205  "input_number",
206  "input_select",
207  "input_text",
208  "schedule",
209  "timer",
210 }
211 DEFAULT_INTEGRATIONS_RECOVERY_MODE = {
212  # These integrations are set up if recovery mode is activated.
213  "frontend",
214 }
215 DEFAULT_INTEGRATIONS_SUPERVISOR = {
216  # These integrations are set up if using the Supervisor
217  "hassio",
218 }
219 CRITICAL_INTEGRATIONS = {
220  # Recovery mode is activated if these integrations fail to set up
221  "frontend",
222 }
223 
224 SETUP_ORDER = (
225  # Load logging and http deps as soon as possible
226  ("logging, http deps", LOGGING_AND_HTTP_DEPS_INTEGRATIONS),
227  # Setup frontend
228  ("frontend", FRONTEND_INTEGRATIONS),
229  # Setup recorder
230  ("recorder", RECORDER_INTEGRATIONS),
231  # Start up debuggers. Start these first in case they want to wait.
232  ("debugger", DEBUGGER_INTEGRATIONS),
233 )
234 
235 #
236 # Storage keys we are likely to load during startup
237 # in order of when we expect to load them.
238 #
239 # If they do not exist they will not be loaded
240 #
241 PRELOAD_STORAGE = [
242  "core.logger",
243  "core.network",
244  "http.auth",
245  "image",
246  "lovelace_dashboards",
247  "lovelace_resources",
248  "core.uuid",
249  "lovelace.map",
250  "bluetooth.passive_update_processor",
251  "bluetooth.remote_scanners",
252  "assist_pipeline.pipelines",
253  "core.analytics",
254  "auth_module.totp",
255 ]
256 
257 
259  runtime_config: RuntimeConfig,
260 ) -> core.HomeAssistant | None:
261  """Set up Home Assistant."""
262 
263  async def create_hass() -> core.HomeAssistant:
264  """Create the hass object and do basic setup."""
265  hass = core.HomeAssistant(runtime_config.config_dir)
266  loader.async_setup(hass)
267 
268  await async_enable_logging(
269  hass,
270  runtime_config.verbose,
271  runtime_config.log_rotate_days,
272  runtime_config.log_file,
273  runtime_config.log_no_color,
274  )
275 
276  if runtime_config.debug or hass.loop.get_debug():
277  hass.config.debug = True
278 
279  hass.config.safe_mode = runtime_config.safe_mode
280  hass.config.skip_pip = runtime_config.skip_pip
281  hass.config.skip_pip_packages = runtime_config.skip_pip_packages
282 
283  return hass
284 
285  async def stop_hass(hass: core.HomeAssistant) -> None:
286  """Stop hass."""
287  # Ask integrations to shut down. It's messy but we can't
288  # do a clean stop without knowing what is broken
289  with contextlib.suppress(TimeoutError):
290  async with hass.timeout.async_timeout(10):
291  await hass.async_stop()
292 
293  hass = await create_hass()
294 
295  if runtime_config.skip_pip or runtime_config.skip_pip_packages:
296  _LOGGER.warning(
297  "Skipping pip installation of required modules. This may cause issues"
298  )
299 
300  if not await conf_util.async_ensure_config_exists(hass):
301  _LOGGER.error("Error getting configuration path")
302  return None
303 
304  _LOGGER.info("Config directory: %s", runtime_config.config_dir)
305 
306  block_async_io.enable()
307 
308  config_dict = None
309  basic_setup_success = False
310 
311  if not (recovery_mode := runtime_config.recovery_mode):
312  await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
313 
314  try:
315  config_dict = await conf_util.async_hass_config_yaml(hass)
316  except HomeAssistantError as err:
317  _LOGGER.error(
318  "Failed to parse configuration.yaml: %s. Activating recovery mode",
319  err,
320  )
321  else:
322  if not is_virtual_env():
323  await async_mount_local_lib_path(runtime_config.config_dir)
324 
325  basic_setup_success = (
326  await async_from_config_dict(config_dict, hass) is not None
327  )
328 
329  if config_dict is None:
330  recovery_mode = True
331  await stop_hass(hass)
332  hass = await create_hass()
333 
334  elif not basic_setup_success:
335  _LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
336  recovery_mode = True
337  await stop_hass(hass)
338  hass = await create_hass()
339 
340  elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
341  _LOGGER.warning(
342  "Detected that %s did not load. Activating recovery mode",
343  ",".join(CRITICAL_INTEGRATIONS),
344  )
345 
346  old_config = hass.config
347  old_logging = hass.data.get(DATA_LOGGING)
348 
349  recovery_mode = True
350  await stop_hass(hass)
351  hass = await create_hass()
352 
353  if old_logging:
354  hass.data[DATA_LOGGING] = old_logging
355  hass.config.debug = old_config.debug
356  hass.config.skip_pip = old_config.skip_pip
357  hass.config.skip_pip_packages = old_config.skip_pip_packages
358  hass.config.internal_url = old_config.internal_url
359  hass.config.external_url = old_config.external_url
360  # Setup loader cache after the config dir has been set
361  loader.async_setup(hass)
362 
363  if recovery_mode:
364  _LOGGER.info("Starting in recovery mode")
365  hass.config.recovery_mode = True
366 
367  http_conf = (await http.async_get_last_config(hass)) or {}
368 
370  {"recovery_mode": {}, "http": http_conf},
371  hass,
372  )
373  elif hass.config.safe_mode:
374  _LOGGER.info("Starting in safe mode")
375 
376  if runtime_config.open_ui:
377  hass.add_job(open_hass_ui, hass)
378 
379  return hass
380 
381 
382 def open_hass_ui(hass: core.HomeAssistant) -> None:
383  """Open the UI."""
384  import webbrowser # pylint: disable=import-outside-toplevel
385 
386  if hass.config.api is None or "frontend" not in hass.config.components:
387  _LOGGER.warning("Cannot launch the UI because frontend not loaded")
388  return
389 
390  scheme = "https" if hass.config.api.use_ssl else "http"
391  url = str(
392  yarl.URL.build(scheme=scheme, host="127.0.0.1", port=hass.config.api.port)
393  )
394 
395  if not webbrowser.open(url):
396  _LOGGER.warning(
397  "Unable to open the Home Assistant UI in a browser. Open it yourself at %s",
398  url,
399  )
400 
401 
403  """Initialize modules that do blocking I/O in executor."""
404  # Cache the result of platform.uname().processor in the executor.
405  # Multiple modules call this function at startup which
406  # executes a blocking subprocess call. This is a problem for the
407  # asyncio event loop. By priming the cache of uname we can
408  # avoid the blocking call in the event loop.
409  _ = platform.uname().processor
410  # Initialize the mimetypes module to avoid blocking calls
411  # to the filesystem to load the mime.types file.
412  mimetypes.init()
413  # Initialize is_official_image and is_docker_env to avoid blocking calls
414  # to the filesystem.
416  is_docker_env()
417 
418 
419 async def async_load_base_functionality(hass: core.HomeAssistant) -> None:
420  """Load the registries and modules that will do blocking I/O."""
421  if DATA_REGISTRIES_LOADED in hass.data:
422  return
423  hass.data[DATA_REGISTRIES_LOADED] = None
424  translation.async_setup(hass)
425  entity.async_setup(hass)
426  template.async_setup(hass)
427  await asyncio.gather(
428  create_eager_task(get_internal_store_manager(hass).async_initialize()),
429  create_eager_task(area_registry.async_load(hass)),
430  create_eager_task(category_registry.async_load(hass)),
431  create_eager_task(device_registry.async_load(hass)),
432  create_eager_task(entity_registry.async_load(hass)),
433  create_eager_task(floor_registry.async_load(hass)),
434  create_eager_task(issue_registry.async_load(hass)),
435  create_eager_task(label_registry.async_load(hass)),
436  hass.async_add_executor_job(_init_blocking_io_modules_in_executor),
437  create_eager_task(template.async_load_custom_templates(hass)),
438  create_eager_task(restore_state.async_load(hass)),
439  create_eager_task(hass.config_entries.async_initialize()),
440  create_eager_task(async_get_system_info(hass)),
441  )
442 
443 
445  config: ConfigType, hass: core.HomeAssistant
446 ) -> core.HomeAssistant | None:
447  """Try to configure Home Assistant from a configuration dictionary.
448 
449  Dynamically loads required components and its dependencies.
450  This method is a coroutine.
451  """
452  start = monotonic()
453 
454  hass.config_entries = config_entries.ConfigEntries(hass, config)
455  # Prime custom component cache early so we know if registry entries are tied
456  # to a custom integration
457  await loader.async_get_custom_components(hass)
459 
460  # Set up core.
461  _LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
462 
463  if not all(
464  await asyncio.gather(
465  *(
466  create_eager_task(
467  async_setup_component(hass, domain, config),
468  name=f"bootstrap setup {domain}",
469  loop=hass.loop,
470  )
471  for domain in CORE_INTEGRATIONS
472  )
473  )
474  ):
475  _LOGGER.error("Home Assistant core failed to initialize. ")
476  return None
477 
478  _LOGGER.debug("Home Assistant core initialized")
479 
480  core_config = config.get(core.DOMAIN, {})
481 
482  try:
483  await async_process_ha_core_config(hass, core_config)
484  except vol.Invalid as config_err:
485  conf_util.async_log_schema_error(config_err, core.DOMAIN, core_config, hass)
486  async_notify_setup_error(hass, core.DOMAIN)
487  return None
488  except HomeAssistantError:
489  _LOGGER.error(
490  "Home Assistant core failed to initialize. Further initialization aborted"
491  )
492  return None
493 
494  await _async_set_up_integrations(hass, config)
495 
496  stop = monotonic()
497  _LOGGER.info("Home Assistant initialized in %.2fs", stop - start)
498 
499  if (
500  REQUIRED_NEXT_PYTHON_HA_RELEASE
501  and sys.version_info[:3] < REQUIRED_NEXT_PYTHON_VER
502  ):
503  current_python_version = ".".join(str(x) for x in sys.version_info[:3])
504  required_python_version = ".".join(str(x) for x in REQUIRED_NEXT_PYTHON_VER[:2])
505  _LOGGER.warning(
506  (
507  "Support for the running Python version %s is deprecated and "
508  "will be removed in Home Assistant %s; "
509  "Please upgrade Python to %s"
510  ),
511  current_python_version,
512  REQUIRED_NEXT_PYTHON_HA_RELEASE,
513  required_python_version,
514  )
515  issue_registry.async_create_issue(
516  hass,
517  core.DOMAIN,
518  f"python_version_{required_python_version}",
519  is_fixable=False,
520  severity=issue_registry.IssueSeverity.WARNING,
521  breaks_in_ha_version=REQUIRED_NEXT_PYTHON_HA_RELEASE,
522  translation_key="python_version",
523  translation_placeholders={
524  "current_python_version": current_python_version,
525  "required_python_version": required_python_version,
526  "breaks_in_ha_version": REQUIRED_NEXT_PYTHON_HA_RELEASE,
527  },
528  )
529 
530  return hass
531 
532 
534  hass: core.HomeAssistant,
535  verbose: bool = False,
536  log_rotate_days: int | None = None,
537  log_file: str | None = None,
538  log_no_color: bool = False,
539 ) -> None:
540  """Set up the logging.
541 
542  This method must be run in the event loop.
543  """
544  fmt = (
545  "%(asctime)s.%(msecs)03d %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
546  )
547 
548  if not log_no_color:
549  try:
550  # pylint: disable-next=import-outside-toplevel
551  from colorlog import ColoredFormatter
552 
553  # basicConfig must be called after importing colorlog in order to
554  # ensure that the handlers it sets up wraps the correct streams.
555  logging.basicConfig(level=logging.INFO)
556 
557  colorfmt = f"%(log_color)s{fmt}%(reset)s"
558  logging.getLogger().handlers[0].setFormatter(
559  ColoredFormatter(
560  colorfmt,
561  datefmt=FORMAT_DATETIME,
562  reset=True,
563  log_colors={
564  "DEBUG": "cyan",
565  "INFO": "green",
566  "WARNING": "yellow",
567  "ERROR": "red",
568  "CRITICAL": "red",
569  },
570  )
571  )
572  except ImportError:
573  pass
574 
575  # If the above initialization failed for any reason, setup the default
576  # formatting. If the above succeeds, this will result in a no-op.
577  logging.basicConfig(format=fmt, datefmt=FORMAT_DATETIME, level=logging.INFO)
578 
579  # Capture warnings.warn(...) and friends messages in logs.
580  # The standard destination for them is stderr, which may end up unnoticed.
581  # This way they're where other messages are, and can be filtered as usual.
582  logging.captureWarnings(True)
583 
584  # Suppress overly verbose logs from libraries that aren't helpful
585  logging.getLogger("requests").setLevel(logging.WARNING)
586  logging.getLogger("urllib3").setLevel(logging.WARNING)
587  logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
588  logging.getLogger("httpx").setLevel(logging.WARNING)
589 
590  sys.excepthook = lambda *args: logging.getLogger().exception(
591  "Uncaught exception", exc_info=args
592  )
593  threading.excepthook = lambda args: logging.getLogger().exception(
594  "Uncaught thread exception",
595  exc_info=( # type: ignore[arg-type]
596  args.exc_type,
597  args.exc_value,
598  args.exc_traceback,
599  ),
600  )
601 
602  # Log errors to a file if we have write access to file or config dir
603  if log_file is None:
604  err_log_path = hass.config.path(ERROR_LOG_FILENAME)
605  else:
606  err_log_path = os.path.abspath(log_file)
607 
608  err_path_exists = os.path.isfile(err_log_path)
609  err_dir = os.path.dirname(err_log_path)
610 
611  # Check if we can write to the error log if it exists or that
612  # we can create files in the containing directory if not.
613  if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
614  not err_path_exists and os.access(err_dir, os.W_OK)
615  ):
616  err_handler = await hass.async_add_executor_job(
617  _create_log_file, err_log_path, log_rotate_days
618  )
619 
620  err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
621 
622  logger = logging.getLogger()
623  logger.addHandler(err_handler)
624  logger.setLevel(logging.INFO if verbose else logging.WARNING)
625 
626  # Save the log file location for access by other components.
627  hass.data[DATA_LOGGING] = err_log_path
628  else:
629  _LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
630 
632 
633 
635  err_log_path: str, log_rotate_days: int | None
636 ) -> RotatingFileHandler | TimedRotatingFileHandler:
637  """Create log file and do roll over."""
638  err_handler: RotatingFileHandler | TimedRotatingFileHandler
639  if log_rotate_days:
640  err_handler = TimedRotatingFileHandler(
641  err_log_path, when="midnight", backupCount=log_rotate_days
642  )
643  else:
645  err_log_path, backupCount=1
646  )
647 
648  try:
649  err_handler.doRollover()
650  except OSError as err:
651  _LOGGER.error("Error rolling over log file: %s", err)
652 
653  return err_handler
654 
655 
656 class _RotatingFileHandlerWithoutShouldRollOver(RotatingFileHandler):
657  """RotatingFileHandler that does not check if it should roll over on every log."""
658 
659  def shouldRollover(self, record: logging.LogRecord) -> bool:
660  """Never roll over.
661 
662  The shouldRollover check is expensive because it has to stat
663  the log file for every log record. Since we do not set maxBytes
664  the result of this check is always False.
665  """
666  return False
667 
668 
669 async def async_mount_local_lib_path(config_dir: str) -> str:
670  """Add local library to Python Path.
671 
672  This function is a coroutine.
673  """
674  deps_dir = os.path.join(config_dir, "deps")
675  if (lib_dir := await async_get_user_site(deps_dir)) not in sys.path:
676  sys.path.insert(0, lib_dir)
677  return deps_dir
678 
679 
680 @core.callback
681 def _get_domains(hass: core.HomeAssistant, config: dict[str, Any]) -> set[str]:
682  """Get domains of components to set up."""
683  # Filter out the repeating and common config section [homeassistant]
684  domains = {
685  domain for key in config if (domain := cv.domain_key(key)) != core.DOMAIN
686  }
687 
688  # Add config entry and default domains
689  if not hass.config.recovery_mode:
690  domains.update(DEFAULT_INTEGRATIONS)
691  domains.update(hass.config_entries.async_domains())
692  else:
693  domains.update(DEFAULT_INTEGRATIONS_RECOVERY_MODE)
694 
695  # Add domains depending on if the Supervisor is used or not
696  if "SUPERVISOR" in os.environ:
697  domains.update(DEFAULT_INTEGRATIONS_SUPERVISOR)
698 
699  return domains
700 
701 
703  """Periodic log and dispatch of setups that are pending."""
704 
705  def __init__(
706  self,
707  hass: core.HomeAssistant,
708  setup_started: dict[tuple[str, str | None], float],
709  ) -> None:
710  """Initialize the WatchPendingSetups class."""
711  self._hass_hass = hass
712  self._setup_started_setup_started = setup_started
713  self._duration_count_duration_count = 0
714  self._handle_handle: asyncio.TimerHandle | None = None
715  self._previous_was_empty_previous_was_empty = True
716  self._loop_loop = hass.loop
717 
718  def _async_watch(self) -> None:
719  """Periodic log of setups that are pending."""
720  now = monotonic()
721  self._duration_count_duration_count += SLOW_STARTUP_CHECK_INTERVAL
722 
723  remaining_with_setup_started: defaultdict[str, float] = defaultdict(float)
724  for integration_group, start_time in self._setup_started_setup_started.items():
725  domain, _ = integration_group
726  remaining_with_setup_started[domain] += now - start_time
727 
728  if remaining_with_setup_started:
729  _LOGGER.debug("Integration remaining: %s", remaining_with_setup_started)
730  elif waiting_tasks := self._hass_hass._active_tasks: # noqa: SLF001
731  _LOGGER.debug("Waiting on tasks: %s", waiting_tasks)
732  self._async_dispatch_async_dispatch(remaining_with_setup_started)
733  if (
734  self._setup_started_setup_started
735  and self._duration_count_duration_count % LOG_SLOW_STARTUP_INTERVAL == 0
736  ):
737  # We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
738  # once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
739  _LOGGER.warning(
740  "Waiting on integrations to complete setup: %s",
741  self._setup_started_setup_started,
742  )
743 
744  _LOGGER.debug("Running timeout Zones: %s", self._hass_hass.timeout.zones)
745  self._async_schedule_next_async_schedule_next()
746 
747  def _async_dispatch(self, remaining_with_setup_started: dict[str, float]) -> None:
748  """Dispatch the signal."""
749  if remaining_with_setup_started or not self._previous_was_empty_previous_was_empty:
750  async_dispatcher_send_internal(
751  self._hass_hass, SIGNAL_BOOTSTRAP_INTEGRATIONS, remaining_with_setup_started
752  )
753  self._previous_was_empty_previous_was_empty = not remaining_with_setup_started
754 
755  def _async_schedule_next(self) -> None:
756  """Schedule the next call."""
757  self._handle_handle = self._loop_loop.call_later(
758  SLOW_STARTUP_CHECK_INTERVAL, self._async_watch_async_watch
759  )
760 
761  def async_start(self) -> None:
762  """Start watching."""
763  self._async_schedule_next_async_schedule_next()
764 
765  def async_stop(self) -> None:
766  """Stop watching."""
767  self._async_dispatch_async_dispatch({})
768  if self._handle_handle:
769  self._handle_handle.cancel()
770  self._handle_handle = None
771 
772 
774  hass: core.HomeAssistant,
775  domains: set[str],
776  config: dict[str, Any],
777 ) -> None:
778  """Set up multiple domains. Log on failure."""
779  # Avoid creating tasks for domains that were setup in a previous stage
780  domains_not_yet_setup = domains - hass.config.components
781  # Create setup tasks for base platforms first since everything will have
782  # to wait to be imported, and the sooner we can get the base platforms
783  # loaded the sooner we can start loading the rest of the integrations.
784  futures = {
785  domain: hass.async_create_task_internal(
786  async_setup_component(hass, domain, config),
787  f"setup component {domain}",
788  eager_start=True,
789  )
790  for domain in sorted(
791  domains_not_yet_setup, key=SETUP_ORDER_SORT_KEY, reverse=True
792  )
793  }
794  results = await asyncio.gather(*futures.values(), return_exceptions=True)
795  for idx, domain in enumerate(futures):
796  result = results[idx]
797  if isinstance(result, BaseException):
798  _LOGGER.error(
799  "Error setting up integration %s - received exception",
800  domain,
801  exc_info=(type(result), result, result.__traceback__),
802  )
803 
804 
806  hass: core.HomeAssistant, config: dict[str, Any]
807 ) -> tuple[set[str], dict[str, loader.Integration]]:
808  """Resolve all dependencies and return list of domains to set up."""
809  domains_to_setup = _get_domains(hass, config)
810  needed_requirements: set[str] = set()
811  platform_integrations = conf_util.extract_platform_integrations(
812  config, BASE_PLATFORMS
813  )
814  # Ensure base platforms that have platform integrations are added to
815  # to `domains_to_setup so they can be setup first instead of
816  # discovering them when later when a config entry setup task
817  # notices its needed and there is already a long line to use
818  # the import executor.
819  #
820  # For example if we have
821  # sensor:
822  # - platform: template
823  #
824  # `template` has to be loaded to validate the config for sensor
825  # so we want to start loading `sensor` as soon as we know
826  # it will be needed. The more platforms under `sensor:`, the longer
827  # it will take to finish setup for `sensor` because each of these
828  # platforms has to be imported before we can validate the config.
829  #
830  # Thankfully we are migrating away from the platform pattern
831  # so this will be less of a problem in the future.
832  domains_to_setup.update(platform_integrations)
833 
834  # Load manifests for base platforms and platform based integrations
835  # that are defined under base platforms right away since we do not require
836  # the manifest to list them as dependencies and we want to avoid the lock
837  # contention when multiple integrations try to load them at once
838  additional_manifests_to_load = {
839  *BASE_PLATFORMS,
840  *chain.from_iterable(platform_integrations.values()),
841  }
842 
843  translations_to_load = additional_manifests_to_load.copy()
844 
845  # Resolve all dependencies so we know all integrations
846  # that will have to be loaded and start right-away
847  integration_cache: dict[str, loader.Integration] = {}
848  to_resolve: set[str] = domains_to_setup
849  while to_resolve or additional_manifests_to_load:
850  old_to_resolve: set[str] = to_resolve
851  to_resolve = set()
852 
853  if additional_manifests_to_load:
854  to_get = {*old_to_resolve, *additional_manifests_to_load}
855  additional_manifests_to_load.clear()
856  else:
857  to_get = old_to_resolve
858 
859  manifest_deps: set[str] = set()
860  resolve_dependencies_tasks: list[asyncio.Task[bool]] = []
861  integrations_to_process: list[loader.Integration] = []
862 
863  for domain, itg in (await loader.async_get_integrations(hass, to_get)).items():
864  if not isinstance(itg, loader.Integration):
865  continue
866  integration_cache[domain] = itg
867  needed_requirements.update(itg.requirements)
868 
869  # Make sure manifests for dependencies are loaded in the next
870  # loop to try to group as many as manifest loads in a single
871  # call to avoid the creating one-off executor jobs later in
872  # the setup process
873  additional_manifests_to_load.update(
874  dep
875  for dep in chain(itg.dependencies, itg.after_dependencies)
876  if dep not in integration_cache
877  )
878 
879  if domain not in old_to_resolve:
880  continue
881 
882  integrations_to_process.append(itg)
883  manifest_deps.update(itg.dependencies)
884  manifest_deps.update(itg.after_dependencies)
885  if not itg.all_dependencies_resolved:
886  resolve_dependencies_tasks.append(
887  create_eager_task(
888  itg.resolve_dependencies(),
889  name=f"resolve dependencies {domain}",
890  loop=hass.loop,
891  )
892  )
893 
894  if unseen_deps := manifest_deps - integration_cache.keys():
895  # If there are dependencies, try to preload all
896  # the integrations manifest at once and add them
897  # to the list of requirements we need to install
898  # so we can try to check if they are already installed
899  # in a single call below which avoids each integration
900  # having to wait for the lock to do it individually
901  deps = await loader.async_get_integrations(hass, unseen_deps)
902  for dependant_domain, dependant_itg in deps.items():
903  if isinstance(dependant_itg, loader.Integration):
904  integration_cache[dependant_domain] = dependant_itg
905  needed_requirements.update(dependant_itg.requirements)
906 
907  if resolve_dependencies_tasks:
908  await asyncio.gather(*resolve_dependencies_tasks)
909 
910  for itg in integrations_to_process:
911  try:
912  all_deps = itg.all_dependencies
913  except RuntimeError:
914  # Integration.all_dependencies raises RuntimeError if
915  # dependencies could not be resolved
916  continue
917  for dep in all_deps:
918  if dep in domains_to_setup:
919  continue
920  domains_to_setup.add(dep)
921  to_resolve.add(dep)
922 
923  _LOGGER.info("Domains to be set up: %s", domains_to_setup)
924 
925  # Optimistically check if requirements are already installed
926  # ahead of setting up the integrations so we can prime the cache
927  # We do not wait for this since its an optimization only
928  hass.async_create_background_task(
929  requirements.async_load_installed_versions(hass, needed_requirements),
930  "check installed requirements",
931  eager_start=True,
932  )
933 
934  #
935  # Only add the domains_to_setup after we finish resolving
936  # as new domains are likely to added in the process
937  #
938  translations_to_load.update(domains_to_setup)
939  # Start loading translations for all integrations we are going to set up
940  # in the background so they are ready when we need them. This avoids a
941  # lot of waiting for the translation load lock and a thundering herd of
942  # tasks trying to load the same translations at the same time as each
943  # integration is loaded.
944  #
945  # We do not wait for this since as soon as the task runs it will
946  # hold the translation load lock and if anything is fast enough to
947  # wait for the translation load lock, loading will be done by the
948  # time it gets to it.
949  hass.async_create_background_task(
950  translation.async_load_integrations(hass, translations_to_load),
951  "load translations",
952  eager_start=True,
953  )
954 
955  # Preload storage for all integrations we are going to set up
956  # so we do not have to wait for it to be loaded when we need it
957  # in the setup process.
958  hass.async_create_background_task(
959  get_internal_store_manager(hass).async_preload(
960  [*PRELOAD_STORAGE, *domains_to_setup]
961  ),
962  "preload storage",
963  eager_start=True,
964  )
965 
966  return domains_to_setup, integration_cache
967 
968 
970  hass: core.HomeAssistant, config: dict[str, Any]
971 ) -> None:
972  """Set up all the integrations."""
973  watcher = _WatchPendingSetups(hass, _setup_started(hass))
974  watcher.async_start()
975 
976  domains_to_setup, integration_cache = await _async_resolve_domains_to_setup(
977  hass, config
978  )
979 
980  # Initialize recorder
981  if "recorder" in domains_to_setup:
982  recorder.async_initialize_recorder(hass)
983 
984  pre_stage_domains = [
985  (name, domains_to_setup & domain_group) for name, domain_group in SETUP_ORDER
986  ]
987 
988  # calculate what components to setup in what stage
989  stage_1_domains: set[str] = set()
990 
991  # Find all dependencies of any dependency of any stage 1 integration that
992  # we plan on loading and promote them to stage 1. This is done only to not
993  # get misleading log messages
994  deps_promotion: set[str] = STAGE_1_INTEGRATIONS
995  while deps_promotion:
996  old_deps_promotion = deps_promotion
997  deps_promotion = set()
998 
999  for domain in old_deps_promotion:
1000  if domain not in domains_to_setup or domain in stage_1_domains:
1001  continue
1002 
1003  stage_1_domains.add(domain)
1004 
1005  if (dep_itg := integration_cache.get(domain)) is None:
1006  continue
1007 
1008  deps_promotion.update(dep_itg.all_dependencies)
1009 
1010  stage_2_domains = domains_to_setup - stage_1_domains
1011 
1012  for name, domain_group in pre_stage_domains:
1013  if domain_group:
1014  stage_2_domains -= domain_group
1015  _LOGGER.info("Setting up %s: %s", name, domain_group)
1016  to_be_loaded = domain_group.copy()
1017  to_be_loaded.update(
1018  dep
1019  for domain in domain_group
1020  if (integration := integration_cache.get(domain)) is not None
1021  for dep in integration.all_dependencies
1022  )
1023  async_set_domains_to_be_loaded(hass, to_be_loaded)
1024  await async_setup_multi_components(hass, domain_group, config)
1025 
1026  # Enables after dependencies when setting up stage 1 domains
1027  async_set_domains_to_be_loaded(hass, stage_1_domains)
1028 
1029  # Start setup
1030  if stage_1_domains:
1031  _LOGGER.info("Setting up stage 1: %s", stage_1_domains)
1032  try:
1033  async with hass.timeout.async_timeout(
1034  STAGE_1_TIMEOUT, cool_down=COOLDOWN_TIME
1035  ):
1036  await async_setup_multi_components(hass, stage_1_domains, config)
1037  except TimeoutError:
1038  _LOGGER.warning(
1039  "Setup timed out for stage 1 waiting on %s - moving forward",
1040  hass._active_tasks, # noqa: SLF001
1041  )
1042 
1043  # Add after dependencies when setting up stage 2 domains
1044  async_set_domains_to_be_loaded(hass, stage_2_domains)
1045 
1046  if stage_2_domains:
1047  _LOGGER.info("Setting up stage 2: %s", stage_2_domains)
1048  try:
1049  async with hass.timeout.async_timeout(
1050  STAGE_2_TIMEOUT, cool_down=COOLDOWN_TIME
1051  ):
1052  await async_setup_multi_components(hass, stage_2_domains, config)
1053  except TimeoutError:
1054  _LOGGER.warning(
1055  "Setup timed out for stage 2 waiting on %s - moving forward",
1056  hass._active_tasks, # noqa: SLF001
1057  )
1058 
1059  # Wrap up startup
1060  _LOGGER.debug("Waiting for startup to wrap up")
1061  try:
1062  async with hass.timeout.async_timeout(WRAP_UP_TIMEOUT, cool_down=COOLDOWN_TIME):
1063  await hass.async_block_till_done()
1064  except TimeoutError:
1065  _LOGGER.warning(
1066  "Setup timed out for bootstrap waiting on %s - moving forward",
1067  hass._active_tasks, # noqa: SLF001
1068  )
1069 
1070  watcher.async_stop()
1071 
1072  if _LOGGER.isEnabledFor(logging.DEBUG):
1073  setup_time = async_get_setup_timings(hass)
1074  _LOGGER.debug(
1075  "Integration setup times: %s",
1076  dict(sorted(setup_time.items(), key=itemgetter(1), reverse=True)),
1077  )
None __init__(self, core.HomeAssistant hass, dict[tuple[str, str|None], float] setup_started)
Definition: bootstrap.py:709
None _async_dispatch(self, dict[str, float] remaining_with_setup_started)
Definition: bootstrap.py:747
RotatingFileHandler|TimedRotatingFileHandler _create_log_file(str err_log_path, int|None log_rotate_days)
Definition: bootstrap.py:636
set[str] _get_domains(core.HomeAssistant hass, dict[str, Any] config)
Definition: bootstrap.py:681
None async_enable_logging(core.HomeAssistant hass, bool verbose=False, int|None log_rotate_days=None, str|None log_file=None, bool log_no_color=False)
Definition: bootstrap.py:539
None open_hass_ui(core.HomeAssistant hass)
Definition: bootstrap.py:382
None _init_blocking_io_modules_in_executor()
Definition: bootstrap.py:402
str async_mount_local_lib_path(str config_dir)
Definition: bootstrap.py:669
None _async_set_up_integrations(core.HomeAssistant hass, dict[str, Any] config)
Definition: bootstrap.py:971
tuple[set[str], dict[str, loader.Integration]] _async_resolve_domains_to_setup(core.HomeAssistant hass, dict[str, Any] config)
Definition: bootstrap.py:807
None async_load_base_functionality(core.HomeAssistant hass)
Definition: bootstrap.py:419
core.HomeAssistant|None async_from_config_dict(ConfigType config, core.HomeAssistant hass)
Definition: bootstrap.py:446
None async_setup_multi_components(core.HomeAssistant hass, set[str] domains, dict[str, Any] config)
Definition: bootstrap.py:777
core.HomeAssistant|None async_setup_hass(RuntimeConfig runtime_config)
Definition: bootstrap.py:260
None async_process_ha_core_config(HomeAssistant hass, dict config)
Definition: core_config.py:327
_StoreManager get_internal_store_manager(HomeAssistant hass)
Definition: storage.py:93
dict[str, Any] async_get_system_info(HomeAssistant hass)
Definition: system_info.py:44
None async_notify_setup_error(HomeAssistant hass, str component, str|None display_link=None)
Definition: setup.py:99
None async_set_domains_to_be_loaded(core.HomeAssistant hass, set[str] domains)
Definition: setup.py:127
dict[str, float] async_get_setup_timings(core.HomeAssistant hass)
Definition: setup.py:798
dict[tuple[str, str|None], float] _setup_started(core.HomeAssistant hass)
Definition: setup.py:685
bool async_setup_component(core.HomeAssistant hass, str domain, ConfigType config)
Definition: setup.py:147
None async_activate_log_queue_handler(HomeAssistant hass)
Definition: logging.py:57
str async_get_user_site(str deps_dir)
Definition: package.py:162