1 """Coordinator for the scrape component."""
3 from __future__
import annotations
5 from datetime
import timedelta
8 from bs4
import BeautifulSoup
15 _LOGGER = logging.getLogger(__name__)
19 """Scrape Coordinator."""
24 config_entry: ConfigEntry |
None,
26 update_interval: timedelta,
28 """Initialize Scrape coordinator."""
32 config_entry=config_entry,
33 name=
"Scrape Coordinator",
34 update_interval=update_interval,
39 """Fetch data from Rest."""
41 if (data := self.
_rest_rest.data)
is None:
43 soup = await self.
hasshass.async_add_executor_job(BeautifulSoup, data,
"lxml")
44 _LOGGER.debug(
"Raw beautiful soup: %s", soup)
None __init__(self, HomeAssistant hass, ConfigEntry|None config_entry, RestData rest, timedelta update_interval)
BeautifulSoup _async_update_data(self)