2019-04-13 20:17:01 +00:00
|
|
|
"""Validate dependencies."""
|
2021-03-18 21:58:19 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2019-12-16 07:22:20 +00:00
|
|
|
import ast
|
2023-02-27 01:25:29 +00:00
|
|
|
from collections import deque
|
2023-03-30 09:25:14 +00:00
|
|
|
import multiprocessing
|
2019-12-19 13:00:22 +00:00
|
|
|
from pathlib import Path
|
2019-04-13 20:17:01 +00:00
|
|
|
|
2021-12-03 08:31:17 +00:00
|
|
|
from homeassistant.const import Platform
|
2019-12-10 08:24:49 +00:00
|
|
|
from homeassistant.requirements import DISCOVERY_INTEGRATIONS
|
|
|
|
|
2022-11-23 18:05:31 +00:00
|
|
|
from .model import Config, Integration
|
2019-04-13 20:17:01 +00:00
|
|
|
|
|
|
|
|
2019-12-16 07:22:20 +00:00
|
|
|
class ImportCollector(ast.NodeVisitor):
|
|
|
|
"""Collect all integrations referenced."""
|
2019-04-13 20:17:01 +00:00
|
|
|
|
2022-11-23 18:05:31 +00:00
|
|
|
def __init__(self, integration: Integration) -> None:
|
2019-12-16 07:22:20 +00:00
|
|
|
"""Initialize the import collector."""
|
|
|
|
self.integration = integration
|
2021-03-18 21:58:19 +00:00
|
|
|
self.referenced: dict[Path, set[str]] = {}
|
2019-12-05 09:15:28 +00:00
|
|
|
|
2019-12-19 13:00:22 +00:00
|
|
|
# Current file or dir we're inspecting
|
2022-11-23 18:05:31 +00:00
|
|
|
self._cur_fil_dir: Path | None = None
|
2019-12-16 07:22:20 +00:00
|
|
|
|
2019-12-19 13:00:22 +00:00
|
|
|
def collect(self) -> None:
|
|
|
|
"""Collect imports from a source file."""
|
|
|
|
for fil in self.integration.path.glob("**/*.py"):
|
|
|
|
if not fil.is_file():
|
|
|
|
continue
|
|
|
|
|
|
|
|
self._cur_fil_dir = fil.relative_to(self.integration.path)
|
|
|
|
self.referenced[self._cur_fil_dir] = set()
|
|
|
|
self.visit(ast.parse(fil.read_text()))
|
|
|
|
self._cur_fil_dir = None
|
|
|
|
|
2022-11-23 18:05:31 +00:00
|
|
|
def _add_reference(self, reference_domain: str) -> None:
|
2019-12-19 13:00:22 +00:00
|
|
|
"""Add a reference."""
|
2022-11-23 18:05:31 +00:00
|
|
|
assert self._cur_fil_dir
|
2019-12-19 13:00:22 +00:00
|
|
|
self.referenced[self._cur_fil_dir].add(reference_domain)
|
2019-12-16 07:22:20 +00:00
|
|
|
|
2022-11-23 18:05:31 +00:00
|
|
|
def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
|
2019-12-16 07:22:20 +00:00
|
|
|
"""Visit ImportFrom node."""
|
|
|
|
if node.module is None:
|
|
|
|
return
|
|
|
|
|
2022-01-21 17:29:17 +00:00
|
|
|
# Exception: we will allow importing the sign path code.
|
|
|
|
if (
|
|
|
|
node.module == "homeassistant.components.http.auth"
|
|
|
|
and len(node.names) == 1
|
|
|
|
and node.names[0].name == "async_sign_path"
|
|
|
|
):
|
|
|
|
return
|
|
|
|
|
2019-12-16 07:22:20 +00:00
|
|
|
if node.module.startswith("homeassistant.components."):
|
|
|
|
# from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME
|
|
|
|
# from homeassistant.components.logbook import bla
|
2019-12-19 13:00:22 +00:00
|
|
|
self._add_reference(node.module.split(".")[2])
|
2019-12-16 07:22:20 +00:00
|
|
|
|
|
|
|
elif node.module == "homeassistant.components":
|
|
|
|
# from homeassistant.components import sun
|
|
|
|
for name_node in node.names:
|
2019-12-19 13:00:22 +00:00
|
|
|
self._add_reference(name_node.name)
|
2019-12-16 07:22:20 +00:00
|
|
|
|
2022-11-23 18:05:31 +00:00
|
|
|
def visit_Import(self, node: ast.Import) -> None:
|
2019-12-16 07:22:20 +00:00
|
|
|
"""Visit Import node."""
|
|
|
|
# import homeassistant.components.hue as hue
|
|
|
|
for name_node in node.names:
|
|
|
|
if name_node.name.startswith("homeassistant.components."):
|
2019-12-19 13:00:22 +00:00
|
|
|
self._add_reference(name_node.name.split(".")[2])
|
2019-12-16 07:22:20 +00:00
|
|
|
|
2022-11-23 18:05:31 +00:00
|
|
|
def visit_Attribute(self, node: ast.Attribute) -> None:
|
2019-12-16 07:22:20 +00:00
|
|
|
"""Visit Attribute node."""
|
|
|
|
# hass.components.hue.async_create()
|
|
|
|
# Name(id=hass)
|
|
|
|
# .Attribute(attr=hue)
|
|
|
|
# .Attribute(attr=async_create)
|
|
|
|
|
|
|
|
# self.hass.components.hue.async_create()
|
|
|
|
# Name(id=self)
|
2020-03-03 21:57:09 +00:00
|
|
|
# .Attribute(attr=hass) or .Attribute(attr=_hass)
|
2019-12-16 07:22:20 +00:00
|
|
|
# .Attribute(attr=hue)
|
|
|
|
# .Attribute(attr=async_create)
|
|
|
|
if (
|
|
|
|
isinstance(node.value, ast.Attribute)
|
|
|
|
and node.value.attr == "components"
|
|
|
|
and (
|
|
|
|
(
|
|
|
|
isinstance(node.value.value, ast.Name)
|
|
|
|
and node.value.value.id == "hass"
|
|
|
|
)
|
|
|
|
or (
|
|
|
|
isinstance(node.value.value, ast.Attribute)
|
2020-03-03 21:57:09 +00:00
|
|
|
and node.value.value.attr in ("hass", "_hass")
|
2019-12-16 07:22:20 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
):
|
2019-12-19 13:00:22 +00:00
|
|
|
self._add_reference(node.attr)
|
2019-12-16 07:22:20 +00:00
|
|
|
else:
|
|
|
|
# Have it visit other kids
|
|
|
|
self.generic_visit(node)
|
2019-04-13 20:17:01 +00:00
|
|
|
|
|
|
|
|
|
|
|
ALLOWED_USED_COMPONENTS = {
|
2021-12-03 08:31:17 +00:00
|
|
|
*{platform.value for platform in Platform},
|
2020-03-03 21:57:09 +00:00
|
|
|
# Internal integrations
|
|
|
|
"alert",
|
|
|
|
"automation",
|
2019-07-31 19:25:30 +00:00
|
|
|
"conversation",
|
2020-03-03 21:57:09 +00:00
|
|
|
"device_automation",
|
2019-07-31 19:25:30 +00:00
|
|
|
"frontend",
|
2020-03-03 21:57:09 +00:00
|
|
|
"group",
|
2019-07-31 19:25:30 +00:00
|
|
|
"hassio",
|
2020-03-03 21:57:09 +00:00
|
|
|
"homeassistant",
|
|
|
|
"input_boolean",
|
2021-12-20 15:18:58 +00:00
|
|
|
"input_button",
|
2020-03-03 21:57:09 +00:00
|
|
|
"input_datetime",
|
|
|
|
"input_number",
|
|
|
|
"input_select",
|
|
|
|
"input_text",
|
2022-01-21 17:29:17 +00:00
|
|
|
"media_source",
|
2020-06-20 00:24:33 +00:00
|
|
|
"onboarding",
|
2023-02-27 01:25:29 +00:00
|
|
|
"panel_custom",
|
2020-03-03 21:57:09 +00:00
|
|
|
"persistent_notification",
|
|
|
|
"person",
|
|
|
|
"script",
|
|
|
|
"shopping_list",
|
|
|
|
"sun",
|
2019-07-31 19:25:30 +00:00
|
|
|
"system_health",
|
2020-03-03 21:57:09 +00:00
|
|
|
"system_log",
|
|
|
|
"timer",
|
|
|
|
"webhook",
|
2019-07-31 19:25:30 +00:00
|
|
|
"websocket_api",
|
2019-12-05 09:15:28 +00:00
|
|
|
"zone",
|
|
|
|
# Other
|
|
|
|
"mjpeg", # base class, has no reqs or component to load.
|
2019-12-19 10:23:19 +00:00
|
|
|
"stream", # Stream cannot install on all systems, can be imported without reqs.
|
2019-04-13 20:17:01 +00:00
|
|
|
}
|
|
|
|
|
2020-02-12 11:59:59 +00:00
|
|
|
IGNORE_VIOLATIONS = {
|
2019-12-05 09:15:28 +00:00
|
|
|
# Has same requirement, gets defaults.
|
|
|
|
("sql", "recorder"),
|
|
|
|
# Sharing a base class
|
|
|
|
("lutron_caseta", "lutron"),
|
|
|
|
("ffmpeg_noise", "ffmpeg_motion"),
|
|
|
|
# Demo
|
|
|
|
("demo", "manual"),
|
2021-11-15 17:18:57 +00:00
|
|
|
# This would be a circular dep
|
|
|
|
("http", "network"),
|
2023-02-25 11:01:01 +00:00
|
|
|
# This would be a circular dep
|
|
|
|
("zha", "homeassistant_hardware"),
|
2023-09-01 13:05:45 +00:00
|
|
|
("zha", "homeassistant_sky_connect"),
|
2023-02-27 01:25:29 +00:00
|
|
|
("zha", "homeassistant_yellow"),
|
2019-12-05 09:15:28 +00:00
|
|
|
# This should become a helper method that integrations can submit data to
|
|
|
|
("websocket_api", "lovelace"),
|
2020-02-12 18:13:07 +00:00
|
|
|
("websocket_api", "shopping_list"),
|
2019-12-05 09:15:28 +00:00
|
|
|
"logbook",
|
2020-02-12 11:59:59 +00:00
|
|
|
}
|
2019-12-05 09:15:28 +00:00
|
|
|
|
2019-04-13 20:17:01 +00:00
|
|
|
|
2021-03-18 21:58:19 +00:00
|
|
|
def calc_allowed_references(integration: Integration) -> set[str]:
|
2019-12-19 13:00:22 +00:00
|
|
|
"""Return a set of allowed references."""
|
2022-11-23 18:05:31 +00:00
|
|
|
manifest = integration.manifest
|
2019-12-19 13:00:22 +00:00
|
|
|
allowed_references = (
|
|
|
|
ALLOWED_USED_COMPONENTS
|
2022-11-23 18:05:31 +00:00
|
|
|
| set(manifest.get("dependencies", []))
|
|
|
|
| set(manifest.get("after_dependencies", []))
|
2019-12-05 09:15:28 +00:00
|
|
|
)
|
2023-01-15 23:17:17 +00:00
|
|
|
# bluetooth_adapters is a wrapper to ensure
|
|
|
|
# that all the integrations that provide bluetooth
|
|
|
|
# adapters are setup before loading integrations
|
|
|
|
# that use them.
|
|
|
|
if "bluetooth_adapters" in allowed_references:
|
|
|
|
allowed_references.add("bluetooth")
|
2019-04-13 20:17:01 +00:00
|
|
|
|
2019-12-10 08:24:49 +00:00
|
|
|
# Discovery requirements are ok if referenced in manifest
|
|
|
|
for check_domain, to_check in DISCOVERY_INTEGRATIONS.items():
|
2022-11-23 18:05:31 +00:00
|
|
|
if any(check in manifest for check in to_check):
|
2019-12-19 13:00:22 +00:00
|
|
|
allowed_references.add(check_domain)
|
|
|
|
|
|
|
|
return allowed_references
|
|
|
|
|
|
|
|
|
|
|
|
def find_non_referenced_integrations(
|
2021-03-18 21:58:19 +00:00
|
|
|
integrations: dict[str, Integration],
|
2019-12-19 13:00:22 +00:00
|
|
|
integration: Integration,
|
2021-03-18 21:58:19 +00:00
|
|
|
references: dict[Path, set[str]],
|
2022-11-23 18:05:31 +00:00
|
|
|
) -> set[str]:
|
2022-11-28 15:51:43 +00:00
|
|
|
"""Find integrations that are not allowed to be referenced."""
|
2019-12-19 13:00:22 +00:00
|
|
|
allowed_references = calc_allowed_references(integration)
|
|
|
|
referenced = set()
|
|
|
|
for path, refs in references.items():
|
|
|
|
if len(path.parts) == 1:
|
|
|
|
# climate.py is stored as climate
|
|
|
|
cur_fil_dir = path.stem
|
|
|
|
else:
|
|
|
|
# climate/__init__.py is stored as climate
|
|
|
|
cur_fil_dir = path.parts[0]
|
|
|
|
|
|
|
|
is_platform_other_integration = cur_fil_dir in integrations
|
2019-12-10 08:24:49 +00:00
|
|
|
|
2019-12-19 13:00:22 +00:00
|
|
|
for ref in refs:
|
|
|
|
# We are always allowed to import from ourselves
|
|
|
|
if ref == integration.domain:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# These references are approved based on the manifest
|
|
|
|
if ref in allowed_references:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Some violations are whitelisted
|
|
|
|
if (integration.domain, ref) in IGNORE_VIOLATIONS:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# If it's a platform for another integration, the other integration is ok
|
|
|
|
if is_platform_other_integration and cur_fil_dir == ref:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# These have a platform specified in this integration
|
|
|
|
if not is_platform_other_integration and (
|
|
|
|
(integration.path / f"{ref}.py").is_file()
|
|
|
|
# Platform dir
|
|
|
|
or (integration.path / ref).is_dir()
|
2019-12-05 09:15:28 +00:00
|
|
|
):
|
|
|
|
continue
|
|
|
|
|
2019-12-19 13:00:22 +00:00
|
|
|
referenced.add(ref)
|
|
|
|
|
|
|
|
return referenced
|
|
|
|
|
|
|
|
|
2023-03-30 09:25:14 +00:00
|
|
|
def _compute_integration_dependencies(
|
2022-11-23 18:05:31 +00:00
|
|
|
integration: Integration,
|
2023-03-30 09:25:14 +00:00
|
|
|
) -> tuple[str, dict[Path, set[str]] | None]:
|
|
|
|
"""Compute integration dependencies."""
|
2019-12-19 13:00:22 +00:00
|
|
|
# Some integrations are allowed to have violations.
|
|
|
|
if integration.domain in IGNORE_VIOLATIONS:
|
2023-03-30 09:25:14 +00:00
|
|
|
return (integration.domain, None)
|
2019-12-19 13:00:22 +00:00
|
|
|
|
|
|
|
# Find usage of hass.components
|
|
|
|
collector = ImportCollector(integration)
|
|
|
|
collector.collect()
|
2023-03-30 09:25:14 +00:00
|
|
|
return (integration.domain, collector.referenced)
|
2019-12-19 13:00:22 +00:00
|
|
|
|
2019-04-13 20:17:01 +00:00
|
|
|
|
2023-03-30 09:25:14 +00:00
|
|
|
def _validate_dependency_imports(
|
|
|
|
integrations: dict[str, Integration],
|
|
|
|
) -> None:
|
|
|
|
"""Validate all dependencies."""
|
|
|
|
|
|
|
|
# Find integration dependencies with multiprocessing
|
|
|
|
# (because it takes some time to parse thousands of files)
|
|
|
|
with multiprocessing.Pool() as pool:
|
|
|
|
integration_imports = dict(
|
|
|
|
pool.imap_unordered(
|
|
|
|
_compute_integration_dependencies,
|
|
|
|
integrations.values(),
|
|
|
|
chunksize=10,
|
|
|
|
)
|
2023-02-27 01:25:29 +00:00
|
|
|
)
|
|
|
|
|
2023-03-30 09:25:14 +00:00
|
|
|
for integration in integrations.values():
|
|
|
|
referenced = integration_imports[integration.domain]
|
|
|
|
if not referenced: # Either ignored or has no references
|
|
|
|
continue
|
|
|
|
|
|
|
|
for domain in sorted(
|
|
|
|
find_non_referenced_integrations(integrations, integration, referenced)
|
|
|
|
):
|
|
|
|
integration.add_error(
|
|
|
|
"dependencies",
|
|
|
|
f"Using component {domain} but it's not in 'dependencies' "
|
|
|
|
"or 'after_dependencies'",
|
|
|
|
)
|
|
|
|
|
2023-02-27 01:25:29 +00:00
|
|
|
|
|
|
|
def _check_circular_deps(
|
|
|
|
integrations: dict[str, Integration],
|
|
|
|
start_domain: str,
|
|
|
|
integration: Integration,
|
|
|
|
checked: set[str],
|
|
|
|
checking: deque[str],
|
|
|
|
) -> None:
|
|
|
|
"""Check for circular dependencies pointing at starting_domain."""
|
2023-03-30 09:25:14 +00:00
|
|
|
|
2023-02-27 01:25:29 +00:00
|
|
|
if integration.domain in checked or integration.domain in checking:
|
|
|
|
return
|
|
|
|
|
|
|
|
checking.append(integration.domain)
|
|
|
|
for domain in integration.manifest.get("dependencies", []):
|
|
|
|
if domain == start_domain:
|
|
|
|
integrations[start_domain].add_error(
|
|
|
|
"dependencies",
|
|
|
|
f"Found a circular dependency with {integration.domain} ({', '.join(checking)})",
|
|
|
|
)
|
|
|
|
break
|
|
|
|
|
|
|
|
_check_circular_deps(
|
|
|
|
integrations, start_domain, integrations[domain], checked, checking
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
for domain in integration.manifest.get("after_dependencies", []):
|
|
|
|
if domain == start_domain:
|
|
|
|
integrations[start_domain].add_error(
|
|
|
|
"dependencies",
|
|
|
|
f"Found a circular dependency with after dependencies of {integration.domain} ({', '.join(checking)})",
|
|
|
|
)
|
|
|
|
break
|
|
|
|
|
|
|
|
_check_circular_deps(
|
|
|
|
integrations, start_domain, integrations[domain], checked, checking
|
|
|
|
)
|
|
|
|
checked.add(integration.domain)
|
|
|
|
checking.remove(integration.domain)
|
|
|
|
|
2019-04-13 20:17:01 +00:00
|
|
|
|
2023-03-30 09:25:14 +00:00
|
|
|
def _validate_circular_dependencies(integrations: dict[str, Integration]) -> None:
|
2019-04-13 20:17:01 +00:00
|
|
|
for integration in integrations.values():
|
2023-03-30 09:25:14 +00:00
|
|
|
if integration.domain in IGNORE_VIOLATIONS:
|
|
|
|
continue
|
|
|
|
|
|
|
|
_check_circular_deps(
|
|
|
|
integrations, integration.domain, integration, set(), deque()
|
2023-02-27 01:25:29 +00:00
|
|
|
)
|
2019-04-13 20:17:01 +00:00
|
|
|
|
2023-03-30 09:25:14 +00:00
|
|
|
|
2023-03-30 10:05:11 +00:00
|
|
|
def _validate_dependencies(
|
2023-03-30 09:25:14 +00:00
|
|
|
integrations: dict[str, Integration],
|
|
|
|
) -> None:
|
2023-03-30 10:05:11 +00:00
|
|
|
"""Check that all referenced dependencies exist and are not duplicated."""
|
2023-03-30 09:25:14 +00:00
|
|
|
for integration in integrations.values():
|
|
|
|
if not integration.manifest:
|
2020-04-16 16:00:04 +00:00
|
|
|
continue
|
|
|
|
|
2020-06-20 00:24:33 +00:00
|
|
|
after_deps = integration.manifest.get("after_dependencies", [])
|
2020-03-16 21:47:44 +00:00
|
|
|
for dep in integration.manifest.get("dependencies", []):
|
2020-06-20 00:24:33 +00:00
|
|
|
if dep in after_deps:
|
|
|
|
integration.add_error(
|
|
|
|
"dependencies",
|
|
|
|
f"Dependency {dep} is both in dependencies and after_dependencies",
|
|
|
|
)
|
|
|
|
|
2019-04-13 20:17:01 +00:00
|
|
|
if dep not in integrations:
|
|
|
|
integration.add_error(
|
2019-08-23 16:53:33 +00:00
|
|
|
"dependencies", f"Dependency {dep} does not exist"
|
2019-04-13 20:17:01 +00:00
|
|
|
)
|
2023-03-30 09:25:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
def validate(
|
|
|
|
integrations: dict[str, Integration],
|
|
|
|
config: Config,
|
|
|
|
) -> None:
|
|
|
|
"""Handle dependencies for integrations."""
|
|
|
|
_validate_dependency_imports(integrations)
|
|
|
|
|
|
|
|
if not config.specific_integrations:
|
2023-03-30 10:05:11 +00:00
|
|
|
_validate_dependencies(integrations)
|
2023-03-30 09:25:14 +00:00
|
|
|
_validate_circular_dependencies(integrations)
|