2015-11-17 08:18:42 +00:00
|
|
|
#!/usr/bin/env python3
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Generate an updated requirements_all.txt."""
|
2019-10-09 23:16:29 +00:00
|
|
|
import difflib
|
2015-11-17 08:18:42 +00:00
|
|
|
import importlib
|
|
|
|
import os
|
2019-10-09 23:16:29 +00:00
|
|
|
from pathlib import Path
|
2015-11-17 08:18:42 +00:00
|
|
|
import pkgutil
|
|
|
|
import re
|
2015-12-18 07:51:34 +00:00
|
|
|
import sys
|
2015-11-17 08:18:42 +00:00
|
|
|
|
2019-04-13 20:17:01 +00:00
|
|
|
from script.hassfest.model import Integration
|
2019-04-05 04:29:29 +00:00
|
|
|
|
2019-12-09 15:24:03 +00:00
|
|
|
from homeassistant.util.yaml.loader import load_yaml
|
|
|
|
|
2016-07-02 18:22:51 +00:00
|
|
|
COMMENT_REQUIREMENTS = (
|
2019-07-31 19:25:30 +00:00
|
|
|
"Adafruit_BBIO",
|
2019-09-10 20:05:46 +00:00
|
|
|
"Adafruit-DHT",
|
2019-07-31 19:25:30 +00:00
|
|
|
"avion",
|
|
|
|
"beacontools",
|
|
|
|
"blinkt",
|
|
|
|
"bluepy",
|
|
|
|
"bme680",
|
|
|
|
"credstash",
|
|
|
|
"decora",
|
|
|
|
"envirophat",
|
|
|
|
"evdev",
|
|
|
|
"face_recognition",
|
|
|
|
"i2csense",
|
|
|
|
"opencv-python-headless",
|
|
|
|
"py_noaa",
|
|
|
|
"pybluez",
|
|
|
|
"pycups",
|
|
|
|
"PySwitchbot",
|
|
|
|
"pySwitchmate",
|
|
|
|
"python-eq3bt",
|
2020-01-28 08:35:41 +00:00
|
|
|
"python-gammu",
|
2019-07-31 19:25:30 +00:00
|
|
|
"python-lirc",
|
|
|
|
"pyuserinput",
|
|
|
|
"raspihats",
|
|
|
|
"rpi-rf",
|
|
|
|
"RPi.GPIO",
|
|
|
|
"smbus-cffi",
|
2019-08-19 14:56:57 +00:00
|
|
|
"tensorflow",
|
2019-09-10 20:05:46 +00:00
|
|
|
"VL53L1X2",
|
2016-07-02 18:22:51 +00:00
|
|
|
)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
IGNORE_PIN = ("colorlog>2.1,<3", "keyring>=9.3,<10.0", "urllib3")
|
2017-01-21 23:31:10 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
URL_PIN = (
|
|
|
|
"https://developers.home-assistant.io/docs/"
|
|
|
|
"creating_platform_code_review.html#1-requirements"
|
|
|
|
)
|
2017-01-22 16:34:00 +00:00
|
|
|
|
2015-11-17 08:18:42 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
CONSTRAINT_PATH = os.path.join(
|
|
|
|
os.path.dirname(__file__), "../homeassistant/package_constraints.txt"
|
|
|
|
)
|
2017-08-05 06:06:10 +00:00
|
|
|
CONSTRAINT_BASE = """
|
2018-08-28 10:49:50 +00:00
|
|
|
pycryptodome>=3.6.6
|
|
|
|
|
2019-12-16 06:29:19 +00:00
|
|
|
# Not needed for our supported Python versions
|
2017-08-05 06:06:10 +00:00
|
|
|
enum34==1000000000.0.0
|
2018-02-27 09:58:45 +00:00
|
|
|
|
|
|
|
# This is a old unmaintained library and is replaced with pycryptodome
|
|
|
|
pycrypto==1000000000.0.0
|
2017-08-05 06:06:10 +00:00
|
|
|
"""
|
2017-03-22 15:50:54 +00:00
|
|
|
|
2019-12-16 10:06:17 +00:00
|
|
|
IGNORE_PRE_COMMIT_HOOK_ID = ("check-json",)
|
|
|
|
|
2017-03-22 15:50:54 +00:00
|
|
|
|
2019-10-09 23:16:29 +00:00
|
|
|
def has_tests(module: str):
|
|
|
|
"""Test if a module has tests.
|
|
|
|
|
|
|
|
Module format: homeassistant.components.hue
|
|
|
|
Test if exists: tests/components/hue
|
|
|
|
"""
|
|
|
|
path = Path(module.replace(".", "/").replace("homeassistant", "tests"))
|
|
|
|
if not path.exists():
|
|
|
|
return False
|
|
|
|
|
|
|
|
if not path.is_dir():
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Dev environments might have stale directories around
|
|
|
|
# from removed tests. Check for that.
|
|
|
|
content = [f.name for f in path.glob("*")]
|
|
|
|
|
|
|
|
# Directories need to contain more than `__pycache__`
|
|
|
|
# to exist in Git and so be seen by CI.
|
|
|
|
return content != ["__pycache__"]
|
|
|
|
|
|
|
|
|
2015-11-17 08:18:42 +00:00
|
|
|
def explore_module(package, explore_children):
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Explore the modules."""
|
2015-11-17 08:18:42 +00:00
|
|
|
module = importlib.import_module(package)
|
|
|
|
|
|
|
|
found = []
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
if not hasattr(module, "__path__"):
|
2015-11-17 08:18:42 +00:00
|
|
|
return found
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
for _, name, _ in pkgutil.iter_modules(module.__path__, package + "."):
|
2015-11-17 08:18:42 +00:00
|
|
|
found.append(name)
|
|
|
|
|
|
|
|
if explore_children:
|
|
|
|
found.extend(explore_module(name, False))
|
|
|
|
|
|
|
|
return found
|
|
|
|
|
|
|
|
|
|
|
|
def core_requirements():
|
2016-03-09 10:15:04 +00:00
|
|
|
"""Gather core requirements out of setup.py."""
|
2019-10-09 23:16:29 +00:00
|
|
|
reqs_raw = re.search(
|
|
|
|
r"REQUIRES = \[(.*?)\]", Path("setup.py").read_text(), re.S
|
|
|
|
).group(1)
|
2019-08-01 15:30:49 +00:00
|
|
|
return [x[1] for x in re.findall(r"(['\"])(.*?)\1", reqs_raw)]
|
2015-11-17 08:18:42 +00:00
|
|
|
|
|
|
|
|
2019-06-10 21:38:14 +00:00
|
|
|
def gather_recursive_requirements(domain, seen=None):
|
|
|
|
"""Recursively gather requirements from a module."""
|
|
|
|
if seen is None:
|
|
|
|
seen = set()
|
|
|
|
|
|
|
|
seen.add(domain)
|
2019-10-09 23:16:29 +00:00
|
|
|
integration = Integration(Path(f"homeassistant/components/{domain}"))
|
2019-06-10 21:38:14 +00:00
|
|
|
integration.load_manifest()
|
2019-07-31 19:25:30 +00:00
|
|
|
reqs = set(integration.manifest["requirements"])
|
|
|
|
for dep_domain in integration.manifest["dependencies"]:
|
2019-06-10 21:38:14 +00:00
|
|
|
reqs.update(gather_recursive_requirements(dep_domain, seen))
|
|
|
|
return reqs
|
|
|
|
|
|
|
|
|
2015-11-17 08:28:22 +00:00
|
|
|
def comment_requirement(req):
|
2018-08-24 08:28:43 +00:00
|
|
|
"""Comment out requirement. Some don't install on all systems."""
|
2015-11-17 08:28:22 +00:00
|
|
|
return any(ign in req for ign in COMMENT_REQUIREMENTS)
|
|
|
|
|
|
|
|
|
2015-11-25 22:31:04 +00:00
|
|
|
def gather_modules():
|
2017-05-07 05:37:31 +00:00
|
|
|
"""Collect the information."""
|
2016-02-01 07:52:42 +00:00
|
|
|
reqs = {}
|
2015-11-17 08:18:42 +00:00
|
|
|
|
|
|
|
errors = []
|
2015-11-25 22:31:04 +00:00
|
|
|
|
2019-04-13 20:17:01 +00:00
|
|
|
gather_requirements_from_manifests(errors, reqs)
|
2019-04-05 04:29:29 +00:00
|
|
|
gather_requirements_from_modules(errors, reqs)
|
|
|
|
|
|
|
|
for key in reqs:
|
2019-07-31 19:25:30 +00:00
|
|
|
reqs[key] = sorted(reqs[key], key=lambda name: (len(name.split(".")), name))
|
2019-04-05 04:29:29 +00:00
|
|
|
|
|
|
|
if errors:
|
|
|
|
print("******* ERROR")
|
2019-07-31 19:25:30 +00:00
|
|
|
print("Errors while importing: ", ", ".join(errors))
|
2019-04-05 04:29:29 +00:00
|
|
|
return None
|
|
|
|
|
|
|
|
return reqs
|
|
|
|
|
|
|
|
|
2019-04-13 20:17:01 +00:00
|
|
|
def gather_requirements_from_manifests(errors, reqs):
|
|
|
|
"""Gather all of the requirements from manifests."""
|
2019-10-09 23:16:29 +00:00
|
|
|
integrations = Integration.load_dir(Path("homeassistant/components"))
|
2019-04-13 20:17:01 +00:00
|
|
|
for domain in sorted(integrations):
|
|
|
|
integration = integrations[domain]
|
|
|
|
|
|
|
|
if not integration.manifest:
|
2019-08-23 16:53:33 +00:00
|
|
|
errors.append(f"The manifest for integration {domain} is invalid.")
|
2019-04-13 20:17:01 +00:00
|
|
|
continue
|
|
|
|
|
|
|
|
process_requirements(
|
|
|
|
errors,
|
2019-07-31 19:25:30 +00:00
|
|
|
integration.manifest["requirements"],
|
2019-08-23 16:53:33 +00:00
|
|
|
f"homeassistant.components.{domain}",
|
2019-07-31 19:25:30 +00:00
|
|
|
reqs,
|
2019-04-13 20:17:01 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-04-05 04:29:29 +00:00
|
|
|
def gather_requirements_from_modules(errors, reqs):
|
|
|
|
"""Collect the requirements from the modules directly."""
|
2018-08-22 07:52:34 +00:00
|
|
|
for package in sorted(
|
2019-07-31 19:25:30 +00:00
|
|
|
explore_module("homeassistant.scripts", True)
|
|
|
|
+ explore_module("homeassistant.auth", True)
|
|
|
|
):
|
2015-11-17 08:18:42 +00:00
|
|
|
try:
|
|
|
|
module = importlib.import_module(package)
|
2019-03-17 03:44:05 +00:00
|
|
|
except ImportError as err:
|
2020-01-03 13:47:06 +00:00
|
|
|
print("{}.py: {}".format(package.replace(".", "/"), err))
|
2019-06-10 21:38:14 +00:00
|
|
|
errors.append(package)
|
2015-11-17 08:18:42 +00:00
|
|
|
continue
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
if getattr(module, "REQUIREMENTS", None):
|
2019-04-05 04:29:29 +00:00
|
|
|
process_requirements(errors, module.REQUIREMENTS, package, reqs)
|
2016-02-01 07:52:42 +00:00
|
|
|
|
2015-11-17 08:18:42 +00:00
|
|
|
|
2019-04-05 04:29:29 +00:00
|
|
|
def process_requirements(errors, module_requirements, package, reqs):
|
|
|
|
"""Process all of the requirements."""
|
|
|
|
for req in module_requirements:
|
2019-07-31 19:25:30 +00:00
|
|
|
if "://" in req:
|
2019-08-23 16:53:33 +00:00
|
|
|
errors.append(f"{package}[Only pypi dependencies are allowed: {req}]")
|
2019-07-31 19:25:30 +00:00
|
|
|
if req.partition("==")[1] == "" and req not in IGNORE_PIN:
|
2019-08-23 16:53:33 +00:00
|
|
|
errors.append(f"{package}[Please pin requirement {req}, see {URL_PIN}]")
|
2019-04-05 04:29:29 +00:00
|
|
|
reqs.setdefault(req, []).append(package)
|
2017-05-07 05:37:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
def generate_requirements_list(reqs):
|
|
|
|
"""Generate a pip file based on requirements."""
|
|
|
|
output = []
|
2016-02-01 07:52:42 +00:00
|
|
|
for pkg, requirements in sorted(reqs.items(), key=lambda item: item[0]):
|
2019-04-05 04:29:29 +00:00
|
|
|
for req in sorted(requirements):
|
2019-08-23 16:53:33 +00:00
|
|
|
output.append(f"\n# {req}")
|
2015-11-17 08:28:22 +00:00
|
|
|
|
|
|
|
if comment_requirement(pkg):
|
2019-08-23 16:53:33 +00:00
|
|
|
output.append(f"\n# {pkg}\n")
|
2015-11-17 08:28:22 +00:00
|
|
|
else:
|
2019-08-23 16:53:33 +00:00
|
|
|
output.append(f"\n{pkg}\n")
|
2019-07-31 19:25:30 +00:00
|
|
|
return "".join(output)
|
2017-05-07 05:37:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
def requirements_all_output(reqs):
|
|
|
|
"""Generate output for requirements_all."""
|
|
|
|
output = []
|
2019-07-31 19:25:30 +00:00
|
|
|
output.append("# Home Assistant core")
|
|
|
|
output.append("\n")
|
|
|
|
output.append("\n".join(core_requirements()))
|
|
|
|
output.append("\n")
|
2017-05-07 05:37:31 +00:00
|
|
|
output.append(generate_requirements_list(reqs))
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
return "".join(output)
|
2017-05-07 05:37:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
def requirements_test_output(reqs):
|
|
|
|
"""Generate output for test_requirements."""
|
|
|
|
output = []
|
2019-11-05 05:21:52 +00:00
|
|
|
output.append("# Home Assistant tests, full dependency set\n")
|
|
|
|
output.append(
|
|
|
|
f"# Automatically generated by {Path(__file__).name}, do not edit\n\n"
|
|
|
|
)
|
|
|
|
output.append("-r requirements_test.txt\n")
|
2019-10-09 23:16:29 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
filtered = {
|
2019-10-09 23:16:29 +00:00
|
|
|
requirement: modules
|
|
|
|
for requirement, modules in reqs.items()
|
2019-07-31 19:25:30 +00:00
|
|
|
if any(
|
2019-10-09 23:16:29 +00:00
|
|
|
# Always install requirements that are not part of integrations
|
|
|
|
not mdl.startswith("homeassistant.components.") or
|
|
|
|
# Install tests for integrations that have tests
|
|
|
|
has_tests(mdl)
|
|
|
|
for mdl in modules
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
|
|
|
}
|
2017-05-07 05:37:31 +00:00
|
|
|
output.append(generate_requirements_list(filtered))
|
2015-11-25 22:31:04 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
return "".join(output)
|
2015-11-25 22:31:04 +00:00
|
|
|
|
|
|
|
|
2019-11-05 05:21:52 +00:00
|
|
|
def requirements_pre_commit_output():
|
|
|
|
"""Generate output for pre-commit dependencies."""
|
|
|
|
source = ".pre-commit-config-all.yaml"
|
|
|
|
pre_commit_conf = load_yaml(source)
|
|
|
|
reqs = []
|
|
|
|
for repo in (x for x in pre_commit_conf["repos"] if x.get("rev")):
|
|
|
|
for hook in repo["hooks"]:
|
2019-12-16 10:06:17 +00:00
|
|
|
if hook["id"] not in IGNORE_PRE_COMMIT_HOOK_ID:
|
|
|
|
reqs.append(f"{hook['id']}=={repo['rev']}")
|
|
|
|
reqs.extend(x for x in hook.get("additional_dependencies", ()))
|
2019-11-05 05:21:52 +00:00
|
|
|
output = [
|
|
|
|
f"# Automatically generated "
|
|
|
|
f"from {source} by {Path(__file__).name}, do not edit",
|
|
|
|
"",
|
|
|
|
]
|
|
|
|
output.extend(sorted(reqs))
|
|
|
|
return "\n".join(output) + "\n"
|
|
|
|
|
|
|
|
|
2017-03-22 15:50:54 +00:00
|
|
|
def gather_constraints():
|
|
|
|
"""Construct output for constraint file."""
|
2019-10-09 23:16:29 +00:00
|
|
|
return (
|
|
|
|
"\n".join(
|
|
|
|
sorted(
|
|
|
|
core_requirements()
|
|
|
|
+ list(gather_recursive_requirements("default_config"))
|
|
|
|
)
|
|
|
|
+ [""]
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2019-10-09 23:16:29 +00:00
|
|
|
+ CONSTRAINT_BASE
|
2019-07-31 19:25:30 +00:00
|
|
|
)
|
2017-03-22 15:50:54 +00:00
|
|
|
|
|
|
|
|
2019-10-09 23:16:29 +00:00
|
|
|
def diff_file(filename, content):
|
|
|
|
"""Diff a file."""
|
|
|
|
return list(
|
|
|
|
difflib.context_diff(
|
2020-01-03 13:47:06 +00:00
|
|
|
[f"{line}\n" for line in Path(filename).read_text().split("\n")],
|
|
|
|
[f"{line}\n" for line in content.split("\n")],
|
2019-10-09 23:16:29 +00:00
|
|
|
filename,
|
|
|
|
"generated",
|
|
|
|
)
|
|
|
|
)
|
2017-03-22 15:50:54 +00:00
|
|
|
|
|
|
|
|
2018-03-09 20:27:39 +00:00
|
|
|
def main(validate):
|
2018-08-24 08:28:43 +00:00
|
|
|
"""Run the script."""
|
2019-07-31 19:25:30 +00:00
|
|
|
if not os.path.isfile("requirements_all.txt"):
|
|
|
|
print("Run this from HA root dir")
|
2018-03-09 20:27:39 +00:00
|
|
|
return 1
|
2015-11-29 21:55:46 +00:00
|
|
|
|
2015-11-25 22:31:04 +00:00
|
|
|
data = gather_modules()
|
|
|
|
|
2015-11-29 21:55:46 +00:00
|
|
|
if data is None:
|
2018-03-09 20:27:39 +00:00
|
|
|
return 1
|
2015-12-18 07:51:34 +00:00
|
|
|
|
2017-05-07 05:37:31 +00:00
|
|
|
reqs_file = requirements_all_output(data)
|
|
|
|
reqs_test_file = requirements_test_output(data)
|
2019-11-05 05:21:52 +00:00
|
|
|
reqs_pre_commit_file = requirements_pre_commit_output()
|
2019-10-09 23:16:29 +00:00
|
|
|
constraints = gather_constraints()
|
|
|
|
|
|
|
|
files = (
|
|
|
|
("requirements_all.txt", reqs_file),
|
2019-11-05 05:21:52 +00:00
|
|
|
("requirements_test_pre_commit.txt", reqs_pre_commit_file),
|
2019-10-09 23:16:29 +00:00
|
|
|
("requirements_test_all.txt", reqs_test_file),
|
|
|
|
("homeassistant/package_constraints.txt", constraints),
|
|
|
|
)
|
2017-05-07 05:37:31 +00:00
|
|
|
|
2018-03-09 20:27:39 +00:00
|
|
|
if validate:
|
2017-05-07 05:37:31 +00:00
|
|
|
errors = []
|
2017-03-22 15:50:54 +00:00
|
|
|
|
2019-10-09 23:16:29 +00:00
|
|
|
for filename, content in files:
|
|
|
|
diff = diff_file(filename, content)
|
|
|
|
if diff:
|
|
|
|
errors.append("".join(diff))
|
2017-05-07 05:37:31 +00:00
|
|
|
|
|
|
|
if errors:
|
2019-10-09 23:16:29 +00:00
|
|
|
print("ERROR - FOUND THE FOLLOWING DIFFERENCES")
|
|
|
|
print()
|
|
|
|
print()
|
|
|
|
print("\n\n".join(errors))
|
|
|
|
print()
|
|
|
|
print("Please run python3 -m script.gen_requirements_all")
|
2018-03-09 20:27:39 +00:00
|
|
|
return 1
|
2017-03-22 15:50:54 +00:00
|
|
|
|
2018-03-09 20:27:39 +00:00
|
|
|
return 0
|
2017-03-22 15:50:54 +00:00
|
|
|
|
2019-10-09 23:16:29 +00:00
|
|
|
for filename, content in files:
|
|
|
|
Path(filename).write_text(content)
|
|
|
|
|
2018-03-09 20:27:39 +00:00
|
|
|
return 0
|
2015-11-17 08:18:42 +00:00
|
|
|
|
2016-11-19 05:47:59 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
_VAL = sys.argv[-1] == "validate"
|
2018-03-09 20:27:39 +00:00
|
|
|
sys.exit(main(_VAL))
|