Enable Ruff PTH for the script directory (#124441)

* Enable Ruff PTH for the script directory

* Address review comments

* Fix translations script

* Update script/hassfest/config_flow.py

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>

---------

Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
pull/125419/head
Sid 2024-09-06 11:33:01 +02:00 committed by GitHub
parent 7752789c3a
commit 1db68327f9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
18 changed files with 125 additions and 163 deletions

View File

@ -734,6 +734,7 @@ select = [
"PIE", # flake8-pie
"PL", # pylint
"PT", # flake8-pytest-style
"PTH", # flake8-pathlib
"PYI", # flake8-pyi
"RET", # flake8-return
"RSE", # flake8-raise
@ -905,5 +906,9 @@ split-on-trailing-comma = false
"homeassistant/scripts/*" = ["T201"]
"script/*" = ["T20"]
# Temporary
"homeassistant/**" = ["PTH"]
"tests/**" = ["PTH"]
[tool.ruff.lint.mccabe]
max-complexity = 25

View File

@ -6,7 +6,6 @@ from __future__ import annotations
import difflib
import importlib
from operator import itemgetter
import os
from pathlib import Path
import pkgutil
import re
@ -82,8 +81,8 @@ URL_PIN = (
)
CONSTRAINT_PATH = os.path.join(
os.path.dirname(__file__), "../homeassistant/package_constraints.txt"
CONSTRAINT_PATH = (
Path(__file__).parent.parent / "homeassistant" / "package_constraints.txt"
)
CONSTRAINT_BASE = """
# Constrain pycryptodome to avoid vulnerability
@ -256,8 +255,7 @@ def explore_module(package: str, explore_children: bool) -> list[str]:
def core_requirements() -> list[str]:
"""Gather core requirements out of pyproject.toml."""
with open("pyproject.toml", "rb") as fp:
data = tomllib.load(fp)
data = tomllib.loads(Path("pyproject.toml").read_text())
dependencies: list[str] = data["project"]["dependencies"]
return dependencies
@ -528,7 +526,7 @@ def diff_file(filename: str, content: str) -> list[str]:
def main(validate: bool, ci: bool) -> int:
"""Run the script."""
if not os.path.isfile("requirements_all.txt"):
if not Path("requirements_all.txt").is_file():
print("Run this from HA root dir")
return 1
@ -590,7 +588,7 @@ def main(validate: bool, ci: bool) -> int:
def _get_hassfest_config() -> Config:
"""Get hassfest config."""
return Config(
root=Path(".").absolute(),
root=Path().absolute(),
specific_integrations=None,
action="validate",
requirements=True,

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import argparse
from operator import attrgetter
import pathlib
from pathlib import Path
import sys
from time import monotonic
@ -63,9 +63,9 @@ ALL_PLUGIN_NAMES = [
]
def valid_integration_path(integration_path: pathlib.Path | str) -> pathlib.Path:
def valid_integration_path(integration_path: Path | str) -> Path:
"""Test if it's a valid integration."""
path = pathlib.Path(integration_path)
path = Path(integration_path)
if not path.is_dir():
raise argparse.ArgumentTypeError(f"{integration_path} is not a directory.")
@ -109,8 +109,8 @@ def get_config() -> Config:
)
parser.add_argument(
"--core-integrations-path",
type=pathlib.Path,
default=pathlib.Path("homeassistant/components"),
type=Path,
default=Path("homeassistant/components"),
help="Path to core integrations",
)
parsed = parser.parse_args()
@ -123,14 +123,11 @@ def get_config() -> Config:
"Generate is not allowed when limiting to specific integrations"
)
if (
not parsed.integration_path
and not pathlib.Path("requirements_all.txt").is_file()
):
if not parsed.integration_path and not Path("requirements_all.txt").is_file():
raise RuntimeError("Run from Home Assistant root")
return Config(
root=pathlib.Path(".").absolute(),
root=Path().absolute(),
specific_integrations=parsed.integration_path,
action=parsed.action,
requirements=parsed.requirements,

View File

@ -34,19 +34,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
with open(str(bluetooth_path)) as fp:
current = fp.read()
if current != content:
config.add_error(
"bluetooth",
"File bluetooth.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
return
if bluetooth_path.read_text() != content:
config.add_error(
"bluetooth",
"File bluetooth.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate bluetooth file."""
bluetooth_path = config.root / "homeassistant/generated/bluetooth.py"
with open(str(bluetooth_path), "w") as fp:
fp.write(f"{config.cache['bluetooth']}")
bluetooth_path.write_text(f"{config.cache['bluetooth']}")

View File

@ -98,18 +98,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
with open(str(codeowners_path)) as fp:
if fp.read().strip() != content:
config.add_error(
"codeowners",
"File CODEOWNERS is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
return
if codeowners_path.read_text() != content + "\n":
config.add_error(
"codeowners",
"File CODEOWNERS is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate CODEOWNERS."""
codeowners_path = config.root / "CODEOWNERS"
with open(str(codeowners_path), "w") as fp:
fp.write(f"{config.cache['codeowners']}\n")
codeowners_path.write_text(f"{config.cache['codeowners']}\n")

View File

@ -3,7 +3,6 @@
from __future__ import annotations
import json
import pathlib
from typing import Any
from .brand import validate as validate_brands
@ -216,36 +215,31 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
brands = Brand.load_dir(pathlib.Path(config.root / "homeassistant/brands"), config)
brands = Brand.load_dir(config.root / "homeassistant/brands", config)
validate_brands(brands, integrations, config)
with open(str(config_flow_path)) as fp:
if fp.read() != content:
config.add_error(
"config_flow",
"File config_flows.py is not up to date. "
"Run python3 -m script.hassfest",
fixable=True,
)
if config_flow_path.read_text() != content:
config.add_error(
"config_flow",
"File config_flows.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
config.cache["integrations"] = content = _generate_integrations(
brands, integrations, config
)
with open(str(integrations_path)) as fp:
if fp.read() != content + "\n":
config.add_error(
"config_flow",
"File integrations.json is not up to date. "
"Run python3 -m script.hassfest",
fixable=True,
)
if integrations_path.read_text() != content + "\n":
config.add_error(
"config_flow",
"File integrations.json is not up to date. "
"Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate config flow file."""
config_flow_path = config.root / "homeassistant/generated/config_flows.py"
integrations_path = config.root / "homeassistant/generated/integrations.json"
with open(str(config_flow_path), "w") as fp:
fp.write(f"{config.cache['config_flow']}")
with open(str(integrations_path), "w") as fp:
fp.write(f"{config.cache['integrations']}\n")
config_flow_path.write_text(f"{config.cache['config_flow']}")
integrations_path.write_text(f"{config.cache['integrations']}\n")

View File

@ -32,19 +32,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
with open(str(dhcp_path)) as fp:
current = fp.read()
if current != content:
config.add_error(
"dhcp",
"File dhcp.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
return
if dhcp_path.read_text() != content:
config.add_error(
"dhcp",
"File dhcp.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate dhcp file."""
dhcp_path = config.root / "homeassistant/generated/dhcp.py"
with open(str(dhcp_path), "w") as fp:
fp.write(f"{config.cache['dhcp']}")
dhcp_path.write_text(f"{config.cache['dhcp']}")

View File

@ -103,9 +103,9 @@ LABEL "com.github.actions.color"="gray-dark"
"""
def _get_package_versions(file: str, packages: set[str]) -> dict[str, str]:
def _get_package_versions(file: Path, packages: set[str]) -> dict[str, str]:
package_versions: dict[str, str] = {}
with open(file, encoding="UTF-8") as fp:
with file.open(encoding="UTF-8") as fp:
for _, line in enumerate(fp):
if package_versions.keys() == packages:
return package_versions
@ -173,10 +173,10 @@ def _generate_files(config: Config) -> list[File]:
) * 1000
package_versions = _get_package_versions(
"requirements_test.txt", {"pipdeptree", "tqdm", "uv"}
Path("requirements_test.txt"), {"pipdeptree", "tqdm", "uv"}
)
package_versions |= _get_package_versions(
"requirements_test_pre_commit.txt", {"ruff"}
Path("requirements_test_pre_commit.txt"), {"ruff"}
)
return [

View File

@ -10,8 +10,7 @@ from .model import Config, Integration
def validate(integrations: dict[str, Integration], config: Config) -> None:
"""Validate project metadata keys."""
metadata_path = config.root / "pyproject.toml"
with open(metadata_path, "rb") as fp:
data = tomllib.load(fp)
data = tomllib.loads(metadata_path.read_text())
try:
if data["project"]["version"] != __version__:

View File

@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
with open(str(mqtt_path)) as fp:
if fp.read() != content:
config.add_error(
"mqtt",
"File mqtt.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
if mqtt_path.read_text() != content:
config.add_error(
"mqtt",
"File mqtt.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate MQTT file."""
mqtt_path = config.root / "homeassistant/generated/mqtt.py"
with open(str(mqtt_path), "w") as fp:
fp.write(f"{config.cache['mqtt']}")
mqtt_path.write_text(f"{config.cache['mqtt']}")

View File

@ -33,17 +33,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
with open(str(ssdp_path)) as fp:
if fp.read() != content:
config.add_error(
"ssdp",
"File ssdp.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
if ssdp_path.read_text() != content:
config.add_error(
"ssdp",
"File ssdp.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate ssdp file."""
ssdp_path = config.root / "homeassistant/generated/ssdp.py"
with open(str(ssdp_path), "w") as fp:
fp.write(f"{config.cache['ssdp']}")
ssdp_path.write_text(f"{config.cache['ssdp']}")

View File

@ -35,19 +35,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
with open(str(usb_path)) as fp:
current = fp.read()
if current != content:
config.add_error(
"usb",
"File usb.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
return
if usb_path.read_text() != content:
config.add_error(
"usb",
"File usb.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate usb file."""
usb_path = config.root / "homeassistant/generated/usb.py"
with open(str(usb_path), "w") as fp:
fp.write(f"{config.cache['usb']}")
usb_path.write_text(f"{config.cache['usb']}")

View File

@ -90,19 +90,15 @@ def validate(integrations: dict[str, Integration], config: Config) -> None:
if config.specific_integrations:
return
with open(str(zeroconf_path)) as fp:
current = fp.read()
if current != content:
config.add_error(
"zeroconf",
"File zeroconf.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
return
if zeroconf_path.read_text() != content:
config.add_error(
"zeroconf",
"File zeroconf.py is not up to date. Run python3 -m script.hassfest",
fixable=True,
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate zeroconf file."""
zeroconf_path = config.root / "homeassistant/generated/zeroconf.py"
with open(str(zeroconf_path), "w") as fp:
fp.write(f"{config.cache['zeroconf']}")
zeroconf_path.write_text(f"{config.cache['zeroconf']}")

View File

@ -2,7 +2,7 @@
"""Inspect all component SCHEMAS."""
import importlib
import os
from pathlib import Path
import pkgutil
from homeassistant.config import _identify_config_schema
@ -20,7 +20,7 @@ def explore_module(package):
def main():
"""Run the script."""
if not os.path.isfile("requirements_all.txt"):
if not Path("requirements_all.txt").is_file():
print("Run this from HA root dir")
return

View File

@ -9,6 +9,7 @@ from collections import namedtuple
from contextlib import suppress
import itertools
import os
from pathlib import Path
import re
import shlex
import sys
@ -63,7 +64,7 @@ async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
if Path(arg).is_file():
argsp.append(f"\\\n {shlex.quote(arg)}")
else:
argsp.append(shlex.quote(arg))
@ -132,7 +133,7 @@ async def ruff(files):
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
files = [file for file in files if Path(file).is_file()]
res = sorted(
itertools.chain(
*await asyncio.gather(
@ -164,7 +165,7 @@ async def lint(files):
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
os.chdir(Path(__file__).parent.parent)
files = await git()
if not files:
@ -194,7 +195,7 @@ async def main():
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
if "/test_" in fname and Path(fname).is_file():
# All test helpers should be excluded
test_files.add(fname)
else:
@ -207,7 +208,7 @@ async def main():
else:
parts[-1] = f"test_{parts[-1]}"
fname = "/".join(parts)
if os.path.isfile(fname):
if Path(fname).is_file():
test_files.add(fname)
if gen_req:

View File

@ -66,7 +66,7 @@ class BucketHolder:
def create_ouput_file(self) -> None:
"""Create output file."""
with open("pytest_buckets.txt", "w") as file:
with Path("pytest_buckets.txt").open("w") as file:
for idx, bucket in enumerate(self._buckets):
print(f"Bucket {idx+1} has {bucket.total_tests} tests")
file.write(bucket.get_paths_line())

View File

@ -4,8 +4,7 @@
from __future__ import annotations
import json
import os
import pathlib
from pathlib import Path
import re
import subprocess
@ -14,7 +13,7 @@ from .error import ExitApp
from .util import get_lokalise_token, load_json_from_path
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
DOWNLOAD_DIR = Path("build/translations-download").absolute()
def run_download_docker():
@ -56,35 +55,32 @@ def run_download_docker():
raise ExitApp("Failed to download translations")
def save_json(filename: str, data: list | dict):
"""Save JSON data to a file.
Returns True on success.
"""
data = json.dumps(data, sort_keys=True, indent=4)
with open(filename, "w", encoding="utf-8") as fdesc:
fdesc.write(data)
return True
return False
def save_json(filename: Path, data: list | dict) -> None:
"""Save JSON data to a file."""
filename.write_text(json.dumps(data, sort_keys=True, indent=4), encoding="utf-8")
def get_component_path(lang, component):
def get_component_path(lang, component) -> Path | None:
"""Get the component translation path."""
if os.path.isdir(os.path.join("homeassistant", "components", component)):
return os.path.join(
"homeassistant", "components", component, "translations", f"{lang}.json"
if (Path("homeassistant") / "components" / component).is_dir():
return (
Path("homeassistant")
/ "components"
/ component
/ "translations"
/ f"{lang}.json"
)
return None
def get_platform_path(lang, component, platform):
def get_platform_path(lang, component, platform) -> Path:
"""Get the platform translation path."""
return os.path.join(
"homeassistant",
"components",
component,
"translations",
f"{platform}.{lang}.json",
return (
Path("homeassistant")
/ "components"
/ component
/ "translations"
/ f"{platform}.{lang}.json"
)
@ -107,7 +103,7 @@ def save_language_translations(lang, translations):
f"Skipping {lang} for {component}, as the integration doesn't seem to exist."
)
continue
os.makedirs(os.path.dirname(path), exist_ok=True)
path.parent.mkdir(parents=True, exist_ok=True)
save_json(path, base_translations)
if "platform" not in component_translations:
@ -117,7 +113,7 @@ def save_language_translations(lang, translations):
"platform"
].items():
path = get_platform_path(lang, component, platform)
os.makedirs(os.path.dirname(path), exist_ok=True)
path.parent.mkdir(parents=True, exist_ok=True)
save_json(path, platform_translations)

View File

@ -2,6 +2,7 @@
"""Helper script to bump the current version."""
import argparse
from pathlib import Path
import re
import subprocess
@ -110,8 +111,7 @@ def bump_version(
def write_version(version):
"""Update Home Assistant constant file with new version."""
with open("homeassistant/const.py") as fil:
content = fil.read()
content = Path("homeassistant/const.py").read_text()
major, minor, patch = str(version).split(".", 2)
@ -125,25 +125,21 @@ def write_version(version):
"PATCH_VERSION: Final = .*\n", f'PATCH_VERSION: Final = "{patch}"\n', content
)
with open("homeassistant/const.py", "w") as fil:
fil.write(content)
Path("homeassistant/const.py").write_text(content)
def write_version_metadata(version: Version) -> None:
"""Update pyproject.toml file with new version."""
with open("pyproject.toml", encoding="utf8") as fp:
content = fp.read()
content = Path("pyproject.toml").read_text(encoding="utf8")
content = re.sub(r"(version\W+=\W).+\n", f'\\g<1>"{version}"\n', content, count=1)
with open("pyproject.toml", "w", encoding="utf8") as fp:
fp.write(content)
Path("pyproject.toml").write_text(content, encoding="utf8")
def write_ci_workflow(version: Version) -> None:
"""Update ci workflow with new version."""
with open(".github/workflows/ci.yaml") as fp:
content = fp.read()
content = Path(".github/workflows/ci.yaml").read_text()
short_version = ".".join(str(version).split(".", maxsplit=2)[:2])
content = re.sub(
@ -153,8 +149,7 @@ def write_ci_workflow(version: Version) -> None:
count=1,
)
with open(".github/workflows/ci.yaml", "w") as fp:
fp.write(content)
Path(".github/workflows/ci.yaml").write_text(content)
def main() -> None: