Improve error messages from translation script (#102098)

Co-authored-by: Robert Resch <robert@resch.dev>
pull/102058/head
Erik Montnemery 2023-10-22 23:45:27 +02:00 committed by GitHub
parent bc45de627a
commit 164872e1af
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 59 additions and 24 deletions

View File

@ -1,11 +1,10 @@
"""Find translation keys that are in Lokalise but no longer defined in source."""
import argparse
import json
from .const import CORE_PROJECT_ID, FRONTEND_DIR, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .lokalise import get_api
from .util import get_base_arg_parser
from .util import get_base_arg_parser, load_json_from_path
def get_arguments() -> argparse.Namespace:
@ -46,9 +45,9 @@ def find_core():
translations = int_dir / "translations" / "en.json"
strings_json = json.loads(strings.read_text())
strings_json = load_json_from_path(strings)
if translations.is_file():
translations_json = json.loads(translations.read_text())
translations_json = load_json_from_path(translations)
else:
translations_json = {}
@ -69,8 +68,8 @@ def find_frontend():
missing_keys = []
find_extra(
json.loads(source.read_text()),
json.loads(translated.read_text()),
load_json_from_path(source),
load_json_from_path(translated),
"",
missing_keys,
)

View File

@ -9,7 +9,7 @@ from homeassistant.const import Platform
from . import upload
from .develop import flatten_translations
from .util import get_base_arg_parser
from .util import get_base_arg_parser, load_json_from_path
def get_arguments() -> argparse.Namespace:
@ -101,7 +101,7 @@ def run():
for component in components:
comp_strings_path = Path(STRINGS_PATH.format(component))
strings[component] = json.loads(comp_strings_path.read_text(encoding="utf-8"))
strings[component] = load_json_from_path(comp_strings_path)
for path, value in update_keys.items():
parts = path.split("::")

View File

@ -10,7 +10,7 @@ import subprocess
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .util import get_lokalise_token
from .util import get_lokalise_token, load_json_from_path
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
@ -122,7 +122,7 @@ def write_integration_translations():
"""Write integration translations."""
for lang_file in DOWNLOAD_DIR.glob("*.json"):
lang = lang_file.stem
translations = json.loads(lang_file.read_text())
translations = load_json_from_path(lang_file)
save_language_translations(lang, translations)

View File

@ -1,4 +1,5 @@
"""Errors for translations."""
import json
class ExitApp(Exception):
@ -8,3 +9,28 @@ class ExitApp(Exception):
"""Initialize the exit app exception."""
self.reason = reason
self.exit_code = exit_code
class JSONDecodeErrorWithPath(json.JSONDecodeError):
"""Subclass of JSONDecodeError with additional properties.
Additional properties:
path: Path to the JSON document being parsed
"""
def __init__(self, msg, doc, pos, path):
"""Initialize."""
lineno = doc.count("\n", 0, pos) + 1
colno = pos - doc.rfind("\n", 0, pos)
errmsg = f"{msg}: file: {path} line {lineno} column {colno} (char {pos})"
ValueError.__init__(self, errmsg)
self.msg = msg
self.doc = doc
self.pos = pos
self.lineno = lineno
self.colno = colno
self.path = path
def __reduce__(self):
"""Reduce."""
return self.__class__, (self.msg, self.doc, self.pos, self.path)

View File

@ -4,7 +4,7 @@ import json
from .const import FRONTEND_DIR
from .download import DOWNLOAD_DIR, run_download_docker
from .util import get_base_arg_parser
from .util import get_base_arg_parser, load_json_from_path
FRONTEND_BACKEND_TRANSLATIONS = FRONTEND_DIR / "translations/backend"
@ -29,7 +29,7 @@ def run():
run_download_docker()
for lang_file in DOWNLOAD_DIR.glob("*.json"):
translations = json.loads(lang_file.read_text())
translations = load_json_from_path(lang_file)
to_write_translations = {"component": {}}

View File

@ -6,6 +6,7 @@ import re
from .const import CORE_PROJECT_ID, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
from .lokalise import get_api
from .util import load_json_from_path
FRONTEND_REPO = pathlib.Path("../frontend/")
@ -164,7 +165,7 @@ def find_and_rename_keys():
if not strings_file.is_file():
continue
strings = json.loads(strings_file.read_text())
strings = load_json_from_path(strings_file)
if "title" in strings.get("config", {}):
from_key = f"component::{integration.name}::config::title"
@ -194,12 +195,12 @@ def interactive_update():
if not strings_file.is_file():
continue
strings = json.loads(strings_file.read_text())
strings = load_json_from_path(strings_file)
if "title" not in strings:
continue
manifest = json.loads((integration / "manifest.json").read_text())
manifest = load_json_from_path(integration / "manifest.json")
print("Processing", manifest["name"])
print("Translation title", strings["title"])
@ -247,9 +248,8 @@ def find_frontend_states():
Source key -> target key
Add key to integrations strings.json
"""
frontend_states = json.loads(
(FRONTEND_REPO / "src/translations/en.json").read_text()
)["state"]
path = FRONTEND_REPO / "src/translations/en.json"
frontend_states = load_json_from_path(path)["state"]
# domain => state object
to_write = {}
@ -307,7 +307,7 @@ def find_frontend_states():
for domain, state in to_write.items():
strings = INTEGRATIONS_DIR / domain / "strings.json"
if strings.is_file():
content = json.loads(strings.read_text())
content = load_json_from_path(strings)
else:
content = {}
@ -326,7 +326,7 @@ def find_frontend_states():
def apply_data_references(to_migrate):
"""Apply references."""
for strings_file in INTEGRATIONS_DIR.glob("*/strings.json"):
strings = json.loads(strings_file.read_text())
strings = load_json_from_path(strings_file)
steps = strings.get("config", {}).get("step")
if not steps:

View File

@ -8,7 +8,7 @@ import subprocess
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
from .error import ExitApp
from .util import get_current_branch, get_lokalise_token
from .util import get_current_branch, get_lokalise_token, load_json_from_path
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
LOCAL_FILE = pathlib.Path("build/translations-upload.json").absolute()
@ -52,7 +52,7 @@ def run_upload_docker():
def generate_upload_data():
"""Generate the data for uploading."""
translations = json.loads((INTEGRATIONS_DIR.parent / "strings.json").read_text())
translations = load_json_from_path(INTEGRATIONS_DIR.parent / "strings.json")
translations["component"] = {}
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
@ -66,7 +66,7 @@ def generate_upload_data():
platforms = parent.setdefault("platform", {})
parent = platforms.setdefault(platform, {})
parent.update(json.loads(path.read_text()))
parent.update(load_json_from_path(path))
return translations

View File

@ -1,10 +1,12 @@
"""Translation utils."""
import argparse
import json
import os
import pathlib
import subprocess
from typing import Any
from .error import ExitApp
from .error import ExitApp, JSONDecodeErrorWithPath
def get_base_arg_parser() -> argparse.ArgumentParser:
@ -55,3 +57,11 @@ def get_current_branch():
.stdout.decode()
.strip()
)
def load_json_from_path(path: pathlib.Path) -> Any:
"""Load JSON from path."""
try:
return json.loads(path.read_text())
except json.JSONDecodeError as err:
raise JSONDecodeErrorWithPath(err.msg, err.doc, err.pos, path) from err