Migrate translations upload (#33926)
* Migrate translations upload * Fix token in download command * Minor cleanuppull/33930/head
parent
2edfa82237
commit
e9c412bac6
|
@ -37,7 +37,7 @@ jobs:
|
|||
export LOKALISE_TOKEN="$(lokaliseToken)"
|
||||
export AZURE_BRANCH="$(Build.SourceBranchName)"
|
||||
|
||||
./script/translations_upload
|
||||
python3 -m script.translations upload
|
||||
displayName: 'Upload Translation'
|
||||
|
||||
- job: 'Download'
|
||||
|
|
|
@ -3,13 +3,13 @@ import argparse
|
|||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from . import download, error
|
||||
from . import download, error, upload
|
||||
|
||||
|
||||
def get_arguments() -> argparse.Namespace:
|
||||
"""Get parsed passed in arguments."""
|
||||
parser = argparse.ArgumentParser(description="Home Assistant Scaffolder")
|
||||
parser.add_argument("action", type=str, choices=["download"])
|
||||
parser.add_argument("action", type=str, choices=["download", "upload"])
|
||||
parser.add_argument("--debug", action="store_true", help="Enable log output")
|
||||
|
||||
arguments = parser.parse_args()
|
||||
|
@ -27,6 +27,8 @@ def main():
|
|||
|
||||
if args.action == "download":
|
||||
download.run(args)
|
||||
elif args.action == "upload":
|
||||
upload.run(args)
|
||||
|
||||
return 0
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
"""Translation constants."""
|
||||
import pathlib
|
||||
|
||||
PROJECT_ID = "130246255a974bd3b5e8a1.51616605"
|
||||
DOCKER_IMAGE = "b8329d20280263cad04f65b843e54b9e8e6909a348a678eac959550b5ef5c75f"
|
||||
INTEGRATIONS_DIR = pathlib.Path("homeassistant/components")
|
||||
|
|
|
@ -16,10 +16,8 @@ FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
|||
LOCAL_DIR = pathlib.Path("build/translations-download").absolute()
|
||||
|
||||
|
||||
def run_download_docker(args):
|
||||
def run_download_docker():
|
||||
"""Run the Docker image to download the translations."""
|
||||
pipe_null = {} if args.debug else {"stdout": subprocess.DEVNULL}
|
||||
|
||||
print("Running Docker to download latest translations.")
|
||||
run = subprocess.run(
|
||||
[
|
||||
|
@ -31,10 +29,10 @@ def run_download_docker(args):
|
|||
f"lokalise/lokalise-cli@sha256:{DOCKER_IMAGE}",
|
||||
# Lokalise command
|
||||
"lokalise",
|
||||
"export",
|
||||
PROJECT_ID,
|
||||
"--token",
|
||||
get_lokalise_token(),
|
||||
"export",
|
||||
PROJECT_ID,
|
||||
"--export_empty",
|
||||
"skip",
|
||||
"--type",
|
||||
|
@ -42,7 +40,6 @@ def run_download_docker(args):
|
|||
"--unzip_to",
|
||||
"/opt/dest",
|
||||
],
|
||||
**pipe_null,
|
||||
)
|
||||
print()
|
||||
|
||||
|
@ -140,7 +137,7 @@ def run(args):
|
|||
"""Run the script."""
|
||||
LOCAL_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
run_download_docker(args)
|
||||
run_download_docker()
|
||||
|
||||
paths = glob.iglob("build/translations-download/*.json")
|
||||
for path in paths:
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Merge all translation sources into a single JSON file."""
|
||||
import json
|
||||
import os
|
||||
import pathlib
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
from .const import DOCKER_IMAGE, INTEGRATIONS_DIR, PROJECT_ID
|
||||
from .error import ExitApp
|
||||
from .util import get_current_branch, get_lokalise_token
|
||||
|
||||
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
||||
LOCAL_FILE = pathlib.Path("build/translations-upload.json").absolute()
|
||||
CONTAINER_FILE = "/opt/src/build/translations-upload.json"
|
||||
LANG_ISO = "en"
|
||||
|
||||
|
||||
def run_upload_docker():
|
||||
"""Run the Docker image to upload the translations."""
|
||||
print("Running Docker to upload latest translations.")
|
||||
run = subprocess.run(
|
||||
[
|
||||
"docker",
|
||||
"run",
|
||||
"-v",
|
||||
f"{LOCAL_FILE}:{CONTAINER_FILE}",
|
||||
"--rm",
|
||||
f"lokalise/lokalise-cli@sha256:{DOCKER_IMAGE}",
|
||||
# Lokalise command
|
||||
"lokalise",
|
||||
"--token",
|
||||
get_lokalise_token(),
|
||||
"import",
|
||||
PROJECT_ID,
|
||||
"--file",
|
||||
CONTAINER_FILE,
|
||||
"--lang_iso",
|
||||
LANG_ISO,
|
||||
"--convert_placeholders",
|
||||
"0",
|
||||
"--replace",
|
||||
"1",
|
||||
],
|
||||
)
|
||||
print()
|
||||
|
||||
if run.returncode != 0:
|
||||
raise ExitApp("Failed to download translations")
|
||||
|
||||
|
||||
def run(args):
|
||||
"""Run the script."""
|
||||
if get_current_branch() != "dev" and os.environ.get("AZURE_BRANCH") != "dev":
|
||||
raise ExitApp(
|
||||
"Please only run the translations upload script from a clean checkout of dev."
|
||||
)
|
||||
|
||||
translations = {"component": {}}
|
||||
|
||||
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
|
||||
component = path.parent.name
|
||||
match = FILENAME_FORMAT.search(path.name)
|
||||
platform = match.group("suffix") if match else None
|
||||
|
||||
parent = translations["component"].setdefault(component, {})
|
||||
|
||||
if platform:
|
||||
platforms = parent.setdefault("platform", {})
|
||||
parent = platforms.setdefault(platform, {})
|
||||
|
||||
parent.update(json.loads(path.read_text()))
|
||||
|
||||
LOCAL_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True))
|
||||
|
||||
# run_upload_docker()
|
|
@ -1,6 +1,7 @@
|
|||
"""Translation utils."""
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
from .error import ExitApp
|
||||
|
||||
|
@ -20,3 +21,14 @@ def get_lokalise_token():
|
|||
)
|
||||
|
||||
return token_file.read_text().strip()
|
||||
|
||||
|
||||
def get_current_branch():
|
||||
"""Get current branch."""
|
||||
return (
|
||||
subprocess.run(
|
||||
["git", "rev-parse", "--abbrev-ref", "HEAD"], stdout=subprocess.PIPE
|
||||
)
|
||||
.stdout.decode()
|
||||
.strip()
|
||||
)
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Safe bash settings
|
||||
# -e Exit on command fail
|
||||
# -u Exit on unset variable
|
||||
# -o pipefail Exit if piped command has error code
|
||||
set -eu -o pipefail
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
if [ -z "${LOKALISE_TOKEN-}" ] && [ ! -f .lokalise_token ] ; then
|
||||
echo "Lokalise API token is required to download the latest set of" \
|
||||
"translations. Please create an account by using the following link:" \
|
||||
"https://lokalise.co/signup/130246255a974bd3b5e8a1.51616605/all/" \
|
||||
"Place your token in a new file \".lokalise_token\" in the repo" \
|
||||
"root directory."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Load token from file if not already in the environment
|
||||
[ -z "${LOKALISE_TOKEN-}" ] && LOKALISE_TOKEN="$(<.lokalise_token)"
|
||||
|
||||
PROJECT_ID="130246255a974bd3b5e8a1.51616605"
|
||||
LOCAL_FILE="$(pwd)/build/translations-upload.json"
|
||||
LANG_ISO=en
|
||||
|
||||
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
# Check Travis and Azure environment as well
|
||||
if [ "${CURRENT_BRANCH-}" != "dev" ] && [ "${AZURE_BRANCH-}" != "dev" ]; then
|
||||
echo "Please only run the translations upload script from a clean checkout of dev."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
script/translations_upload_merge.py
|
||||
|
||||
docker run \
|
||||
-v ${LOCAL_FILE}:/opt/src/${LOCAL_FILE} \
|
||||
lokalise/lokalise-cli@sha256:2198814ebddfda56ee041a4b427521757dd57f75415ea9693696a64c550cef21 lokalise \
|
||||
--token ${LOKALISE_TOKEN} \
|
||||
import ${PROJECT_ID} \
|
||||
--file /opt/src/${LOCAL_FILE} \
|
||||
--lang_iso ${LANG_ISO} \
|
||||
--convert_placeholders 0 \
|
||||
--replace 1
|
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Merge all translation sources into a single JSON file."""
|
||||
import glob
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Dict, List, Union
|
||||
|
||||
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
||||
|
||||
|
||||
def load_json(filename: str) -> Union[List, Dict]:
|
||||
"""Load JSON data from a file and return as dict or list.
|
||||
|
||||
Defaults to returning empty dict if file is not found.
|
||||
"""
|
||||
with open(filename, encoding="utf-8") as fdesc:
|
||||
return json.loads(fdesc.read())
|
||||
return {}
|
||||
|
||||
|
||||
def save_json(filename: str, data: Union[List, Dict]):
|
||||
"""Save JSON data to a file.
|
||||
|
||||
Returns True on success.
|
||||
"""
|
||||
data = json.dumps(data, sort_keys=True, indent=4)
|
||||
with open(filename, "w", encoding="utf-8") as fdesc:
|
||||
fdesc.write(data)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def find_strings_files():
|
||||
"""Return the paths of the strings source files."""
|
||||
return itertools.chain(
|
||||
glob.iglob("strings*.json"), glob.iglob(f"*{os.sep}strings*.json")
|
||||
)
|
||||
|
||||
|
||||
def get_component_platform(path):
|
||||
"""Get the component and platform name from the path."""
|
||||
directory, filename = os.path.split(path)
|
||||
match = FILENAME_FORMAT.search(filename)
|
||||
suffix = match.group("suffix") if match else None
|
||||
if directory:
|
||||
return directory, suffix
|
||||
else:
|
||||
return suffix, None
|
||||
|
||||
|
||||
def get_translation_dict(translations, component, platform):
|
||||
"""Return the dict to hold component translations."""
|
||||
if not component:
|
||||
return translations["component"]
|
||||
|
||||
if component not in translations["component"]:
|
||||
translations["component"][component] = {}
|
||||
|
||||
if not platform:
|
||||
return translations["component"][component]
|
||||
|
||||
if "platform" not in translations["component"][component]:
|
||||
translations["component"][component]["platform"] = {}
|
||||
|
||||
if platform not in translations["component"][component]["platform"]:
|
||||
translations["component"][component]["platform"][platform] = {}
|
||||
|
||||
return translations["component"][component]["platform"][platform]
|
||||
|
||||
|
||||
def main():
|
||||
"""Run the script."""
|
||||
if not os.path.isfile("requirements_all.txt"):
|
||||
print("Run this from HA root dir")
|
||||
return
|
||||
|
||||
root = os.getcwd()
|
||||
os.chdir(os.path.join("homeassistant", "components"))
|
||||
|
||||
translations = {"component": {}}
|
||||
|
||||
paths = find_strings_files()
|
||||
for path in paths:
|
||||
component, platform = get_component_platform(path)
|
||||
parent = get_translation_dict(translations, component, platform)
|
||||
strings = load_json(path)
|
||||
parent.update(strings)
|
||||
|
||||
os.chdir(root)
|
||||
|
||||
os.makedirs("build", exist_ok=True)
|
||||
|
||||
save_json(os.path.join("build", "translations-upload.json"), translations)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in New Issue