Support serving of backend translations (#12453)
* Add view to support backend translation fetching * Load backend translations from component json * Translations for season sensor * Scripts to merge and unpack Lokalise translations * Fix copy paste error * Serve post-lokalise translations to frontend * Linting * Auto-deploy translations with Travis * Commit post-lokalise translation files * Split logic into more helper functions * Fall back to English for missing keys * Move local translation copies to `.translations` * Linting * Initial tests * Remove unnecessary file check * Convert translation helper to async/await * Convert translation helper tests to async/await * Use set subtraction to find missing_components * load_translation_files use component->file mapping * Remove duplicated resources fetching Get to take advantage of the slick Python 3.5 dict merging here. * Switch to live project IDpull/12804/head
parent
a60712d826
commit
b434ffba2d
|
@ -103,3 +103,6 @@ desktop.ini
|
|||
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
|
||||
# Secrets
|
||||
.lokalise_token
|
||||
|
|
|
@ -28,4 +28,13 @@ cache:
|
|||
install: pip install -U tox coveralls
|
||||
language: python
|
||||
script: travis_wait 30 tox --develop
|
||||
services:
|
||||
- docker
|
||||
before_deploy:
|
||||
- docker pull lokalise/lokalise-cli
|
||||
deploy:
|
||||
provider: script
|
||||
script: script/travis_deploy
|
||||
on:
|
||||
branch: dev
|
||||
after_success: coveralls
|
||||
|
|
|
@ -21,6 +21,7 @@ from homeassistant.components.http.const import KEY_AUTHENTICATED
|
|||
from homeassistant.config import find_config_file, load_yaml_config_file
|
||||
from homeassistant.const import CONF_NAME, EVENT_THEMES_UPDATED
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.translation import async_get_translations
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
REQUIREMENTS = ['home-assistant-frontend==20180228.1']
|
||||
|
@ -379,6 +380,8 @@ def async_setup(hass, config):
|
|||
|
||||
async_setup_themes(hass, conf.get(CONF_THEMES))
|
||||
|
||||
hass.http.register_view(TranslationsView)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
@ -541,6 +544,23 @@ class ThemesView(HomeAssistantView):
|
|||
})
|
||||
|
||||
|
||||
class TranslationsView(HomeAssistantView):
|
||||
"""View to return backend defined translations."""
|
||||
|
||||
url = '/api/translations/{language}'
|
||||
name = 'api:translations'
|
||||
|
||||
@asyncio.coroutine
|
||||
def get(self, request, language):
|
||||
"""Return translations."""
|
||||
hass = request.app['hass']
|
||||
|
||||
resources = yield from async_get_translations(hass, language)
|
||||
return self.json({
|
||||
'resources': resources,
|
||||
})
|
||||
|
||||
|
||||
def _fingerprint(path):
|
||||
"""Fingerprint a file."""
|
||||
with open(path) as fil:
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"state": {
|
||||
"autumn": "Autumn",
|
||||
"spring": "Spring",
|
||||
"summer": "Summer",
|
||||
"winter": "Winter"
|
||||
}
|
||||
}
|
|
@ -21,10 +21,10 @@ _LOGGER = logging.getLogger(__name__)
|
|||
NORTHERN = 'northern'
|
||||
SOUTHERN = 'southern'
|
||||
EQUATOR = 'equator'
|
||||
STATE_SPRING = 'Spring'
|
||||
STATE_SUMMER = 'Summer'
|
||||
STATE_AUTUMN = 'Autumn'
|
||||
STATE_WINTER = 'Winter'
|
||||
STATE_SPRING = 'spring'
|
||||
STATE_SUMMER = 'summer'
|
||||
STATE_AUTUMN = 'autumn'
|
||||
STATE_WINTER = 'winter'
|
||||
TYPE_ASTRONOMICAL = 'astronomical'
|
||||
TYPE_METEOROLOGICAL = 'meteorological'
|
||||
VALID_TYPES = [TYPE_ASTRONOMICAL, TYPE_METEOROLOGICAL]
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"state": {
|
||||
"spring": "Spring",
|
||||
"summer": "Summer",
|
||||
"autumn": "Autumn",
|
||||
"winter": "Winter"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,126 @@
|
|||
"""Translation string lookup helpers."""
|
||||
import logging
|
||||
# pylint: disable=unused-import
|
||||
from typing import Optional # NOQA
|
||||
from os import path
|
||||
|
||||
from homeassistant.loader import get_component, bind_hass
|
||||
from homeassistant.util.json import load_json
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TRANSLATION_STRING_CACHE = 'translation_string_cache'
|
||||
|
||||
|
||||
def recursive_flatten(prefix, data):
|
||||
"""Return a flattened representation of dict data."""
|
||||
output = {}
|
||||
for key, value in data.items():
|
||||
if isinstance(value, dict):
|
||||
output.update(
|
||||
recursive_flatten('{}{}.'.format(prefix, key), value))
|
||||
else:
|
||||
output['{}{}'.format(prefix, key)] = value
|
||||
return output
|
||||
|
||||
|
||||
def flatten(data):
|
||||
"""Return a flattened representation of dict data."""
|
||||
return recursive_flatten('', data)
|
||||
|
||||
|
||||
def component_translation_file(component, language):
|
||||
"""Return the translation json file location for a component."""
|
||||
if '.' in component:
|
||||
name = component.split('.', 1)[1]
|
||||
else:
|
||||
name = component
|
||||
|
||||
module = get_component(component)
|
||||
component_path = path.dirname(module.__file__)
|
||||
|
||||
# If loading translations for the package root, (__init__.py), the
|
||||
# prefix should be skipped.
|
||||
if module.__name__ == module.__package__:
|
||||
filename = '{}.json'.format(language)
|
||||
else:
|
||||
filename = '{}.{}.json'.format(name, language)
|
||||
|
||||
return path.join(component_path, '.translations', filename)
|
||||
|
||||
|
||||
def load_translations_files(translation_files):
|
||||
"""Load and parse translation.json files."""
|
||||
loaded = {}
|
||||
for component, translation_file in translation_files.items():
|
||||
loaded[component] = load_json(translation_file)
|
||||
|
||||
return loaded
|
||||
|
||||
|
||||
def build_resources(translation_cache, components):
|
||||
"""Build the resources response for the given components."""
|
||||
# Build response
|
||||
resources = {}
|
||||
for component in components:
|
||||
if '.' not in component:
|
||||
domain = component
|
||||
else:
|
||||
domain = component.split('.', 1)[0]
|
||||
|
||||
if domain not in resources:
|
||||
resources[domain] = {}
|
||||
|
||||
# Add the translations for this component to the domain resources.
|
||||
# Since clients cannot determine which platform an entity belongs to,
|
||||
# all translations for a domain will be returned together.
|
||||
resources[domain].update(translation_cache[component])
|
||||
|
||||
return resources
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_get_component_resources(hass, language):
|
||||
"""Return translation resources for all components."""
|
||||
if TRANSLATION_STRING_CACHE not in hass.data:
|
||||
hass.data[TRANSLATION_STRING_CACHE] = {}
|
||||
if language not in hass.data[TRANSLATION_STRING_CACHE]:
|
||||
hass.data[TRANSLATION_STRING_CACHE][language] = {}
|
||||
translation_cache = hass.data[TRANSLATION_STRING_CACHE][language]
|
||||
|
||||
# Get the set of components
|
||||
components = hass.config.components
|
||||
|
||||
# Calculate the missing components
|
||||
missing_components = components - set(translation_cache)
|
||||
missing_files = {}
|
||||
for component in missing_components:
|
||||
missing_files[component] = component_translation_file(
|
||||
component, language)
|
||||
|
||||
# Load missing files
|
||||
if missing_files:
|
||||
loaded_translations = await hass.async_add_job(
|
||||
load_translations_files, missing_files)
|
||||
|
||||
# Update cache
|
||||
for component, translation_data in loaded_translations.items():
|
||||
translation_cache[component] = translation_data
|
||||
|
||||
resources = build_resources(translation_cache, components)
|
||||
|
||||
# Return the component translations resources under the 'component'
|
||||
# translation namespace
|
||||
return flatten({'component': resources})
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_get_translations(hass, language):
|
||||
"""Return all backend translations."""
|
||||
resources = await async_get_component_resources(hass, language)
|
||||
if language != 'en':
|
||||
# Fetch the English resources, as a fallback for missing keys
|
||||
base_resources = await async_get_component_resources(hass, 'en')
|
||||
resources = {**base_resources, **resources}
|
||||
|
||||
return resources
|
|
@ -32,13 +32,13 @@ def load_json(filename: str, default: Union[List, Dict] = _UNDEFINED) \
|
|||
return {} if default is _UNDEFINED else default
|
||||
|
||||
|
||||
def save_json(filename: str, config: Union[List, Dict]):
|
||||
def save_json(filename: str, data: Union[List, Dict]):
|
||||
"""Save JSON data to a file.
|
||||
|
||||
Returns True on success.
|
||||
"""
|
||||
try:
|
||||
data = json.dumps(config, sort_keys=True, indent=4)
|
||||
data = json.dumps(data, sort_keys=True, indent=4)
|
||||
with open(filename, 'w', encoding='utf-8') as fdesc:
|
||||
fdesc.write(data)
|
||||
return True
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Safe bash settings
|
||||
# -e Exit on command fail
|
||||
# -u Exit on unset variable
|
||||
# -o pipefail Exit if piped command has error code
|
||||
set -eu -o pipefail
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
if [ -z "${LOKALISE_TOKEN-}" ] && [ ! -f .lokalise_token ] ; then
|
||||
echo "Lokalise API token is required to download the latest set of" \
|
||||
"translations. Please create an account by using the following link:" \
|
||||
"https://lokalise.co/signup/130246255a974bd3b5e8a1.51616605/all/" \
|
||||
"Place your token in a new file \".lokalise_token\" in the repo" \
|
||||
"root directory."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Load token from file if not already in the environment
|
||||
[ -z "${LOKALISE_TOKEN-}" ] && LOKALISE_TOKEN="$(<.lokalise_token)"
|
||||
|
||||
PROJECT_ID="130246255a974bd3b5e8a1.51616605"
|
||||
LOCAL_DIR="$(pwd)/build/translations-download"
|
||||
FILE_FORMAT=json
|
||||
|
||||
mkdir -p ${LOCAL_DIR}
|
||||
|
||||
docker pull lokalise/lokalise-cli
|
||||
docker run \
|
||||
-v ${LOCAL_DIR}:/opt/dest/locale \
|
||||
lokalise/lokalise-cli lokalise \
|
||||
--token ${LOKALISE_TOKEN} \
|
||||
export ${PROJECT_ID} \
|
||||
--export_empty skip \
|
||||
--type json \
|
||||
--unzip_to /opt/dest
|
||||
|
||||
script/translations_download_split.py
|
|
@ -0,0 +1,81 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Merge all translation sources into a single JSON file."""
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
|
||||
from homeassistant.util import json as json_util
|
||||
|
||||
FILENAME_FORMAT = re.compile(r'strings\.(?P<suffix>\w+)\.json')
|
||||
|
||||
|
||||
def get_language(path):
|
||||
"""Get the language code for the given file path."""
|
||||
return os.path.splitext(os.path.basename(path))[0]
|
||||
|
||||
|
||||
def get_component_path(lang, component):
|
||||
"""Get the component translation path."""
|
||||
if os.path.isdir(os.path.join("homeassistant", "components", component)):
|
||||
return os.path.join(
|
||||
"homeassistant", "components", component, ".translations",
|
||||
"{}.json".format(lang))
|
||||
else:
|
||||
return os.path.join(
|
||||
"homeassistant", "components", ".translations",
|
||||
"{}.{}.json".format(component, lang))
|
||||
|
||||
|
||||
def get_platform_path(lang, component, platform):
|
||||
"""Get the platform translation path."""
|
||||
if os.path.isdir(os.path.join(
|
||||
"homeassistant", "components", component, platform)):
|
||||
return os.path.join(
|
||||
"homeassistant", "components", component, platform,
|
||||
".translations", "{}.json".format(lang))
|
||||
else:
|
||||
return os.path.join(
|
||||
"homeassistant", "components", component, ".translations",
|
||||
"{}.{}.json".format(platform, lang))
|
||||
|
||||
|
||||
def get_component_translations(translations):
|
||||
"""Get the component level translations."""
|
||||
translations = translations.copy()
|
||||
translations.pop('platform', None)
|
||||
|
||||
return translations
|
||||
|
||||
|
||||
def save_language_translations(lang, translations):
|
||||
"""Distribute the translations for this language."""
|
||||
components = translations.get('component', {})
|
||||
for component, component_translations in components.items():
|
||||
base_translations = get_component_translations(component_translations)
|
||||
if base_translations:
|
||||
path = get_component_path(lang, component)
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
json_util.save_json(path, base_translations)
|
||||
|
||||
for platform, platform_translations in component_translations.get(
|
||||
'platform', {}).items():
|
||||
path = get_platform_path(lang, component, platform)
|
||||
os.makedirs(os.path.dirname(path), exist_ok=True)
|
||||
json_util.save_json(path, platform_translations)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main section of the script."""
|
||||
if not os.path.isfile("requirements_all.txt"):
|
||||
print("Run this from HA root dir")
|
||||
return
|
||||
|
||||
paths = glob.iglob("build/translations-download/*.json")
|
||||
for path in paths:
|
||||
lang = get_language(path)
|
||||
translations = json_util.load_json(path)
|
||||
save_language_translations(lang, translations)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,44 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Safe bash settings
|
||||
# -e Exit on command fail
|
||||
# -u Exit on unset variable
|
||||
# -o pipefail Exit if piped command has error code
|
||||
set -eu -o pipefail
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
if [ -z "${LOKALISE_TOKEN-}" ] && [ ! -f .lokalise_token ] ; then
|
||||
echo "Lokalise API token is required to download the latest set of" \
|
||||
"translations. Please create an account by using the following link:" \
|
||||
"https://lokalise.co/signup/130246255a974bd3b5e8a1.51616605/all/" \
|
||||
"Place your token in a new file \".lokalise_token\" in the repo" \
|
||||
"root directory."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Load token from file if not already in the environment
|
||||
[ -z "${LOKALISE_TOKEN-}" ] && LOKALISE_TOKEN="$(<.lokalise_token)"
|
||||
|
||||
PROJECT_ID="130246255a974bd3b5e8a1.51616605"
|
||||
LOCAL_FILE="$(pwd)/build/translations-upload.json"
|
||||
LANG_ISO=en
|
||||
|
||||
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
|
||||
|
||||
if [ "${CURRENT_BRANCH-}" != "dev" ] && [ "${TRAVIS_BRANCH-}" != "dev" ] ; then
|
||||
echo "Please only run the translations upload script from a clean checkout of dev."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
script/translations_upload_merge.py
|
||||
|
||||
docker pull lokalise/lokalise-cli
|
||||
docker run \
|
||||
-v ${LOCAL_FILE}:/opt/src/${LOCAL_FILE} \
|
||||
lokalise/lokalise-cli lokalise \
|
||||
--token ${LOKALISE_TOKEN} \
|
||||
import ${PROJECT_ID} \
|
||||
--file /opt/src/${LOCAL_FILE} \
|
||||
--lang_iso ${LANG_ISO} \
|
||||
--replace 1
|
|
@ -0,0 +1,81 @@
|
|||
#!/usr/bin/env python3
|
||||
"""Merge all translation sources into a single JSON file."""
|
||||
import glob
|
||||
import itertools
|
||||
import os
|
||||
import re
|
||||
|
||||
from homeassistant.util import json as json_util
|
||||
|
||||
FILENAME_FORMAT = re.compile(r'strings\.(?P<suffix>\w+)\.json')
|
||||
|
||||
|
||||
def find_strings_files():
|
||||
"""Return the paths of the strings source files."""
|
||||
return itertools.chain(
|
||||
glob.iglob("strings*.json"),
|
||||
glob.iglob("*{}strings*.json".format(os.sep)),
|
||||
)
|
||||
|
||||
|
||||
def get_component_platform(path):
|
||||
"""Get the component and platform name from the path."""
|
||||
directory, filename = os.path.split(path)
|
||||
match = FILENAME_FORMAT.search(filename)
|
||||
suffix = match.group('suffix') if match else None
|
||||
if directory:
|
||||
return directory, suffix
|
||||
else:
|
||||
return suffix, None
|
||||
|
||||
|
||||
def get_translation_dict(translations, component, platform):
|
||||
"""Return the dict to hold component translations."""
|
||||
if not component:
|
||||
return translations['component']
|
||||
|
||||
if component not in translations:
|
||||
translations['component'][component] = {}
|
||||
|
||||
if not platform:
|
||||
return translations['component'][component]
|
||||
|
||||
if 'platform' not in translations['component'][component]:
|
||||
translations['component'][component]['platform'] = {}
|
||||
|
||||
if platform not in translations['component'][component]['platform']:
|
||||
translations['component'][component]['platform'][platform] = {}
|
||||
|
||||
return translations['component'][component]['platform'][platform]
|
||||
|
||||
|
||||
def main():
|
||||
"""Main section of the script."""
|
||||
if not os.path.isfile("requirements_all.txt"):
|
||||
print("Run this from HA root dir")
|
||||
return
|
||||
|
||||
root = os.getcwd()
|
||||
os.chdir(os.path.join("homeassistant", "components"))
|
||||
|
||||
translations = {
|
||||
'component': {}
|
||||
}
|
||||
|
||||
paths = find_strings_files()
|
||||
for path in paths:
|
||||
component, platform = get_component_platform(path)
|
||||
parent = get_translation_dict(translations, component, platform)
|
||||
strings = json_util.load_json(path)
|
||||
parent.update(strings)
|
||||
|
||||
os.chdir(root)
|
||||
|
||||
os.makedirs("build", exist_ok=True)
|
||||
|
||||
json_util.save_json(
|
||||
os.path.join("build", "translations-upload.json"), translations)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Safe bash settings
|
||||
# -e Exit on command fail
|
||||
# -u Exit on unset variable
|
||||
# -o pipefail Exit if piped command has error code
|
||||
set -eu -o pipefail
|
||||
|
||||
cd "$(dirname "$0")/.."
|
||||
|
||||
script/translations_upload
|
|
@ -0,0 +1,108 @@
|
|||
"""Test the translation helper."""
|
||||
# pylint: disable=protected-access
|
||||
from os import path
|
||||
|
||||
import homeassistant.helpers.translation as translation
|
||||
from homeassistant.setup import async_setup_component
|
||||
|
||||
|
||||
def test_flatten():
|
||||
"""Test the flatten function."""
|
||||
data = {
|
||||
"parent1": {
|
||||
"child1": "data1",
|
||||
"child2": "data2",
|
||||
},
|
||||
"parent2": "data3",
|
||||
}
|
||||
|
||||
flattened = translation.flatten(data)
|
||||
|
||||
assert flattened == {
|
||||
"parent1.child1": "data1",
|
||||
"parent1.child2": "data2",
|
||||
"parent2": "data3",
|
||||
}
|
||||
|
||||
|
||||
async def test_component_translation_file(hass):
|
||||
"""Test the component translation file function."""
|
||||
assert await async_setup_component(hass, 'switch', {
|
||||
'switch': {'platform': 'test'}
|
||||
})
|
||||
assert await async_setup_component(hass, 'test_standalone', {
|
||||
'test_standalone'
|
||||
})
|
||||
assert await async_setup_component(hass, 'test_package', {
|
||||
'test_package'
|
||||
})
|
||||
|
||||
assert path.normpath(translation.component_translation_file(
|
||||
'switch.test', 'en')) == path.normpath(hass.config.path(
|
||||
'custom_components', 'switch', '.translations', 'test.en.json'))
|
||||
|
||||
assert path.normpath(translation.component_translation_file(
|
||||
'test_standalone', 'en')) == path.normpath(hass.config.path(
|
||||
'custom_components', '.translations', 'test_standalone.en.json'))
|
||||
|
||||
assert path.normpath(translation.component_translation_file(
|
||||
'test_package', 'en')) == path.normpath(hass.config.path(
|
||||
'custom_components', 'test_package', '.translations', 'en.json'))
|
||||
|
||||
|
||||
def test_load_translations_files(hass):
|
||||
"""Test the load translation files function."""
|
||||
# Test one valid and one invalid file
|
||||
file1 = hass.config.path(
|
||||
'custom_components', 'switch', '.translations', 'test.en.json')
|
||||
file2 = hass.config.path(
|
||||
'custom_components', 'switch', '.translations', 'invalid.json')
|
||||
assert translation.load_translations_files({
|
||||
'switch.test': file1,
|
||||
'invalid': file2
|
||||
}) == {
|
||||
'switch.test': {
|
||||
'state': {
|
||||
'string1': 'Value 1',
|
||||
'string2': 'Value 2',
|
||||
}
|
||||
},
|
||||
'invalid': {},
|
||||
}
|
||||
|
||||
|
||||
async def test_get_translations(hass):
|
||||
"""Test the get translations helper."""
|
||||
translations = await translation.async_get_translations(hass, 'en')
|
||||
assert translations == {}
|
||||
|
||||
assert await async_setup_component(hass, 'switch', {
|
||||
'switch': {'platform': 'test'}
|
||||
})
|
||||
|
||||
translations = await translation.async_get_translations(hass, 'en')
|
||||
assert translations == {
|
||||
'component.switch.state.string1': 'Value 1',
|
||||
'component.switch.state.string2': 'Value 2',
|
||||
}
|
||||
|
||||
translations = await translation.async_get_translations(hass, 'de')
|
||||
assert translations == {
|
||||
'component.switch.state.string1': 'German Value 1',
|
||||
'component.switch.state.string2': 'German Value 2',
|
||||
}
|
||||
|
||||
# Test a partial translation
|
||||
translations = await translation.async_get_translations(hass, 'es')
|
||||
assert translations == {
|
||||
'component.switch.state.string1': 'Spanish Value 1',
|
||||
'component.switch.state.string2': 'Value 2',
|
||||
}
|
||||
|
||||
# Test that an untranslated language falls back to English.
|
||||
translations = await translation.async_get_translations(
|
||||
hass, 'invalid-language')
|
||||
assert translations == {
|
||||
'component.switch.state.string1': 'Value 1',
|
||||
'component.switch.state.string2': 'Value 2',
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"state": {
|
||||
"string1": "German Value 1",
|
||||
"string2": "German Value 2"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"state": {
|
||||
"string1": "Value 1",
|
||||
"string2": "Value 2"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"state": {
|
||||
"string1": "Spanish Value 1"
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
"""Provide a mock package component."""
|
||||
DOMAIN = 'test_package'
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Mock a successful setup."""
|
||||
return True
|
|
@ -0,0 +1,7 @@
|
|||
"""Provide a mock standalone component."""
|
||||
DOMAIN = 'test_standalone'
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Mock a successful setup."""
|
||||
return True
|
Loading…
Reference in New Issue