2017-10-04 06:28:44 +00:00
|
|
|
# Copyright 2017 Mycroft AI Inc.
|
2017-05-02 00:07:40 +00:00
|
|
|
#
|
2017-10-04 06:28:44 +00:00
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
2017-05-02 00:07:40 +00:00
|
|
|
#
|
2017-10-04 06:28:44 +00:00
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
2017-05-02 00:07:40 +00:00
|
|
|
#
|
2017-10-04 06:28:44 +00:00
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
2017-05-02 00:07:40 +00:00
|
|
|
#
|
2017-01-28 02:56:11 +00:00
|
|
|
from copy import copy
|
2017-01-24 19:54:37 +00:00
|
|
|
|
2018-04-03 18:36:29 +00:00
|
|
|
import json
|
2016-08-31 02:55:35 +00:00
|
|
|
import requests
|
2018-03-27 23:51:30 +00:00
|
|
|
from requests import HTTPError, RequestException
|
2018-10-03 02:20:26 +00:00
|
|
|
import os
|
|
|
|
import time
|
2019-06-05 23:45:15 +00:00
|
|
|
from threading import Lock
|
2016-08-31 02:55:35 +00:00
|
|
|
|
2017-09-23 12:13:50 +00:00
|
|
|
from mycroft.configuration import Configuration
|
2017-09-22 06:13:50 +00:00
|
|
|
from mycroft.configuration.config import DEFAULT_CONFIG, SYSTEM_CONFIG, \
|
2017-11-22 23:02:02 +00:00
|
|
|
USER_CONFIG
|
2018-09-28 06:26:33 +00:00
|
|
|
from mycroft.identity import IdentityManager, identity_lock
|
2016-12-06 23:10:34 +00:00
|
|
|
from mycroft.version import VersionManager
|
2018-03-27 23:51:30 +00:00
|
|
|
from mycroft.util import get_arch, connected, LOG
|
2016-08-31 02:55:35 +00:00
|
|
|
|
2018-09-28 06:26:33 +00:00
|
|
|
|
2017-09-13 20:13:18 +00:00
|
|
|
_paired_cache = False
|
2016-09-01 19:04:33 +00:00
|
|
|
|
2016-08-31 02:55:35 +00:00
|
|
|
|
2018-03-27 23:51:30 +00:00
|
|
|
class BackendDown(RequestException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
class InternetDown(RequestException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2019-01-10 07:40:47 +00:00
|
|
|
class Api:
|
|
|
|
""" Generic class to wrap web APIs """
|
2018-04-03 18:36:29 +00:00
|
|
|
params_to_etag = {}
|
|
|
|
etag_to_response = {}
|
2017-05-02 00:07:40 +00:00
|
|
|
|
2016-08-31 02:55:35 +00:00
|
|
|
def __init__(self, path):
|
|
|
|
self.path = path
|
2017-12-22 08:34:01 +00:00
|
|
|
|
|
|
|
# Load the config, skipping the REMOTE_CONFIG since we are
|
|
|
|
# getting the info needed to get to it!
|
2017-11-22 23:02:02 +00:00
|
|
|
config = Configuration.get([DEFAULT_CONFIG,
|
|
|
|
SYSTEM_CONFIG,
|
|
|
|
USER_CONFIG],
|
2017-09-23 12:49:20 +00:00
|
|
|
cache=False)
|
2016-08-31 02:55:35 +00:00
|
|
|
config_server = config.get("server")
|
|
|
|
self.url = config_server.get("url")
|
|
|
|
self.version = config_server.get("version")
|
2016-09-08 00:50:45 +00:00
|
|
|
self.identity = IdentityManager.get()
|
2016-08-31 02:55:35 +00:00
|
|
|
|
2017-01-28 02:56:11 +00:00
|
|
|
def request(self, params):
|
|
|
|
self.check_token()
|
|
|
|
self.build_path(params)
|
|
|
|
self.old_params = copy(params)
|
|
|
|
return self.send(params)
|
|
|
|
|
2016-10-18 21:39:30 +00:00
|
|
|
def check_token(self):
|
2018-09-28 17:26:31 +00:00
|
|
|
# If the identity hasn't been loaded, load it
|
|
|
|
if not self.identity.has_refresh():
|
|
|
|
self.identity = IdentityManager.load()
|
|
|
|
# If refresh is needed perform a refresh
|
2016-10-18 23:41:55 +00:00
|
|
|
if self.identity.refresh and self.identity.is_expired():
|
|
|
|
self.identity = IdentityManager.load()
|
2018-09-28 08:26:04 +00:00
|
|
|
# if no one else has updated the token refresh it
|
2016-10-18 23:41:55 +00:00
|
|
|
if self.identity.is_expired():
|
2017-01-28 02:56:11 +00:00
|
|
|
self.refresh_token()
|
|
|
|
|
|
|
|
def refresh_token(self):
|
2018-09-28 06:26:33 +00:00
|
|
|
LOG.debug('Refreshing token')
|
|
|
|
if identity_lock.acquire(blocking=False):
|
|
|
|
try:
|
|
|
|
data = self.send({
|
|
|
|
"path": "auth/token",
|
|
|
|
"headers": {
|
2019-05-22 23:37:03 +00:00
|
|
|
"Authorization": "Bearer " + self.identity.refresh,
|
|
|
|
"Device": self.identity.uuid
|
2018-09-28 06:26:33 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
IdentityManager.save(data, lock=False)
|
|
|
|
LOG.debug('Saved credentials')
|
2018-12-05 20:41:06 +00:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.response.status_code == 401:
|
|
|
|
LOG.error('Could not refresh token, invalid refresh code.')
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
2018-09-28 06:26:33 +00:00
|
|
|
finally:
|
|
|
|
identity_lock.release()
|
|
|
|
else: # Someone is updating the identity wait for release
|
2018-10-03 02:20:26 +00:00
|
|
|
with identity_lock:
|
|
|
|
LOG.debug('Refresh is already in progress, waiting until done')
|
|
|
|
time.sleep(1.2)
|
|
|
|
os.sync()
|
|
|
|
self.identity = IdentityManager.load(lock=False)
|
|
|
|
LOG.debug('new credentials loaded')
|
2016-10-18 21:39:30 +00:00
|
|
|
|
2018-09-28 08:26:04 +00:00
|
|
|
def send(self, params, no_refresh=False):
|
2018-04-05 09:20:55 +00:00
|
|
|
""" Send request to mycroft backend.
|
|
|
|
The method handles Etags and will return a cached response value
|
|
|
|
if nothing has changed on the remote.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
params (dict): request parameters
|
2018-09-28 08:26:04 +00:00
|
|
|
no_refresh (bool): optional parameter to disable refreshs of token
|
2018-04-05 09:20:55 +00:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
Requests response object.
|
|
|
|
"""
|
2018-04-03 18:36:29 +00:00
|
|
|
query_data = frozenset(params.get('query', {}).items())
|
|
|
|
params_key = (params.get('path'), query_data)
|
|
|
|
etag = self.params_to_etag.get(params_key)
|
|
|
|
|
2016-08-31 02:55:35 +00:00
|
|
|
method = params.get("method", "GET")
|
|
|
|
headers = self.build_headers(params)
|
2016-10-18 23:41:55 +00:00
|
|
|
data = self.build_data(params)
|
2018-04-03 18:36:29 +00:00
|
|
|
json_body = self.build_json(params)
|
2016-09-28 02:20:59 +00:00
|
|
|
query = self.build_query(params)
|
2016-08-31 02:55:35 +00:00
|
|
|
url = self.build_url(params)
|
2018-04-03 18:36:29 +00:00
|
|
|
|
2018-04-05 09:20:55 +00:00
|
|
|
# For an introduction to the Etag feature check out:
|
|
|
|
# https://en.wikipedia.org/wiki/HTTP_ETag
|
2018-04-03 18:36:29 +00:00
|
|
|
if etag:
|
|
|
|
headers['If-None-Match'] = etag
|
|
|
|
|
|
|
|
response = requests.request(
|
|
|
|
method, url, headers=headers, params=query,
|
|
|
|
data=data, json=json_body, timeout=(3.05, 15)
|
|
|
|
)
|
|
|
|
if response.status_code == 304:
|
2018-09-28 10:48:34 +00:00
|
|
|
# Etag matched, use response previously cached
|
2018-04-03 18:36:29 +00:00
|
|
|
response = self.etag_to_response[etag]
|
|
|
|
elif 'ETag' in response.headers:
|
|
|
|
etag = response.headers['ETag'].strip('"')
|
2018-09-28 10:48:34 +00:00
|
|
|
# Cache response for future lookup when we receive a 304
|
2018-04-03 18:36:29 +00:00
|
|
|
self.params_to_etag[params_key] = etag
|
|
|
|
self.etag_to_response[etag] = response
|
|
|
|
|
2018-09-28 08:26:04 +00:00
|
|
|
return self.get_response(response, no_refresh)
|
|
|
|
|
|
|
|
def get_response(self, response, no_refresh=False):
|
|
|
|
""" Parse response and extract data from response.
|
|
|
|
|
|
|
|
Will try to refresh the access token if it's expired.
|
2016-09-01 19:04:33 +00:00
|
|
|
|
2018-09-28 08:26:04 +00:00
|
|
|
Arguments:
|
|
|
|
response (requests Response object): Response to parse
|
|
|
|
no_refresh (bool): Disable refreshing of the token
|
|
|
|
Returns:
|
|
|
|
data fetched from server
|
|
|
|
"""
|
2016-09-28 02:20:59 +00:00
|
|
|
data = self.get_data(response)
|
2018-09-28 06:26:33 +00:00
|
|
|
|
2016-09-01 19:04:33 +00:00
|
|
|
if 200 <= response.status_code < 300:
|
|
|
|
return data
|
2018-09-28 08:26:04 +00:00
|
|
|
elif (not no_refresh and response.status_code == 401 and not
|
|
|
|
response.url.endswith("auth/token") and
|
|
|
|
self.identity.is_expired()):
|
2017-01-28 02:56:11 +00:00
|
|
|
self.refresh_token()
|
2018-09-28 08:26:04 +00:00
|
|
|
return self.send(self.old_params, no_refresh=True)
|
2016-09-28 02:20:59 +00:00
|
|
|
raise HTTPError(data, response=response)
|
|
|
|
|
|
|
|
def get_data(self, response):
|
|
|
|
try:
|
|
|
|
return response.json()
|
2019-05-15 10:49:07 +00:00
|
|
|
except Exception:
|
2016-09-28 02:20:59 +00:00
|
|
|
return response.text
|
2016-08-31 02:55:35 +00:00
|
|
|
|
|
|
|
def build_headers(self, params):
|
|
|
|
headers = params.get("headers", {})
|
|
|
|
self.add_content_type(headers)
|
|
|
|
self.add_authorization(headers)
|
2016-09-01 19:04:33 +00:00
|
|
|
params["headers"] = headers
|
2016-08-31 02:55:35 +00:00
|
|
|
return headers
|
|
|
|
|
|
|
|
def add_content_type(self, headers):
|
2016-09-01 19:04:33 +00:00
|
|
|
if not headers.__contains__("Content-Type"):
|
2016-08-31 02:55:35 +00:00
|
|
|
headers["Content-Type"] = "application/json"
|
|
|
|
|
|
|
|
def add_authorization(self, headers):
|
2016-09-01 19:04:33 +00:00
|
|
|
if not headers.__contains__("Authorization"):
|
2016-09-28 03:56:51 +00:00
|
|
|
headers["Authorization"] = "Bearer " + self.identity.access
|
2016-08-31 02:55:35 +00:00
|
|
|
|
2016-10-18 23:41:55 +00:00
|
|
|
def build_data(self, params):
|
|
|
|
return params.get("data")
|
|
|
|
|
2016-09-16 19:34:20 +00:00
|
|
|
def build_json(self, params):
|
|
|
|
json = params.get("json")
|
|
|
|
if json and params["headers"]["Content-Type"] == "application/json":
|
2018-02-08 19:03:46 +00:00
|
|
|
for k, v in json.items():
|
2016-08-31 02:55:35 +00:00
|
|
|
if v == "":
|
2016-09-16 19:34:20 +00:00
|
|
|
json[k] = None
|
|
|
|
params["json"] = json
|
|
|
|
return json
|
2016-08-31 02:55:35 +00:00
|
|
|
|
2016-09-28 02:20:59 +00:00
|
|
|
def build_query(self, params):
|
|
|
|
return params.get("query")
|
|
|
|
|
2016-10-18 23:41:55 +00:00
|
|
|
def build_path(self, params):
|
|
|
|
path = params.get("path", "")
|
|
|
|
params["path"] = self.path + path
|
|
|
|
return params["path"]
|
|
|
|
|
2016-08-31 02:55:35 +00:00
|
|
|
def build_url(self, params):
|
|
|
|
path = params.get("path", "")
|
|
|
|
version = params.get("version", self.version)
|
2016-10-18 21:39:30 +00:00
|
|
|
return self.url + "/" + version + "/" + path
|
|
|
|
|
2016-08-31 02:55:35 +00:00
|
|
|
|
|
|
|
class DeviceApi(Api):
|
2017-05-02 00:07:40 +00:00
|
|
|
""" Web API wrapper for obtaining device-level information """
|
2019-06-05 23:45:15 +00:00
|
|
|
_skill_settings_lock = Lock()
|
|
|
|
_skill_settings = None
|
2017-05-02 00:07:40 +00:00
|
|
|
|
2016-08-31 02:55:35 +00:00
|
|
|
def __init__(self):
|
|
|
|
super(DeviceApi, self).__init__("device")
|
|
|
|
|
2016-09-01 19:04:33 +00:00
|
|
|
def get_code(self, state):
|
2016-11-17 05:25:06 +00:00
|
|
|
IdentityManager.update()
|
2016-09-01 19:04:33 +00:00
|
|
|
return self.request({
|
|
|
|
"path": "/code?state=" + state
|
|
|
|
})
|
|
|
|
|
2016-09-02 15:35:16 +00:00
|
|
|
def activate(self, state, token):
|
2016-12-09 02:04:16 +00:00
|
|
|
version = VersionManager.get()
|
2017-12-13 08:54:19 +00:00
|
|
|
platform = "unknown"
|
|
|
|
platform_build = ""
|
2017-12-22 08:34:01 +00:00
|
|
|
|
|
|
|
# load just the local configs to get platform info
|
|
|
|
config = Configuration.get([SYSTEM_CONFIG,
|
|
|
|
USER_CONFIG],
|
|
|
|
cache=False)
|
2017-12-13 08:54:19 +00:00
|
|
|
if "enclosure" in config:
|
|
|
|
platform = config.get("enclosure").get("platform", "unknown")
|
|
|
|
platform_build = config.get("enclosure").get("platform_build", "")
|
2017-12-22 08:34:01 +00:00
|
|
|
|
2016-09-02 15:35:16 +00:00
|
|
|
return self.request({
|
2016-09-08 00:50:45 +00:00
|
|
|
"method": "POST",
|
2016-09-07 20:40:15 +00:00
|
|
|
"path": "/activate",
|
2016-12-06 23:10:34 +00:00
|
|
|
"json": {"state": state,
|
|
|
|
"token": token,
|
|
|
|
"coreVersion": version.get("coreVersion"),
|
2017-12-13 08:54:19 +00:00
|
|
|
"platform": platform,
|
|
|
|
"platform_build": platform_build,
|
2016-12-06 23:10:34 +00:00
|
|
|
"enclosureVersion": version.get("enclosureVersion")}
|
2016-09-02 15:35:16 +00:00
|
|
|
})
|
|
|
|
|
2017-10-10 19:08:06 +00:00
|
|
|
def update_version(self):
|
|
|
|
version = VersionManager.get()
|
2017-12-13 08:54:19 +00:00
|
|
|
platform = "unknown"
|
|
|
|
platform_build = ""
|
2017-12-22 08:34:01 +00:00
|
|
|
|
|
|
|
# load just the local configs to get platform info
|
|
|
|
config = Configuration.get([SYSTEM_CONFIG,
|
|
|
|
USER_CONFIG],
|
|
|
|
cache=False)
|
2017-12-13 08:54:19 +00:00
|
|
|
if "enclosure" in config:
|
|
|
|
platform = config.get("enclosure").get("platform", "unknown")
|
|
|
|
platform_build = config.get("enclosure").get("platform_build", "")
|
2017-12-22 08:34:01 +00:00
|
|
|
|
2017-10-10 19:08:06 +00:00
|
|
|
return self.request({
|
|
|
|
"method": "PATCH",
|
|
|
|
"path": "/" + self.identity.uuid,
|
|
|
|
"json": {"coreVersion": version.get("coreVersion"),
|
2017-12-13 08:54:19 +00:00
|
|
|
"platform": platform,
|
|
|
|
"platform_build": platform_build,
|
2017-10-10 19:08:06 +00:00
|
|
|
"enclosureVersion": version.get("enclosureVersion")}
|
|
|
|
})
|
|
|
|
|
2017-11-16 01:09:48 +00:00
|
|
|
def send_email(self, title, body, sender):
|
|
|
|
return self.request({
|
|
|
|
"method": "PUT",
|
|
|
|
"path": "/" + self.identity.uuid + "/message",
|
|
|
|
"json": {"title": title, "body": body, "sender": sender}
|
|
|
|
})
|
|
|
|
|
2017-11-18 01:16:00 +00:00
|
|
|
def report_metric(self, name, data):
|
|
|
|
return self.request({
|
|
|
|
"method": "POST",
|
|
|
|
"path": "/" + self.identity.uuid + "/metric/" + name,
|
|
|
|
"json": data
|
|
|
|
})
|
|
|
|
|
2017-05-02 00:07:40 +00:00
|
|
|
def get(self):
|
|
|
|
""" Retrieve all device information from the web backend """
|
2016-09-01 19:04:33 +00:00
|
|
|
return self.request({
|
|
|
|
"path": "/" + self.identity.uuid
|
|
|
|
})
|
|
|
|
|
2017-05-02 00:07:40 +00:00
|
|
|
def get_settings(self):
|
|
|
|
""" Retrieve device settings information from the web backend
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: JSON string with user configuration information.
|
|
|
|
"""
|
2016-09-01 19:04:33 +00:00
|
|
|
return self.request({
|
|
|
|
"path": "/" + self.identity.uuid + "/setting"
|
|
|
|
})
|
2016-09-08 19:40:54 +00:00
|
|
|
|
2017-05-02 00:07:40 +00:00
|
|
|
def get_location(self):
|
|
|
|
""" Retrieve device location information from the web backend
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: JSON string with user location.
|
|
|
|
"""
|
2017-01-31 20:11:06 +00:00
|
|
|
return self.request({
|
|
|
|
"path": "/" + self.identity.uuid + "/location"
|
|
|
|
})
|
|
|
|
|
2017-08-10 09:19:36 +00:00
|
|
|
def get_subscription(self):
|
|
|
|
"""
|
|
|
|
Get information about type of subscrition this unit is connected
|
|
|
|
to.
|
|
|
|
|
|
|
|
Returns: dictionary with subscription information
|
|
|
|
"""
|
|
|
|
return self.request({
|
|
|
|
'path': '/' + self.identity.uuid + '/subscription'})
|
|
|
|
|
|
|
|
@property
|
|
|
|
def is_subscriber(self):
|
|
|
|
"""
|
|
|
|
status of subscription. True if device is connected to a paying
|
|
|
|
subscriber.
|
|
|
|
"""
|
2017-10-28 01:53:45 +00:00
|
|
|
try:
|
|
|
|
return self.get_subscription().get('@type') != 'free'
|
2019-05-15 10:49:07 +00:00
|
|
|
except Exception:
|
2017-10-28 01:53:45 +00:00
|
|
|
# If can't retrieve, assume not paired and not a subscriber yet
|
|
|
|
return False
|
2017-08-11 13:31:35 +00:00
|
|
|
|
2017-10-16 14:04:00 +00:00
|
|
|
def get_subscriber_voice_url(self, voice=None):
|
2017-08-11 13:31:35 +00:00
|
|
|
self.check_token()
|
2017-12-20 08:36:32 +00:00
|
|
|
archs = {'x86_64': 'x86_64', 'armv7l': 'arm', 'aarch64': 'arm'}
|
2017-12-15 00:42:25 +00:00
|
|
|
arch = archs.get(get_arch())
|
|
|
|
if arch:
|
|
|
|
path = '/' + self.identity.uuid + '/voice?arch=' + arch
|
|
|
|
return self.request({'path': path})['link']
|
2017-08-10 09:19:36 +00:00
|
|
|
|
2017-12-21 13:52:48 +00:00
|
|
|
def get_oauth_token(self, dev_cred):
|
|
|
|
"""
|
|
|
|
Get Oauth token for dev_credential dev_cred.
|
|
|
|
|
|
|
|
Argument:
|
|
|
|
dev_cred: development credentials identifier
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
json string containing token and additional information
|
|
|
|
"""
|
|
|
|
return self.request({
|
|
|
|
"method": "GET",
|
|
|
|
"path": "/" + self.identity.uuid + "/token/" + str(dev_cred)
|
|
|
|
})
|
|
|
|
|
2019-06-05 23:45:15 +00:00
|
|
|
def get_skill_settings(self):
|
|
|
|
""" Fetch all skill settings. """
|
|
|
|
with DeviceApi._skill_settings_lock:
|
|
|
|
if (DeviceApi._skill_settings is None or
|
|
|
|
time.monotonic() > DeviceApi._skill_settings[0] + 30):
|
|
|
|
DeviceApi._skill_settings = (
|
|
|
|
time.monotonic(),
|
|
|
|
self.request({
|
|
|
|
"method": "GET",
|
|
|
|
"path": "/" + self.identity.uuid + "/skill"
|
|
|
|
})
|
|
|
|
)
|
|
|
|
return DeviceApi._skill_settings[1]
|
|
|
|
|
|
|
|
def upload_skill_metadata(self, settings_meta):
|
|
|
|
""" Upload skill metadata.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
settings_meta (dict): settings_meta typecasted to suite the backend
|
|
|
|
"""
|
|
|
|
return self.request({
|
|
|
|
"method": "PUT",
|
|
|
|
"path": "/" + self.identity.uuid + "/skill",
|
|
|
|
"json": settings_meta
|
|
|
|
})
|
|
|
|
|
2019-08-10 06:10:44 +00:00
|
|
|
def delete_skill_metadata(self, skill_gid):
|
2019-06-05 23:45:15 +00:00
|
|
|
""" Delete the current skill metadata from backend
|
|
|
|
|
|
|
|
TODO: Real implementation when method exists on backend
|
|
|
|
Args:
|
2019-08-10 06:10:44 +00:00
|
|
|
skill_gid (str): skill_gid identifying the skill
|
2019-06-05 23:45:15 +00:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
LOG.debug("Deleting remote metadata for {}".format(skill_gid))
|
|
|
|
self.request({
|
|
|
|
"method": "DELETE",
|
|
|
|
"path": ("/" + self.identity.uuid + "/skill" +
|
|
|
|
"/{}".format(skill_gid))
|
|
|
|
})
|
|
|
|
except Exception as e:
|
|
|
|
LOG.error("{} cannot delete metadata because this".format(e))
|
|
|
|
|
2018-10-24 11:28:33 +00:00
|
|
|
def upload_skills_data(self, data):
|
2019-06-05 23:45:15 +00:00
|
|
|
""" Upload skills.json file. This file contains a manifest of installed
|
|
|
|
and failed installations for use with the Marketplace.
|
2018-10-24 11:28:33 +00:00
|
|
|
|
|
|
|
Arguments:
|
|
|
|
data: dictionary with skills data from msm
|
|
|
|
"""
|
2019-06-09 09:20:03 +00:00
|
|
|
if not isinstance(data, dict):
|
|
|
|
raise ValueError('data must be of type dict')
|
|
|
|
|
2019-04-06 21:30:32 +00:00
|
|
|
# Strip the skills.json down to the bare essentials
|
|
|
|
to_send = {}
|
2019-06-09 09:20:03 +00:00
|
|
|
if 'blacklist' in data:
|
|
|
|
to_send['blacklist'] = data['blacklist']
|
|
|
|
else:
|
|
|
|
LOG.warning('skills manifest lacks blacklist entry')
|
|
|
|
to_send['blacklist'] = []
|
|
|
|
|
2019-04-06 21:30:32 +00:00
|
|
|
# Make sure skills doesn't contain duplicates (keep only last)
|
2019-06-09 09:20:03 +00:00
|
|
|
if 'skills' in data:
|
|
|
|
skills = {s['name']: s for s in data['skills']}
|
|
|
|
to_send['skills'] = [skills[key] for key in skills]
|
|
|
|
else:
|
|
|
|
LOG.warning('skills manifest lacks skills entry')
|
|
|
|
to_send['skills'] = []
|
2019-04-06 21:30:32 +00:00
|
|
|
|
2019-05-21 15:47:12 +00:00
|
|
|
for s in to_send['skills']:
|
2019-07-01 10:24:18 +00:00
|
|
|
# Remove optional fields backend objects to
|
|
|
|
if 'update' in s:
|
|
|
|
s.pop('update')
|
|
|
|
|
|
|
|
# Finalize skill_gid with uuid if needed
|
2019-05-21 15:47:12 +00:00
|
|
|
s['skill_gid'] = s.get('skill_gid', '').replace(
|
|
|
|
'@|', '@{}|'.format(self.identity.uuid))
|
|
|
|
|
2018-10-24 11:28:33 +00:00
|
|
|
self.request({
|
|
|
|
"method": "PUT",
|
|
|
|
"path": "/" + self.identity.uuid + "/skillJson",
|
2019-04-06 21:30:32 +00:00
|
|
|
"json": to_send
|
2018-10-24 11:28:33 +00:00
|
|
|
})
|
|
|
|
|
2016-09-08 19:40:54 +00:00
|
|
|
|
|
|
|
class STTApi(Api):
|
2017-05-02 00:07:40 +00:00
|
|
|
""" Web API wrapper for performing Speech to Text (STT) """
|
|
|
|
|
2018-03-22 18:33:10 +00:00
|
|
|
def __init__(self, path):
|
2018-03-22 18:28:24 +00:00
|
|
|
super(STTApi, self).__init__(path)
|
2016-09-08 19:40:54 +00:00
|
|
|
|
2016-10-23 17:47:09 +00:00
|
|
|
def stt(self, audio, language, limit):
|
2017-05-02 00:07:40 +00:00
|
|
|
""" Web API wrapper for performing Speech to Text (STT)
|
|
|
|
|
|
|
|
Args:
|
|
|
|
audio (bytes): The recorded audio, as in a FLAC file
|
|
|
|
language (str): A BCP-47 language code, e.g. "en-US"
|
|
|
|
limit (int): Maximum minutes to transcribe(?)
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
str: JSON structure with transcription results
|
|
|
|
"""
|
|
|
|
|
2016-09-08 19:40:54 +00:00
|
|
|
return self.request({
|
|
|
|
"method": "POST",
|
|
|
|
"headers": {"Content-Type": "audio/x-flac"},
|
2016-10-23 17:47:09 +00:00
|
|
|
"query": {"lang": language, "limit": limit},
|
2016-09-16 19:34:20 +00:00
|
|
|
"data": audio
|
2016-09-08 19:40:54 +00:00
|
|
|
})
|
2017-06-07 10:00:29 +00:00
|
|
|
|
2017-06-07 16:31:38 +00:00
|
|
|
|
2017-06-13 10:35:06 +00:00
|
|
|
def has_been_paired():
|
|
|
|
""" Determine if this device has ever been paired with a web backend
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
bool: True if ever paired with backend (not factory reset)
|
|
|
|
"""
|
2017-06-13 22:32:14 +00:00
|
|
|
# This forces a load from the identity file in case the pairing state
|
|
|
|
# has recently changed
|
|
|
|
id = IdentityManager.load()
|
2017-06-13 12:17:21 +00:00
|
|
|
return id.uuid is not None and id.uuid != ""
|
|
|
|
|
|
|
|
|
2018-03-27 23:51:30 +00:00
|
|
|
def is_paired(ignore_errors=True):
|
2019-06-30 19:42:30 +00:00
|
|
|
"""Determine if this device is actively paired with a web backend
|
2017-06-07 10:00:29 +00:00
|
|
|
|
|
|
|
Determines if the installation of Mycroft has been paired by the user
|
2017-06-13 10:35:06 +00:00
|
|
|
with the backend system, and if that pairing is still active.
|
2017-06-07 10:00:29 +00:00
|
|
|
|
|
|
|
Returns:
|
|
|
|
bool: True if paired with backend
|
|
|
|
"""
|
2017-09-13 20:13:18 +00:00
|
|
|
global _paired_cache
|
|
|
|
if _paired_cache:
|
2017-06-07 10:00:29 +00:00
|
|
|
# NOTE: This assumes once paired, the unit remains paired. So
|
|
|
|
# un-pairing must restart the system (or clear this value).
|
|
|
|
# The Mark 1 does perform a restart on RESET.
|
|
|
|
return True
|
|
|
|
|
2019-06-30 19:42:30 +00:00
|
|
|
api = DeviceApi()
|
|
|
|
_paired_cache = api.identity.uuid and check_remote_pairing(ignore_errors)
|
|
|
|
|
|
|
|
return _paired_cache
|
|
|
|
|
|
|
|
|
|
|
|
def check_remote_pairing(ignore_errors):
|
|
|
|
"""Check that a basic backend endpoint accepts our pairing.
|
|
|
|
|
|
|
|
Arguments:
|
|
|
|
ignore_errors (bool): True if errors should be ignored when
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
True if pairing checks out, otherwise False.
|
|
|
|
"""
|
2017-06-07 10:00:29 +00:00
|
|
|
try:
|
2019-06-30 19:42:30 +00:00
|
|
|
DeviceApi().get()
|
|
|
|
return True
|
2018-03-27 23:51:30 +00:00
|
|
|
except HTTPError as e:
|
|
|
|
if e.response.status_code == 401:
|
|
|
|
return False
|
2019-06-30 19:42:30 +00:00
|
|
|
error = e
|
2018-03-27 23:51:30 +00:00
|
|
|
except Exception as e:
|
2019-06-30 19:42:30 +00:00
|
|
|
error = e
|
|
|
|
|
|
|
|
LOG.warning('Could not get device info: {}'.format(repr(error)))
|
|
|
|
|
2018-03-27 23:51:30 +00:00
|
|
|
if ignore_errors:
|
2017-06-07 16:31:38 +00:00
|
|
|
return False
|
2019-06-30 19:42:30 +00:00
|
|
|
|
|
|
|
if isinstance(error, HTTPError):
|
|
|
|
if connected():
|
|
|
|
raise BackendDown from error
|
|
|
|
else:
|
|
|
|
raise InternetDown from error
|
|
|
|
else:
|
|
|
|
raise error
|