2019-02-13 20:21:14 +00:00
|
|
|
"""Support for functionality to download files."""
|
2013-12-11 08:07:30 +00:00
|
|
|
import logging
|
2016-02-19 05:27:50 +00:00
|
|
|
import os
|
2013-12-11 08:07:30 +00:00
|
|
|
import re
|
2014-02-14 19:57:42 +00:00
|
|
|
import threading
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2016-01-29 05:37:08 +00:00
|
|
|
import requests
|
2016-04-13 03:21:27 +00:00
|
|
|
import voluptuous as vol
|
2016-01-29 05:37:08 +00:00
|
|
|
|
2016-04-13 03:21:27 +00:00
|
|
|
import homeassistant.helpers.config_validation as cv
|
2014-12-07 07:57:02 +00:00
|
|
|
from homeassistant.util import sanitize_filename
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2016-09-02 04:31:49 +00:00
|
|
|
_LOGGER = logging.getLogger(__name__)
|
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
ATTR_FILENAME = "filename"
|
|
|
|
ATTR_SUBDIR = "subdir"
|
|
|
|
ATTR_URL = "url"
|
|
|
|
ATTR_OVERWRITE = "overwrite"
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
CONF_DOWNLOAD_DIR = "download_dir"
|
2016-08-22 12:19:19 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
DOMAIN = "downloader"
|
|
|
|
DOWNLOAD_FAILED_EVENT = "download_failed"
|
|
|
|
DOWNLOAD_COMPLETED_EVENT = "download_completed"
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
SERVICE_DOWNLOAD_FILE = "download_file"
|
2014-03-25 01:00:57 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
SERVICE_DOWNLOAD_FILE_SCHEMA = vol.Schema(
|
|
|
|
{
|
|
|
|
vol.Required(ATTR_URL): cv.url,
|
|
|
|
vol.Optional(ATTR_SUBDIR): cv.string,
|
|
|
|
vol.Optional(ATTR_FILENAME): cv.string,
|
|
|
|
vol.Optional(ATTR_OVERWRITE, default=False): cv.boolean,
|
|
|
|
}
|
|
|
|
)
|
2016-04-13 03:21:27 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
CONFIG_SCHEMA = vol.Schema(
|
|
|
|
{DOMAIN: vol.Schema({vol.Required(CONF_DOWNLOAD_DIR): cv.string})},
|
|
|
|
extra=vol.ALLOW_EXTRA,
|
|
|
|
)
|
2016-09-02 04:31:49 +00:00
|
|
|
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2014-08-13 12:28:45 +00:00
|
|
|
def setup(hass, config):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Listen for download events to download files."""
|
2014-08-13 12:28:45 +00:00
|
|
|
download_path = config[DOMAIN][CONF_DOWNLOAD_DIR]
|
|
|
|
|
2015-10-26 00:10:32 +00:00
|
|
|
# If path is relative, we assume relative to HASS config dir
|
|
|
|
if not os.path.isabs(download_path):
|
2015-10-26 00:13:47 +00:00
|
|
|
download_path = hass.config.path(download_path)
|
2015-10-26 00:10:32 +00:00
|
|
|
|
2013-12-11 08:07:30 +00:00
|
|
|
if not os.path.isdir(download_path):
|
2016-09-02 04:31:49 +00:00
|
|
|
_LOGGER.error(
|
2019-07-31 19:25:30 +00:00
|
|
|
"Download path %s does not exist. File Downloader not active", download_path
|
|
|
|
)
|
2013-12-11 08:07:30 +00:00
|
|
|
|
|
|
|
return False
|
|
|
|
|
2014-02-14 19:57:42 +00:00
|
|
|
def download_file(service):
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Start thread to download file specified in the URL."""
|
2019-07-31 19:25:30 +00:00
|
|
|
|
2014-02-14 19:57:42 +00:00
|
|
|
def do_download():
|
2016-03-08 16:55:57 +00:00
|
|
|
"""Download the file."""
|
2014-02-14 19:57:42 +00:00
|
|
|
try:
|
2014-03-25 01:00:57 +00:00
|
|
|
url = service.data[ATTR_URL]
|
|
|
|
|
|
|
|
subdir = service.data.get(ATTR_SUBDIR)
|
|
|
|
|
2017-10-27 20:50:02 +00:00
|
|
|
filename = service.data.get(ATTR_FILENAME)
|
|
|
|
|
2017-11-03 20:02:38 +00:00
|
|
|
overwrite = service.data.get(ATTR_OVERWRITE)
|
|
|
|
|
2014-03-25 01:00:57 +00:00
|
|
|
if subdir:
|
2014-12-07 07:57:02 +00:00
|
|
|
subdir = sanitize_filename(subdir)
|
2014-03-25 01:00:57 +00:00
|
|
|
|
|
|
|
final_path = None
|
|
|
|
|
2014-06-13 06:09:56 +00:00
|
|
|
req = requests.get(url, stream=True, timeout=10)
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2017-11-17 06:05:08 +00:00
|
|
|
if req.status_code != 200:
|
|
|
|
_LOGGER.warning(
|
2019-07-31 19:25:30 +00:00
|
|
|
"downloading '%s' failed, status_code=%d", url, req.status_code
|
|
|
|
)
|
2019-06-20 20:59:17 +00:00
|
|
|
hass.bus.fire(
|
2019-09-03 15:09:59 +00:00
|
|
|
f"{DOMAIN}_{DOWNLOAD_FAILED_EVENT}",
|
2019-07-31 19:25:30 +00:00
|
|
|
{"url": url, "filename": filename},
|
|
|
|
)
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2017-11-17 06:05:08 +00:00
|
|
|
else:
|
2019-07-31 19:25:30 +00:00
|
|
|
if filename is None and "content-disposition" in req.headers:
|
|
|
|
match = re.findall(
|
|
|
|
r"filename=(\S+)", req.headers["content-disposition"]
|
|
|
|
)
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2017-04-24 03:41:09 +00:00
|
|
|
if match:
|
2014-02-14 19:57:42 +00:00
|
|
|
filename = match[0].strip("'\" ")
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2014-02-14 19:57:42 +00:00
|
|
|
if not filename:
|
2017-10-27 20:50:02 +00:00
|
|
|
filename = os.path.basename(url).strip()
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2014-02-14 19:57:42 +00:00
|
|
|
if not filename:
|
2019-07-31 19:25:30 +00:00
|
|
|
filename = "ha_download"
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2014-02-14 19:57:42 +00:00
|
|
|
# Remove stuff to ruin paths
|
2014-12-07 07:57:02 +00:00
|
|
|
filename = sanitize_filename(filename)
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2014-03-25 01:00:57 +00:00
|
|
|
# Do we want to download to subdir, create if needed
|
|
|
|
if subdir:
|
|
|
|
subdir_path = os.path.join(download_path, subdir)
|
|
|
|
|
|
|
|
# Ensure subdir exist
|
|
|
|
if not os.path.isdir(subdir_path):
|
|
|
|
os.makedirs(subdir_path)
|
|
|
|
|
|
|
|
final_path = os.path.join(subdir_path, filename)
|
|
|
|
|
|
|
|
else:
|
|
|
|
final_path = os.path.join(download_path, filename)
|
|
|
|
|
|
|
|
path, ext = os.path.splitext(final_path)
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2014-02-14 19:57:42 +00:00
|
|
|
# If file exist append a number.
|
|
|
|
# We test filename, filename_2..
|
2017-11-03 20:02:38 +00:00
|
|
|
if not overwrite:
|
|
|
|
tries = 1
|
|
|
|
final_path = path + ext
|
|
|
|
while os.path.isfile(final_path):
|
|
|
|
tries += 1
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2019-09-03 15:09:59 +00:00
|
|
|
final_path = f"{path}_{tries}.{ext}"
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2017-11-17 06:05:08 +00:00
|
|
|
_LOGGER.debug("%s -> %s", url, final_path)
|
2014-02-14 19:57:42 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
with open(final_path, "wb") as fil:
|
2014-02-14 19:57:42 +00:00
|
|
|
for chunk in req.iter_content(1024):
|
|
|
|
fil.write(chunk)
|
|
|
|
|
2017-11-17 06:05:08 +00:00
|
|
|
_LOGGER.debug("Downloading of %s done", url)
|
2018-03-14 23:03:40 +00:00
|
|
|
hass.bus.fire(
|
2019-09-03 15:09:59 +00:00
|
|
|
f"{DOMAIN}_{DOWNLOAD_COMPLETED_EVENT}",
|
2019-07-31 19:25:30 +00:00
|
|
|
{"url": url, "filename": filename},
|
|
|
|
)
|
2014-02-14 19:57:42 +00:00
|
|
|
|
|
|
|
except requests.exceptions.ConnectionError:
|
2017-09-23 15:15:46 +00:00
|
|
|
_LOGGER.exception("ConnectionError occurred for %s", url)
|
2018-03-14 23:03:40 +00:00
|
|
|
hass.bus.fire(
|
2019-09-03 15:09:59 +00:00
|
|
|
f"{DOMAIN}_{DOWNLOAD_FAILED_EVENT}",
|
2019-07-31 19:25:30 +00:00
|
|
|
{"url": url, "filename": filename},
|
|
|
|
)
|
2014-02-14 19:57:42 +00:00
|
|
|
|
2014-03-25 01:00:57 +00:00
|
|
|
# Remove file if we started downloading but failed
|
|
|
|
if final_path and os.path.isfile(final_path):
|
|
|
|
os.remove(final_path)
|
|
|
|
|
2014-02-14 19:57:42 +00:00
|
|
|
threading.Thread(target=do_download).start()
|
2013-12-11 08:07:30 +00:00
|
|
|
|
2019-07-31 19:25:30 +00:00
|
|
|
hass.services.register(
|
|
|
|
DOMAIN,
|
|
|
|
SERVICE_DOWNLOAD_FILE,
|
|
|
|
download_file,
|
|
|
|
schema=SERVICE_DOWNLOAD_FILE_SCHEMA,
|
|
|
|
)
|
2013-12-11 08:07:30 +00:00
|
|
|
|
|
|
|
return True
|