Add integration kraken (#31114)
Co-authored-by: Paulus Schoutsen <paulus@home-assistant.io> Co-authored-by: Franck Nijhof <frenck@frenck.nl> Co-authored-by: Paulus Schoutsen <balloob@gmail.com>pull/50620/head
parent
a8e1a68d1f
commit
c220e70008
|
@ -255,6 +255,7 @@ homeassistant/components/knx/* @Julius2342 @farmio @marvin-w
|
|||
homeassistant/components/kodi/* @OnFreund @cgtobi
|
||||
homeassistant/components/konnected/* @heythisisnate @kit-klein
|
||||
homeassistant/components/kostal_plenticore/* @stegm
|
||||
homeassistant/components/kraken/* @eifinger
|
||||
homeassistant/components/kulersky/* @emlove
|
||||
homeassistant/components/lametric/* @robbiet480
|
||||
homeassistant/components/launch_library/* @ludeeus
|
||||
|
|
|
@ -0,0 +1,152 @@
|
|||
"""The kraken integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
import krakenex
|
||||
import pykrakenapi
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_TRACKED_ASSET_PAIRS,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_TRACKED_ASSET_PAIR,
|
||||
DISPATCH_CONFIG_UPDATED,
|
||||
DOMAIN,
|
||||
)
|
||||
from .utils import get_tradable_asset_pairs
|
||||
|
||||
PLATFORMS = ["sensor"]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Set up kraken from a config entry."""
|
||||
kraken_data = KrakenData(hass, config_entry)
|
||||
await kraken_data.async_setup()
|
||||
hass.data[DOMAIN] = kraken_data
|
||||
config_entry.add_update_listener(async_options_updated)
|
||||
hass.config_entries.async_setup_platforms(config_entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(
|
||||
config_entry, PLATFORMS
|
||||
)
|
||||
if unload_ok:
|
||||
for unsub_listener in hass.data[DOMAIN].unsub_listeners:
|
||||
unsub_listener()
|
||||
hass.data.pop(DOMAIN)
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
class KrakenData:
|
||||
"""Define an object to hold kraken data."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self._hass = hass
|
||||
self._config_entry = config_entry
|
||||
self._api = pykrakenapi.KrakenAPI(krakenex.API(), retry=0, crl_sleep=0)
|
||||
self.tradable_asset_pairs = None
|
||||
self.coordinator = None
|
||||
self.unsub_listeners = []
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest data from the Kraken.com REST API.
|
||||
|
||||
All tradeable asset pairs are retrieved, not the tracked asset pairs
|
||||
selected by the user. This enables us to check for an unknown and
|
||||
thus likely removed asset pair in sensor.py and only log a warning
|
||||
once.
|
||||
"""
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
return await self._hass.async_add_executor_job(self._get_kraken_data)
|
||||
except pykrakenapi.pykrakenapi.KrakenAPIError as error:
|
||||
if "Unknown asset pair" in str(error):
|
||||
_LOGGER.info(
|
||||
"Kraken.com reported an unknown asset pair. Refreshing list of tradable asset pairs"
|
||||
)
|
||||
await self._async_refresh_tradable_asset_pairs()
|
||||
else:
|
||||
raise UpdateFailed(
|
||||
f"Unable to fetch data from Kraken.com: {error}"
|
||||
) from error
|
||||
except pykrakenapi.pykrakenapi.CallRateLimitError:
|
||||
_LOGGER.warning(
|
||||
"Exceeded the Kraken.com call rate limit. Increase the update interval to prevent this error"
|
||||
)
|
||||
|
||||
def _get_kraken_data(self) -> dict:
|
||||
websocket_name_pairs = self._get_websocket_name_asset_pairs()
|
||||
ticker_df = self._api.get_ticker_information(websocket_name_pairs)
|
||||
# Rename columns to their full name
|
||||
ticker_df = ticker_df.rename(
|
||||
columns={
|
||||
"a": "ask",
|
||||
"b": "bid",
|
||||
"c": "last_trade_closed",
|
||||
"v": "volume",
|
||||
"p": "volume_weighted_average",
|
||||
"t": "number_of_trades",
|
||||
"l": "low",
|
||||
"h": "high",
|
||||
"o": "opening_price",
|
||||
}
|
||||
)
|
||||
response_dict = ticker_df.transpose().to_dict()
|
||||
return response_dict
|
||||
|
||||
async def _async_refresh_tradable_asset_pairs(self) -> None:
|
||||
self.tradable_asset_pairs = await self._hass.async_add_executor_job(
|
||||
get_tradable_asset_pairs, self._api
|
||||
)
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the Kraken integration."""
|
||||
if not self._config_entry.options:
|
||||
options = {
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_TRACKED_ASSET_PAIRS: [DEFAULT_TRACKED_ASSET_PAIR],
|
||||
}
|
||||
self._hass.config_entries.async_update_entry(
|
||||
self._config_entry, options=options
|
||||
)
|
||||
await self._async_refresh_tradable_asset_pairs()
|
||||
await asyncio.sleep(1) # Wait 1 second to avoid triggering the CallRateLimiter
|
||||
self.coordinator = DataUpdateCoordinator(
|
||||
self._hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_method=self.async_update,
|
||||
update_interval=timedelta(
|
||||
seconds=self._config_entry.options[CONF_SCAN_INTERVAL]
|
||||
),
|
||||
)
|
||||
await self.coordinator.async_config_entry_first_refresh()
|
||||
|
||||
def _get_websocket_name_asset_pairs(self) -> list:
|
||||
return ",".join(wsname for wsname in self.tradable_asset_pairs.values())
|
||||
|
||||
def set_update_interval(self, update_interval: int) -> None:
|
||||
"""Set the coordinator update_interval to the supplied update_interval."""
|
||||
self.coordinator.update_interval = timedelta(seconds=update_interval)
|
||||
|
||||
|
||||
async def async_options_updated(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
|
||||
"""Triggered by config entry options updates."""
|
||||
hass.data[DOMAIN].set_update_interval(config_entry.options[CONF_SCAN_INTERVAL])
|
||||
async_dispatcher_send(hass, DISPATCH_CONFIG_UPDATED, hass, config_entry)
|
|
@ -0,0 +1,81 @@
|
|||
"""Config flow for kraken integration."""
|
||||
import logging
|
||||
|
||||
import krakenex
|
||||
from pykrakenapi.pykrakenapi import KrakenAPI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_TRACKED_ASSET_PAIRS, DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
from .utils import get_tradable_asset_pairs
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class KrakenConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for kraken."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry):
|
||||
"""Get the options flow for this handler."""
|
||||
return KrakenOptionsFlowHandler(config_entry)
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle the initial step."""
|
||||
if DOMAIN in self.hass.data:
|
||||
return self.async_abort(reason="already_configured")
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title=DOMAIN, data=user_input)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=None,
|
||||
errors={},
|
||||
)
|
||||
|
||||
|
||||
class KrakenOptionsFlowHandler(config_entries.OptionsFlow):
|
||||
"""Handle Kraken client options."""
|
||||
|
||||
def __init__(self, config_entry):
|
||||
"""Initialize Kraken options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(self, user_input=None):
|
||||
"""Manage the Kraken options."""
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
api = KrakenAPI(krakenex.API(), retry=0, crl_sleep=0)
|
||||
tradable_asset_pairs = await self.hass.async_add_executor_job(
|
||||
get_tradable_asset_pairs, api
|
||||
)
|
||||
tradable_asset_pairs_for_multi_select = {
|
||||
v: v for v in tradable_asset_pairs.keys()
|
||||
}
|
||||
options = {
|
||||
vol.Optional(
|
||||
CONF_SCAN_INTERVAL,
|
||||
default=self.config_entry.options.get(
|
||||
CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL
|
||||
),
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TRACKED_ASSET_PAIRS,
|
||||
default=self.config_entry.options.get(CONF_TRACKED_ASSET_PAIRS, []),
|
||||
): cv.multi_select(tradable_asset_pairs_for_multi_select),
|
||||
}
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
|
||||
|
||||
|
||||
class AlreadyConfigured(HomeAssistantError):
|
||||
"""Error to indicate the asset pair is already configured."""
|
|
@ -0,0 +1,28 @@
|
|||
"""Constants for the kraken integration."""
|
||||
|
||||
DEFAULT_SCAN_INTERVAL = 60
|
||||
DEFAULT_TRACKED_ASSET_PAIR = "XBT/USD"
|
||||
DISPATCH_CONFIG_UPDATED = "kraken_config_updated"
|
||||
|
||||
CONF_TRACKED_ASSET_PAIRS = "tracked_asset_pairs"
|
||||
|
||||
DOMAIN = "kraken"
|
||||
|
||||
SENSOR_TYPES = [
|
||||
{"name": "ask", "enabled_by_default": True},
|
||||
{"name": "ask_volume", "enabled_by_default": False},
|
||||
{"name": "bid", "enabled_by_default": True},
|
||||
{"name": "bid_volume", "enabled_by_default": False},
|
||||
{"name": "volume_today", "enabled_by_default": False},
|
||||
{"name": "volume_last_24h", "enabled_by_default": False},
|
||||
{"name": "volume_weighted_average_today", "enabled_by_default": False},
|
||||
{"name": "volume_weighted_average_last_24h", "enabled_by_default": False},
|
||||
{"name": "number_of_trades_today", "enabled_by_default": False},
|
||||
{"name": "number_of_trades_last_24h", "enabled_by_default": False},
|
||||
{"name": "last_trade_closed", "enabled_by_default": False},
|
||||
{"name": "low_today", "enabled_by_default": True},
|
||||
{"name": "low_last_24h", "enabled_by_default": False},
|
||||
{"name": "high_today", "enabled_by_default": True},
|
||||
{"name": "high_last_24h", "enabled_by_default": False},
|
||||
{"name": "opening_price_today", "enabled_by_default": False},
|
||||
]
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"domain": "kraken",
|
||||
"name": "Kraken",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/kraken",
|
||||
"requirements": ["krakenex==2.1.0", "pykrakenapi==0.1.8"],
|
||||
"codeowners": ["@eifinger"],
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
|
@ -0,0 +1,230 @@
|
|||
"""The kraken integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import async_entries_for_config_entry
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import KrakenData
|
||||
from .const import (
|
||||
CONF_TRACKED_ASSET_PAIRS,
|
||||
DISPATCH_CONFIG_UPDATED,
|
||||
DOMAIN,
|
||||
SENSOR_TYPES,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||
"""Add kraken entities from a config_entry."""
|
||||
|
||||
@callback
|
||||
async def async_update_sensors(
|
||||
hass: HomeAssistant, config_entry: ConfigEntry
|
||||
) -> None:
|
||||
device_registry = await hass.helpers.device_registry.async_get_registry()
|
||||
|
||||
existing_devices = {
|
||||
device.name: device.id
|
||||
for device in async_entries_for_config_entry(
|
||||
device_registry, config_entry.entry_id
|
||||
)
|
||||
}
|
||||
|
||||
for tracked_asset_pair in config_entry.options[CONF_TRACKED_ASSET_PAIRS]:
|
||||
# Only create new devices
|
||||
if create_device_name(tracked_asset_pair) in existing_devices:
|
||||
existing_devices.pop(create_device_name(tracked_asset_pair))
|
||||
else:
|
||||
sensors = []
|
||||
for sensor_type in SENSOR_TYPES:
|
||||
sensors.append(
|
||||
KrakenSensor(
|
||||
hass.data[DOMAIN],
|
||||
tracked_asset_pair,
|
||||
sensor_type,
|
||||
)
|
||||
)
|
||||
async_add_entities(sensors, True)
|
||||
|
||||
# Remove devices for asset pairs which are no longer tracked
|
||||
for device_id in existing_devices.values():
|
||||
device_registry.async_remove_device(device_id)
|
||||
|
||||
await async_update_sensors(hass, config_entry)
|
||||
|
||||
hass.data[DOMAIN].unsub_listeners.append(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
DISPATCH_CONFIG_UPDATED,
|
||||
async_update_sensors,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class KrakenSensor(CoordinatorEntity):
|
||||
"""Define a Kraken sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
kraken_data: KrakenData,
|
||||
tracked_asset_pair: str,
|
||||
sensor_type: dict[str, bool],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(kraken_data.coordinator)
|
||||
self.tracked_asset_pair_wsname = kraken_data.tradable_asset_pairs[
|
||||
tracked_asset_pair
|
||||
]
|
||||
self._source_asset = tracked_asset_pair.split("/")[0]
|
||||
self._target_asset = tracked_asset_pair.split("/")[1]
|
||||
self._sensor_type = sensor_type["name"]
|
||||
self._enabled_by_default = sensor_type["enabled_by_default"]
|
||||
self._unit_of_measurement = self._target_asset
|
||||
self._device_name = f"{self._source_asset} {self._target_asset}"
|
||||
self._name = "_".join(
|
||||
[
|
||||
tracked_asset_pair.split("/")[0],
|
||||
tracked_asset_pair.split("/")[1],
|
||||
sensor_type["name"],
|
||||
]
|
||||
)
|
||||
self._received_data_at_least_once = False
|
||||
self._available = True
|
||||
self._state = None
|
||||
|
||||
@property
|
||||
def entity_registry_enabled_default(self):
|
||||
"""Return if the entity should be enabled when first added to the entity registry."""
|
||||
return self._enabled_by_default
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Set unique_id for sensor."""
|
||||
return self._name.lower()
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state."""
|
||||
return self._state
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Handle entity which will be added."""
|
||||
await super().async_added_to_hass()
|
||||
self._update_internal_state()
|
||||
|
||||
def _handle_coordinator_update(self):
|
||||
self._update_internal_state()
|
||||
return super()._handle_coordinator_update()
|
||||
|
||||
def _update_internal_state(self):
|
||||
try:
|
||||
self._state = self._try_get_state()
|
||||
self._received_data_at_least_once = True # Received data at least one time.
|
||||
except TypeError:
|
||||
if self._received_data_at_least_once:
|
||||
if self._available:
|
||||
_LOGGER.warning(
|
||||
"Asset Pair %s is no longer available",
|
||||
self._device_name,
|
||||
)
|
||||
self._available = False
|
||||
|
||||
def _try_get_state(self) -> str:
|
||||
"""Try to get the state or return a TypeError."""
|
||||
if self._sensor_type == "last_trade_closed":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname][
|
||||
"last_trade_closed"
|
||||
][0]
|
||||
if self._sensor_type == "ask":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["ask"][0]
|
||||
if self._sensor_type == "ask_volume":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["ask"][1]
|
||||
if self._sensor_type == "bid":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["bid"][0]
|
||||
if self._sensor_type == "bid_volume":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["bid"][1]
|
||||
if self._sensor_type == "volume_today":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["volume"][0]
|
||||
if self._sensor_type == "volume_last_24h":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["volume"][1]
|
||||
if self._sensor_type == "volume_weighted_average_today":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname][
|
||||
"volume_weighted_average"
|
||||
][0]
|
||||
if self._sensor_type == "volume_weighted_average_last_24h":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname][
|
||||
"volume_weighted_average"
|
||||
][1]
|
||||
if self._sensor_type == "number_of_trades_today":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname][
|
||||
"number_of_trades"
|
||||
][0]
|
||||
if self._sensor_type == "number_of_trades_last_24h":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname][
|
||||
"number_of_trades"
|
||||
][1]
|
||||
if self._sensor_type == "low_today":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["low"][0]
|
||||
if self._sensor_type == "low_last_24h":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["low"][1]
|
||||
if self._sensor_type == "high_today":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["high"][0]
|
||||
if self._sensor_type == "high_last_24h":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname]["high"][1]
|
||||
if self._sensor_type == "opening_price_today":
|
||||
return self.coordinator.data[self.tracked_asset_pair_wsname][
|
||||
"opening_price"
|
||||
]
|
||||
|
||||
@property
|
||||
def icon(self):
|
||||
"""Return the icon."""
|
||||
if self._target_asset == "EUR":
|
||||
return "mdi:currency-eur"
|
||||
if self._target_asset == "GBP":
|
||||
return "mdi:currency-gbp"
|
||||
if self._target_asset == "USD":
|
||||
return "mdi:currency-usd"
|
||||
if self._target_asset == "JPY":
|
||||
return "mdi:currency-jpy"
|
||||
if self._target_asset == "XBT":
|
||||
return "mdi:currency-btc"
|
||||
return "mdi:cash"
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit the value is expressed in."""
|
||||
if "number_of" not in self._sensor_type:
|
||||
return self._unit_of_measurement
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Could the api be accessed during the last update call."""
|
||||
return self._available and self.coordinator.last_update_success
|
||||
|
||||
@property
|
||||
def device_info(self) -> dict:
|
||||
"""Return a device description for device registry."""
|
||||
|
||||
return {
|
||||
"identifiers": {(DOMAIN, self._source_asset, self._target_asset)},
|
||||
"name": self._device_name,
|
||||
"manufacturer": "Kraken.com",
|
||||
"entry_type": "service",
|
||||
}
|
||||
|
||||
|
||||
def create_device_name(tracked_asset_pair: str) -> str:
|
||||
"""Create the device name for a given tracked asset pair."""
|
||||
return f"{tracked_asset_pair.split('/')[0]} {tracked_asset_pair.split('/')[1]}"
|
|
@ -0,0 +1,24 @@
|
|||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
||||
},
|
||||
"error": {},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {},
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"scan_interval": "Update interval",
|
||||
"tracked_asset_pairs": "Tracked Asset Pairs"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
"""Utility functions for the kraken integration."""
|
||||
from __future__ import annotations
|
||||
|
||||
from pykrakenapi.pykrakenapi import KrakenAPI
|
||||
|
||||
|
||||
def get_tradable_asset_pairs(kraken_api: KrakenAPI) -> dict[str, str]:
|
||||
"""Get a list of tradable asset pairs."""
|
||||
tradable_asset_pairs = {}
|
||||
asset_pairs_df = kraken_api.get_tradable_asset_pairs()
|
||||
for pair in zip(asset_pairs_df.index.values, asset_pairs_df["wsname"]):
|
||||
if not pair[0].endswith(
|
||||
".d"
|
||||
): # Remove darkpools https://support.kraken.com/hc/en-us/articles/360001391906-Introducing-the-Kraken-Dark-Pool
|
||||
tradable_asset_pairs[pair[1]] = pair[0]
|
||||
return tradable_asset_pairs
|
|
@ -131,6 +131,7 @@ FLOWS = [
|
|||
"kodi",
|
||||
"konnected",
|
||||
"kostal_plenticore",
|
||||
"kraken",
|
||||
"kulersky",
|
||||
"life360",
|
||||
"lifx",
|
||||
|
|
3
mypy.ini
3
mypy.ini
|
@ -908,6 +908,9 @@ ignore_errors = true
|
|||
[mypy-homeassistant.components.kostal_plenticore.*]
|
||||
ignore_errors = true
|
||||
|
||||
[mypy-homeassistant.components.kraken.*]
|
||||
ignore_errors = true
|
||||
|
||||
[mypy-homeassistant.components.kulersky.*]
|
||||
ignore_errors = true
|
||||
|
||||
|
|
|
@ -857,6 +857,9 @@ konnected==1.2.0
|
|||
# homeassistant.components.kostal_plenticore
|
||||
kostal_plenticore==0.2.0
|
||||
|
||||
# homeassistant.components.kraken
|
||||
krakenex==2.1.0
|
||||
|
||||
# homeassistant.components.eufy
|
||||
lakeside==0.12
|
||||
|
||||
|
@ -1499,6 +1502,9 @@ pykmtronic==0.3.0
|
|||
# homeassistant.components.kodi
|
||||
pykodi==0.2.5
|
||||
|
||||
# homeassistant.components.kraken
|
||||
pykrakenapi==0.1.8
|
||||
|
||||
# homeassistant.components.kulersky
|
||||
pykulersky==0.5.2
|
||||
|
||||
|
|
|
@ -480,6 +480,9 @@ konnected==1.2.0
|
|||
# homeassistant.components.kostal_plenticore
|
||||
kostal_plenticore==0.2.0
|
||||
|
||||
# homeassistant.components.kraken
|
||||
krakenex==2.1.0
|
||||
|
||||
# homeassistant.components.dyson
|
||||
libpurecool==0.6.4
|
||||
|
||||
|
@ -831,6 +834,9 @@ pykmtronic==0.3.0
|
|||
# homeassistant.components.kodi
|
||||
pykodi==0.2.5
|
||||
|
||||
# homeassistant.components.kraken
|
||||
pykrakenapi==0.1.8
|
||||
|
||||
# homeassistant.components.kulersky
|
||||
pykulersky==0.5.2
|
||||
|
||||
|
|
|
@ -115,6 +115,7 @@ IGNORED_MODULES: Final[list[str]] = [
|
|||
"homeassistant.components.kodi.*",
|
||||
"homeassistant.components.konnected.*",
|
||||
"homeassistant.components.kostal_plenticore.*",
|
||||
"homeassistant.components.kraken.*",
|
||||
"homeassistant.components.kulersky.*",
|
||||
"homeassistant.components.lifx.*",
|
||||
"homeassistant.components.litejet.*",
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
"""Tests for the kraken integration."""
|
|
@ -0,0 +1,80 @@
|
|||
"""Constants for kraken tests."""
|
||||
import pandas
|
||||
|
||||
TRADEABLE_ASSET_PAIR_RESPONSE = pandas.DataFrame(
|
||||
{"wsname": ["ADA/XBT", "ADA/ETH", "XBT/EUR", "XBT/GBP", "XBT/USD", "XBT/JPY"]},
|
||||
columns=["wsname"],
|
||||
index=["ADAXBT", "ADAETH", "XBTEUR", "XXBTZGBP", "XXBTZUSD", "XXBTZJPY"],
|
||||
)
|
||||
|
||||
TICKER_INFORMATION_RESPONSE = pandas.DataFrame(
|
||||
{
|
||||
"a": [
|
||||
[0.000349400, 15949, 15949.000],
|
||||
[0.000349400, 15949, 15949.000],
|
||||
[0.000349400, 15949, 15949.000],
|
||||
[0.000349400, 15949, 15949.000],
|
||||
[0.000349400, 15949, 15949.000],
|
||||
[0.000349400, 15949, 15949.000],
|
||||
],
|
||||
"b": [
|
||||
[0.000348400, 20792, 20792.000],
|
||||
[0.000348400, 20792, 20792.000],
|
||||
[0.000348400, 20792, 20792.000],
|
||||
[0.000348400, 20792, 20792.000],
|
||||
[0.000348400, 20792, 20792.000],
|
||||
[0.000348400, 20792, 20792.000],
|
||||
],
|
||||
"c": [
|
||||
[0.000347800, 2809.36384377],
|
||||
[0.000347800, 2809.36384377],
|
||||
[0.000347800, 2809.36384377],
|
||||
[0.000347800, 2809.36384377],
|
||||
[0.000347800, 2809.36384377],
|
||||
[0.000347800, 2809.36384377],
|
||||
],
|
||||
"h": [
|
||||
[0.000351600, 0.000352100],
|
||||
[0.000351600, 0.000352100],
|
||||
[0.000351600, 0.000352100],
|
||||
[0.000351600, 0.000352100],
|
||||
[0.000351600, 0.000352100],
|
||||
[0.000351600, 0.000352100],
|
||||
],
|
||||
"l": [
|
||||
[0.000344600, 0.000344600],
|
||||
[0.000344600, 0.000344600],
|
||||
[0.000344600, 0.000344600],
|
||||
[0.000344600, 0.000344600],
|
||||
[0.000344600, 0.000344600],
|
||||
[0.000344600, 0.000344600],
|
||||
],
|
||||
"o": [
|
||||
0.000351300,
|
||||
0.000351300,
|
||||
0.000351300,
|
||||
0.000351300,
|
||||
0.000351300,
|
||||
0.000351300,
|
||||
],
|
||||
"p": [
|
||||
[0.000348573, 0.000344881],
|
||||
[0.000348573, 0.000344881],
|
||||
[0.000348573, 0.000344881],
|
||||
[0.000348573, 0.000344881],
|
||||
[0.000348573, 0.000344881],
|
||||
[0.000348573, 0.000344881],
|
||||
],
|
||||
"t": [[82, 128], [82, 128], [82, 128], [82, 128], [82, 128], [82, 128]],
|
||||
"v": [
|
||||
[146300.24906838, 253478.04715403],
|
||||
[146300.24906838, 253478.04715403],
|
||||
[146300.24906838, 253478.04715403],
|
||||
[146300.24906838, 253478.04715403],
|
||||
[146300.24906838, 253478.04715403],
|
||||
[146300.24906838, 253478.04715403],
|
||||
],
|
||||
},
|
||||
columns=["a", "b", "c", "h", "l", "o", "p", "t", "v"],
|
||||
index=["ADAXBT", "ADAETH", "XBTEUR", "XXBTZGBP", "XXBTZUSD", "XXBTZJPY"],
|
||||
)
|
|
@ -0,0 +1,101 @@
|
|||
"""Tests for the kraken config_flow."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from homeassistant.components.kraken.const import CONF_TRACKED_ASSET_PAIRS, DOMAIN
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL
|
||||
|
||||
from .const import TICKER_INFORMATION_RESPONSE, TRADEABLE_ASSET_PAIR_RESPONSE
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_config_flow(hass):
|
||||
"""Test we can finish a config flow."""
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
), patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
return_value=TICKER_INFORMATION_RESPONSE,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}
|
||||
)
|
||||
assert result["type"] == "form"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
assert result["type"] == "create_entry"
|
||||
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get("sensor.xbt_usd_ask")
|
||||
assert state
|
||||
|
||||
|
||||
async def test_already_configured(hass):
|
||||
"""Test we can not add a second config flow."""
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
), patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
return_value=TICKER_INFORMATION_RESPONSE,
|
||||
):
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}
|
||||
)
|
||||
assert result["type"] == "form"
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
|
||||
assert result["type"] == "create_entry"
|
||||
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": "user"}
|
||||
)
|
||||
assert result["type"] == "abort"
|
||||
|
||||
|
||||
async def test_options(hass):
|
||||
"""Test options for Kraken."""
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
), patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
return_value=TICKER_INFORMATION_RESPONSE,
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
options={
|
||||
CONF_SCAN_INTERVAL: 60,
|
||||
CONF_TRACKED_ASSET_PAIRS: [
|
||||
"ADA/XBT",
|
||||
"ADA/ETH",
|
||||
"XBT/EUR",
|
||||
"XBT/GBP",
|
||||
"XBT/USD",
|
||||
"XBT/JPY",
|
||||
],
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert hass.states.get("sensor.xbt_usd_ask")
|
||||
|
||||
result = await hass.config_entries.options.async_init(entry.entry_id)
|
||||
result = await hass.config_entries.options.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
CONF_SCAN_INTERVAL: 10,
|
||||
CONF_TRACKED_ASSET_PAIRS: ["ADA/ETH"],
|
||||
},
|
||||
)
|
||||
assert result["type"] == "create_entry"
|
||||
await hass.async_block_till_done()
|
||||
|
||||
ada_eth_sensor = hass.states.get("sensor.ada_eth_ask")
|
||||
assert ada_eth_sensor.state == "0.0003494"
|
||||
|
||||
assert hass.states.get("sensor.xbt_usd_ask") is None
|
|
@ -0,0 +1,66 @@
|
|||
"""Tests for the kraken integration."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from pykrakenapi.pykrakenapi import CallRateLimitError, KrakenAPIError
|
||||
|
||||
from homeassistant.components import kraken
|
||||
from homeassistant.components.kraken.const import DOMAIN
|
||||
|
||||
from .const import TICKER_INFORMATION_RESPONSE, TRADEABLE_ASSET_PAIR_RESPONSE
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_unload_entry(hass):
|
||||
"""Test unload for Kraken."""
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
), patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
return_value=TICKER_INFORMATION_RESPONSE,
|
||||
):
|
||||
entry = MockConfigEntry(domain=DOMAIN)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert await kraken.async_unload_entry(hass, entry)
|
||||
assert DOMAIN not in hass.data
|
||||
|
||||
|
||||
async def test_unkown_error(hass, caplog):
|
||||
"""Test unload for Kraken."""
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
), patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
side_effect=KrakenAPIError("EQuery: Error"),
|
||||
):
|
||||
entry = MockConfigEntry(domain=DOMAIN)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert "Unable to fetch data from Kraken.com:" in caplog.text
|
||||
|
||||
|
||||
async def test_callrate_limit(hass, caplog):
|
||||
"""Test unload for Kraken."""
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
), patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
side_effect=CallRateLimitError(),
|
||||
):
|
||||
entry = MockConfigEntry(domain=DOMAIN)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
assert (
|
||||
"Exceeded the Kraken.com call rate limit. Increase the update interval to prevent this error"
|
||||
in caplog.text
|
||||
)
|
|
@ -0,0 +1,267 @@
|
|||
"""Tests for the kraken sensor platform."""
|
||||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from pykrakenapi.pykrakenapi import KrakenAPIError
|
||||
|
||||
from homeassistant.components.kraken.const import (
|
||||
CONF_TRACKED_ASSET_PAIRS,
|
||||
DEFAULT_SCAN_INTERVAL,
|
||||
DEFAULT_TRACKED_ASSET_PAIR,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.const import CONF_SCAN_INTERVAL, EVENT_HOMEASSISTANT_START
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import TICKER_INFORMATION_RESPONSE, TRADEABLE_ASSET_PAIR_RESPONSE
|
||||
|
||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||
|
||||
|
||||
async def test_sensor(hass):
|
||||
"""Test that sensor has a value."""
|
||||
utcnow = dt_util.utcnow()
|
||||
# Patching 'utcnow' to gain more control over the timed update.
|
||||
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
), patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
return_value=TICKER_INFORMATION_RESPONSE,
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id="0123456789",
|
||||
options={
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_TRACKED_ASSET_PAIRS: [
|
||||
"ADA/XBT",
|
||||
"ADA/ETH",
|
||||
"XBT/EUR",
|
||||
"XBT/GBP",
|
||||
"XBT/USD",
|
||||
"XBT/JPY",
|
||||
],
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
registry = await hass.helpers.entity_registry.async_get_registry()
|
||||
|
||||
# Pre-create registry entries for disabled by default sensors
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_ask_volume",
|
||||
suggested_object_id="xbt_usd_ask_volume",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_last_trade_closed",
|
||||
suggested_object_id="xbt_usd_last_trade_closed",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_bid_volume",
|
||||
suggested_object_id="xbt_usd_bid_volume",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_volume_today",
|
||||
suggested_object_id="xbt_usd_volume_today",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_volume_last_24h",
|
||||
suggested_object_id="xbt_usd_volume_last_24h",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_volume_weighted_average_today",
|
||||
suggested_object_id="xbt_usd_volume_weighted_average_today",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_volume_weighted_average_last_24h",
|
||||
suggested_object_id="xbt_usd_volume_weighted_average_last_24h",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_number_of_trades_today",
|
||||
suggested_object_id="xbt_usd_number_of_trades_today",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_number_of_trades_last_24h",
|
||||
suggested_object_id="xbt_usd_number_of_trades_last_24h",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_low_last_24h",
|
||||
suggested_object_id="xbt_usd_low_last_24h",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_high_last_24h",
|
||||
suggested_object_id="xbt_usd_high_last_24h",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
registry.async_get_or_create(
|
||||
"sensor",
|
||||
DOMAIN,
|
||||
"xbt_usd_opening_price_today",
|
||||
suggested_object_id="xbt_usd_opening_price_today",
|
||||
disabled_by=None,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
xbt_usd_sensor = hass.states.get("sensor.xbt_usd_ask")
|
||||
assert xbt_usd_sensor.state == "0.0003494"
|
||||
assert xbt_usd_sensor.attributes["icon"] == "mdi:currency-usd"
|
||||
|
||||
xbt_eur_sensor = hass.states.get("sensor.xbt_eur_ask")
|
||||
assert xbt_eur_sensor.state == "0.0003494"
|
||||
assert xbt_eur_sensor.attributes["icon"] == "mdi:currency-eur"
|
||||
|
||||
ada_xbt_sensor = hass.states.get("sensor.ada_xbt_ask")
|
||||
assert ada_xbt_sensor.state == "0.0003494"
|
||||
assert ada_xbt_sensor.attributes["icon"] == "mdi:currency-btc"
|
||||
|
||||
xbt_jpy_sensor = hass.states.get("sensor.xbt_jpy_ask")
|
||||
assert xbt_jpy_sensor.state == "0.0003494"
|
||||
assert xbt_jpy_sensor.attributes["icon"] == "mdi:currency-jpy"
|
||||
|
||||
xbt_gbp_sensor = hass.states.get("sensor.xbt_gbp_ask")
|
||||
assert xbt_gbp_sensor.state == "0.0003494"
|
||||
assert xbt_gbp_sensor.attributes["icon"] == "mdi:currency-gbp"
|
||||
|
||||
ada_eth_sensor = hass.states.get("sensor.ada_eth_ask")
|
||||
assert ada_eth_sensor.state == "0.0003494"
|
||||
assert ada_eth_sensor.attributes["icon"] == "mdi:cash"
|
||||
|
||||
xbt_usd_ask_volume = hass.states.get("sensor.xbt_usd_ask_volume")
|
||||
assert xbt_usd_ask_volume.state == "15949"
|
||||
|
||||
xbt_usd_last_trade_closed = hass.states.get("sensor.xbt_usd_last_trade_closed")
|
||||
assert xbt_usd_last_trade_closed.state == "0.0003478"
|
||||
|
||||
xbt_usd_bid_volume = hass.states.get("sensor.xbt_usd_bid_volume")
|
||||
assert xbt_usd_bid_volume.state == "20792"
|
||||
|
||||
xbt_usd_volume_today = hass.states.get("sensor.xbt_usd_volume_today")
|
||||
assert xbt_usd_volume_today.state == "146300.24906838"
|
||||
|
||||
xbt_usd_volume_last_24h = hass.states.get("sensor.xbt_usd_volume_last_24h")
|
||||
assert xbt_usd_volume_last_24h.state == "253478.04715403"
|
||||
|
||||
xbt_usd_volume_weighted_average_today = hass.states.get(
|
||||
"sensor.xbt_usd_volume_weighted_average_today"
|
||||
)
|
||||
assert xbt_usd_volume_weighted_average_today.state == "0.000348573"
|
||||
|
||||
xbt_usd_volume_weighted_average_last_24h = hass.states.get(
|
||||
"sensor.xbt_usd_volume_weighted_average_last_24h"
|
||||
)
|
||||
assert xbt_usd_volume_weighted_average_last_24h.state == "0.000344881"
|
||||
|
||||
xbt_usd_number_of_trades_today = hass.states.get(
|
||||
"sensor.xbt_usd_number_of_trades_today"
|
||||
)
|
||||
assert xbt_usd_number_of_trades_today.state == "82"
|
||||
|
||||
xbt_usd_number_of_trades_last_24h = hass.states.get(
|
||||
"sensor.xbt_usd_number_of_trades_last_24h"
|
||||
)
|
||||
assert xbt_usd_number_of_trades_last_24h.state == "128"
|
||||
|
||||
xbt_usd_low_last_24h = hass.states.get("sensor.xbt_usd_low_last_24h")
|
||||
assert xbt_usd_low_last_24h.state == "0.0003446"
|
||||
|
||||
xbt_usd_high_last_24h = hass.states.get("sensor.xbt_usd_high_last_24h")
|
||||
assert xbt_usd_high_last_24h.state == "0.0003521"
|
||||
|
||||
xbt_usd_opening_price_today = hass.states.get(
|
||||
"sensor.xbt_usd_opening_price_today"
|
||||
)
|
||||
assert xbt_usd_opening_price_today.state == "0.0003513"
|
||||
|
||||
|
||||
async def test_missing_pair_marks_sensor_unavailable(hass):
|
||||
"""Test that a missing tradable asset pair marks the sensor unavailable."""
|
||||
utcnow = dt_util.utcnow()
|
||||
# Patching 'utcnow' to gain more control over the timed update.
|
||||
with patch("homeassistant.util.dt.utcnow", return_value=utcnow), patch(
|
||||
"pykrakenapi.KrakenAPI.get_tradable_asset_pairs",
|
||||
return_value=TRADEABLE_ASSET_PAIR_RESPONSE,
|
||||
):
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
return_value=TICKER_INFORMATION_RESPONSE,
|
||||
):
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
options={
|
||||
CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL,
|
||||
CONF_TRACKED_ASSET_PAIRS: [DEFAULT_TRACKED_ASSET_PAIR],
|
||||
},
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
|
||||
await hass.async_block_till_done()
|
||||
|
||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
sensor = hass.states.get("sensor.xbt_usd_ask")
|
||||
assert sensor.state == "0.0003494"
|
||||
|
||||
with patch(
|
||||
"pykrakenapi.KrakenAPI.get_ticker_information",
|
||||
side_effect=KrakenAPIError("EQuery:Unknown asset pair"),
|
||||
):
|
||||
async_fire_time_changed(
|
||||
hass, utcnow + timedelta(seconds=DEFAULT_SCAN_INTERVAL * 2)
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
sensor = hass.states.get("sensor.xbt_usd_ask")
|
||||
assert sensor.state == "unavailable"
|
Loading…
Reference in New Issue