parent
bd9a8ba6f1
commit
e4a78420b8
|
@ -248,6 +248,7 @@ select = [
|
|||
"F", # pyflakes/autoflake
|
||||
"PGH004", # Use specific rule codes when using noqa
|
||||
"PT001", # Use @pytest.fixture without parentheses
|
||||
"PT013", # Found incorrect pytest import, use simple import pytest instead
|
||||
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
||||
"T20", # flake8-print
|
||||
"UP", # pyupgrade
|
||||
|
|
|
@ -2,16 +2,16 @@
|
|||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from canary.api import Api
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_ffmpeg(hass):
|
||||
"""Mock ffmpeg is loaded."""
|
||||
hass.config.components.add("ffmpeg")
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def canary(hass):
|
||||
"""Mock the CanaryApi for easier testing."""
|
||||
with patch.object(Api, "login", return_value=True), patch(
|
||||
|
@ -35,7 +35,7 @@ def canary(hass):
|
|||
yield mock_canary
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def canary_config_flow(hass):
|
||||
"""Mock the CanaryApi for easier config flow testing."""
|
||||
with patch.object(Api, "login", return_value=True), patch(
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
"""Define fixtures available for all tests."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from . import _get_mock_cfupdate
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def cfupdate(hass):
|
||||
"""Mock the CloudflareUpdater for easier testing."""
|
||||
mock_cfupdate = _get_mock_cfupdate()
|
||||
|
@ -17,7 +17,7 @@ def cfupdate(hass):
|
|||
yield mock_api
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def cfupdate_flow(hass):
|
||||
"""Mock the CloudflareUpdater for easier config flow testing."""
|
||||
mock_cfupdate = _get_mock_cfupdate()
|
||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
|||
|
||||
from typing import Any
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import setup
|
||||
from homeassistant.components.binary_sensor import DOMAIN
|
||||
|
@ -116,7 +116,9 @@ async def test_unique_id(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
|
||||
async def test_return_code(caplog: LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
async def test_return_code(
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test setting the state with a template."""
|
||||
await setup_test_entity(
|
||||
hass,
|
||||
|
|
|
@ -6,7 +6,7 @@ import tempfile
|
|||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import config as hass_config, setup
|
||||
from homeassistant.components.cover import DOMAIN, SCAN_INTERVAL
|
||||
|
@ -38,7 +38,7 @@ async def setup_test_entity(hass: HomeAssistant, config_dict: dict[str, Any]) ->
|
|||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
async def test_no_covers(caplog: LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
async def test_no_covers(caplog: pytest.LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
"""Test that the cover does not polls when there's no state command."""
|
||||
|
||||
with patch(
|
||||
|
@ -153,7 +153,7 @@ async def test_reload(hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_move_cover_failure(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test command failure."""
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ import tempfile
|
|||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import setup
|
||||
from homeassistant.components.notify import DOMAIN
|
||||
|
@ -63,7 +63,7 @@ async def test_command_line_output(hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_error_for_none_zero_exit_code(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test if an error is logged for non zero exit codes."""
|
||||
await setup_test_service(
|
||||
|
@ -80,7 +80,7 @@ async def test_error_for_none_zero_exit_code(
|
|||
assert "return code 1" in caplog.text
|
||||
|
||||
|
||||
async def test_timeout(caplog: LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
async def test_timeout(caplog: pytest.LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
"""Test blocking is not forever."""
|
||||
await setup_test_service(
|
||||
hass,
|
||||
|
@ -96,7 +96,7 @@ async def test_timeout(caplog: LogCaptureFixture, hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_subprocess_exceptions(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test that notify subprocess exceptions are handled correctly."""
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import setup
|
||||
from homeassistant.components.sensor import DOMAIN
|
||||
|
@ -101,7 +101,7 @@ async def test_template_render_with_quote(hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_bad_template_render(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test rendering a broken template."""
|
||||
|
||||
|
@ -128,7 +128,9 @@ async def test_bad_command(hass: HomeAssistant) -> None:
|
|||
assert entity_state.state == "unknown"
|
||||
|
||||
|
||||
async def test_return_code(caplog: LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
async def test_return_code(
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test that an error return code is logged."""
|
||||
await setup_test_entities(
|
||||
hass,
|
||||
|
@ -159,7 +161,7 @@ async def test_update_with_json_attrs(hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_update_with_json_attrs_no_data(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test attributes when no JSON result fetched."""
|
||||
|
||||
|
@ -177,7 +179,7 @@ async def test_update_with_json_attrs_no_data(
|
|||
|
||||
|
||||
async def test_update_with_json_attrs_not_dict(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test attributes when the return value not a dict."""
|
||||
|
||||
|
@ -195,7 +197,7 @@ async def test_update_with_json_attrs_not_dict(
|
|||
|
||||
|
||||
async def test_update_with_json_attrs_bad_json(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test attributes when the return value is invalid JSON."""
|
||||
|
||||
|
@ -213,7 +215,7 @@ async def test_update_with_json_attrs_bad_json(
|
|||
|
||||
|
||||
async def test_update_with_missing_json_attrs(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test attributes when an expected key is missing."""
|
||||
|
||||
|
@ -236,7 +238,7 @@ async def test_update_with_missing_json_attrs(
|
|||
|
||||
|
||||
async def test_update_with_unnecessary_json_attrs(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test attributes when an expected key is missing."""
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ import tempfile
|
|||
from typing import Any
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import setup
|
||||
from homeassistant.components.switch import DOMAIN, SCAN_INTERVAL
|
||||
|
@ -281,7 +281,7 @@ async def test_name_is_set_correctly(hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_switch_command_state_fail(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test that switch failures are handled correctly."""
|
||||
await setup_test_entity(
|
||||
|
@ -318,7 +318,7 @@ async def test_switch_command_state_fail(
|
|||
|
||||
|
||||
async def test_switch_command_state_code_exceptions(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test that switch state code exceptions are handled correctly."""
|
||||
|
||||
|
@ -351,7 +351,7 @@ async def test_switch_command_state_code_exceptions(
|
|||
|
||||
|
||||
async def test_switch_command_state_value_exceptions(
|
||||
caplog: LogCaptureFixture, hass: HomeAssistant
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test that switch state value exceptions are handled correctly."""
|
||||
|
||||
|
@ -384,7 +384,9 @@ async def test_switch_command_state_value_exceptions(
|
|||
assert "Error trying to exec command" in caplog.text
|
||||
|
||||
|
||||
async def test_no_switches(caplog: LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
async def test_no_switches(
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test with no switches."""
|
||||
|
||||
await setup_test_entity(hass, {})
|
||||
|
@ -426,7 +428,9 @@ async def test_unique_id(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
|
||||
async def test_command_failure(caplog: LogCaptureFixture, hass: HomeAssistant) -> None:
|
||||
async def test_command_failure(
|
||||
caplog: pytest.LogCaptureFixture, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test command failure."""
|
||||
|
||||
await setup_test_entity(
|
||||
|
|
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||
from datetime import datetime, timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.directv.media_player import (
|
||||
ATTR_MEDIA_CURRENTLY_RECORDING,
|
||||
|
@ -62,7 +62,7 @@ STANDBY_ENTITY_ID = f"{MP_DOMAIN}.standby_client"
|
|||
UNAVAILABLE_ENTITY_ID = f"{MP_DOMAIN}.unavailable_client"
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def mock_now() -> datetime:
|
||||
"""Fixture for dtutil.now."""
|
||||
return dt_util.utcnow()
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
"""Define fixtures available for all tests."""
|
||||
"""Define pytest.fixtures available for all tests."""
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from pyezviz import EzvizClient
|
||||
from pyezviz.test_cam_rtsp import TestRTSPAuth
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_ffmpeg(hass):
|
||||
"""Mock ffmpeg is loaded."""
|
||||
hass.config.components.add("ffmpeg")
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def ezviz_test_rtsp_config_flow(hass):
|
||||
"""Mock the EzvizApi for easier testing."""
|
||||
with patch.object(TestRTSPAuth, "main", return_value=True), patch(
|
||||
|
@ -29,7 +29,7 @@ def ezviz_test_rtsp_config_flow(hass):
|
|||
yield mock_ezviz_test_rtsp
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def ezviz_config_flow(hass):
|
||||
"""Mock the EzvizAPI for easier config flow testing."""
|
||||
with patch.object(EzvizClient, "login", return_value=True), patch(
|
||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
|||
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.fjaraskupan.const import DOMAIN
|
||||
|
@ -13,7 +13,7 @@ from homeassistant.data_entry_flow import FlowResultType
|
|||
from . import COOKER_SERVICE_INFO
|
||||
|
||||
|
||||
@fixture(name="mock_setup_entry", autouse=True)
|
||||
@pytest.fixture(name="mock_setup_entry", autouse=True)
|
||||
async def fixture_mock_setup_entry(hass):
|
||||
"""Fixture for config entry."""
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""Test the GitHub init file."""
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.github import CONF_REPOSITORIES
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
@ -15,7 +15,7 @@ async def test_device_registry_cleanup(
|
|||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
aioclient_mock: AiohttpClientMocker,
|
||||
caplog: LogCaptureFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test that we remove untracked repositories from the decvice registry."""
|
||||
mock_config_entry.options = {CONF_REPOSITORIES: ["home-assistant/core"]}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import raises
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import google_assistant as ga
|
||||
from homeassistant.core import Context, HomeAssistant
|
||||
|
@ -43,7 +43,7 @@ async def test_sync_button(hass: HomeAssistant, hass_owner_user: MockUser):
|
|||
)
|
||||
mock_sync_entities.assert_called_once_with(hass_owner_user.id)
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
mock_sync_entities.return_value = 400
|
||||
|
||||
await hass.services.async_call(
|
||||
|
|
|
@ -4,7 +4,7 @@ import time
|
|||
from unittest.mock import patch
|
||||
|
||||
from httplib2 import Response
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.application_credentials import (
|
||||
ClientCredential,
|
||||
|
@ -33,13 +33,13 @@ TITLE = "example@gmail.com"
|
|||
TOKEN = "homeassistant.components.google_mail.api.config_entry_oauth2_flow.OAuth2Session.async_ensure_token_valid"
|
||||
|
||||
|
||||
@fixture(name="scopes")
|
||||
@pytest.fixture(name="scopes")
|
||||
def mock_scopes() -> list[str]:
|
||||
"""Fixture to set the scopes present in the OAuth token."""
|
||||
return SCOPES
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_credentials(hass: HomeAssistant) -> None:
|
||||
"""Fixture to setup credentials."""
|
||||
assert await async_setup_component(hass, "application_credentials", {})
|
||||
|
@ -51,13 +51,13 @@ async def setup_credentials(hass: HomeAssistant) -> None:
|
|||
)
|
||||
|
||||
|
||||
@fixture(name="expires_at")
|
||||
@pytest.fixture(name="expires_at")
|
||||
def mock_expires_at() -> int:
|
||||
"""Fixture to set the oauth token expiration time."""
|
||||
return time.time() + 3600
|
||||
|
||||
|
||||
@fixture(name="config_entry")
|
||||
@pytest.fixture(name="config_entry")
|
||||
def mock_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry:
|
||||
"""Create Google Mail entry in Home Assistant."""
|
||||
return MockConfigEntry(
|
||||
|
@ -76,7 +76,7 @@ def mock_config_entry(expires_at: int, scopes: list[str]) -> MockConfigEntry:
|
|||
)
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_connection(aioclient_mock: AiohttpClientMocker) -> None:
|
||||
"""Mock Google Mail connection."""
|
||||
aioclient_mock.post(
|
||||
|
@ -90,7 +90,7 @@ def mock_connection(aioclient_mock: AiohttpClientMocker) -> None:
|
|||
)
|
||||
|
||||
|
||||
@fixture(name="setup_integration")
|
||||
@pytest.fixture(name="setup_integration")
|
||||
async def mock_setup_integration(
|
||||
hass: HomeAssistant, config_entry: MockConfigEntry
|
||||
) -> Generator[ComponentSetup, None, None]:
|
||||
|
|
|
@ -6,7 +6,6 @@ from typing import Any
|
|||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from pytest import LogCaptureFixture
|
||||
|
||||
from homeassistant import config as hass_config
|
||||
from homeassistant.components.group import DOMAIN as GROUP_DOMAIN
|
||||
|
@ -240,7 +239,7 @@ async def test_reload(hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_sensor_incorrect_state(
|
||||
hass: HomeAssistant, caplog: LogCaptureFixture
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test the min sensor."""
|
||||
config = {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
from homewizard_energy.errors import DisabledError, RequestError
|
||||
from pytest import raises
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import button
|
||||
from homeassistant.const import ATTR_FRIENDLY_NAME, STATE_UNKNOWN
|
||||
|
@ -123,7 +123,7 @@ async def test_identify_press_catches_requesterror(
|
|||
|
||||
assert api.identify.call_count == 0
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
button.DOMAIN,
|
||||
button.SERVICE_PRESS,
|
||||
|
@ -161,7 +161,7 @@ async def test_identify_press_catches_disablederror(
|
|||
|
||||
assert api.identify.call_count == 0
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
button.DOMAIN,
|
||||
button.SERVICE_PRESS,
|
||||
|
|
|
@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, patch
|
|||
|
||||
from homewizard_energy.errors import DisabledError, RequestError
|
||||
from homewizard_energy.models import State
|
||||
from pytest import raises
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import number
|
||||
from homeassistant.components.number import ATTR_VALUE, SERVICE_SET_VALUE
|
||||
|
@ -170,7 +170,7 @@ async def test_brightness_level_set_catches_requesterror(
|
|||
await hass.async_block_till_done()
|
||||
|
||||
# Set level halfway
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
number.DOMAIN,
|
||||
SERVICE_SET_VALUE,
|
||||
|
@ -206,7 +206,7 @@ async def test_brightness_level_set_catches_disablederror(
|
|||
await hass.async_block_till_done()
|
||||
|
||||
# Set level halfway
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
number.DOMAIN,
|
||||
SERVICE_SET_VALUE,
|
||||
|
@ -244,7 +244,7 @@ async def test_brightness_level_set_catches_invalid_value(
|
|||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
with raises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
await hass.services.async_call(
|
||||
number.DOMAIN,
|
||||
SERVICE_SET_VALUE,
|
||||
|
@ -257,7 +257,7 @@ async def test_brightness_level_set_catches_invalid_value(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
with raises(ValueError):
|
||||
with pytest.raises(ValueError):
|
||||
await hass.services.async_call(
|
||||
number.DOMAIN,
|
||||
SERVICE_SET_VALUE,
|
||||
|
|
|
@ -4,7 +4,7 @@ from unittest.mock import AsyncMock, patch
|
|||
|
||||
from homewizard_energy.errors import DisabledError, RequestError
|
||||
from homewizard_energy.models import State, System
|
||||
from pytest import raises
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import switch
|
||||
from homeassistant.components.switch import SwitchDeviceClass
|
||||
|
@ -375,7 +375,7 @@ async def test_switch_handles_requesterror(
|
|||
await hass.async_block_till_done()
|
||||
|
||||
# Power on toggle
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
|
@ -383,7 +383,7 @@ async def test_switch_handles_requesterror(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
|
@ -392,7 +392,7 @@ async def test_switch_handles_requesterror(
|
|||
)
|
||||
|
||||
# Switch Lock toggle
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
|
@ -400,7 +400,7 @@ async def test_switch_handles_requesterror(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
|
@ -409,7 +409,7 @@ async def test_switch_handles_requesterror(
|
|||
)
|
||||
|
||||
# Disable Cloud toggle
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
|
@ -417,7 +417,7 @@ async def test_switch_handles_requesterror(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
|
@ -452,7 +452,7 @@ async def test_switch_handles_disablederror(
|
|||
await hass.async_block_till_done()
|
||||
|
||||
# Power on toggle
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
|
@ -460,7 +460,7 @@ async def test_switch_handles_disablederror(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
|
@ -469,7 +469,7 @@ async def test_switch_handles_disablederror(
|
|||
)
|
||||
|
||||
# Switch Lock toggle
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
|
@ -477,7 +477,7 @@ async def test_switch_handles_disablederror(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
|
@ -486,7 +486,7 @@ async def test_switch_handles_disablederror(
|
|||
)
|
||||
|
||||
# Disable Cloud toggle
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_ON,
|
||||
|
@ -494,7 +494,7 @@ async def test_switch_handles_disablederror(
|
|||
blocking=True,
|
||||
)
|
||||
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
switch.DOMAIN,
|
||||
SERVICE_TURN_OFF,
|
||||
|
|
|
@ -3,7 +3,6 @@ from unittest.mock import MagicMock, patch
|
|||
|
||||
import pykulersky
|
||||
import pytest
|
||||
from pytest import approx
|
||||
|
||||
from homeassistant.components.kulersky.const import (
|
||||
DATA_ADDRESSES,
|
||||
|
@ -219,10 +218,10 @@ async def test_light_update(hass, mock_light):
|
|||
ATTR_SUPPORTED_FEATURES: 0,
|
||||
ATTR_COLOR_MODE: ColorMode.RGBW,
|
||||
ATTR_BRIGHTNESS: 255,
|
||||
ATTR_HS_COLOR: (approx(212.571), approx(68.627)),
|
||||
ATTR_HS_COLOR: (pytest.approx(212.571), pytest.approx(68.627)),
|
||||
ATTR_RGB_COLOR: (80, 160, 255),
|
||||
ATTR_RGBW_COLOR: (80, 160, 255, 0),
|
||||
ATTR_XY_COLOR: (approx(0.17), approx(0.193)),
|
||||
ATTR_XY_COLOR: (pytest.approx(0.17), pytest.approx(0.193)),
|
||||
}
|
||||
|
||||
mock_light.get_color.side_effect = None
|
||||
|
@ -239,10 +238,10 @@ async def test_light_update(hass, mock_light):
|
|||
ATTR_SUPPORTED_FEATURES: 0,
|
||||
ATTR_COLOR_MODE: ColorMode.RGBW,
|
||||
ATTR_BRIGHTNESS: 255,
|
||||
ATTR_HS_COLOR: (approx(199.701), approx(26.275)),
|
||||
ATTR_HS_COLOR: (pytest.approx(199.701), pytest.approx(26.275)),
|
||||
ATTR_RGB_COLOR: (188, 233, 255),
|
||||
ATTR_RGBW_COLOR: (80, 160, 200, 255),
|
||||
ATTR_XY_COLOR: (approx(0.259), approx(0.306)),
|
||||
ATTR_XY_COLOR: (pytest.approx(0.259), pytest.approx(0.306)),
|
||||
}
|
||||
|
||||
mock_light.get_color.side_effect = None
|
||||
|
@ -259,8 +258,8 @@ async def test_light_update(hass, mock_light):
|
|||
ATTR_SUPPORTED_FEATURES: 0,
|
||||
ATTR_COLOR_MODE: ColorMode.RGBW,
|
||||
ATTR_BRIGHTNESS: 240,
|
||||
ATTR_HS_COLOR: (approx(200.0), approx(27.059)),
|
||||
ATTR_HS_COLOR: (pytest.approx(200.0), pytest.approx(27.059)),
|
||||
ATTR_RGB_COLOR: (186, 232, 255),
|
||||
ATTR_RGBW_COLOR: (85, 170, 212, 255),
|
||||
ATTR_XY_COLOR: (approx(0.257), approx(0.305)),
|
||||
ATTR_XY_COLOR: (pytest.approx(0.257), pytest.approx(0.305)),
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import statistics
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import config as hass_config
|
||||
from homeassistant.components.min_max.const import DOMAIN
|
||||
|
@ -442,7 +442,7 @@ async def test_reload(hass: HomeAssistant) -> None:
|
|||
|
||||
|
||||
async def test_sensor_incorrect_state(
|
||||
hass: HomeAssistant, caplog: LogCaptureFixture
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test the min sensor."""
|
||||
config = {
|
||||
|
|
|
@ -10,7 +10,6 @@ from nibe.exceptions import (
|
|||
CoilWriteException,
|
||||
)
|
||||
import pytest
|
||||
from pytest import fixture
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.nibe_heatpump import DOMAIN
|
||||
|
@ -33,7 +32,7 @@ MOCK_FLOW_MODBUS_USERDATA = {
|
|||
}
|
||||
|
||||
|
||||
@fixture(autouse=True, name="mock_setup_entry")
|
||||
@pytest.fixture(autouse=True, name="mock_setup_entry")
|
||||
async def fixture_mock_setup():
|
||||
"""Make sure we never actually run setup."""
|
||||
with patch(
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
"""Define fixtures available for all tests."""
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from . import MOCK_HISTORY, MOCK_STATUS, MOCK_VERSION
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def nzbget_api(hass):
|
||||
"""Mock NZBGetApi for easier testing."""
|
||||
with patch("homeassistant.components.nzbget.coordinator.NZBGetAPI") as mock_api:
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from open_meteo import OpenMeteoConnectionError
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.open_meteo.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
|
@ -51,7 +51,7 @@ async def test_config_entry_not_ready(
|
|||
|
||||
async def test_config_entry_zone_removed(
|
||||
hass: HomeAssistant,
|
||||
caplog: LogCaptureFixture,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test the Open-Meteo configuration entry not ready."""
|
||||
mock_config_entry = MockConfigEntry(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Test the PECO Outage Counter config flow."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import raises
|
||||
import pytest
|
||||
from voluptuous.error import MultipleInvalid
|
||||
|
||||
from homeassistant import config_entries
|
||||
|
@ -45,7 +45,7 @@ async def test_invalid_county(hass: HomeAssistant) -> None:
|
|||
assert result["type"] == FlowResultType.FORM
|
||||
assert result["errors"] is None
|
||||
|
||||
with raises(MultipleInvalid):
|
||||
with pytest.raises(MultipleInvalid):
|
||||
await hass.config_entries.flow.async_configure(
|
||||
result["flow_id"],
|
||||
{
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from unittest.mock import create_autospec, patch
|
||||
|
||||
from haphilipsjs import PhilipsTV
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.philips_js.const import DOMAIN
|
||||
|
||||
|
@ -11,12 +11,12 @@ from . import MOCK_CONFIG, MOCK_ENTITY_ID, MOCK_NAME, MOCK_SERIAL_NO, MOCK_SYSTE
|
|||
from tests.common import MockConfigEntry, mock_device_registry
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
async def setup_notification(hass):
|
||||
"""Configure notification system."""
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_tv():
|
||||
"""Disable component actual use."""
|
||||
tv = create_autospec(PhilipsTV)
|
||||
|
@ -42,7 +42,7 @@ def mock_tv():
|
|||
yield tv
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
async def mock_config_entry(hass):
|
||||
"""Get standard player."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
@ -52,13 +52,13 @@ async def mock_config_entry(hass):
|
|||
return config_entry
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def mock_device_reg(hass):
|
||||
"""Get standard device."""
|
||||
return mock_device_registry(hass)
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
async def mock_entity(hass, mock_device_reg, mock_config_entry):
|
||||
"""Get standard player."""
|
||||
assert await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
|
@ -66,7 +66,7 @@ async def mock_entity(hass, mock_device_reg, mock_config_entry):
|
|||
return MOCK_ENTITY_ID
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def mock_device(hass, mock_device_reg, mock_entity, mock_config_entry):
|
||||
"""Get standard device."""
|
||||
return mock_device_reg.async_get_or_create(
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from unittest.mock import ANY, patch
|
||||
|
||||
from haphilipsjs import PairingFailure
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries, data_entry_flow
|
||||
from homeassistant.components.philips_js.const import CONF_ALLOW_NOTIFY, DOMAIN
|
||||
|
@ -19,7 +19,7 @@ from . import (
|
|||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@fixture(autouse=True, name="mock_setup_entry")
|
||||
@pytest.fixture(autouse=True, name="mock_setup_entry")
|
||||
def mock_setup_entry_fixture():
|
||||
"""Disable component setup."""
|
||||
with patch(
|
||||
|
@ -30,7 +30,7 @@ def mock_setup_entry_fixture():
|
|||
yield mock_setup_entry
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
async def mock_tv_pairable(mock_tv):
|
||||
"""Return a mock tv that is pariable."""
|
||||
mock_tv.system = MOCK_SYSTEM_UNPAIRED
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from pyprosegur.installation import Status
|
||||
from pytest import fixture, mark
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.alarm_control_panel import DOMAIN as ALARM_DOMAIN
|
||||
from homeassistant.const import (
|
||||
|
@ -25,7 +25,7 @@ from .common import CONTRACT, setup_platform
|
|||
PROSEGUR_ALARM_ENTITY = f"alarm_control_panel.contract_{CONTRACT}"
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def mock_auth():
|
||||
"""Setups authentication."""
|
||||
|
||||
|
@ -33,7 +33,7 @@ def mock_auth():
|
|||
yield
|
||||
|
||||
|
||||
@fixture(params=list(Status))
|
||||
@pytest.fixture(params=list(Status))
|
||||
def mock_status(request):
|
||||
"""Mock the status of the alarm."""
|
||||
|
||||
|
@ -88,7 +88,7 @@ async def test_connection_error(hass, mock_auth):
|
|||
assert state.state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
@pytest.mark.parametrize(
|
||||
"code, alarm_service, alarm_state",
|
||||
[
|
||||
(Status.ARMED, SERVICE_ALARM_ARM_AWAY, STATE_ALARM_ARMED_AWAY),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Test the Prosegur Alarm config flow."""
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from pytest import mark
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.prosegur.config_flow import CannotConnect, InvalidAuth
|
||||
|
@ -179,7 +179,7 @@ async def test_reauth_flow(hass):
|
|||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
@pytest.mark.parametrize(
|
||||
"exception, base_error",
|
||||
[
|
||||
(CannotConnect, "cannot_connect"),
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
"""Tests prosegur setup."""
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from pytest import mark
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.prosegur import DOMAIN
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@mark.parametrize(
|
||||
@pytest.mark.parametrize(
|
||||
"error",
|
||||
[
|
||||
ConnectionRefusedError,
|
||||
|
|
|
@ -6,7 +6,6 @@ import sys
|
|||
from unittest.mock import ANY, DEFAULT, MagicMock, patch, sentinel
|
||||
|
||||
import pytest
|
||||
from pytest import approx
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.exc import OperationalError
|
||||
from sqlalchemy.orm import Session
|
||||
|
@ -82,9 +81,9 @@ def test_compile_hourly_statistics(hass_recorder):
|
|||
expected_1 = {
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(14.915254237288135),
|
||||
"min": approx(10.0),
|
||||
"max": approx(20.0),
|
||||
"mean": pytest.approx(14.915254237288135),
|
||||
"min": pytest.approx(10.0),
|
||||
"max": pytest.approx(20.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -92,9 +91,9 @@ def test_compile_hourly_statistics(hass_recorder):
|
|||
expected_2 = {
|
||||
"start": process_timestamp(four),
|
||||
"end": process_timestamp(four + timedelta(minutes=5)),
|
||||
"mean": approx(20.0),
|
||||
"min": approx(20.0),
|
||||
"max": approx(20.0),
|
||||
"mean": pytest.approx(20.0),
|
||||
"min": pytest.approx(20.0),
|
||||
"max": pytest.approx(20.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -336,9 +335,9 @@ def test_rename_entity(hass_recorder):
|
|||
expected_1 = {
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(14.915254237288135),
|
||||
"min": approx(10.0),
|
||||
"max": approx(20.0),
|
||||
"mean": pytest.approx(14.915254237288135),
|
||||
"min": pytest.approx(10.0),
|
||||
"max": pytest.approx(20.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -402,9 +401,9 @@ def test_rename_entity_collision(hass_recorder, caplog):
|
|||
expected_1 = {
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(14.915254237288135),
|
||||
"min": approx(10.0),
|
||||
"max": approx(20.0),
|
||||
"mean": pytest.approx(14.915254237288135),
|
||||
"min": pytest.approx(10.0),
|
||||
"max": pytest.approx(20.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -539,8 +538,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -549,8 +548,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -597,8 +596,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -622,8 +621,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(5.0),
|
||||
"sum": approx(6.0),
|
||||
"state": pytest.approx(5.0),
|
||||
"sum": pytest.approx(6.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -632,8 +631,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -684,12 +683,12 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": process_timestamp(period1),
|
||||
"end": process_timestamp(period1 + timedelta(hours=1)),
|
||||
"max": approx(1.0),
|
||||
"mean": approx(2.0),
|
||||
"min": approx(3.0),
|
||||
"max": pytest.approx(1.0),
|
||||
"mean": pytest.approx(2.0),
|
||||
"min": pytest.approx(3.0),
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(4.0),
|
||||
"sum": approx(5.0),
|
||||
"state": pytest.approx(4.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -698,8 +697,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -725,12 +724,12 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": process_timestamp(period1),
|
||||
"end": process_timestamp(period1 + timedelta(hours=1)),
|
||||
"max": approx(1.0),
|
||||
"mean": approx(2.0),
|
||||
"min": approx(3.0),
|
||||
"max": pytest.approx(1.0),
|
||||
"mean": pytest.approx(2.0),
|
||||
"min": pytest.approx(3.0),
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(4.0),
|
||||
"sum": approx(5.0),
|
||||
"state": pytest.approx(4.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -739,8 +738,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": last_reset_utc,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(1000 * 1000 + 3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(1000 * 1000 + 3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1122,8 +1121,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
{
|
||||
"start": oct_start,
|
||||
|
@ -1132,8 +1131,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(3.0),
|
||||
"sum": approx(5.0),
|
||||
"state": pytest.approx(3.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1157,8 +1156,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
{
|
||||
"start": oct_start,
|
||||
|
@ -1167,8 +1166,8 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(3.0),
|
||||
"sum": approx(5.0),
|
||||
"state": pytest.approx(3.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
|
|
@ -8,7 +8,6 @@ from unittest.mock import ANY, patch
|
|||
|
||||
from freezegun import freeze_time
|
||||
import pytest
|
||||
from pytest import approx
|
||||
|
||||
from homeassistant.components import recorder
|
||||
from homeassistant.components.recorder.db_schema import Statistics, StatisticsShortTerm
|
||||
|
@ -169,9 +168,9 @@ async def test_statistics_during_period(recorder_mock, hass, hass_ws_client):
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(10),
|
||||
"min": approx(10),
|
||||
"max": approx(10),
|
||||
"mean": pytest.approx(10),
|
||||
"min": pytest.approx(10),
|
||||
"max": pytest.approx(10),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -196,7 +195,7 @@ async def test_statistics_during_period(recorder_mock, hass, hass_ws_client):
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(10),
|
||||
"mean": pytest.approx(10),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -918,9 +917,9 @@ async def test_statistics_during_period_unit_conversion(
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(value),
|
||||
"min": approx(value),
|
||||
"max": approx(value),
|
||||
"mean": pytest.approx(value),
|
||||
"min": pytest.approx(value),
|
||||
"max": pytest.approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -946,9 +945,9 @@ async def test_statistics_during_period_unit_conversion(
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(converted_value),
|
||||
"min": approx(converted_value),
|
||||
"max": approx(converted_value),
|
||||
"mean": pytest.approx(converted_value),
|
||||
"min": pytest.approx(converted_value),
|
||||
"max": pytest.approx(converted_value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1014,8 +1013,8 @@ async def test_sum_statistics_during_period_unit_conversion(
|
|||
"min": None,
|
||||
"max": None,
|
||||
"last_reset": None,
|
||||
"state": approx(value),
|
||||
"sum": approx(value),
|
||||
"state": pytest.approx(value),
|
||||
"sum": pytest.approx(value),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1042,8 +1041,8 @@ async def test_sum_statistics_during_period_unit_conversion(
|
|||
"min": None,
|
||||
"max": None,
|
||||
"last_reset": None,
|
||||
"state": approx(converted_value),
|
||||
"sum": approx(converted_value),
|
||||
"state": pytest.approx(converted_value),
|
||||
"sum": pytest.approx(converted_value),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1169,9 +1168,9 @@ async def test_statistics_during_period_in_the_past(
|
|||
{
|
||||
"start": int(stats_start.timestamp() * 1000),
|
||||
"end": int((stats_start + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(10),
|
||||
"min": approx(10),
|
||||
"max": approx(10),
|
||||
"mean": pytest.approx(10),
|
||||
"min": pytest.approx(10),
|
||||
"max": pytest.approx(10),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1196,9 +1195,9 @@ async def test_statistics_during_period_in_the_past(
|
|||
{
|
||||
"start": int(start_of_day.timestamp() * 1000),
|
||||
"end": int((start_of_day + timedelta(days=1)).timestamp() * 1000),
|
||||
"mean": approx(10),
|
||||
"min": approx(10),
|
||||
"max": approx(10),
|
||||
"mean": pytest.approx(10),
|
||||
"min": pytest.approx(10),
|
||||
"max": pytest.approx(10),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1604,9 +1603,9 @@ async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(value),
|
||||
"min": approx(value),
|
||||
"max": approx(value),
|
||||
"mean": pytest.approx(value),
|
||||
"min": pytest.approx(value),
|
||||
"max": pytest.approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1616,9 +1615,9 @@ async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(value * 2),
|
||||
"min": approx(value * 2),
|
||||
"max": approx(value * 2),
|
||||
"mean": pytest.approx(value * 2),
|
||||
"min": pytest.approx(value * 2),
|
||||
"max": pytest.approx(value * 2),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1628,9 +1627,9 @@ async def test_clear_statistics(recorder_mock, hass, hass_ws_client):
|
|||
{
|
||||
"start": int(now.timestamp() * 1000),
|
||||
"end": int((now + timedelta(minutes=5)).timestamp() * 1000),
|
||||
"mean": approx(value * 3),
|
||||
"min": approx(value * 3),
|
||||
"max": approx(value * 3),
|
||||
"mean": pytest.approx(value * 3),
|
||||
"min": pytest.approx(value * 3),
|
||||
"max": pytest.approx(value * 3),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2434,8 +2433,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2444,8 +2443,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2492,8 +2491,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2529,8 +2528,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(5.0),
|
||||
"sum": approx(6.0),
|
||||
"state": pytest.approx(5.0),
|
||||
"sum": pytest.approx(6.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2539,8 +2538,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2575,12 +2574,12 @@ async def test_import_statistics(
|
|||
{
|
||||
"start": period1,
|
||||
"end": period1 + timedelta(hours=1),
|
||||
"max": approx(1.0),
|
||||
"mean": approx(2.0),
|
||||
"min": approx(3.0),
|
||||
"max": pytest.approx(1.0),
|
||||
"mean": pytest.approx(2.0),
|
||||
"min": pytest.approx(3.0),
|
||||
"last_reset": None,
|
||||
"state": approx(4.0),
|
||||
"sum": approx(5.0),
|
||||
"state": pytest.approx(4.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2589,8 +2588,8 @@ async def test_import_statistics(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2661,8 +2660,8 @@ async def test_adjust_sum_statistics_energy(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2671,8 +2670,8 @@ async def test_adjust_sum_statistics_energy(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2725,12 +2724,12 @@ async def test_adjust_sum_statistics_energy(
|
|||
{
|
||||
"start": period1,
|
||||
"end": period1 + timedelta(hours=1),
|
||||
"max": approx(None),
|
||||
"mean": approx(None),
|
||||
"min": approx(None),
|
||||
"max": pytest.approx(None),
|
||||
"mean": pytest.approx(None),
|
||||
"min": pytest.approx(None),
|
||||
"last_reset": None,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2739,8 +2738,8 @@ async def test_adjust_sum_statistics_energy(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(1003.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(1003.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2766,12 +2765,12 @@ async def test_adjust_sum_statistics_energy(
|
|||
{
|
||||
"start": period1,
|
||||
"end": period1 + timedelta(hours=1),
|
||||
"max": approx(None),
|
||||
"mean": approx(None),
|
||||
"min": approx(None),
|
||||
"max": pytest.approx(None),
|
||||
"mean": pytest.approx(None),
|
||||
"min": pytest.approx(None),
|
||||
"last_reset": None,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2780,8 +2779,8 @@ async def test_adjust_sum_statistics_energy(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3003.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3003.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2852,8 +2851,8 @@ async def test_adjust_sum_statistics_gas(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2862,8 +2861,8 @@ async def test_adjust_sum_statistics_gas(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(3.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(3.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2916,12 +2915,12 @@ async def test_adjust_sum_statistics_gas(
|
|||
{
|
||||
"start": period1,
|
||||
"end": period1 + timedelta(hours=1),
|
||||
"max": approx(None),
|
||||
"mean": approx(None),
|
||||
"min": approx(None),
|
||||
"max": pytest.approx(None),
|
||||
"mean": pytest.approx(None),
|
||||
"min": pytest.approx(None),
|
||||
"last_reset": None,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2930,8 +2929,8 @@ async def test_adjust_sum_statistics_gas(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(1003.0),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(1003.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -2957,12 +2956,12 @@ async def test_adjust_sum_statistics_gas(
|
|||
{
|
||||
"start": period1,
|
||||
"end": period1 + timedelta(hours=1),
|
||||
"max": approx(None),
|
||||
"mean": approx(None),
|
||||
"min": approx(None),
|
||||
"max": pytest.approx(None),
|
||||
"mean": pytest.approx(None),
|
||||
"min": pytest.approx(None),
|
||||
"last_reset": None,
|
||||
"state": approx(0.0),
|
||||
"sum": approx(2.0),
|
||||
"state": pytest.approx(0.0),
|
||||
"sum": pytest.approx(2.0),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -2971,8 +2970,8 @@ async def test_adjust_sum_statistics_gas(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0),
|
||||
"sum": approx(1004),
|
||||
"state": pytest.approx(1.0),
|
||||
"sum": pytest.approx(1004),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -3058,8 +3057,8 @@ async def test_adjust_sum_statistics_errors(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(0.0 * factor),
|
||||
"sum": approx(2.0 * factor),
|
||||
"state": pytest.approx(0.0 * factor),
|
||||
"sum": pytest.approx(2.0 * factor),
|
||||
},
|
||||
{
|
||||
"start": period2,
|
||||
|
@ -3068,8 +3067,8 @@ async def test_adjust_sum_statistics_errors(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(1.0 * factor),
|
||||
"sum": approx(3.0 * factor),
|
||||
"state": pytest.approx(1.0 * factor),
|
||||
"sum": pytest.approx(3.0 * factor),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Fixtures for Risco tests."""
|
||||
from unittest.mock import MagicMock, PropertyMock, patch
|
||||
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.risco.const import DOMAIN, TYPE_LOCAL
|
||||
from homeassistant.const import (
|
||||
|
@ -30,7 +30,7 @@ TEST_LOCAL_CONFIG = {
|
|||
}
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def two_zone_cloud():
|
||||
"""Fixture to mock alarm with two zones."""
|
||||
zone_mocks = {0: zone_mock(), 1: zone_mock()}
|
||||
|
@ -58,7 +58,7 @@ def two_zone_cloud():
|
|||
yield zone_mocks
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def two_zone_local():
|
||||
"""Fixture to mock alarm with two zones."""
|
||||
zone_mocks = {0: zone_mock(), 1: zone_mock()}
|
||||
|
@ -92,19 +92,19 @@ def two_zone_local():
|
|||
yield zone_mocks
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def options():
|
||||
"""Fixture for default (empty) options."""
|
||||
return {}
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def events():
|
||||
"""Fixture for default (empty) events."""
|
||||
return []
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def cloud_config_entry(hass, options):
|
||||
"""Fixture for a cloud config entry."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
@ -117,7 +117,7 @@ def cloud_config_entry(hass, options):
|
|||
return config_entry
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def login_with_error(exception):
|
||||
"""Fixture to simulate error on login."""
|
||||
with patch(
|
||||
|
@ -127,7 +127,7 @@ def login_with_error(exception):
|
|||
yield
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
async def setup_risco_cloud(hass, cloud_config_entry, events):
|
||||
"""Set up a Risco integration for testing."""
|
||||
with patch(
|
||||
|
@ -151,7 +151,7 @@ async def setup_risco_cloud(hass, cloud_config_entry, events):
|
|||
yield cloud_config_entry
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def local_config_entry(hass, options):
|
||||
"""Fixture for a local config entry."""
|
||||
config_entry = MockConfigEntry(
|
||||
|
@ -161,7 +161,7 @@ def local_config_entry(hass, options):
|
|||
return config_entry
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
def connect_with_error(exception):
|
||||
"""Fixture to simulate error on connect."""
|
||||
with patch(
|
||||
|
@ -171,7 +171,7 @@ def connect_with_error(exception):
|
|||
yield
|
||||
|
||||
|
||||
@fixture
|
||||
@pytest.fixture
|
||||
async def setup_risco_local(hass, local_config_entry):
|
||||
"""Set up a local Risco integration for testing."""
|
||||
with patch(
|
||||
|
|
|
@ -5,7 +5,7 @@ from datetime import timedelta
|
|||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from pysensibo.model import SensiboData
|
||||
from pytest import MonkeyPatch
|
||||
import pytest
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
@ -18,7 +18,7 @@ async def test_binary_sensor(
|
|||
hass: HomeAssistant,
|
||||
entity_registry_enabled_by_default: AsyncMock,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo binary sensor."""
|
||||
|
|
|
@ -6,7 +6,7 @@ from unittest.mock import patch
|
|||
|
||||
from freezegun.api import FrozenDateTimeFactory
|
||||
from pysensibo.model import SensiboData
|
||||
from pytest import MonkeyPatch, raises
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -21,7 +21,7 @@ from tests.common import async_fire_time_changed
|
|||
async def test_button(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
freezer: FrozenDateTimeFactory,
|
||||
) -> None:
|
||||
|
@ -84,7 +84,7 @@ async def test_button(
|
|||
async def test_button_failure(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo button fails."""
|
||||
|
@ -98,7 +98,7 @@ async def test_button_failure(
|
|||
"homeassistant.components.sensibo.util.SensiboClient.async_reset_filter",
|
||||
return_value={"status": "failure"},
|
||||
):
|
||||
with raises(HomeAssistantError):
|
||||
with pytest.raises(HomeAssistantError):
|
||||
await hass.services.async_call(
|
||||
BUTTON_DOMAIN,
|
||||
SERVICE_PRESS,
|
||||
|
|
|
@ -6,7 +6,6 @@ from unittest.mock import AsyncMock, patch
|
|||
|
||||
from pysensibo.model import SensiboData
|
||||
import pytest
|
||||
from pytest import MonkeyPatch
|
||||
|
||||
from homeassistant.components.number import (
|
||||
ATTR_VALUE,
|
||||
|
@ -26,7 +25,7 @@ async def test_number(
|
|||
hass: HomeAssistant,
|
||||
entity_registry_enabled_by_default: AsyncMock,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo number."""
|
||||
|
|
|
@ -6,7 +6,6 @@ from unittest.mock import patch
|
|||
|
||||
from pysensibo.model import SensiboData
|
||||
import pytest
|
||||
from pytest import MonkeyPatch
|
||||
|
||||
from homeassistant.components.select import (
|
||||
ATTR_OPTION,
|
||||
|
@ -25,7 +24,7 @@ from tests.common import async_fire_time_changed
|
|||
async def test_select(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo select."""
|
||||
|
@ -54,7 +53,7 @@ async def test_select(
|
|||
async def test_select_set_option(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo select service."""
|
||||
|
|
|
@ -5,7 +5,7 @@ from datetime import timedelta
|
|||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from pysensibo.model import SensiboData
|
||||
from pytest import MonkeyPatch
|
||||
import pytest
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
@ -18,7 +18,7 @@ async def test_sensor(
|
|||
hass: HomeAssistant,
|
||||
entity_registry_enabled_by_default: AsyncMock,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo sensor."""
|
||||
|
|
|
@ -6,7 +6,6 @@ from unittest.mock import patch
|
|||
|
||||
from pysensibo.model import SensiboData
|
||||
import pytest
|
||||
from pytest import MonkeyPatch
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -27,7 +26,7 @@ from tests.common import async_fire_time_changed
|
|||
async def test_switch_timer(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo switch."""
|
||||
|
@ -107,7 +106,7 @@ async def test_switch_timer(
|
|||
async def test_switch_pure_boost(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo switch."""
|
||||
|
@ -183,7 +182,7 @@ async def test_switch_pure_boost(
|
|||
async def test_switch_command_failure(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo switch fails commands."""
|
||||
|
@ -228,7 +227,7 @@ async def test_switch_command_failure(
|
|||
async def test_switch_climate_react(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo switch for climate react."""
|
||||
|
@ -303,7 +302,7 @@ async def test_switch_climate_react(
|
|||
async def test_switch_climate_react_no_data(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo switch for climate react."""
|
||||
|
|
|
@ -5,7 +5,7 @@ from datetime import timedelta
|
|||
from unittest.mock import patch
|
||||
|
||||
from pysensibo.model import SensiboData
|
||||
from pytest import MonkeyPatch
|
||||
import pytest
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
|
@ -18,7 +18,7 @@ from tests.common import async_fire_time_changed
|
|||
async def test_select(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_data: SensiboData,
|
||||
) -> None:
|
||||
"""Test the Sensibo update."""
|
||||
|
|
|
@ -6,7 +6,6 @@ from decimal import Decimal
|
|||
from typing import Any
|
||||
|
||||
import pytest
|
||||
from pytest import approx
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass
|
||||
from homeassistant.components.sensor import (
|
||||
|
@ -1161,12 +1160,12 @@ async def test_unit_conversion_priority_suggested_unit_change(
|
|||
|
||||
# Registered entity -> Follow automatic unit conversion the first time the entity was seen
|
||||
state = hass.states.get(entity0.entity_id)
|
||||
assert float(state.state) == approx(float(original_value))
|
||||
assert float(state.state) == pytest.approx(float(original_value))
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == original_unit
|
||||
|
||||
# Registered entity -> Follow suggested unit the first time the entity was seen
|
||||
state = hass.states.get(entity1.entity_id)
|
||||
assert float(state.state) == approx(float(original_value))
|
||||
assert float(state.state) == pytest.approx(float(original_value))
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == original_unit
|
||||
|
||||
|
||||
|
@ -1228,7 +1227,7 @@ async def test_unit_conversion_priority_legacy_conversion_removed(
|
|||
await hass.async_block_till_done()
|
||||
|
||||
state = hass.states.get(entity0.entity_id)
|
||||
assert float(state.state) == approx(float(original_value))
|
||||
assert float(state.state) == pytest.approx(float(original_value))
|
||||
assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == original_unit
|
||||
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ from statistics import mean
|
|||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from pytest import approx
|
||||
|
||||
from homeassistant import loader
|
||||
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, history
|
||||
|
@ -163,9 +162,9 @@ def test_compile_hourly_statistics(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -237,9 +236,9 @@ def test_compile_hourly_statistics_purged_state_changes(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -346,9 +345,9 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(13.050847),
|
||||
"min": approx(-10.0),
|
||||
"max": approx(30.0),
|
||||
"mean": pytest.approx(13.050847),
|
||||
"min": pytest.approx(-10.0),
|
||||
"max": pytest.approx(30.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -382,9 +381,9 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(13.050847),
|
||||
"min": approx(-10.0),
|
||||
"max": approx(30.0),
|
||||
"mean": pytest.approx(13.050847),
|
||||
"min": pytest.approx(-10.0),
|
||||
"max": pytest.approx(30.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -394,9 +393,9 @@ def test_compile_hourly_statistics_wrong_unit(hass_recorder, caplog, attributes)
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(13.050847),
|
||||
"min": approx(-10.0),
|
||||
"max": approx(30.0),
|
||||
"mean": pytest.approx(13.050847),
|
||||
"min": pytest.approx(-10.0),
|
||||
"max": pytest.approx(30.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -506,8 +505,8 @@ async def test_compile_hourly_sum_statistics_amount(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(period0),
|
||||
"state": approx(factor * seq[2]),
|
||||
"sum": approx(factor * 10.0),
|
||||
"state": pytest.approx(factor * seq[2]),
|
||||
"sum": pytest.approx(factor * 10.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period1),
|
||||
|
@ -516,8 +515,8 @@ async def test_compile_hourly_sum_statistics_amount(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(factor * seq[5]),
|
||||
"sum": approx(factor * 40.0),
|
||||
"state": pytest.approx(factor * seq[5]),
|
||||
"sum": pytest.approx(factor * 40.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -526,8 +525,8 @@ async def test_compile_hourly_sum_statistics_amount(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(factor * seq[8]),
|
||||
"sum": approx(factor * 70.0),
|
||||
"state": pytest.approx(factor * seq[8]),
|
||||
"sum": pytest.approx(factor * 70.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -571,8 +570,8 @@ async def test_compile_hourly_sum_statistics_amount(
|
|||
assert response["success"]
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
expected_stats["sensor.test1"][1]["sum"] = approx(factor * 40.0 + 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = approx(factor * 70.0 + 100)
|
||||
expected_stats["sensor.test1"][1]["sum"] = pytest.approx(factor * 40.0 + 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = pytest.approx(factor * 70.0 + 100)
|
||||
stats = statistics_during_period(hass, period0, period="5minute")
|
||||
assert stats == expected_stats
|
||||
|
||||
|
@ -591,8 +590,8 @@ async def test_compile_hourly_sum_statistics_amount(
|
|||
assert response["success"]
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
expected_stats["sensor.test1"][1]["sum"] = approx(factor * 40.0 + 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = approx(factor * 70.0 - 300)
|
||||
expected_stats["sensor.test1"][1]["sum"] = pytest.approx(factor * 40.0 + 100)
|
||||
expected_stats["sensor.test1"][2]["sum"] = pytest.approx(factor * 70.0 - 300)
|
||||
stats = statistics_during_period(hass, period0, period="5minute")
|
||||
assert stats == expected_stats
|
||||
|
||||
|
@ -696,8 +695,8 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(dt_util.as_local(one)),
|
||||
"state": approx(factor * seq[7]),
|
||||
"sum": approx(factor * (sum(seq) - seq[0])),
|
||||
"state": pytest.approx(factor * seq[7]),
|
||||
"sum": pytest.approx(factor * (sum(seq) - seq[0])),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(zero + timedelta(minutes=5)),
|
||||
|
@ -706,8 +705,8 @@ def test_compile_hourly_sum_statistics_amount_reset_every_state_change(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(dt_util.as_local(two)),
|
||||
"state": approx(factor * seq[7]),
|
||||
"sum": approx(factor * (2 * sum(seq) - seq[0])),
|
||||
"state": pytest.approx(factor * seq[7]),
|
||||
"sum": pytest.approx(factor * (2 * sum(seq) - seq[0])),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -793,8 +792,8 @@ def test_compile_hourly_sum_statistics_amount_invalid_last_reset(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(dt_util.as_local(one)),
|
||||
"state": approx(factor * seq[7]),
|
||||
"sum": approx(factor * (sum(seq) - seq[0] - seq[3])),
|
||||
"state": pytest.approx(factor * seq[7]),
|
||||
"sum": pytest.approx(factor * (sum(seq) - seq[0] - seq[3])),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -877,8 +876,10 @@ def test_compile_hourly_sum_statistics_nan_inf_state(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(one),
|
||||
"state": approx(factor * seq[7]),
|
||||
"sum": approx(factor * (seq[2] + seq[3] + seq[4] + seq[6] + seq[7])),
|
||||
"state": pytest.approx(factor * seq[7]),
|
||||
"sum": pytest.approx(
|
||||
factor * (seq[2] + seq[3] + seq[4] + seq[6] + seq[7])
|
||||
),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1001,8 +1002,8 @@ def test_compile_hourly_sum_statistics_negative_state(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(seq[7]),
|
||||
"sum": approx(offset + 15), # (20 - 15) + (10 - 0)
|
||||
"state": pytest.approx(seq[7]),
|
||||
"sum": pytest.approx(offset + 15), # (20 - 15) + (10 - 0)
|
||||
},
|
||||
]
|
||||
assert "Error while processing event StatisticsTask" not in caplog.text
|
||||
|
@ -1090,8 +1091,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(factor * seq[2]),
|
||||
"sum": approx(factor * 10.0),
|
||||
"state": pytest.approx(factor * seq[2]),
|
||||
"sum": pytest.approx(factor * 10.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period1),
|
||||
|
@ -1100,8 +1101,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(factor * seq[5]),
|
||||
"sum": approx(factor * 30.0),
|
||||
"state": pytest.approx(factor * seq[5]),
|
||||
"sum": pytest.approx(factor * 30.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -1110,8 +1111,8 @@ def test_compile_hourly_sum_statistics_total_no_reset(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(factor * seq[8]),
|
||||
"sum": approx(factor * 60.0),
|
||||
"state": pytest.approx(factor * seq[8]),
|
||||
"sum": pytest.approx(factor * 60.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1190,8 +1191,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(factor * seq[2]),
|
||||
"sum": approx(factor * 10.0),
|
||||
"state": pytest.approx(factor * seq[2]),
|
||||
"sum": pytest.approx(factor * 10.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period1),
|
||||
|
@ -1200,8 +1201,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(factor * seq[5]),
|
||||
"sum": approx(factor * 50.0),
|
||||
"state": pytest.approx(factor * seq[5]),
|
||||
"sum": pytest.approx(factor * 50.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -1210,8 +1211,8 @@ def test_compile_hourly_sum_statistics_total_increasing(
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": None,
|
||||
"state": approx(factor * seq[8]),
|
||||
"sum": approx(factor * 80.0),
|
||||
"state": pytest.approx(factor * seq[8]),
|
||||
"sum": pytest.approx(factor * 80.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1301,8 +1302,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
|
|||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"state": approx(factor * seq[2]),
|
||||
"sum": approx(factor * 10.0),
|
||||
"state": pytest.approx(factor * seq[2]),
|
||||
"sum": pytest.approx(factor * 10.0),
|
||||
},
|
||||
{
|
||||
"last_reset": None,
|
||||
|
@ -1311,8 +1312,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
|
|||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"state": approx(factor * seq[5]),
|
||||
"sum": approx(factor * 30.0),
|
||||
"state": pytest.approx(factor * seq[5]),
|
||||
"sum": pytest.approx(factor * 30.0),
|
||||
},
|
||||
{
|
||||
"last_reset": None,
|
||||
|
@ -1321,8 +1322,8 @@ def test_compile_hourly_sum_statistics_total_increasing_small_dip(
|
|||
"max": None,
|
||||
"mean": None,
|
||||
"min": None,
|
||||
"state": approx(factor * seq[8]),
|
||||
"sum": approx(factor * 60.0),
|
||||
"state": pytest.approx(factor * seq[8]),
|
||||
"sum": pytest.approx(factor * 60.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1393,8 +1394,8 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(period0),
|
||||
"state": approx(20.0),
|
||||
"sum": approx(10.0),
|
||||
"state": pytest.approx(20.0),
|
||||
"sum": pytest.approx(10.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period1),
|
||||
|
@ -1403,8 +1404,8 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(40.0),
|
||||
"sum": approx(40.0),
|
||||
"state": pytest.approx(40.0),
|
||||
"sum": pytest.approx(40.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -1413,8 +1414,8 @@ def test_compile_hourly_energy_statistics_unsupported(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(70.0),
|
||||
"sum": approx(70.0),
|
||||
"state": pytest.approx(70.0),
|
||||
"sum": pytest.approx(70.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1503,8 +1504,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(period0),
|
||||
"state": approx(20.0),
|
||||
"sum": approx(10.0),
|
||||
"state": pytest.approx(20.0),
|
||||
"sum": pytest.approx(10.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period1),
|
||||
|
@ -1513,8 +1514,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(40.0),
|
||||
"sum": approx(40.0),
|
||||
"state": pytest.approx(40.0),
|
||||
"sum": pytest.approx(40.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -1523,8 +1524,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(70.0),
|
||||
"sum": approx(70.0),
|
||||
"state": pytest.approx(70.0),
|
||||
"sum": pytest.approx(70.0),
|
||||
},
|
||||
],
|
||||
"sensor.test2": [
|
||||
|
@ -1535,8 +1536,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(period0),
|
||||
"state": approx(130.0),
|
||||
"sum": approx(20.0),
|
||||
"state": pytest.approx(130.0),
|
||||
"sum": pytest.approx(20.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period1),
|
||||
|
@ -1545,8 +1546,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(45.0),
|
||||
"sum": approx(-65.0),
|
||||
"state": pytest.approx(45.0),
|
||||
"sum": pytest.approx(-65.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -1555,8 +1556,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(75.0),
|
||||
"sum": approx(-35.0),
|
||||
"state": pytest.approx(75.0),
|
||||
"sum": pytest.approx(-35.0),
|
||||
},
|
||||
],
|
||||
"sensor.test3": [
|
||||
|
@ -1567,8 +1568,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(period0),
|
||||
"state": approx(5.0),
|
||||
"sum": approx(5.0),
|
||||
"state": pytest.approx(5.0),
|
||||
"sum": pytest.approx(5.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period1),
|
||||
|
@ -1577,8 +1578,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(50.0),
|
||||
"sum": approx(60.0),
|
||||
"state": pytest.approx(50.0),
|
||||
"sum": pytest.approx(60.0),
|
||||
},
|
||||
{
|
||||
"start": process_timestamp(period2),
|
||||
|
@ -1587,8 +1588,8 @@ def test_compile_hourly_energy_statistics_multiple(hass_recorder, caplog):
|
|||
"mean": None,
|
||||
"min": None,
|
||||
"last_reset": process_timestamp(four),
|
||||
"state": approx(90.0),
|
||||
"sum": approx(100.0),
|
||||
"state": pytest.approx(90.0),
|
||||
"sum": pytest.approx(100.0),
|
||||
},
|
||||
],
|
||||
}
|
||||
|
@ -1644,9 +1645,9 @@ def test_compile_hourly_statistics_unchanged(
|
|||
{
|
||||
"start": process_timestamp(four),
|
||||
"end": process_timestamp(four + timedelta(minutes=5)),
|
||||
"mean": approx(value),
|
||||
"min": approx(value),
|
||||
"max": approx(value),
|
||||
"mean": pytest.approx(value),
|
||||
"min": pytest.approx(value),
|
||||
"max": pytest.approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1676,9 +1677,9 @@ def test_compile_hourly_statistics_partially_unavailable(hass_recorder, caplog):
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(21.1864406779661),
|
||||
"min": approx(10.0),
|
||||
"max": approx(25.0),
|
||||
"mean": pytest.approx(21.1864406779661),
|
||||
"min": pytest.approx(10.0),
|
||||
"max": pytest.approx(25.0),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1745,9 +1746,9 @@ def test_compile_hourly_statistics_unavailable(
|
|||
{
|
||||
"start": process_timestamp(four),
|
||||
"end": process_timestamp(four + timedelta(minutes=5)),
|
||||
"mean": approx(value),
|
||||
"min": approx(value),
|
||||
"max": approx(value),
|
||||
"mean": pytest.approx(value),
|
||||
"min": pytest.approx(value),
|
||||
"max": pytest.approx(value),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -1967,9 +1968,9 @@ def test_compile_hourly_statistics_changing_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2002,9 +2003,9 @@ def test_compile_hourly_statistics_changing_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2145,9 +2146,9 @@ def test_compile_hourly_statistics_changing_units_3(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2180,9 +2181,9 @@ def test_compile_hourly_statistics_changing_units_3(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2253,9 +2254,9 @@ def test_compile_hourly_statistics_convert_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2296,9 +2297,9 @@ def test_compile_hourly_statistics_convert_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean * factor),
|
||||
"min": approx(min * factor),
|
||||
"max": approx(max * factor),
|
||||
"mean": pytest.approx(mean * factor),
|
||||
"min": pytest.approx(min * factor),
|
||||
"max": pytest.approx(max * factor),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2306,9 +2307,9 @@ def test_compile_hourly_statistics_convert_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero + timedelta(minutes=10)),
|
||||
"end": process_timestamp(zero + timedelta(minutes=15)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2385,9 +2386,9 @@ def test_compile_hourly_statistics_equivalent_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2416,9 +2417,9 @@ def test_compile_hourly_statistics_equivalent_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2426,9 +2427,9 @@ def test_compile_hourly_statistics_equivalent_units_1(
|
|||
{
|
||||
"start": process_timestamp(zero + timedelta(minutes=10)),
|
||||
"end": process_timestamp(zero + timedelta(minutes=15)),
|
||||
"mean": approx(mean2),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean2),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2500,9 +2501,9 @@ def test_compile_hourly_statistics_equivalent_units_2(
|
|||
{
|
||||
"start": process_timestamp(zero + timedelta(seconds=30 * 5)),
|
||||
"end": process_timestamp(zero + timedelta(seconds=30 * 15)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2569,9 +2570,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean1),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean1),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2614,9 +2615,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean1),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean1),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2624,9 +2625,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
|||
{
|
||||
"start": process_timestamp(zero + timedelta(minutes=10)),
|
||||
"end": process_timestamp(zero + timedelta(minutes=15)),
|
||||
"mean": approx(mean2),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean2),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2669,9 +2670,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean1),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean1),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2679,9 +2680,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
|||
{
|
||||
"start": process_timestamp(zero + timedelta(minutes=10)),
|
||||
"end": process_timestamp(zero + timedelta(minutes=15)),
|
||||
"mean": approx(mean2),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean2),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2689,9 +2690,9 @@ def test_compile_hourly_statistics_changing_device_class_1(
|
|||
{
|
||||
"start": process_timestamp(zero + timedelta(minutes=20)),
|
||||
"end": process_timestamp(zero + timedelta(minutes=25)),
|
||||
"mean": approx(mean2),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean2),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2759,9 +2760,9 @@ def test_compile_hourly_statistics_changing_device_class_2(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2804,9 +2805,9 @@ def test_compile_hourly_statistics_changing_device_class_2(
|
|||
{
|
||||
"start": process_timestamp(zero),
|
||||
"end": process_timestamp(zero + timedelta(minutes=5)),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2814,9 +2815,9 @@ def test_compile_hourly_statistics_changing_device_class_2(
|
|||
{
|
||||
"start": process_timestamp(zero + timedelta(minutes=10)),
|
||||
"end": process_timestamp(zero + timedelta(minutes=15)),
|
||||
"mean": approx(mean2),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean2),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2933,9 +2934,9 @@ def test_compile_hourly_statistics_changing_state_class(
|
|||
{
|
||||
"start": process_timestamp(period0),
|
||||
"end": process_timestamp(period0_end),
|
||||
"mean": approx(mean),
|
||||
"min": approx(min),
|
||||
"max": approx(max),
|
||||
"mean": pytest.approx(mean),
|
||||
"min": pytest.approx(min),
|
||||
"max": pytest.approx(max),
|
||||
"last_reset": None,
|
||||
"state": None,
|
||||
"sum": None,
|
||||
|
@ -2947,8 +2948,8 @@ def test_compile_hourly_statistics_changing_state_class(
|
|||
"min": None,
|
||||
"max": None,
|
||||
"last_reset": None,
|
||||
"state": approx(30.0),
|
||||
"sum": approx(30.0),
|
||||
"state": pytest.approx(30.0),
|
||||
"sum": pytest.approx(30.0),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -3183,9 +3184,9 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
|||
{
|
||||
"start": process_timestamp(start),
|
||||
"end": process_timestamp(end),
|
||||
"mean": approx(expected_average),
|
||||
"min": approx(expected_minimum),
|
||||
"max": approx(expected_maximum),
|
||||
"mean": pytest.approx(expected_average),
|
||||
"min": pytest.approx(expected_minimum),
|
||||
"max": pytest.approx(expected_maximum),
|
||||
"last_reset": None,
|
||||
"state": expected_state,
|
||||
"sum": expected_sum,
|
||||
|
@ -3240,9 +3241,9 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
|||
{
|
||||
"start": process_timestamp(start),
|
||||
"end": process_timestamp(end),
|
||||
"mean": approx(expected_average),
|
||||
"min": approx(expected_minimum),
|
||||
"max": approx(expected_maximum),
|
||||
"mean": pytest.approx(expected_average),
|
||||
"min": pytest.approx(expected_minimum),
|
||||
"max": pytest.approx(expected_maximum),
|
||||
"last_reset": None,
|
||||
"state": expected_state,
|
||||
"sum": expected_sum,
|
||||
|
@ -3297,9 +3298,9 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
|||
{
|
||||
"start": process_timestamp(start),
|
||||
"end": process_timestamp(end),
|
||||
"mean": approx(expected_average),
|
||||
"min": approx(expected_minimum),
|
||||
"max": approx(expected_maximum),
|
||||
"mean": pytest.approx(expected_average),
|
||||
"min": pytest.approx(expected_minimum),
|
||||
"max": pytest.approx(expected_maximum),
|
||||
"last_reset": None,
|
||||
"state": expected_state,
|
||||
"sum": expected_sum,
|
||||
|
@ -3354,9 +3355,9 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
|||
{
|
||||
"start": process_timestamp(start),
|
||||
"end": process_timestamp(end),
|
||||
"mean": approx(expected_average),
|
||||
"min": approx(expected_minimum),
|
||||
"max": approx(expected_maximum),
|
||||
"mean": pytest.approx(expected_average),
|
||||
"min": pytest.approx(expected_minimum),
|
||||
"max": pytest.approx(expected_maximum),
|
||||
"last_reset": None,
|
||||
"state": expected_state,
|
||||
"sum": expected_sum,
|
||||
|
|
|
@ -3,12 +3,12 @@
|
|||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aioskybell import Skybell, SkybellDevice
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from . import USER_ID
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
def skybell_mock():
|
||||
"""Fixture for our skybell tests."""
|
||||
mocked_skybell_device = AsyncMock(spec=SkybellDevice)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
from aioskybell import exceptions
|
||||
from pytest import fixture
|
||||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.skybell.const import DOMAIN
|
||||
|
@ -16,7 +16,7 @@ from . import CONF_CONFIG_FLOW, PASSWORD, USER_ID
|
|||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@fixture(autouse=True)
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_entry() -> None:
|
||||
"""Make sure component doesn't initialize."""
|
||||
with patch(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Test Subaru locks."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import raises
|
||||
import pytest
|
||||
from voluptuous.error import MultipleInvalid
|
||||
|
||||
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN
|
||||
|
@ -51,7 +51,7 @@ async def test_unlock_cmd(hass, ev_entry):
|
|||
|
||||
async def test_lock_cmd_fails(hass, ev_entry):
|
||||
"""Test subaru lock request that initiates but fails."""
|
||||
with patch(MOCK_API_LOCK, return_value=False) as mock_lock, raises(
|
||||
with patch(MOCK_API_LOCK, return_value=False) as mock_lock, pytest.raises(
|
||||
HomeAssistantError
|
||||
):
|
||||
await hass.services.async_call(
|
||||
|
@ -76,7 +76,7 @@ async def test_unlock_specific_door(hass, ev_entry):
|
|||
|
||||
async def test_unlock_specific_door_invalid(hass, ev_entry):
|
||||
"""Test subaru unlock specific door function."""
|
||||
with patch(MOCK_API_UNLOCK) as mock_unlock, raises(MultipleInvalid):
|
||||
with patch(MOCK_API_UNLOCK) as mock_unlock, pytest.raises(MultipleInvalid):
|
||||
await hass.services.async_call(
|
||||
SUBARU_DOMAIN,
|
||||
SERVICE_UNLOCK_SPECIFIC_DOOR,
|
||||
|
|
|
@ -3,7 +3,7 @@ from datetime import datetime, timedelta
|
|||
from unittest.mock import patch
|
||||
|
||||
from freezegun import freeze_time
|
||||
from pytest import mark
|
||||
import pytest
|
||||
|
||||
import homeassistant.components.sun as sun
|
||||
from homeassistant.const import EVENT_STATE_CHANGED
|
||||
|
@ -178,7 +178,7 @@ async def test_norway_in_june(hass):
|
|||
assert state.state == sun.STATE_ABOVE_HORIZON
|
||||
|
||||
|
||||
@mark.skip
|
||||
@pytest.mark.skip
|
||||
async def test_state_change_count(hass):
|
||||
"""Count the number of state change events in a location."""
|
||||
# Skipped because it's a bit slow. Has been validated with
|
||||
|
|
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
|||
from datetime import timedelta
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import MonkeyPatch
|
||||
import pytest
|
||||
from pytrafikverket.trafikverket_ferry import FerryStop
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
|
@ -17,7 +17,7 @@ from tests.common import async_fire_time_changed
|
|||
async def test_sensor(
|
||||
hass: HomeAssistant,
|
||||
load_int: ConfigEntry,
|
||||
monkeypatch: MonkeyPatch,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
get_ferries: list[FerryStop],
|
||||
) -> None:
|
||||
"""Test the Trafikverket Ferry sensor."""
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from pytest import LogCaptureFixture
|
||||
from pyuptimerobot import UptimeRobotAuthenticationException, UptimeRobotException
|
||||
|
||||
from homeassistant import config_entries
|
||||
|
@ -103,7 +102,9 @@ async def test_form_exception_thrown(hass: HomeAssistant, exception, error_key)
|
|||
assert result2["errors"]["base"] == error_key
|
||||
|
||||
|
||||
async def test_form_api_error(hass: HomeAssistant, caplog: LogCaptureFixture) -> None:
|
||||
async def test_form_api_error(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test we handle unexpected error."""
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": config_entries.SOURCE_USER}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""Test the UptimeRobot init."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from pytest import LogCaptureFixture
|
||||
import pytest
|
||||
from pyuptimerobot import UptimeRobotAuthenticationException, UptimeRobotException
|
||||
|
||||
from homeassistant import config_entries
|
||||
|
@ -28,7 +28,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed
|
|||
|
||||
|
||||
async def test_reauthentication_trigger_in_setup(
|
||||
hass: HomeAssistant, caplog: LogCaptureFixture
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
):
|
||||
"""Test reauthentication trigger."""
|
||||
mock_config_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA)
|
||||
|
@ -61,7 +61,7 @@ async def test_reauthentication_trigger_in_setup(
|
|||
|
||||
|
||||
async def test_reauthentication_trigger_key_read_only(
|
||||
hass: HomeAssistant, caplog: LogCaptureFixture
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
):
|
||||
"""Test reauthentication trigger."""
|
||||
mock_config_entry = MockConfigEntry(
|
||||
|
@ -94,7 +94,7 @@ async def test_reauthentication_trigger_key_read_only(
|
|||
|
||||
|
||||
async def test_reauthentication_trigger_after_setup(
|
||||
hass: HomeAssistant, caplog: LogCaptureFixture
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
):
|
||||
"""Test reauthentication trigger."""
|
||||
mock_config_entry = await setup_uptimerobot_integration(hass)
|
||||
|
@ -144,7 +144,7 @@ async def test_integration_reload(hass: HomeAssistant):
|
|||
assert hass.states.get(UPTIMEROBOT_BINARY_SENSOR_TEST_ENTITY).state == STATE_ON
|
||||
|
||||
|
||||
async def test_update_errors(hass: HomeAssistant, caplog: LogCaptureFixture):
|
||||
async def test_update_errors(hass: HomeAssistant, caplog: pytest.LogCaptureFixture):
|
||||
"""Test errors during updates."""
|
||||
await setup_uptimerobot_integration(hass)
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ from typing import Any
|
|||
from unittest.mock import call, patch
|
||||
|
||||
import pytest
|
||||
from pytest import raises
|
||||
from pyvizio.api.apps import AppConfig
|
||||
from pyvizio.const import (
|
||||
APPS,
|
||||
|
@ -658,10 +657,10 @@ async def test_setup_with_apps_additional_apps_config(
|
|||
|
||||
def test_invalid_apps_config(hass: HomeAssistant):
|
||||
"""Test that schema validation fails on certain conditions."""
|
||||
with raises(vol.Invalid):
|
||||
with pytest.raises(vol.Invalid):
|
||||
vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_TV_APPS_FAILURE)
|
||||
|
||||
with raises(vol.Invalid):
|
||||
with pytest.raises(vol.Invalid):
|
||||
vol.Schema(vol.All(VIZIO_SCHEMA, validate_apps))(MOCK_SPEAKER_APPS_FAILURE)
|
||||
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
from pytest import approx
|
||||
|
||||
from homeassistant.components.weather import (
|
||||
ATTR_CONDITION_SUNNY,
|
||||
|
@ -164,12 +163,12 @@ async def test_temperature(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == pytest.approx(
|
||||
expected, rel=0.1
|
||||
)
|
||||
assert state.attributes[ATTR_WEATHER_TEMPERATURE_UNIT] == state_unit
|
||||
assert float(forecast[ATTR_FORECAST_TEMP]) == approx(expected, rel=0.1)
|
||||
assert float(forecast[ATTR_FORECAST_TEMP_LOW]) == approx(expected, rel=0.1)
|
||||
assert float(forecast[ATTR_FORECAST_TEMP]) == pytest.approx(expected, rel=0.1)
|
||||
assert float(forecast[ATTR_FORECAST_TEMP_LOW]) == pytest.approx(expected, rel=0.1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
|
@ -200,12 +199,12 @@ async def test_temperature_no_unit(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == pytest.approx(
|
||||
expected, rel=0.1
|
||||
)
|
||||
assert state.attributes[ATTR_WEATHER_TEMPERATURE_UNIT] == state_unit
|
||||
assert float(forecast[ATTR_FORECAST_TEMP]) == approx(expected, rel=0.1)
|
||||
assert float(forecast[ATTR_FORECAST_TEMP_LOW]) == approx(expected, rel=0.1)
|
||||
assert float(forecast[ATTR_FORECAST_TEMP]) == pytest.approx(expected, rel=0.1)
|
||||
assert float(forecast[ATTR_FORECAST_TEMP_LOW]) == pytest.approx(expected, rel=0.1)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("native_unit", (UnitOfPressure.INHG, UnitOfPressure.INHG))
|
||||
|
@ -232,8 +231,10 @@ async def test_pressure(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == approx(expected, rel=1e-2)
|
||||
assert float(forecast[ATTR_FORECAST_PRESSURE]) == approx(expected, rel=1e-2)
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
assert float(forecast[ATTR_FORECAST_PRESSURE]) == pytest.approx(expected, rel=1e-2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
|
@ -260,8 +261,10 @@ async def test_pressure_no_unit(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == approx(expected, rel=1e-2)
|
||||
assert float(forecast[ATTR_FORECAST_PRESSURE]) == approx(expected, rel=1e-2)
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
assert float(forecast[ATTR_FORECAST_PRESSURE]) == pytest.approx(expected, rel=1e-2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -299,10 +302,12 @@ async def test_wind_speed(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
assert float(forecast[ATTR_FORECAST_WIND_SPEED]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
assert float(forecast[ATTR_FORECAST_WIND_SPEED]) == approx(expected, rel=1e-2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
|
@ -333,10 +338,12 @@ async def test_wind_speed_no_unit(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
assert float(forecast[ATTR_FORECAST_WIND_SPEED]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
assert float(forecast[ATTR_FORECAST_WIND_SPEED]) == approx(expected, rel=1e-2)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("native_unit", (UnitOfLength.MILES, UnitOfLength.KILOMETERS))
|
||||
|
@ -365,7 +372,7 @@ async def test_visibility(
|
|||
|
||||
state = hass.states.get(entity0.entity_id)
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
|
||||
|
@ -396,7 +403,7 @@ async def test_visibility_no_unit(
|
|||
|
||||
state = hass.states.get(entity0.entity_id)
|
||||
expected = state_value
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
|
||||
|
@ -429,7 +436,9 @@ async def test_precipitation(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == approx(expected, rel=1e-2)
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("native_unit", (None,))
|
||||
|
@ -460,7 +469,9 @@ async def test_precipitation_no_unit(
|
|||
forecast = state.attributes[ATTR_FORECAST][0]
|
||||
|
||||
expected = state_value
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == approx(expected, rel=1e-2)
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == pytest.approx(
|
||||
expected, rel=1e-2
|
||||
)
|
||||
|
||||
|
||||
async def test_wind_bearing_and_ozone(
|
||||
|
@ -585,17 +596,19 @@ async def test_custom_units(hass: HomeAssistant, enable_custom_integrations) ->
|
|||
ROUNDING_PRECISION,
|
||||
)
|
||||
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == pytest.approx(
|
||||
expected_wind_speed
|
||||
)
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == pytest.approx(
|
||||
expected_temperature, rel=0.1
|
||||
)
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == approx(expected_pressure)
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == pytest.approx(
|
||||
expected_pressure
|
||||
)
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == pytest.approx(
|
||||
expected_visibility
|
||||
)
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == approx(
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == pytest.approx(
|
||||
expected_precipitation, rel=1e-2
|
||||
)
|
||||
|
||||
|
@ -681,42 +694,50 @@ async def test_backwards_compatibility(
|
|||
state1 = hass.states.get(entity1.entity_id)
|
||||
forecast1 = state1.attributes[ATTR_FORECAST][0]
|
||||
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_WIND_SPEED]) == pytest.approx(
|
||||
wind_speed_value * 3.6
|
||||
)
|
||||
assert (
|
||||
state.attributes[ATTR_WEATHER_WIND_SPEED_UNIT]
|
||||
== UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
)
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_TEMPERATURE]) == pytest.approx(
|
||||
temperature_value, rel=0.1
|
||||
)
|
||||
assert state.attributes[ATTR_WEATHER_TEMPERATURE_UNIT] == UnitOfTemperature.CELSIUS
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == approx(
|
||||
assert float(state.attributes[ATTR_WEATHER_PRESSURE]) == pytest.approx(
|
||||
pressure_value / 100
|
||||
)
|
||||
assert state.attributes[ATTR_WEATHER_PRESSURE_UNIT] == UnitOfPressure.HPA
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == approx(visibility_value)
|
||||
assert float(state.attributes[ATTR_WEATHER_VISIBILITY]) == pytest.approx(
|
||||
visibility_value
|
||||
)
|
||||
assert state.attributes[ATTR_WEATHER_VISIBILITY_UNIT] == UnitOfLength.KILOMETERS
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == approx(
|
||||
assert float(forecast[ATTR_FORECAST_PRECIPITATION]) == pytest.approx(
|
||||
precipitation_value, rel=1e-2
|
||||
)
|
||||
assert state.attributes[ATTR_WEATHER_PRECIPITATION_UNIT] == UnitOfLength.MILLIMETERS
|
||||
|
||||
assert float(state1.attributes[ATTR_WEATHER_WIND_SPEED]) == approx(wind_speed_value)
|
||||
assert float(state1.attributes[ATTR_WEATHER_WIND_SPEED]) == pytest.approx(
|
||||
wind_speed_value
|
||||
)
|
||||
assert (
|
||||
state1.attributes[ATTR_WEATHER_WIND_SPEED_UNIT]
|
||||
== UnitOfSpeed.KILOMETERS_PER_HOUR
|
||||
)
|
||||
assert float(state1.attributes[ATTR_WEATHER_TEMPERATURE]) == approx(
|
||||
assert float(state1.attributes[ATTR_WEATHER_TEMPERATURE]) == pytest.approx(
|
||||
temperature_value, rel=0.1
|
||||
)
|
||||
assert state1.attributes[ATTR_WEATHER_TEMPERATURE_UNIT] == UnitOfTemperature.CELSIUS
|
||||
assert float(state1.attributes[ATTR_WEATHER_PRESSURE]) == approx(pressure_value)
|
||||
assert float(state1.attributes[ATTR_WEATHER_PRESSURE]) == pytest.approx(
|
||||
pressure_value
|
||||
)
|
||||
assert state1.attributes[ATTR_WEATHER_PRESSURE_UNIT] == UnitOfPressure.HPA
|
||||
assert float(state1.attributes[ATTR_WEATHER_VISIBILITY]) == approx(visibility_value)
|
||||
assert float(state1.attributes[ATTR_WEATHER_VISIBILITY]) == pytest.approx(
|
||||
visibility_value
|
||||
)
|
||||
assert state1.attributes[ATTR_WEATHER_VISIBILITY_UNIT] == UnitOfLength.KILOMETERS
|
||||
assert float(forecast1[ATTR_FORECAST_PRECIPITATION]) == approx(
|
||||
assert float(forecast1[ATTR_FORECAST_PRECIPITATION]) == pytest.approx(
|
||||
precipitation_value, rel=1e-2
|
||||
)
|
||||
assert (
|
||||
|
@ -798,23 +819,25 @@ async def test_backwards_compatibility_convert_values(
|
|||
assert state.attributes == {
|
||||
ATTR_FORECAST: [
|
||||
{
|
||||
ATTR_FORECAST_PRECIPITATION: approx(expected_precipitation, rel=0.1),
|
||||
ATTR_FORECAST_PRESSURE: approx(expected_pressure, rel=0.1),
|
||||
ATTR_FORECAST_TEMP: approx(expected_temperature, rel=0.1),
|
||||
ATTR_FORECAST_TEMP_LOW: approx(expected_temperature, rel=0.1),
|
||||
ATTR_FORECAST_PRECIPITATION: pytest.approx(
|
||||
expected_precipitation, rel=0.1
|
||||
),
|
||||
ATTR_FORECAST_PRESSURE: pytest.approx(expected_pressure, rel=0.1),
|
||||
ATTR_FORECAST_TEMP: pytest.approx(expected_temperature, rel=0.1),
|
||||
ATTR_FORECAST_TEMP_LOW: pytest.approx(expected_temperature, rel=0.1),
|
||||
ATTR_FORECAST_WIND_BEARING: None,
|
||||
ATTR_FORECAST_WIND_SPEED: approx(expected_wind_speed, rel=0.1),
|
||||
ATTR_FORECAST_WIND_SPEED: pytest.approx(expected_wind_speed, rel=0.1),
|
||||
}
|
||||
],
|
||||
ATTR_FRIENDLY_NAME: "Test",
|
||||
ATTR_WEATHER_PRECIPITATION_UNIT: UnitOfLength.INCHES,
|
||||
ATTR_WEATHER_PRESSURE: approx(expected_pressure, rel=0.1),
|
||||
ATTR_WEATHER_PRESSURE: pytest.approx(expected_pressure, rel=0.1),
|
||||
ATTR_WEATHER_PRESSURE_UNIT: UnitOfPressure.INHG,
|
||||
ATTR_WEATHER_TEMPERATURE: approx(expected_temperature, rel=0.1),
|
||||
ATTR_WEATHER_TEMPERATURE: pytest.approx(expected_temperature, rel=0.1),
|
||||
ATTR_WEATHER_TEMPERATURE_UNIT: UnitOfTemperature.FAHRENHEIT,
|
||||
ATTR_WEATHER_VISIBILITY: approx(expected_visibility, rel=0.1),
|
||||
ATTR_WEATHER_VISIBILITY: pytest.approx(expected_visibility, rel=0.1),
|
||||
ATTR_WEATHER_VISIBILITY_UNIT: UnitOfLength.MILES,
|
||||
ATTR_WEATHER_WIND_SPEED: approx(expected_wind_speed, rel=0.1),
|
||||
ATTR_WEATHER_WIND_SPEED: pytest.approx(expected_wind_speed, rel=0.1),
|
||||
ATTR_WEATHER_WIND_SPEED_UNIT: UnitOfSpeed.MILES_PER_HOUR,
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue