diff --git a/homeassistant/components/google_generative_ai_conversation/config_flow.py b/homeassistant/components/google_generative_ai_conversation/config_flow.py index ef700d289c7..b373239665d 100644 --- a/homeassistant/components/google_generative_ai_conversation/config_flow.py +++ b/homeassistant/components/google_generative_ai_conversation/config_flow.py @@ -43,7 +43,6 @@ from .const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, - DEFAULT_PROMPT, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_HARM_BLOCK_THRESHOLD, @@ -64,7 +63,7 @@ STEP_API_DATA_SCHEMA = vol.Schema( RECOMMENDED_OPTIONS = { CONF_RECOMMENDED: True, CONF_LLM_HASS_API: llm.LLM_API_ASSIST, - CONF_PROMPT: DEFAULT_PROMPT, + CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, } @@ -224,7 +223,11 @@ async def google_generative_ai_config_option_schema( schema = { vol.Optional( CONF_PROMPT, - description={"suggested_value": options.get(CONF_PROMPT, DEFAULT_PROMPT)}, + description={ + "suggested_value": options.get( + CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT + ) + }, ): TemplateSelector(), vol.Optional( CONF_LLM_HASS_API, diff --git a/homeassistant/components/google_generative_ai_conversation/const.py b/homeassistant/components/google_generative_ai_conversation/const.py index a83ffed2d88..bd60e8d94c1 100644 --- a/homeassistant/components/google_generative_ai_conversation/const.py +++ b/homeassistant/components/google_generative_ai_conversation/const.py @@ -5,7 +5,6 @@ import logging DOMAIN = "google_generative_ai_conversation" LOGGER = logging.getLogger(__package__) CONF_PROMPT = "prompt" -DEFAULT_PROMPT = "Answer in plain text. Keep it simple and to the point." CONF_RECOMMENDED = "recommended" CONF_CHAT_MODEL = "chat_model" diff --git a/homeassistant/components/google_generative_ai_conversation/conversation.py b/homeassistant/components/google_generative_ai_conversation/conversation.py index ed50ed69a02..d6f7981fc8c 100644 --- a/homeassistant/components/google_generative_ai_conversation/conversation.py +++ b/homeassistant/components/google_generative_ai_conversation/conversation.py @@ -32,7 +32,6 @@ from .const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, - DEFAULT_PROMPT, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL, @@ -226,7 +225,10 @@ class GoogleGenerativeAIConversationEntity( prompt = "\n".join( ( template.Template( - self.entry.options.get(CONF_PROMPT, DEFAULT_PROMPT), self.hass + self.entry.options.get( + CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT + ), + self.hass, ).async_render( { "ha_name": self.hass.config.location_name, diff --git a/homeassistant/components/openai_conversation/config_flow.py b/homeassistant/components/openai_conversation/config_flow.py index 09b909b3d5e..9a2b1b6fa79 100644 --- a/homeassistant/components/openai_conversation/config_flow.py +++ b/homeassistant/components/openai_conversation/config_flow.py @@ -34,7 +34,6 @@ from .const import ( CONF_RECOMMENDED, CONF_TEMPERATURE, CONF_TOP_P, - DEFAULT_PROMPT, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_MAX_TOKENS, @@ -53,7 +52,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema( RECOMMENDED_OPTIONS = { CONF_RECOMMENDED: True, CONF_LLM_HASS_API: llm.LLM_API_ASSIST, - CONF_PROMPT: DEFAULT_PROMPT, + CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, } @@ -170,7 +169,11 @@ def openai_config_option_schema( schema = { vol.Optional( CONF_PROMPT, - description={"suggested_value": options.get(CONF_PROMPT, DEFAULT_PROMPT)}, + description={ + "suggested_value": options.get( + CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT + ) + }, ): TemplateSelector(), vol.Optional( CONF_LLM_HASS_API, diff --git a/homeassistant/components/openai_conversation/const.py b/homeassistant/components/openai_conversation/const.py index 995d80e02f1..f362f4278a1 100644 --- a/homeassistant/components/openai_conversation/const.py +++ b/homeassistant/components/openai_conversation/const.py @@ -7,7 +7,6 @@ LOGGER = logging.getLogger(__package__) CONF_RECOMMENDED = "recommended" CONF_PROMPT = "prompt" -DEFAULT_PROMPT = """Answer in plain text. Keep it simple and to the point.""" CONF_CHAT_MODEL = "chat_model" RECOMMENDED_CHAT_MODEL = "gpt-4o" CONF_MAX_TOKENS = "max_tokens" diff --git a/homeassistant/components/openai_conversation/conversation.py b/homeassistant/components/openai_conversation/conversation.py index eb2f0911a20..ab76d9cfb56 100644 --- a/homeassistant/components/openai_conversation/conversation.py +++ b/homeassistant/components/openai_conversation/conversation.py @@ -23,7 +23,6 @@ from .const import ( CONF_PROMPT, CONF_TEMPERATURE, CONF_TOP_P, - DEFAULT_PROMPT, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL, @@ -143,7 +142,8 @@ class OpenAIConversationEntity( prompt = "\n".join( ( template.Template( - options.get(CONF_PROMPT, DEFAULT_PROMPT), self.hass + options.get(CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT), + self.hass, ).async_render( { "ha_name": self.hass.config.location_name, diff --git a/homeassistant/helpers/llm.py b/homeassistant/helpers/llm.py index e09af97620c..e81c62ae25c 100644 --- a/homeassistant/helpers/llm.py +++ b/homeassistant/helpers/llm.py @@ -23,6 +23,12 @@ from .singleton import singleton LLM_API_ASSIST = "assist" +DEFAULT_INSTRUCTIONS_PROMPT = """You are a voice assistant for Home Assistant. +Answer in plain text. Keep it simple and to the point. +The current time is {{ now().strftime("%X") }}. +Today's date is {{ now().strftime("%x") }}. +""" + @callback def async_render_no_api_prompt(hass: HomeAssistant) -> str: diff --git a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr index 6d37c1d1823..6ffe3d747d3 100644 --- a/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr +++ b/tests/components/google_generative_ai_conversation/snapshots/test_conversation.ambr @@ -30,7 +30,10 @@ 'history': list([ dict({ 'parts': ''' + You are a voice assistant for Home Assistant. Answer in plain text. Keep it simple and to the point. + The current time is 05:00:00. + Today's date is 05/24/24. Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'role': 'user', @@ -79,7 +82,10 @@ 'history': list([ dict({ 'parts': ''' + You are a voice assistant for Home Assistant. Answer in plain text. Keep it simple and to the point. + The current time is 05:00:00. + Today's date is 05/24/24. Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'role': 'user', @@ -140,7 +146,10 @@ 'history': list([ dict({ 'parts': ''' + You are a voice assistant for Home Assistant. Answer in plain text. Keep it simple and to the point. + The current time is 05:00:00. + Today's date is 05/24/24. Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'role': 'user', @@ -193,7 +202,10 @@ 'history': list([ dict({ 'parts': ''' + You are a voice assistant for Home Assistant. Answer in plain text. Keep it simple and to the point. + The current time is 05:00:00. + Today's date is 05/24/24. Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant. ''', 'role': 'user', @@ -246,7 +258,10 @@ 'history': list([ dict({ 'parts': ''' + You are a voice assistant for Home Assistant. Answer in plain text. Keep it simple and to the point. + The current time is 05:00:00. + Today's date is 05/24/24. Call the intent tools to control Home Assistant. Just pass the name to the intent. ''', 'role': 'user', @@ -299,7 +314,10 @@ 'history': list([ dict({ 'parts': ''' + You are a voice assistant for Home Assistant. Answer in plain text. Keep it simple and to the point. + The current time is 05:00:00. + Today's date is 05/24/24. Call the intent tools to control Home Assistant. Just pass the name to the intent. ''', 'role': 'user', diff --git a/tests/components/google_generative_ai_conversation/test_config_flow.py b/tests/components/google_generative_ai_conversation/test_config_flow.py index 805fb9c3c74..77da95506fa 100644 --- a/tests/components/google_generative_ai_conversation/test_config_flow.py +++ b/tests/components/google_generative_ai_conversation/test_config_flow.py @@ -7,6 +7,9 @@ from google.rpc.error_details_pb2 import ErrorInfo import pytest from homeassistant import config_entries +from homeassistant.components.google_generative_ai_conversation.config_flow import ( + RECOMMENDED_OPTIONS, +) from homeassistant.components.google_generative_ai_conversation.const import ( CONF_CHAT_MODEL, CONF_DANGEROUS_BLOCK_THRESHOLD, @@ -19,7 +22,6 @@ from homeassistant.components.google_generative_ai_conversation.const import ( CONF_TEMPERATURE, CONF_TOP_K, CONF_TOP_P, - DEFAULT_PROMPT, DOMAIN, RECOMMENDED_CHAT_MODEL, RECOMMENDED_HARM_BLOCK_THRESHOLD, @@ -30,7 +32,6 @@ from homeassistant.components.google_generative_ai_conversation.const import ( from homeassistant.const import CONF_LLM_HASS_API from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import llm from tests.common import MockConfigEntry @@ -92,11 +93,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["data"] == { "api_key": "bla", } - assert result2["options"] == { - CONF_RECOMMENDED: True, - CONF_LLM_HASS_API: llm.LLM_API_ASSIST, - CONF_PROMPT: DEFAULT_PROMPT, - } + assert result2["options"] == RECOMMENDED_OPTIONS assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index 4c208c240b8..1f11cc58705 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -2,6 +2,7 @@ from unittest.mock import AsyncMock, MagicMock, patch +from freezegun import freeze_time from google.api_core.exceptions import GoogleAPICallError import google.generativeai.types as genai_types import pytest @@ -23,6 +24,13 @@ from homeassistant.helpers import ( from tests.common import MockConfigEntry +@pytest.fixture(autouse=True) +def freeze_the_time(): + """Freeze the time.""" + with freeze_time("2024-05-24 12:00:00", tz_offset=0): + yield + + @pytest.mark.parametrize( "agent_id", [None, "conversation.google_generative_ai_conversation"] ) diff --git a/tests/components/openai_conversation/test_config_flow.py b/tests/components/openai_conversation/test_config_flow.py index 234e518b3c5..f5017c124b1 100644 --- a/tests/components/openai_conversation/test_config_flow.py +++ b/tests/components/openai_conversation/test_config_flow.py @@ -7,6 +7,7 @@ from openai import APIConnectionError, AuthenticationError, BadRequestError import pytest from homeassistant import config_entries +from homeassistant.components.openai_conversation.config_flow import RECOMMENDED_OPTIONS from homeassistant.components.openai_conversation.const import ( CONF_CHAT_MODEL, CONF_MAX_TOKENS, @@ -62,6 +63,7 @@ async def test_form(hass: HomeAssistant) -> None: assert result2["data"] == { "api_key": "bla", } + assert result2["options"] == RECOMMENDED_OPTIONS assert len(mock_setup_entry.mock_calls) == 1