Standardize LLM instructions prompt (#118195)
* Standardize instructions prompt * Add time/date to default instructionspull/118277/head
parent
98d7821f47
commit
1602c8063c
|
@ -43,7 +43,6 @@ from .const import (
|
|||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
DEFAULT_PROMPT,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
|
@ -64,7 +63,7 @@ STEP_API_DATA_SCHEMA = vol.Schema(
|
|||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_PROMPT: DEFAULT_PROMPT,
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
|
||||
|
@ -224,7 +223,11 @@ async def google_generative_ai_config_option_schema(
|
|||
schema = {
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={"suggested_value": options.get(CONF_PROMPT, DEFAULT_PROMPT)},
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
|
|
|
@ -5,7 +5,6 @@ import logging
|
|||
DOMAIN = "google_generative_ai_conversation"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
CONF_PROMPT = "prompt"
|
||||
DEFAULT_PROMPT = "Answer in plain text. Keep it simple and to the point."
|
||||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
|
|
|
@ -32,7 +32,6 @@ from .const import (
|
|||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
DEFAULT_PROMPT,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
|
@ -226,7 +225,10 @@ class GoogleGenerativeAIConversationEntity(
|
|||
prompt = "\n".join(
|
||||
(
|
||||
template.Template(
|
||||
self.entry.options.get(CONF_PROMPT, DEFAULT_PROMPT), self.hass
|
||||
self.entry.options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
),
|
||||
self.hass,
|
||||
).async_render(
|
||||
{
|
||||
"ha_name": self.hass.config.location_name,
|
||||
|
|
|
@ -34,7 +34,6 @@ from .const import (
|
|||
CONF_RECOMMENDED,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
DEFAULT_PROMPT,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
|
@ -53,7 +52,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||
RECOMMENDED_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_PROMPT: DEFAULT_PROMPT,
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
|
||||
|
@ -170,7 +169,11 @@ def openai_config_option_schema(
|
|||
schema = {
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
description={"suggested_value": options.get(CONF_PROMPT, DEFAULT_PROMPT)},
|
||||
description={
|
||||
"suggested_value": options.get(
|
||||
CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
)
|
||||
},
|
||||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
|
|
|
@ -7,7 +7,6 @@ LOGGER = logging.getLogger(__package__)
|
|||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
DEFAULT_PROMPT = """Answer in plain text. Keep it simple and to the point."""
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "gpt-4o"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
|
|
|
@ -23,7 +23,6 @@ from .const import (
|
|||
CONF_PROMPT,
|
||||
CONF_TEMPERATURE,
|
||||
CONF_TOP_P,
|
||||
DEFAULT_PROMPT,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
|
@ -143,7 +142,8 @@ class OpenAIConversationEntity(
|
|||
prompt = "\n".join(
|
||||
(
|
||||
template.Template(
|
||||
options.get(CONF_PROMPT, DEFAULT_PROMPT), self.hass
|
||||
options.get(CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT),
|
||||
self.hass,
|
||||
).async_render(
|
||||
{
|
||||
"ha_name": self.hass.config.location_name,
|
||||
|
|
|
@ -23,6 +23,12 @@ from .singleton import singleton
|
|||
|
||||
LLM_API_ASSIST = "assist"
|
||||
|
||||
DEFAULT_INSTRUCTIONS_PROMPT = """You are a voice assistant for Home Assistant.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
The current time is {{ now().strftime("%X") }}.
|
||||
Today's date is {{ now().strftime("%x") }}.
|
||||
"""
|
||||
|
||||
|
||||
@callback
|
||||
def async_render_no_api_prompt(hass: HomeAssistant) -> str:
|
||||
|
|
|
@ -30,7 +30,10 @@
|
|||
'history': list([
|
||||
dict({
|
||||
'parts': '''
|
||||
You are a voice assistant for Home Assistant.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
The current time is 05:00:00.
|
||||
Today's date is 05/24/24.
|
||||
Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant.
|
||||
''',
|
||||
'role': 'user',
|
||||
|
@ -79,7 +82,10 @@
|
|||
'history': list([
|
||||
dict({
|
||||
'parts': '''
|
||||
You are a voice assistant for Home Assistant.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
The current time is 05:00:00.
|
||||
Today's date is 05/24/24.
|
||||
Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant.
|
||||
''',
|
||||
'role': 'user',
|
||||
|
@ -140,7 +146,10 @@
|
|||
'history': list([
|
||||
dict({
|
||||
'parts': '''
|
||||
You are a voice assistant for Home Assistant.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
The current time is 05:00:00.
|
||||
Today's date is 05/24/24.
|
||||
Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant.
|
||||
''',
|
||||
'role': 'user',
|
||||
|
@ -193,7 +202,10 @@
|
|||
'history': list([
|
||||
dict({
|
||||
'parts': '''
|
||||
You are a voice assistant for Home Assistant.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
The current time is 05:00:00.
|
||||
Today's date is 05/24/24.
|
||||
Only if the user wants to control a device, tell them to edit the AI configuration and allow access to Home Assistant.
|
||||
''',
|
||||
'role': 'user',
|
||||
|
@ -246,7 +258,10 @@
|
|||
'history': list([
|
||||
dict({
|
||||
'parts': '''
|
||||
You are a voice assistant for Home Assistant.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
The current time is 05:00:00.
|
||||
Today's date is 05/24/24.
|
||||
Call the intent tools to control Home Assistant. Just pass the name to the intent.
|
||||
''',
|
||||
'role': 'user',
|
||||
|
@ -299,7 +314,10 @@
|
|||
'history': list([
|
||||
dict({
|
||||
'parts': '''
|
||||
You are a voice assistant for Home Assistant.
|
||||
Answer in plain text. Keep it simple and to the point.
|
||||
The current time is 05:00:00.
|
||||
Today's date is 05/24/24.
|
||||
Call the intent tools to control Home Assistant. Just pass the name to the intent.
|
||||
''',
|
||||
'role': 'user',
|
||||
|
|
|
@ -7,6 +7,9 @@ from google.rpc.error_details_pb2 import ErrorInfo
|
|||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.google_generative_ai_conversation.config_flow import (
|
||||
RECOMMENDED_OPTIONS,
|
||||
)
|
||||
from homeassistant.components.google_generative_ai_conversation.const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_DANGEROUS_BLOCK_THRESHOLD,
|
||||
|
@ -19,7 +22,6 @@ from homeassistant.components.google_generative_ai_conversation.const import (
|
|||
CONF_TEMPERATURE,
|
||||
CONF_TOP_K,
|
||||
CONF_TOP_P,
|
||||
DEFAULT_PROMPT,
|
||||
DOMAIN,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_HARM_BLOCK_THRESHOLD,
|
||||
|
@ -30,7 +32,6 @@ from homeassistant.components.google_generative_ai_conversation.const import (
|
|||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.helpers import llm
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
@ -92,11 +93,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
|||
assert result2["data"] == {
|
||||
"api_key": "bla",
|
||||
}
|
||||
assert result2["options"] == {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
|
||||
CONF_PROMPT: DEFAULT_PROMPT,
|
||||
}
|
||||
assert result2["options"] == RECOMMENDED_OPTIONS
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from freezegun import freeze_time
|
||||
from google.api_core.exceptions import GoogleAPICallError
|
||||
import google.generativeai.types as genai_types
|
||||
import pytest
|
||||
|
@ -23,6 +24,13 @@ from homeassistant.helpers import (
|
|||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def freeze_the_time():
|
||||
"""Freeze the time."""
|
||||
with freeze_time("2024-05-24 12:00:00", tz_offset=0):
|
||||
yield
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"agent_id", [None, "conversation.google_generative_ai_conversation"]
|
||||
)
|
||||
|
|
|
@ -7,6 +7,7 @@ from openai import APIConnectionError, AuthenticationError, BadRequestError
|
|||
import pytest
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.openai_conversation.config_flow import RECOMMENDED_OPTIONS
|
||||
from homeassistant.components.openai_conversation.const import (
|
||||
CONF_CHAT_MODEL,
|
||||
CONF_MAX_TOKENS,
|
||||
|
@ -62,6 +63,7 @@ async def test_form(hass: HomeAssistant) -> None:
|
|||
assert result2["data"] == {
|
||||
"api_key": "bla",
|
||||
}
|
||||
assert result2["options"] == RECOMMENDED_OPTIONS
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue