Add Google Generative AI Conversation system prompt `user_name` and `llm_context` variables (#118510)
* Google Generative AI Conversation: Add variables to the system prompt * User name and llm_context * test for template variables * test for template variables --------- Co-authored-by: Paulus Schoutsen <balloob@gmail.com>pull/118845/head
parent
2e45d678b8
commit
395e1ae31e
|
@ -163,20 +163,22 @@ class GoogleGenerativeAIConversationEntity(
|
|||
intent_response = intent.IntentResponse(language=user_input.language)
|
||||
llm_api: llm.APIInstance | None = None
|
||||
tools: list[dict[str, Any]] | None = None
|
||||
user_name: str | None = None
|
||||
llm_context = llm.LLMContext(
|
||||
platform=DOMAIN,
|
||||
context=user_input.context,
|
||||
user_prompt=user_input.text,
|
||||
language=user_input.language,
|
||||
assistant=conversation.DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
)
|
||||
|
||||
if self.entry.options.get(CONF_LLM_HASS_API):
|
||||
try:
|
||||
llm_api = await llm.async_get_api(
|
||||
self.hass,
|
||||
self.entry.options[CONF_LLM_HASS_API],
|
||||
llm.LLMContext(
|
||||
platform=DOMAIN,
|
||||
context=user_input.context,
|
||||
user_prompt=user_input.text,
|
||||
language=user_input.language,
|
||||
assistant=conversation.DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
),
|
||||
llm_context,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
LOGGER.error("Error getting LLM API: %s", err)
|
||||
|
@ -225,6 +227,15 @@ class GoogleGenerativeAIConversationEntity(
|
|||
conversation_id = ulid.ulid_now()
|
||||
messages = [{}, {}]
|
||||
|
||||
if (
|
||||
user_input.context
|
||||
and user_input.context.user_id
|
||||
and (
|
||||
user := await self.hass.auth.async_get_user(user_input.context.user_id)
|
||||
)
|
||||
):
|
||||
user_name = user.name
|
||||
|
||||
try:
|
||||
if llm_api:
|
||||
api_prompt = llm_api.api_prompt
|
||||
|
@ -241,6 +252,8 @@ class GoogleGenerativeAIConversationEntity(
|
|||
).async_render(
|
||||
{
|
||||
"ha_name": self.hass.config.location_name,
|
||||
"user_name": user_name,
|
||||
"llm_context": llm_context,
|
||||
},
|
||||
parse_result=False,
|
||||
),
|
||||
|
|
|
@ -449,6 +449,51 @@ async def test_template_error(
|
|||
assert result.response.error_code == "unknown", result
|
||||
|
||||
|
||||
async def test_template_variables(
|
||||
hass: HomeAssistant, mock_config_entry: MockConfigEntry
|
||||
) -> None:
|
||||
"""Test that template variables work."""
|
||||
context = Context(user_id="12345")
|
||||
mock_user = MagicMock()
|
||||
mock_user.id = "12345"
|
||||
mock_user.name = "Test User"
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
mock_config_entry,
|
||||
options={
|
||||
"prompt": (
|
||||
"The user name is {{ user_name }}. "
|
||||
"The user id is {{ llm_context.context.user_id }}."
|
||||
),
|
||||
},
|
||||
)
|
||||
with (
|
||||
patch("google.generativeai.GenerativeModel") as mock_model,
|
||||
patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user),
|
||||
):
|
||||
await hass.config_entries.async_setup(mock_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
mock_chat = AsyncMock()
|
||||
mock_model.return_value.start_chat.return_value = mock_chat
|
||||
chat_response = MagicMock()
|
||||
mock_chat.send_message_async.return_value = chat_response
|
||||
mock_part = MagicMock()
|
||||
mock_part.text = "Model response"
|
||||
chat_response.parts = [mock_part]
|
||||
result = await conversation.async_converse(
|
||||
hass, "hello", None, context, agent_id=mock_config_entry.entry_id
|
||||
)
|
||||
|
||||
assert (
|
||||
result.response.response_type == intent.IntentResponseType.ACTION_DONE
|
||||
), result
|
||||
assert (
|
||||
"The user name is Test User."
|
||||
in mock_model.mock_calls[1][2]["history"][0]["parts"]
|
||||
)
|
||||
assert "The user id is 12345." in mock_model.mock_calls[1][2]["history"][0]["parts"]
|
||||
|
||||
|
||||
async def test_conversation_agent(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
|
|
Loading…
Reference in New Issue