Update OpenAI conversation agent to allow multiple LLM APIs (#143189)
parent
f11f4510a2
commit
61e4be4456
|
@ -154,9 +154,8 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||
|
||||
if user_input is not None:
|
||||
if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
if user_input[CONF_LLM_HASS_API] == "none":
|
||||
user_input.pop(CONF_LLM_HASS_API)
|
||||
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
if user_input.get(CONF_CHAT_MODEL) in UNSUPPORTED_MODELS:
|
||||
errors[CONF_CHAT_MODEL] = "model_not_supported"
|
||||
|
||||
|
@ -178,7 +177,7 @@ class OpenAIOptionsFlow(OptionsFlow):
|
|||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API],
|
||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||
}
|
||||
|
||||
schema = openai_config_option_schema(self.hass, options)
|
||||
|
@ -248,19 +247,16 @@ def openai_config_option_schema(
|
|||
) -> VolDictType:
|
||||
"""Return a schema for OpenAI completion options."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label="No control",
|
||||
value="none",
|
||||
)
|
||||
]
|
||||
hass_apis.extend(
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
)
|
||||
|
||||
]
|
||||
if (suggested_llm_apis := options.get(CONF_LLM_HASS_API)) and isinstance(
|
||||
suggested_llm_apis, str
|
||||
):
|
||||
suggested_llm_apis = [suggested_llm_apis]
|
||||
schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_PROMPT,
|
||||
|
@ -272,9 +268,8 @@ def openai_config_option_schema(
|
|||
): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
description={"suggested_value": options.get(CONF_LLM_HASS_API)},
|
||||
default="none",
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis)),
|
||||
description={"suggested_value": suggested_llm_apis},
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
|
|
|
@ -111,7 +111,7 @@ async def test_options_unsupported_model(
|
|||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_CHAT_MODEL: "o1-mini",
|
||||
CONF_LLM_HASS_API: "assist",
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
@ -168,7 +168,6 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
|||
(
|
||||
{
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: "none",
|
||||
CONF_PROMPT: "bla",
|
||||
},
|
||||
{
|
||||
|
@ -202,6 +201,18 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
|||
CONF_WEB_SEARCH_CONTEXT_SIZE: "medium",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
},
|
||||
{
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_PROMPT: "",
|
||||
},
|
||||
{
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_PROMPT: "",
|
||||
},
|
||||
),
|
||||
(
|
||||
{
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: "assist",
|
||||
|
@ -209,7 +220,12 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
|||
},
|
||||
{
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: "assist",
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_PROMPT: "",
|
||||
},
|
||||
{
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_PROMPT: "",
|
||||
},
|
||||
),
|
||||
|
@ -338,7 +354,7 @@ async def test_options_web_search_unsupported_model(
|
|||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_CHAT_MODEL: "o1-pro",
|
||||
CONF_LLM_HASS_API: "assist",
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_WEB_SEARCH: True,
|
||||
},
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue