core/homeassistant/components/ollama/strings.json

40 lines
1.1 KiB
JSON

{
"config": {
"step": {
"user": {
"data": {
"url": "[%key:common::config_flow::data::url%]",
"model": "Model"
}
},
"download": {
"title": "Downloading model"
}
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"download_failed": "Model downloading failed",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"progress": {
"download": "Please wait while the model is downloaded, which may take a very long time. Check your Ollama server logs for more details."
}
},
"options": {
"step": {
"init": {
"data": {
"prompt": "Instructions",
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
"max_history": "Max history messages",
"keep_alive": "Keep alive"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template.",
"keep_alive": "Duration in seconds for Ollama to keep model in memory. -1 = indefinite, 0 = never."
}
}
}
}
}