Use smart LLM for automatic config generation
parent
03a2882915
commit
08e6254232
|
@ -216,7 +216,7 @@ def generate_aiconfig_automatic(user_prompt: str, config: Config) -> AIConfig:
|
|||
# Call LLM with the string as user input
|
||||
output = create_chat_completion(
|
||||
ChatSequence.for_model(
|
||||
config.fast_llm,
|
||||
config.smart_llm,
|
||||
[
|
||||
Message("system", system_prompt),
|
||||
Message("user", prompt_ai_config_automatic),
|
||||
|
|
Loading…
Reference in New Issue