Runs agent as default command
parent
b5378174f3
commit
6f87fb63c1
|
@ -12,15 +12,7 @@ from autogpt.memory import get_memory
|
||||||
from autogpt.prompt import construct_prompt
|
from autogpt.prompt import construct_prompt
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group(invoke_without_command=True)
|
||||||
def main() -> None:
|
|
||||||
"""
|
|
||||||
Welcome to AutoGPT an experimental open-source application showcasing the capabilities of the GPT-4 pushing the boundaries of AI.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@main.command()
|
|
||||||
@click.option("-c", "--continuous", is_flag=True, help="Enable Continuous Mode")
|
@click.option("-c", "--continuous", is_flag=True, help="Enable Continuous Mode")
|
||||||
@click.option(
|
@click.option(
|
||||||
"--skip-reprompt",
|
"--skip-reprompt",
|
||||||
|
@ -60,7 +52,9 @@ def main() -> None:
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help="Dangerous: Allows Auto-GPT to download files natively.",
|
help="Dangerous: Allows Auto-GPT to download files natively.",
|
||||||
)
|
)
|
||||||
def start(
|
@click.pass_context
|
||||||
|
def main(
|
||||||
|
ctx: click.Context,
|
||||||
continuous: bool,
|
continuous: bool,
|
||||||
continuous_limit: int,
|
continuous_limit: int,
|
||||||
ai_settings: str,
|
ai_settings: str,
|
||||||
|
@ -73,51 +67,56 @@ def start(
|
||||||
browser_name: str,
|
browser_name: str,
|
||||||
allow_downloads: bool,
|
allow_downloads: bool,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Start an Auto-GPT assistant"""
|
"""
|
||||||
cfg = Config()
|
Welcome to AutoGPT an experimental open-source application showcasing the capabilities of the GPT-4 pushing the boundaries of AI.
|
||||||
# TODO: fill in llm values here
|
|
||||||
check_openai_api_key()
|
Start an Auto-GPT assistant.
|
||||||
create_config(
|
"""
|
||||||
continuous,
|
if ctx.invoked_subcommand is None:
|
||||||
continuous_limit,
|
cfg = Config()
|
||||||
ai_settings,
|
# TODO: fill in llm values here
|
||||||
skip_reprompt,
|
check_openai_api_key()
|
||||||
speak,
|
create_config(
|
||||||
debug,
|
continuous,
|
||||||
gpt3only,
|
continuous_limit,
|
||||||
gpt4only,
|
ai_settings,
|
||||||
memory_type,
|
skip_reprompt,
|
||||||
browser_name,
|
speak,
|
||||||
allow_downloads,
|
debug,
|
||||||
)
|
gpt3only,
|
||||||
logger.set_level(logging.DEBUG if cfg.debug_mode else logging.INFO)
|
gpt4only,
|
||||||
ai_name = ""
|
memory_type,
|
||||||
system_prompt = construct_prompt()
|
browser_name,
|
||||||
# print(prompt)
|
allow_downloads,
|
||||||
# Initialize variables
|
)
|
||||||
full_message_history = []
|
logger.set_level(logging.DEBUG if cfg.debug_mode else logging.INFO)
|
||||||
next_action_count = 0
|
ai_name = ""
|
||||||
# Make a constant:
|
system_prompt = construct_prompt()
|
||||||
triggering_prompt = (
|
# print(prompt)
|
||||||
"Determine which next command to use, and respond using the"
|
# Initialize variables
|
||||||
" format specified above:"
|
full_message_history = []
|
||||||
)
|
next_action_count = 0
|
||||||
# Initialize memory and make sure it is empty.
|
# Make a constant:
|
||||||
# this is particularly important for indexing and referencing pinecone memory
|
triggering_prompt = (
|
||||||
memory = get_memory(cfg, init=True)
|
"Determine which next command to use, and respond using the"
|
||||||
logger.typewriter_log(
|
" format specified above:"
|
||||||
"Using memory of type:", Fore.GREEN, f"{memory.__class__.__name__}"
|
)
|
||||||
)
|
# Initialize memory and make sure it is empty.
|
||||||
logger.typewriter_log("Using Browser:", Fore.GREEN, cfg.selenium_web_browser)
|
# this is particularly important for indexing and referencing pinecone memory
|
||||||
agent = Agent(
|
memory = get_memory(cfg, init=True)
|
||||||
ai_name=ai_name,
|
logger.typewriter_log(
|
||||||
memory=memory,
|
"Using memory of type:", Fore.GREEN, f"{memory.__class__.__name__}"
|
||||||
full_message_history=full_message_history,
|
)
|
||||||
next_action_count=next_action_count,
|
logger.typewriter_log("Using Browser:", Fore.GREEN, cfg.selenium_web_browser)
|
||||||
system_prompt=system_prompt,
|
agent = Agent(
|
||||||
triggering_prompt=triggering_prompt,
|
ai_name=ai_name,
|
||||||
)
|
memory=memory,
|
||||||
agent.start_interaction_loop()
|
full_message_history=full_message_history,
|
||||||
|
next_action_count=next_action_count,
|
||||||
|
system_prompt=system_prompt,
|
||||||
|
triggering_prompt=triggering_prompt,
|
||||||
|
)
|
||||||
|
agent.start_interaction_loop()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -17,6 +17,13 @@ def prompt_user() -> AIConfig:
|
||||||
logger.typewriter_log(
|
logger.typewriter_log(
|
||||||
"Welcome to Auto-GPT! ",
|
"Welcome to Auto-GPT! ",
|
||||||
Fore.GREEN,
|
Fore.GREEN,
|
||||||
|
"run with '--help' for more information.",
|
||||||
|
speak_text=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.typewriter_log(
|
||||||
|
"Create an AI-Assistant:",
|
||||||
|
Fore.GREEN,
|
||||||
"Enter the name of your AI and its role below. Entering nothing will load"
|
"Enter the name of your AI and its role below. Entering nothing will load"
|
||||||
" defaults.",
|
" defaults.",
|
||||||
speak_text=True,
|
speak_text=True,
|
||||||
|
|
Loading…
Reference in New Issue