removed un necessary changes
parent
13467259b4
commit
156739788a
|
@ -5,17 +5,21 @@ from llm_utils import create_chat_completion
|
|||
|
||||
cfg = Config()
|
||||
|
||||
def get_website_content(url):
|
||||
response = requests.get(url, headers=cfg.user_agent_header)
|
||||
def scrape_text(url):
|
||||
# Most basic check if the URL is valid:
|
||||
if not url.startswith('http'):
|
||||
return "Error: Invalid URL"
|
||||
|
||||
try:
|
||||
response = requests.get(url, headers=cfg.user_agent_header)
|
||||
except requests.exceptions.RequestException as e:
|
||||
return "Error: " + str(e)
|
||||
|
||||
# Check if the response contains an HTTP error
|
||||
if response.status_code >= 400:
|
||||
return "Error: HTTP " + str(response.status_code) + " error"
|
||||
return response
|
||||
|
||||
|
||||
|
||||
def scrape_text(website_content):
|
||||
soup = BeautifulSoup(website_content.text, "html.parser")
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
|
||||
for script in soup(["script", "style"]):
|
||||
script.extract()
|
||||
|
@ -42,8 +46,14 @@ def format_hyperlinks(hyperlinks):
|
|||
return formatted_links
|
||||
|
||||
|
||||
def scrape_links(website_content):
|
||||
soup = BeautifulSoup(website_content.text, "html.parser")
|
||||
def scrape_links(url):
|
||||
response = requests.get(url, headers=cfg.user_agent_header)
|
||||
|
||||
# Check if the response contains an HTTP error
|
||||
if response.status_code >= 400:
|
||||
return "error"
|
||||
|
||||
soup = BeautifulSoup(response.text, "html.parser")
|
||||
|
||||
for script in soup(["script", "style"]):
|
||||
script.extract()
|
||||
|
|
|
@ -106,6 +106,8 @@ def execute_command(command_name, arguments):
|
|||
return execute_python_file(arguments["file"])
|
||||
elif command_name == "generate_image":
|
||||
return generate_image(arguments["prompt"])
|
||||
elif command_name == "do_nothing":
|
||||
return "No action performed."
|
||||
elif command_name == "task_complete":
|
||||
shutdown()
|
||||
else:
|
||||
|
@ -163,9 +165,8 @@ def google_official_search(query, num_results=8):
|
|||
return search_results_links
|
||||
|
||||
def browse_website(url, question):
|
||||
website_content = browse.get_website_content(url)
|
||||
summary = get_text_summary(website_content, question)
|
||||
links = get_hyperlinks(website_content)
|
||||
summary = get_text_summary(url, question)
|
||||
links = get_hyperlinks(url)
|
||||
|
||||
# Limit links to 5
|
||||
if len(links) > 5:
|
||||
|
@ -176,14 +177,14 @@ def browse_website(url, question):
|
|||
return result
|
||||
|
||||
|
||||
def get_text_summary(website_content, question):
|
||||
text = browse.scrape_text(website_content)
|
||||
def get_text_summary(url, question):
|
||||
text = browse.scrape_text(url)
|
||||
summary = browse.summarize_text(text, question)
|
||||
return """ "Result" : """ + summary
|
||||
|
||||
|
||||
def get_hyperlinks(website_content):
|
||||
link_list = browse.scrape_links(website_content)
|
||||
def get_hyperlinks(url):
|
||||
link_list = browse.scrape_links(url)
|
||||
return link_list
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue