Merge branch 'master' into zamilmajdy/security-update-upgrade-next

zamilmajdy/security-update-upgrade-next
Zamil Majdy 2024-09-26 20:56:33 -05:00 committed by GitHub
commit 02b6ebcd9d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 26 additions and 43 deletions

View File

@ -362,6 +362,13 @@ class AITextGeneratorBlock(Block):
yield "error", str(e)
class SummaryStyle(Enum):
CONCISE = "concise"
DETAILED = "detailed"
BULLET_POINTS = "bullet points"
NUMBERED_LIST = "numbered list"
class AITextSummarizerBlock(Block):
class Input(BlockSchema):
text: str
@ -370,6 +377,8 @@ class AITextSummarizerBlock(Block):
default=LlmModel.GPT4_TURBO,
description="The language model to use for summarizing the text.",
)
focus: str = "general information"
style: SummaryStyle = SummaryStyle.CONCISE
api_key: BlockSecret = SecretField(value="")
# TODO: Make this dynamic
max_tokens: int = 4000 # Adjust based on the model's context window
@ -440,7 +449,7 @@ class AITextSummarizerBlock(Block):
raise ValueError("Failed to get a response from the LLM.")
def _summarize_chunk(self, chunk: str, input_data: Input) -> str:
prompt = f"Summarize the following text concisely:\n\n{chunk}"
prompt = f"Summarize the following text in a {input_data.style} form. Focus your summary on the topic of `{input_data.focus}` if present, otherwise just provide a general summary:\n\n```{chunk}```"
llm_response = self.llm_call(
AIStructuredResponseGeneratorBlock.Input(
@ -454,13 +463,10 @@ class AITextSummarizerBlock(Block):
return llm_response["summary"]
def _combine_summaries(self, summaries: list[str], input_data: Input) -> str:
combined_text = " ".join(summaries)
combined_text = "\n\n".join(summaries)
if len(combined_text.split()) <= input_data.max_tokens:
prompt = (
"Provide a final, concise summary of the following summaries:\n\n"
+ combined_text
)
prompt = f"Provide a final summary of the following section summaries in a {input_data.style} form, focus your summary on the topic of `{input_data.focus}` if present:\n\n ```{combined_text}```\n\n Just respond with the final_summary in the format specified."
llm_response = self.llm_call(
AIStructuredResponseGeneratorBlock.Input(

View File

@ -96,36 +96,6 @@ services:
file: ./supabase/docker/docker-compose.yml
service: rest
realtime:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: realtime
storage:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: storage
imgproxy:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: imgproxy
meta:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: meta
functions:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: functions
analytics:
<<: *supabase-services
extends:

View File

@ -77,11 +77,7 @@ export const BlocksControl: React.FC<BlocksControlProps> = ({
return (
<Popover
open={pinBlocksPopover ? true : undefined}
onOpenChange={(open) => {
if (!open) {
resetFilters();
}
}}
onOpenChange={(open) => open || resetFilters()}
>
<Tooltip delayDuration={500}>
<TooltipTrigger asChild>

View File

@ -85,7 +85,7 @@ env:
NUM_NODE_WORKERS: 5
REDIS_HOST: "redis-dev-master.redis-dev.svc.cluster.local"
REDIS_PORT: "6379"
BACKEND_CORS_ALLOW_ORIGINS: ["https://dev-builder.agpt.co"]
BACKEND_CORS_ALLOW_ORIGINS: '["https://dev-builder.agpt.co"]'
SUPABASE_SERVICE_ROLE_KEY: ""
GITHUB_CLIENT_ID: ""
GITHUB_CLIENT_SECRET: ""

View File

@ -9,7 +9,7 @@ This guide will help you setup the server and builder for the project.
<!-- The video is listed in the root Readme.md of the repo -->
We also offer this in video format. You can check it out [here](https://github.com/Significant-Gravitas/AutoGPT#how-to-get-started).
We also offer this in video format. You can check it out [here](https://github.com/Significant-Gravitas/AutoGPT?tab=readme-ov-file#how-to-setup-for-self-hosting).
!!! warning
**DO NOT FOLLOW ANY OUTSIDE TUTORIALS AS THEY WILL LIKELY BE OUT OF DATE**
@ -20,6 +20,7 @@ To setup the server, you need to have the following installed:
- [Node.js](https://nodejs.org/en/)
- [Docker](https://docs.docker.com/get-docker/)
- [Git](https://git-scm.com/downloads)
### Checking if you have Node.js & NPM installed
@ -59,6 +60,16 @@ docker-compose -v
Once you have Docker and Docker Compose installed, you can proceed to the next step.
## Cloning the Repository
The first step is cloning the AutoGPT repository to your computer.
To do this, open a terminal window in a folder on your computer and run:
```
git clone https://github.com/Significant-Gravitas/AutoGPT.git
```
If you get stuck, follow [this guide](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository).
Once that's complete you can close this terminal window.
## Running the backend services
To run the backend services, follow these steps: