forked from Significant-Gravitas/AutoGPT
Compare commits
29 Commits
Author | SHA1 | Date |
---|---|---|
|
e419a1ec6e | |
|
80f31a4f72 | |
|
5933adef3e | |
|
e3634c9f24 | |
|
0d0a9b0884 | |
|
103248667a | |
|
907c9b35d8 | |
|
71f3ee1b88 | |
|
7f1a04a320 | |
|
800625c952 | |
|
56612f16cf | |
|
0d2bb46786 | |
|
c61317e448 | |
|
a137afc1b6 | |
|
c572f7cc9f | |
|
3c30783b14 | |
|
56b33327ab | |
|
c36c239dd5 | |
|
3f971444f9 | |
|
7a5fa7f8b3 | |
|
fe5b4b2451 | |
|
78238998ad | |
|
e53f1eaf80 | |
|
ffee52a7f6 | |
|
6727dc299a | |
|
02e0b37521 | |
|
04915f2db0 | |
|
9d79bfadea | |
|
5f50c4863d |
|
@ -15,6 +15,9 @@ REDIS_PORT=6379
|
|||
REDIS_PASSWORD=password
|
||||
|
||||
ENABLE_CREDIT=false
|
||||
STRIPE_API_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
|
||||
# What environment things should be logged under: local dev or prod
|
||||
APP_ENV=local
|
||||
# What environment to behave as: "local" or "cloud"
|
||||
|
@ -36,7 +39,7 @@ SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
|||
## to use the platform's webhook-related functionality.
|
||||
## If you are developing locally, you can use something like ngrok to get a publc URL
|
||||
## and tunnel it to your locally running backend.
|
||||
PLATFORM_BASE_URL=https://your-public-url-here
|
||||
PLATFORM_BASE_URL=http://localhost:3000
|
||||
|
||||
## == INTEGRATION CREDENTIALS == ##
|
||||
# Each set of server side credentials is required for the corresponding 3rd party
|
||||
|
@ -72,6 +75,12 @@ GOOGLE_CLIENT_SECRET=
|
|||
TWITTER_CLIENT_ID=
|
||||
TWITTER_CLIENT_SECRET=
|
||||
|
||||
# Linear App
|
||||
# Make a new workspace for your OAuth APP -- trust me
|
||||
# https://linear.app/settings/api/applications/new
|
||||
# Callback URL: http://localhost:3000/auth/integrations/oauth_callback
|
||||
LINEAR_CLIENT_ID=
|
||||
LINEAR_CLIENT_SECRET=
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
|
||||
|
@ -130,6 +139,9 @@ EXA_API_KEY=
|
|||
# E2B
|
||||
E2B_API_KEY=
|
||||
|
||||
# Mem0
|
||||
MEM0_API_KEY=
|
||||
|
||||
# Nvidia
|
||||
NVIDIA_API_KEY=
|
||||
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import enum
|
||||
from typing import Any, List
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
|
||||
from backend.data.model import SchemaField
|
||||
from backend.util.mock import MockObject
|
||||
from backend.util.text import TextFormatter
|
||||
from backend.util.type import convert
|
||||
|
||||
formatter = TextFormatter()
|
||||
|
||||
|
@ -590,3 +592,78 @@ class CreateListBlock(Block):
|
|||
yield "list", input_data.values
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to create list: {str(e)}"
|
||||
|
||||
|
||||
class TypeOptions(enum.Enum):
|
||||
STRING = "string"
|
||||
NUMBER = "number"
|
||||
BOOLEAN = "boolean"
|
||||
LIST = "list"
|
||||
DICTIONARY = "dictionary"
|
||||
|
||||
|
||||
class UniversalTypeConverterBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(
|
||||
description="The value to convert to a universal type."
|
||||
)
|
||||
type: TypeOptions = SchemaField(description="The type to convert the value to.")
|
||||
|
||||
class Output(BlockSchema):
|
||||
value: Any = SchemaField(description="The converted value.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="95d1b990-ce13-4d88-9737-ba5c2070c97b",
|
||||
description="This block is used to convert a value to a universal type.",
|
||||
categories={BlockCategory.BASIC},
|
||||
input_schema=UniversalTypeConverterBlock.Input,
|
||||
output_schema=UniversalTypeConverterBlock.Output,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
converted_value = convert(
|
||||
input_data.value,
|
||||
{
|
||||
TypeOptions.STRING: str,
|
||||
TypeOptions.NUMBER: float,
|
||||
TypeOptions.BOOLEAN: bool,
|
||||
TypeOptions.LIST: list,
|
||||
TypeOptions.DICTIONARY: dict,
|
||||
}[input_data.type],
|
||||
)
|
||||
yield "value", converted_value
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to convert value: {str(e)}"
|
||||
|
||||
|
||||
class TextSplitBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(description="The text to split.")
|
||||
delimiter: str = SchemaField(description="The delimiter to split the text by.")
|
||||
strip: bool = SchemaField(
|
||||
description="Whether to strip the text.", default=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
texts: list[str] = SchemaField(
|
||||
description="The text split into a list of strings."
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d5ea33c8-a575-477a-b42f-2fe3be5055ec",
|
||||
description="This block is used to split a text into a list of strings.",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=TextSplitBlock.Input,
|
||||
output_schema=TextSplitBlock.Output,
|
||||
test_input={"text": "Hello, World!", "delimiter": ","},
|
||||
test_output=[("texts", ["Hello", " World!"])],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
texts = input_data.text.split(input_data.delimiter)
|
||||
if input_data.strip:
|
||||
texts = [text.strip() for text in texts]
|
||||
yield "texts", texts
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import List, Optional
|
||||
from typing import List
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
|
|
@ -0,0 +1,272 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from backend.blocks.linear._auth import LinearCredentials
|
||||
from backend.blocks.linear.models import (
|
||||
CreateCommentResponse,
|
||||
CreateIssueResponse,
|
||||
Issue,
|
||||
Project,
|
||||
)
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
class LinearAPIException(Exception):
|
||||
def __init__(self, message: str, status_code: int):
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class LinearClient:
|
||||
"""Client for the Linear API
|
||||
|
||||
If you're looking for the schema: https://studio.apollographql.com/public/Linear-API/variant/current/schema
|
||||
"""
|
||||
|
||||
API_URL = "https://api.linear.app/graphql"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
credentials: LinearCredentials | None = None,
|
||||
custom_requests: Optional[Requests] = None,
|
||||
):
|
||||
if custom_requests:
|
||||
self._requests = custom_requests
|
||||
else:
|
||||
|
||||
headers: Dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
if credentials:
|
||||
headers["Authorization"] = credentials.bearer()
|
||||
|
||||
self._requests = Requests(
|
||||
extra_headers=headers,
|
||||
trusted_origins=["https://api.linear.app"],
|
||||
raise_for_status=False,
|
||||
)
|
||||
|
||||
def _execute_graphql_request(
|
||||
self, query: str, variables: dict | None = None
|
||||
) -> Any:
|
||||
"""
|
||||
Executes a GraphQL request against the Linear API and returns the response data.
|
||||
|
||||
Args:
|
||||
query: The GraphQL query string.
|
||||
variables (optional): Any GraphQL query variables
|
||||
|
||||
Returns:
|
||||
The parsed JSON response data, or raises a LinearAPIException on error.
|
||||
"""
|
||||
payload: Dict[str, Any] = {"query": query}
|
||||
if variables:
|
||||
payload["variables"] = variables
|
||||
|
||||
response = self._requests.post(self.API_URL, json=payload)
|
||||
|
||||
if not response.ok:
|
||||
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("errors", [{}])[0].get("message", "")
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise LinearAPIException(
|
||||
f"Linear API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
response_data = response.json()
|
||||
if "errors" in response_data:
|
||||
|
||||
error_messages = [
|
||||
error.get("message", "") for error in response_data["errors"]
|
||||
]
|
||||
raise LinearAPIException(
|
||||
f"Linear API returned errors: {', '.join(error_messages)}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return response_data["data"]
|
||||
|
||||
def query(self, query: str, variables: Optional[dict] = None) -> dict:
|
||||
"""Executes a GraphQL query.
|
||||
|
||||
Args:
|
||||
query: The GraphQL query string.
|
||||
variables: Query variables, if any.
|
||||
|
||||
Returns:
|
||||
The response data.
|
||||
"""
|
||||
return self._execute_graphql_request(query, variables)
|
||||
|
||||
def mutate(self, mutation: str, variables: Optional[dict] = None) -> dict:
|
||||
"""Executes a GraphQL mutation.
|
||||
|
||||
Args:
|
||||
mutation: The GraphQL mutation string.
|
||||
variables: Query variables, if any.
|
||||
|
||||
Returns:
|
||||
The response data.
|
||||
"""
|
||||
return self._execute_graphql_request(mutation, variables)
|
||||
|
||||
def try_create_comment(self, issue_id: str, comment: str) -> CreateCommentResponse:
|
||||
try:
|
||||
mutation = """
|
||||
mutation CommentCreate($input: CommentCreateInput!) {
|
||||
commentCreate(input: $input) {
|
||||
success
|
||||
comment {
|
||||
id
|
||||
body
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables = {
|
||||
"input": {
|
||||
"body": comment,
|
||||
"issueId": issue_id,
|
||||
}
|
||||
}
|
||||
|
||||
added_comment = self.mutate(mutation, variables)
|
||||
# Select the commentCreate field from the mutation response
|
||||
return CreateCommentResponse(**added_comment["commentCreate"])
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_get_team_by_name(self, team_name: str) -> str:
|
||||
try:
|
||||
query = """
|
||||
query GetTeamId($searchTerm: String!) {
|
||||
teams(filter: {
|
||||
or: [
|
||||
{ name: { eqIgnoreCase: $searchTerm } },
|
||||
{ key: { eqIgnoreCase: $searchTerm } }
|
||||
]
|
||||
}) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
key
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"searchTerm": team_name,
|
||||
}
|
||||
|
||||
team_id = self.query(query, variables)
|
||||
return team_id["teams"]["nodes"][0]["id"]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_create_issue(
|
||||
self,
|
||||
team_id: str,
|
||||
title: str,
|
||||
description: str | None = None,
|
||||
priority: int | None = None,
|
||||
project_id: str | None = None,
|
||||
) -> CreateIssueResponse:
|
||||
try:
|
||||
mutation = """
|
||||
mutation IssueCreate($input: IssueCreateInput!) {
|
||||
issueCreate(input: $input) {
|
||||
issue {
|
||||
title
|
||||
description
|
||||
id
|
||||
identifier
|
||||
priority
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"input": {
|
||||
"teamId": team_id,
|
||||
"title": title,
|
||||
}
|
||||
}
|
||||
|
||||
if project_id:
|
||||
variables["input"]["projectId"] = project_id
|
||||
|
||||
if description:
|
||||
variables["input"]["description"] = description
|
||||
|
||||
if priority:
|
||||
variables["input"]["priority"] = priority
|
||||
|
||||
added_issue = self.mutate(mutation, variables)
|
||||
return CreateIssueResponse(**added_issue["issueCreate"])
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_search_projects(self, term: str) -> list[Project]:
|
||||
try:
|
||||
query = """
|
||||
query SearchProjects($term: String!, $includeComments: Boolean!) {
|
||||
searchProjects(term: $term, includeComments: $includeComments) {
|
||||
nodes {
|
||||
id
|
||||
name
|
||||
description
|
||||
priority
|
||||
progress
|
||||
content
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"term": term,
|
||||
"includeComments": True,
|
||||
}
|
||||
|
||||
projects = self.query(query, variables)
|
||||
return [
|
||||
Project(**project) for project in projects["searchProjects"]["nodes"]
|
||||
]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
||||
|
||||
def try_search_issues(self, term: str) -> list[Issue]:
|
||||
try:
|
||||
query = """
|
||||
query SearchIssues($term: String!, $includeComments: Boolean!) {
|
||||
searchIssues(term: $term, includeComments: $includeComments) {
|
||||
nodes {
|
||||
id
|
||||
identifier
|
||||
title
|
||||
description
|
||||
priority
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables: dict[str, Any] = {
|
||||
"term": term,
|
||||
"includeComments": True,
|
||||
}
|
||||
|
||||
issues = self.query(query, variables)
|
||||
return [Issue(**issue) for issue in issues["searchIssues"]["nodes"]]
|
||||
except LinearAPIException as e:
|
||||
raise e
|
|
@ -0,0 +1,101 @@
|
|||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
OAuth2Credentials,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
secrets = Secrets()
|
||||
LINEAR_OAUTH_IS_CONFIGURED = bool(
|
||||
secrets.linear_client_id and secrets.linear_client_secret
|
||||
)
|
||||
|
||||
LinearCredentials = OAuth2Credentials | APIKeyCredentials
|
||||
# LinearCredentialsInput = CredentialsMetaInput[
|
||||
# Literal[ProviderName.LINEAR],
|
||||
# Literal["oauth2", "api_key"] if LINEAR_OAUTH_IS_CONFIGURED else Literal["oauth2"],
|
||||
# ]
|
||||
LinearCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.LINEAR], Literal["oauth2"]
|
||||
]
|
||||
|
||||
|
||||
# (required) Comma separated list of scopes:
|
||||
|
||||
# read - (Default) Read access for the user's account. This scope will always be present.
|
||||
|
||||
# write - Write access for the user's account. If your application only needs to create comments, use a more targeted scope
|
||||
|
||||
# issues:create - Allows creating new issues and their attachments
|
||||
|
||||
# comments:create - Allows creating new issue comments
|
||||
|
||||
# timeSchedule:write - Allows creating and modifying time schedules
|
||||
|
||||
|
||||
# admin - Full access to admin level endpoints. You should never ask for this permission unless it's absolutely needed
|
||||
class LinearScope(str, Enum):
|
||||
READ = "read"
|
||||
WRITE = "write"
|
||||
ISSUES_CREATE = "issues:create"
|
||||
COMMENTS_CREATE = "comments:create"
|
||||
TIME_SCHEDULE_WRITE = "timeSchedule:write"
|
||||
ADMIN = "admin"
|
||||
|
||||
|
||||
def LinearCredentialsField(scopes: list[LinearScope]) -> LinearCredentialsInput:
|
||||
"""
|
||||
Creates a Linear credentials input on a block.
|
||||
|
||||
Params:
|
||||
scope: The authorization scope needed for the block to work. ([list of available scopes](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/scopes-for-oauth-apps#available-scopes))
|
||||
""" # noqa
|
||||
return CredentialsField(
|
||||
required_scopes=set([LinearScope.READ.value]).union(
|
||||
set([scope.value for scope in scopes])
|
||||
),
|
||||
description="The Linear integration can be used with OAuth, "
|
||||
"or any API key with sufficient permissions for the blocks it is used on.",
|
||||
)
|
||||
|
||||
|
||||
TEST_CREDENTIALS_OAUTH = OAuth2Credentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="linear",
|
||||
title="Mock Linear API key",
|
||||
username="mock-linear-username",
|
||||
access_token=SecretStr("mock-linear-access-token"),
|
||||
access_token_expires_at=None,
|
||||
refresh_token=SecretStr("mock-linear-refresh-token"),
|
||||
refresh_token_expires_at=None,
|
||||
scopes=["mock-linear-scopes"],
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_API_KEY = APIKeyCredentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="linear",
|
||||
title="Mock Linear API key",
|
||||
api_key=SecretStr("mock-linear-api-key"),
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT_OAUTH = {
|
||||
"provider": TEST_CREDENTIALS_OAUTH.provider,
|
||||
"id": TEST_CREDENTIALS_OAUTH.id,
|
||||
"type": TEST_CREDENTIALS_OAUTH.type,
|
||||
"title": TEST_CREDENTIALS_OAUTH.type,
|
||||
}
|
||||
|
||||
TEST_CREDENTIALS_INPUT_API_KEY = {
|
||||
"provider": TEST_CREDENTIALS_API_KEY.provider,
|
||||
"id": TEST_CREDENTIALS_API_KEY.id,
|
||||
"type": TEST_CREDENTIALS_API_KEY.type,
|
||||
"title": TEST_CREDENTIALS_API_KEY.type,
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
from backend.blocks.linear._api import LinearAPIException, LinearClient
|
||||
from backend.blocks.linear._auth import (
|
||||
TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
TEST_CREDENTIALS_OAUTH,
|
||||
LinearCredentials,
|
||||
LinearCredentialsField,
|
||||
LinearCredentialsInput,
|
||||
LinearScope,
|
||||
)
|
||||
from backend.blocks.linear.models import CreateCommentResponse
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class LinearCreateCommentBlock(Block):
|
||||
"""Block for creating comments on Linear issues"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.COMMENTS_CREATE],
|
||||
)
|
||||
issue_id: str = SchemaField(description="ID of the issue to comment on")
|
||||
comment: str = SchemaField(description="Comment text to add to the issue")
|
||||
|
||||
class Output(BlockSchema):
|
||||
comment_id: str = SchemaField(description="ID of the created comment")
|
||||
comment_body: str = SchemaField(
|
||||
description="Text content of the created comment"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if comment creation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8f7d3a2e-9b5c-4c6a-8f1d-7c8b3e4a5d6c",
|
||||
description="Creates a new comment on a Linear issue",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
|
||||
test_input={
|
||||
"issue_id": "TEST-123",
|
||||
"comment": "Test comment",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[("comment_id", "abc123"), ("comment_body", "Test comment")],
|
||||
test_mock={
|
||||
"create_comment": lambda *args, **kwargs: (
|
||||
"abc123",
|
||||
"Test comment",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_comment(
|
||||
credentials: LinearCredentials, issue_id: str, comment: str
|
||||
) -> tuple[str, str]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
response: CreateCommentResponse = client.try_create_comment(
|
||||
issue_id=issue_id, comment=comment
|
||||
)
|
||||
return response.comment.id, response.comment.body
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the comment creation"""
|
||||
try:
|
||||
comment_id, comment_body = self.create_comment(
|
||||
credentials=credentials,
|
||||
issue_id=input_data.issue_id,
|
||||
comment=input_data.comment,
|
||||
)
|
||||
|
||||
yield "comment_id", comment_id
|
||||
yield "comment_body", comment_body
|
||||
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
|
@ -0,0 +1,186 @@
|
|||
from backend.blocks.linear._api import LinearAPIException, LinearClient
|
||||
from backend.blocks.linear._auth import (
|
||||
TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
TEST_CREDENTIALS_OAUTH,
|
||||
LinearCredentials,
|
||||
LinearCredentialsField,
|
||||
LinearCredentialsInput,
|
||||
LinearScope,
|
||||
)
|
||||
from backend.blocks.linear.models import CreateIssueResponse, Issue
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class LinearCreateIssueBlock(Block):
|
||||
"""Block for creating issues on Linear"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.ISSUES_CREATE],
|
||||
)
|
||||
title: str = SchemaField(description="Title of the issue")
|
||||
description: str | None = SchemaField(description="Description of the issue")
|
||||
team_name: str = SchemaField(
|
||||
description="Name of the team to create the issue on"
|
||||
)
|
||||
priority: int | None = SchemaField(
|
||||
description="Priority of the issue",
|
||||
default=None,
|
||||
minimum=0,
|
||||
maximum=4,
|
||||
)
|
||||
project_name: str | None = SchemaField(
|
||||
description="Name of the project to create the issue on",
|
||||
default=None,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
issue_id: str = SchemaField(description="ID of the created issue")
|
||||
issue_title: str = SchemaField(description="Title of the created issue")
|
||||
error: str = SchemaField(description="Error message if issue creation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f9c68f55-dcca-40a8-8771-abf9601680aa",
|
||||
description="Creates a new issue on Linear",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
|
||||
test_input={
|
||||
"title": "Test issue",
|
||||
"description": "Test description",
|
||||
"team_name": "Test team",
|
||||
"project_name": "Test project",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[("issue_id", "abc123"), ("issue_title", "Test issue")],
|
||||
test_mock={
|
||||
"create_issue": lambda *args, **kwargs: (
|
||||
"abc123",
|
||||
"Test issue",
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_issue(
|
||||
credentials: LinearCredentials,
|
||||
team_name: str,
|
||||
title: str,
|
||||
description: str | None = None,
|
||||
priority: int | None = None,
|
||||
project_name: str | None = None,
|
||||
) -> tuple[str, str]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
team_id = client.try_get_team_by_name(team_name=team_name)
|
||||
project_id: str | None = None
|
||||
if project_name:
|
||||
projects = client.try_search_projects(term=project_name)
|
||||
if projects:
|
||||
project_id = projects[0].id
|
||||
else:
|
||||
raise LinearAPIException("Project not found", status_code=404)
|
||||
response: CreateIssueResponse = client.try_create_issue(
|
||||
team_id=team_id,
|
||||
title=title,
|
||||
description=description,
|
||||
priority=priority,
|
||||
project_id=project_id,
|
||||
)
|
||||
return response.issue.identifier, response.issue.title
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the issue creation"""
|
||||
try:
|
||||
issue_id, issue_title = self.create_issue(
|
||||
credentials=credentials,
|
||||
team_name=input_data.team_name,
|
||||
title=input_data.title,
|
||||
description=input_data.description,
|
||||
priority=input_data.priority,
|
||||
project_name=input_data.project_name,
|
||||
)
|
||||
|
||||
yield "issue_id", issue_id
|
||||
yield "issue_title", issue_title
|
||||
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
||||
|
||||
|
||||
class LinearSearchIssuesBlock(Block):
|
||||
"""Block for searching issues on Linear"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
term: str = SchemaField(description="Term to search for issues")
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.READ],
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
issues: list[Issue] = SchemaField(description="List of issues")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b5a2a0e6-26b4-4c5b-8a42-bc79e9cb65c2",
|
||||
description="Searches for issues on Linear",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
test_input={
|
||||
"term": "Test issue",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[
|
||||
(
|
||||
"issues",
|
||||
[
|
||||
Issue(
|
||||
id="abc123",
|
||||
identifier="abc123",
|
||||
title="Test issue",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
)
|
||||
],
|
||||
)
|
||||
],
|
||||
test_mock={
|
||||
"search_issues": lambda *args, **kwargs: [
|
||||
Issue(
|
||||
id="abc123",
|
||||
identifier="abc123",
|
||||
title="Test issue",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
)
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_issues(
|
||||
credentials: LinearCredentials,
|
||||
term: str,
|
||||
) -> list[Issue]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
response: list[Issue] = client.try_search_issues(term=term)
|
||||
return response
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the issue search"""
|
||||
try:
|
||||
issues = self.search_issues(credentials=credentials, term=input_data.term)
|
||||
yield "issues", issues
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
|
@ -0,0 +1,41 @@
|
|||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class Comment(BaseModel):
|
||||
id: str
|
||||
body: str
|
||||
|
||||
|
||||
class CreateCommentInput(BaseModel):
|
||||
body: str
|
||||
issueId: str
|
||||
|
||||
|
||||
class CreateCommentResponse(BaseModel):
|
||||
success: bool
|
||||
comment: Comment
|
||||
|
||||
|
||||
class CreateCommentResponseWrapper(BaseModel):
|
||||
commentCreate: CreateCommentResponse
|
||||
|
||||
|
||||
class Issue(BaseModel):
|
||||
id: str
|
||||
identifier: str
|
||||
title: str
|
||||
description: str | None
|
||||
priority: int
|
||||
|
||||
|
||||
class CreateIssueResponse(BaseModel):
|
||||
issue: Issue
|
||||
|
||||
|
||||
class Project(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str
|
||||
priority: int
|
||||
progress: int
|
||||
content: str
|
|
@ -0,0 +1,93 @@
|
|||
from backend.blocks.linear._api import LinearAPIException, LinearClient
|
||||
from backend.blocks.linear._auth import (
|
||||
TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
TEST_CREDENTIALS_OAUTH,
|
||||
LinearCredentials,
|
||||
LinearCredentialsField,
|
||||
LinearCredentialsInput,
|
||||
LinearScope,
|
||||
)
|
||||
from backend.blocks.linear.models import Project
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class LinearSearchProjectsBlock(Block):
|
||||
"""Block for searching projects on Linear"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: LinearCredentialsInput = LinearCredentialsField(
|
||||
scopes=[LinearScope.READ],
|
||||
)
|
||||
term: str = SchemaField(description="Term to search for projects")
|
||||
|
||||
class Output(BlockSchema):
|
||||
projects: list[Project] = SchemaField(description="List of projects")
|
||||
error: str = SchemaField(description="Error message if issue creation failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="446a1d35-9d8f-4ac5-83ea-7684ec50e6af",
|
||||
description="Searches for projects on Linear",
|
||||
input_schema=self.Input,
|
||||
output_schema=self.Output,
|
||||
categories={BlockCategory.PRODUCTIVITY, BlockCategory.ISSUE_TRACKING},
|
||||
test_input={
|
||||
"term": "Test project",
|
||||
"credentials": TEST_CREDENTIALS_INPUT_OAUTH,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS_OAUTH,
|
||||
test_output=[
|
||||
(
|
||||
"projects",
|
||||
[
|
||||
Project(
|
||||
id="abc123",
|
||||
name="Test project",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
progress=1,
|
||||
content="Test content",
|
||||
)
|
||||
],
|
||||
)
|
||||
],
|
||||
test_mock={
|
||||
"search_projects": lambda *args, **kwargs: [
|
||||
Project(
|
||||
id="abc123",
|
||||
name="Test project",
|
||||
description="Test description",
|
||||
priority=1,
|
||||
progress=1,
|
||||
content="Test content",
|
||||
)
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def search_projects(
|
||||
credentials: LinearCredentials,
|
||||
term: str,
|
||||
) -> list[Project]:
|
||||
client = LinearClient(credentials=credentials)
|
||||
response: list[Project] = client.try_search_projects(term=term)
|
||||
return response
|
||||
|
||||
def run(
|
||||
self, input_data: Input, *, credentials: LinearCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""Execute the project search"""
|
||||
try:
|
||||
projects = self.search_projects(
|
||||
credentials=credentials,
|
||||
term=input_data.term,
|
||||
)
|
||||
|
||||
yield "projects", projects
|
||||
|
||||
except LinearAPIException as e:
|
||||
yield "error", str(e)
|
||||
except Exception as e:
|
||||
yield "error", f"Unexpected error: {str(e)}"
|
|
@ -0,0 +1,310 @@
|
|||
from typing import Any, Literal, Optional, Union
|
||||
|
||||
from mem0 import MemoryClient
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="ed55ac19-356e-4243-a6cb-bc599e9b716f",
|
||||
provider="mem0",
|
||||
api_key=SecretStr("mock-mem0-api-key"),
|
||||
title="Mock Mem0 API key",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
|
||||
|
||||
class Mem0Base:
|
||||
"""Base class with shared utilities for Mem0 blocks"""
|
||||
|
||||
@staticmethod
|
||||
def _get_client(credentials: APIKeyCredentials) -> MemoryClient:
|
||||
"""Get initialized Mem0 client"""
|
||||
return MemoryClient(api_key=credentials.api_key.get_secret_value())
|
||||
|
||||
|
||||
class AddMemoryBlock(Block, Mem0Base):
|
||||
"""Block for adding memories to Mem0
|
||||
|
||||
Always limited by user_id and optional graph_id, run_id, agent_id"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.MEM0], Literal["api_key"]
|
||||
] = CredentialsField(description="Mem0 API key credentials")
|
||||
content: Union[str, list[dict[str, str]]] = SchemaField(
|
||||
description="Content to add - either a string or list of message objects"
|
||||
)
|
||||
metadata: dict[str, Any] = SchemaField(
|
||||
description="Optional metadata for the memory", default={}
|
||||
)
|
||||
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
description="Limit the memory to the run", default=False
|
||||
)
|
||||
limit_memory_to_agent: bool = SchemaField(
|
||||
description="Limit the memory to the agent", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
action: str = SchemaField(description="Action of the operation")
|
||||
memory: str = SchemaField(description="Memory created")
|
||||
error: str = SchemaField(description="Error message if operation fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="dce97578-86be-45a4-ae50-f6de33fc935a",
|
||||
description="Add new memories to Mem0 with user segmentation",
|
||||
input_schema=AddMemoryBlock.Input,
|
||||
output_schema=AddMemoryBlock.Output,
|
||||
test_input={
|
||||
"content": [{"role": "user", "content": "I'm a vegetarian"}],
|
||||
"metadata": {"food": "vegetarian"},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[("action", "NO_CHANGE")],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
user_id: str,
|
||||
graph_id: str,
|
||||
run_id: str,
|
||||
**kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
client = self._get_client(credentials)
|
||||
|
||||
# Convert input to messages format if needed
|
||||
messages = (
|
||||
input_data.content
|
||||
if isinstance(input_data.content, list)
|
||||
else [{"role": "user", "content": input_data.content}]
|
||||
)
|
||||
|
||||
params = {
|
||||
"user_id": user_id,
|
||||
"output_format": "v1.1",
|
||||
"metadata": input_data.metadata,
|
||||
}
|
||||
|
||||
if input_data.limit_memory_to_run:
|
||||
params["run_id"] = run_id
|
||||
if input_data.limit_memory_to_agent:
|
||||
params["agent_id"] = graph_id
|
||||
|
||||
# Use the client to add memory
|
||||
result = client.add(
|
||||
messages,
|
||||
**params,
|
||||
)
|
||||
|
||||
if len(result.get("results", [])) > 0:
|
||||
for result in result.get("results", []):
|
||||
yield "action", result["event"]
|
||||
yield "memory", result["memory"]
|
||||
else:
|
||||
yield "action", "NO_CHANGE"
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(object=e)
|
||||
|
||||
|
||||
class SearchMemoryBlock(Block, Mem0Base):
|
||||
"""Block for searching memories in Mem0"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.MEM0], Literal["api_key"]
|
||||
] = CredentialsField(description="Mem0 API key credentials")
|
||||
query: str = SchemaField(
|
||||
description="Search query",
|
||||
advanced=False,
|
||||
)
|
||||
trigger: bool = SchemaField(
|
||||
description="An unused field that is used to (re-)trigger the block when you have no other inputs",
|
||||
default=False,
|
||||
advanced=False,
|
||||
)
|
||||
categories_filter: list[str] | None = SchemaField(
|
||||
description="Categories to filter by", default=None
|
||||
)
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
description="Limit the memory to the run", default=False
|
||||
)
|
||||
limit_memory_to_agent: bool = SchemaField(
|
||||
description="Limit the memory to the agent", default=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
memories: Any = SchemaField(description="List of matching memories")
|
||||
error: str = SchemaField(description="Error message if operation fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="bd7c84e3-e073-4b75-810c-600886ec8a5b",
|
||||
description="Search memories in Mem0 by user",
|
||||
input_schema=SearchMemoryBlock.Input,
|
||||
output_schema=SearchMemoryBlock.Output,
|
||||
test_input={
|
||||
"query": "vegetarian preferences",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"top_k": 10,
|
||||
"rerank": True,
|
||||
},
|
||||
test_output=[
|
||||
("memories", [{"id": "test-memory", "content": "test content"}])
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
user_id: str,
|
||||
graph_id: str,
|
||||
run_id: str,
|
||||
**kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
client = self._get_client(credentials)
|
||||
|
||||
filters: dict[str, list[dict[str, str | dict[str, list[str]]]]] = {
|
||||
# This works with only one filter, so we can allow others to add on later
|
||||
"AND": [
|
||||
{"user_id": user_id},
|
||||
]
|
||||
}
|
||||
if input_data.categories_filter:
|
||||
filters["AND"].append(
|
||||
{"categories": {"contains": input_data.categories_filter}}
|
||||
)
|
||||
if input_data.limit_memory_to_run:
|
||||
filters["AND"].append({"run_id": run_id})
|
||||
if input_data.limit_memory_to_agent:
|
||||
filters["AND"].append({"agent_id": graph_id})
|
||||
|
||||
result: list[dict[str, Any]] = client.search(
|
||||
input_data.query, version="v2", filters=filters
|
||||
)
|
||||
yield "memories", result
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class GetAllMemoriesBlock(Block, Mem0Base):
|
||||
"""Block for retrieving all memories from Mem0"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.MEM0], Literal["api_key"]
|
||||
] = CredentialsField(description="Mem0 API key credentials")
|
||||
trigger: bool = SchemaField(
|
||||
description="An unused field that is used to trigger the block when you have no other inputs",
|
||||
default=False,
|
||||
advanced=False,
|
||||
)
|
||||
categories: Optional[list[str]] = SchemaField(
|
||||
description="Filter by categories", default=None
|
||||
)
|
||||
limit_memory_to_run: bool = SchemaField(
|
||||
description="Limit the memory to the run", default=False
|
||||
)
|
||||
limit_memory_to_agent: bool = SchemaField(
|
||||
description="Limit the memory to the agent", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
memories: Any = SchemaField(description="List of memories")
|
||||
error: str = SchemaField(description="Error message if operation fails")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="45aee5bf-4767-45d1-a28b-e01c5aae9fc1",
|
||||
description="Retrieve all memories from Mem0 with pagination",
|
||||
input_schema=GetAllMemoriesBlock.Input,
|
||||
output_schema=GetAllMemoriesBlock.Output,
|
||||
test_input={
|
||||
"user_id": "test_user",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
("memories", [{"id": "test-memory", "content": "test content"}]),
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={"_get_client": lambda credentials: MockMemoryClient()},
|
||||
)
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
user_id: str,
|
||||
graph_id: str,
|
||||
run_id: str,
|
||||
**kwargs
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
client = self._get_client(credentials)
|
||||
|
||||
filters: dict[str, list[dict[str, str | dict[str, list[str]]]]] = {
|
||||
"AND": [
|
||||
{"user_id": user_id},
|
||||
]
|
||||
}
|
||||
if input_data.limit_memory_to_run:
|
||||
filters["AND"].append({"run_id": run_id})
|
||||
if input_data.limit_memory_to_agent:
|
||||
filters["AND"].append({"agent_id": graph_id})
|
||||
if input_data.categories:
|
||||
filters["AND"].append(
|
||||
{"categories": {"contains": input_data.categories}}
|
||||
)
|
||||
|
||||
memories: list[dict[str, Any]] = client.get_all(
|
||||
filters=filters,
|
||||
version="v2",
|
||||
)
|
||||
|
||||
yield "memories", memories
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
# Mock client for testing
|
||||
class MockMemoryClient:
|
||||
"""Mock Mem0 client for testing"""
|
||||
|
||||
def add(self, *args, **kwargs):
|
||||
return {"memory_id": "test-memory-id", "status": "success"}
|
||||
|
||||
def search(self, *args, **kwargs) -> list[dict[str, str]]:
|
||||
return [{"id": "test-memory", "content": "test content"}]
|
||||
|
||||
def get_all(self, *args, **kwargs) -> list[dict[str, str]]:
|
||||
return [{"id": "test-memory", "content": "test content"}]
|
|
@ -64,6 +64,8 @@ class BlockCategory(Enum):
|
|||
SAFETY = (
|
||||
"Block that provides AI safety mechanisms such as detecting harmful content"
|
||||
)
|
||||
PRODUCTIVITY = "Block that helps with productivity"
|
||||
ISSUE_TRACKING = "Block that helps with issue tracking"
|
||||
|
||||
def dict(self) -> dict[str, str]:
|
||||
return {"category": self.name, "description": self.value}
|
||||
|
@ -395,6 +397,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
|||
}
|
||||
|
||||
def execute(self, input_data: BlockInput, **kwargs) -> BlockOutput:
|
||||
# Merge the input data with the extra execution arguments, preferring the args for security
|
||||
if error := self.input_schema.validate_data(input_data):
|
||||
raise ValueError(
|
||||
f"Unable to execute block with invalid input data: {error}"
|
||||
|
|
|
@ -1,40 +1,40 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import stripe
|
||||
from prisma import Json
|
||||
from prisma.enums import CreditTransactionType
|
||||
from prisma.errors import UniqueViolationError
|
||||
from prisma.models import CreditTransaction
|
||||
from prisma.models import CreditTransaction, User
|
||||
from prisma.types import CreditTransactionCreateInput, CreditTransactionWhereInput
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block import Block, BlockInput, get_block
|
||||
from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.util.settings import Config
|
||||
from backend.data.execution import NodeExecutionEntry
|
||||
from backend.data.user import get_user_by_id
|
||||
from backend.util.settings import Settings
|
||||
|
||||
config = Config()
|
||||
settings = Settings()
|
||||
stripe.api_key = settings.secrets.stripe_api_key
|
||||
|
||||
|
||||
class UserCreditBase(ABC):
|
||||
def __init__(self, num_user_credits_refill: int):
|
||||
self.num_user_credits_refill = num_user_credits_refill
|
||||
|
||||
@abstractmethod
|
||||
async def get_or_refill_credit(self, user_id: str) -> int:
|
||||
async def get_credits(self, user_id: str) -> int:
|
||||
"""
|
||||
Get the current credit for the user and refill if no transaction has been made in the current cycle.
|
||||
Get the current credits for the user.
|
||||
|
||||
Returns:
|
||||
int: The current credit for the user.
|
||||
int: The current credits for the user.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def spend_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
user_credit: int,
|
||||
block_id: str,
|
||||
input_data: BlockInput,
|
||||
entry: NodeExecutionEntry,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
) -> int:
|
||||
|
@ -42,10 +42,7 @@ class UserCreditBase(ABC):
|
|||
Spend the credits for the user based on the block usage.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
user_credit (int): The current credit for the user.
|
||||
block_id (str): The block ID.
|
||||
input_data (BlockInput): The input data for the block.
|
||||
entry (NodeExecutionEntry): The node execution identifiers & data.
|
||||
data_size (float): The size of the data being processed.
|
||||
run_time (float): The time taken to run the block.
|
||||
|
||||
|
@ -57,7 +54,7 @@ class UserCreditBase(ABC):
|
|||
@abstractmethod
|
||||
async def top_up_credits(self, user_id: str, amount: int):
|
||||
"""
|
||||
Top up the credits for the user.
|
||||
Top up the credits for the user immediately.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
|
@ -65,51 +62,137 @@ class UserCreditBase(ABC):
|
|||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def top_up_intent(self, user_id: str, amount: int) -> str:
|
||||
"""
|
||||
Create a payment intent to top up the credits for the user.
|
||||
|
||||
class UserCredit(UserCreditBase):
|
||||
async def get_or_refill_credit(self, user_id: str) -> int:
|
||||
cur_time = self.time_now()
|
||||
cur_month = cur_time.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
nxt_month = (
|
||||
cur_month.replace(month=cur_month.month + 1)
|
||||
if cur_month.month < 12
|
||||
else cur_month.replace(year=cur_month.year + 1, month=1)
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
amount (int): The amount of credits to top up.
|
||||
|
||||
Returns:
|
||||
str: The redirect url to the payment page.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def fulfill_checkout(
|
||||
self, *, session_id: str | None = None, user_id: str | None = None
|
||||
):
|
||||
"""
|
||||
Fulfill the Stripe checkout session.
|
||||
|
||||
Args:
|
||||
session_id (str | None): The checkout session ID. Will try to fulfill most recent if None.
|
||||
user_id (str | None): The user ID must be provided if session_id is None.
|
||||
"""
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def time_now() -> datetime:
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
# ====== Transaction Helper Methods ====== #
|
||||
# Any modifications to the transaction table should only be done through these methods #
|
||||
|
||||
async def _get_credits(self, user_id: str) -> tuple[int, datetime]:
|
||||
"""
|
||||
Returns the current balance of the user & the latest balance snapshot time.
|
||||
"""
|
||||
top_time = self.time_now()
|
||||
snapshot = await CreditTransaction.prisma().find_first(
|
||||
where={
|
||||
"userId": user_id,
|
||||
"createdAt": {"lte": top_time},
|
||||
"isActive": True,
|
||||
"runningBalance": {"not": None}, # type: ignore
|
||||
},
|
||||
order={"createdAt": "desc"},
|
||||
)
|
||||
if snapshot:
|
||||
return snapshot.runningBalance or 0, snapshot.createdAt
|
||||
|
||||
user_credit = await CreditTransaction.prisma().group_by(
|
||||
# No snapshot: Manually calculate balance using current month's transactions.
|
||||
low_time = top_time.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
transactions = await CreditTransaction.prisma().group_by(
|
||||
by=["userId"],
|
||||
sum={"amount": True},
|
||||
where={
|
||||
"userId": user_id,
|
||||
"createdAt": {"gte": cur_month, "lt": nxt_month},
|
||||
"createdAt": {"gte": low_time, "lte": top_time},
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
transaction_balance = (
|
||||
transactions[0].get("_sum", {}).get("amount", 0) if transactions else 0
|
||||
)
|
||||
return transaction_balance, datetime.min
|
||||
|
||||
if user_credit:
|
||||
credit_sum = user_credit[0].get("_sum") or {}
|
||||
return credit_sum.get("amount", 0)
|
||||
async def _enable_transaction(
|
||||
self, transaction_key: str, user_id: str, metadata: Json
|
||||
):
|
||||
|
||||
key = f"MONTHLY-CREDIT-TOP-UP-{cur_month}"
|
||||
transaction = await CreditTransaction.prisma().find_first_or_raise(
|
||||
where={"transactionKey": transaction_key, "userId": user_id}
|
||||
)
|
||||
|
||||
try:
|
||||
await CreditTransaction.prisma().create(
|
||||
data={
|
||||
"amount": self.num_user_credits_refill,
|
||||
"type": CreditTransactionType.TOP_UP,
|
||||
if transaction.isActive:
|
||||
return
|
||||
|
||||
async with db.locked_transaction(f"usr_trx_{user_id}"):
|
||||
user_balance, _ = await self._get_credits(user_id)
|
||||
|
||||
await CreditTransaction.prisma().update(
|
||||
where={
|
||||
"creditTransactionIdentifier": {
|
||||
"transactionKey": transaction_key,
|
||||
"userId": user_id,
|
||||
"transactionKey": key,
|
||||
}
|
||||
},
|
||||
data={
|
||||
"isActive": True,
|
||||
"runningBalance": user_balance + transaction.amount,
|
||||
"createdAt": self.time_now(),
|
||||
"metadata": metadata,
|
||||
},
|
||||
)
|
||||
|
||||
async def _add_transaction(
|
||||
self,
|
||||
user_id: str,
|
||||
amount: int,
|
||||
transaction_type: CreditTransactionType,
|
||||
is_active: bool = True,
|
||||
transaction_key: str | None = None,
|
||||
metadata: Json = Json({}),
|
||||
):
|
||||
async with db.locked_transaction(f"usr_trx_{user_id}"):
|
||||
# Get latest balance snapshot
|
||||
user_balance, _ = await self._get_credits(user_id)
|
||||
if amount < 0 and user_balance < abs(amount):
|
||||
raise ValueError(
|
||||
f"Insufficient balance for user {user_id}, balance: {user_balance}, amount: {amount}"
|
||||
)
|
||||
|
||||
# Create the transaction
|
||||
transaction_data: CreditTransactionCreateInput = {
|
||||
"userId": user_id,
|
||||
"amount": amount,
|
||||
"runningBalance": user_balance + amount,
|
||||
"type": transaction_type,
|
||||
"metadata": metadata,
|
||||
"isActive": is_active,
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
)
|
||||
except UniqueViolationError:
|
||||
pass # Already refilled this month
|
||||
if transaction_key:
|
||||
transaction_data["transactionKey"] = transaction_key
|
||||
await CreditTransaction.prisma().create(data=transaction_data)
|
||||
|
||||
return self.num_user_credits_refill
|
||||
return user_balance + amount
|
||||
|
||||
@staticmethod
|
||||
def time_now():
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
class UserCredit(UserCreditBase):
|
||||
|
||||
def _block_usage_cost(
|
||||
self,
|
||||
|
@ -148,8 +231,8 @@ class UserCredit(UserCreditBase):
|
|||
) -> bool:
|
||||
"""
|
||||
Filter rules:
|
||||
- If costFilter is an object, then check if costFilter is the subset of inputValues
|
||||
- Otherwise, check if costFilter is equal to inputValues.
|
||||
- If cost_filter is an object, then check if cost_filter is the subset of input_data
|
||||
- Otherwise, check if cost_filter is equal to input_data.
|
||||
- Undefined, null, and empty string are considered as equal.
|
||||
"""
|
||||
if not isinstance(cost_filter, dict) or not isinstance(input_data, dict):
|
||||
|
@ -163,57 +246,169 @@ class UserCredit(UserCreditBase):
|
|||
|
||||
async def spend_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
user_credit: int,
|
||||
block_id: str,
|
||||
input_data: BlockInput,
|
||||
entry: NodeExecutionEntry,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
validate_balance: bool = True,
|
||||
) -> int:
|
||||
block = get_block(block_id)
|
||||
block = get_block(entry.block_id)
|
||||
if not block:
|
||||
raise ValueError(f"Block not found: {block_id}")
|
||||
raise ValueError(f"Block not found: {entry.block_id}")
|
||||
|
||||
cost, matching_filter = self._block_usage_cost(
|
||||
block=block, input_data=input_data, data_size=data_size, run_time=run_time
|
||||
block=block, input_data=entry.data, data_size=data_size, run_time=run_time
|
||||
)
|
||||
if cost <= 0:
|
||||
if cost == 0:
|
||||
return 0
|
||||
|
||||
if validate_balance and user_credit < cost:
|
||||
raise ValueError(f"Insufficient credit: {user_credit} < {cost}")
|
||||
|
||||
await CreditTransaction.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"amount": -cost,
|
||||
"type": CreditTransactionType.USAGE,
|
||||
"blockId": block.id,
|
||||
"metadata": Json(
|
||||
await self._add_transaction(
|
||||
user_id=entry.user_id,
|
||||
amount=-cost,
|
||||
transaction_type=CreditTransactionType.USAGE,
|
||||
metadata=Json(
|
||||
{
|
||||
"graph_exec_id": entry.graph_exec_id,
|
||||
"graph_id": entry.graph_id,
|
||||
"node_id": entry.node_id,
|
||||
"node_exec_id": entry.node_exec_id,
|
||||
"block_id": entry.block_id,
|
||||
"block": block.name,
|
||||
"input": matching_filter,
|
||||
}
|
||||
),
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
)
|
||||
|
||||
return cost
|
||||
|
||||
async def top_up_credits(self, user_id: str, amount: int):
|
||||
if amount < 0:
|
||||
raise ValueError(f"Top up amount must not be negative: {amount}")
|
||||
|
||||
await self._add_transaction(
|
||||
user_id=user_id,
|
||||
amount=amount,
|
||||
transaction_type=CreditTransactionType.TOP_UP,
|
||||
)
|
||||
|
||||
async def top_up_intent(self, user_id: str, amount: int) -> str:
|
||||
# Create checkout session
|
||||
# https://docs.stripe.com/checkout/quickstart?client=react
|
||||
# unit_amount param is always in the smallest currency unit (so cents for usd)
|
||||
# which is equal to amount of credits
|
||||
checkout_session = stripe.checkout.Session.create(
|
||||
customer=await get_stripe_customer_id(user_id),
|
||||
line_items=[
|
||||
{
|
||||
"price_data": {
|
||||
"currency": "usd",
|
||||
"product_data": {
|
||||
"name": "AutoGPT Platform Credits",
|
||||
},
|
||||
"unit_amount": amount,
|
||||
},
|
||||
"quantity": 1,
|
||||
}
|
||||
],
|
||||
mode="payment",
|
||||
success_url=settings.config.platform_base_url
|
||||
+ "/marketplace/credits?topup=success",
|
||||
cancel_url=settings.config.platform_base_url
|
||||
+ "/marketplace/credits?topup=cancel",
|
||||
)
|
||||
|
||||
# Create pending transaction
|
||||
await self._add_transaction(
|
||||
user_id=user_id,
|
||||
amount=amount,
|
||||
transaction_type=CreditTransactionType.TOP_UP,
|
||||
transaction_key=checkout_session.id,
|
||||
is_active=False,
|
||||
metadata=Json({"checkout_session": checkout_session}),
|
||||
)
|
||||
|
||||
return checkout_session.url or ""
|
||||
|
||||
# https://docs.stripe.com/checkout/fulfillment
|
||||
async def fulfill_checkout(
|
||||
self, *, session_id: str | None = None, user_id: str | None = None
|
||||
):
|
||||
if (not session_id and not user_id) or (session_id and user_id):
|
||||
raise ValueError("Either session_id or user_id must be provided")
|
||||
|
||||
# Retrieve CreditTransaction
|
||||
find_filter: CreditTransactionWhereInput = {
|
||||
"type": CreditTransactionType.TOP_UP,
|
||||
"isActive": False,
|
||||
}
|
||||
if session_id:
|
||||
find_filter["transactionKey"] = session_id
|
||||
if user_id:
|
||||
find_filter["userId"] = user_id
|
||||
|
||||
# Find the most recent inactive top-up transaction
|
||||
credit_transaction = await CreditTransaction.prisma().find_first_or_raise(
|
||||
where=find_filter,
|
||||
order={"createdAt": "desc"},
|
||||
)
|
||||
|
||||
# This can be called multiple times for one id, so ignore if already fulfilled
|
||||
if not credit_transaction:
|
||||
return
|
||||
|
||||
# Retrieve the Checkout Session from the API
|
||||
checkout_session = stripe.checkout.Session.retrieve(
|
||||
credit_transaction.transactionKey
|
||||
)
|
||||
|
||||
# Check the Checkout Session's payment_status property
|
||||
# to determine if fulfillment should be performed
|
||||
if checkout_session.payment_status in ["paid", "no_payment_required"]:
|
||||
await self._enable_transaction(
|
||||
transaction_key=credit_transaction.transactionKey,
|
||||
user_id=credit_transaction.userId,
|
||||
metadata=Json({"checkout_session": checkout_session}),
|
||||
)
|
||||
|
||||
async def get_credits(self, user_id: str) -> int:
|
||||
balance, _ = await self._get_credits(user_id)
|
||||
return balance
|
||||
|
||||
|
||||
class BetaUserCredit(UserCredit):
|
||||
"""
|
||||
This is a temporary class to handle the test user utilizing monthly credit refill.
|
||||
TODO: Remove this class & its feature toggle.
|
||||
"""
|
||||
|
||||
def __init__(self, num_user_credits_refill: int):
|
||||
self.num_user_credits_refill = num_user_credits_refill
|
||||
|
||||
async def get_credits(self, user_id: str) -> int:
|
||||
cur_time = self.time_now().date()
|
||||
balance, snapshot_time = await self._get_credits(user_id)
|
||||
if (snapshot_time.year, snapshot_time.month) == (cur_time.year, cur_time.month):
|
||||
return balance
|
||||
|
||||
try:
|
||||
await CreditTransaction.prisma().create(
|
||||
data={
|
||||
"transactionKey": f"MONTHLY-CREDIT-TOP-UP-{cur_time}",
|
||||
"userId": user_id,
|
||||
"amount": amount,
|
||||
"amount": self.num_user_credits_refill,
|
||||
"runningBalance": self.num_user_credits_refill,
|
||||
"type": CreditTransactionType.TOP_UP,
|
||||
"metadata": Json({}),
|
||||
"isActive": True,
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
)
|
||||
except UniqueViolationError:
|
||||
pass # Already refilled this month
|
||||
|
||||
return self.num_user_credits_refill
|
||||
|
||||
|
||||
class DisabledUserCredit(UserCreditBase):
|
||||
async def get_or_refill_credit(self, *args, **kwargs) -> int:
|
||||
async def get_credits(self, *args, **kwargs) -> int:
|
||||
return 0
|
||||
|
||||
async def spend_credits(self, *args, **kwargs) -> int:
|
||||
|
@ -222,13 +417,37 @@ class DisabledUserCredit(UserCreditBase):
|
|||
async def top_up_credits(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
async def top_up_intent(self, *args, **kwargs) -> str:
|
||||
return ""
|
||||
|
||||
async def fulfill_checkout(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
def get_user_credit_model() -> UserCreditBase:
|
||||
if config.enable_credit.lower() == "true":
|
||||
return UserCredit(config.num_user_credits_refill)
|
||||
else:
|
||||
return DisabledUserCredit(0)
|
||||
if not settings.config.enable_credit:
|
||||
return DisabledUserCredit()
|
||||
|
||||
if settings.config.enable_beta_monthly_credit:
|
||||
return BetaUserCredit(settings.config.num_user_credits_refill)
|
||||
|
||||
return UserCredit()
|
||||
|
||||
|
||||
def get_block_costs() -> dict[str, list[BlockCost]]:
|
||||
return {block().id: costs for block, costs in BLOCK_COSTS.items()}
|
||||
|
||||
|
||||
async def get_stripe_customer_id(user_id: str) -> str:
|
||||
user = await get_user_by_id(user_id)
|
||||
if not user:
|
||||
raise ValueError(f"User not found: {user_id}")
|
||||
|
||||
if user.stripeCustomerId:
|
||||
return user.stripeCustomerId
|
||||
|
||||
customer = stripe.Customer.create(name=user.name or "", email=user.email)
|
||||
await User.prisma().update(
|
||||
where={"id": user_id}, data={"stripeCustomerId": customer.id}
|
||||
)
|
||||
return customer.id
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import logging
|
||||
import os
|
||||
import zlib
|
||||
from contextlib import asynccontextmanager
|
||||
from uuid import uuid4
|
||||
|
||||
|
@ -54,6 +55,14 @@ async def transaction():
|
|||
yield tx
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def locked_transaction(key: str):
|
||||
lock_key = zlib.crc32(key.encode("utf-8"))
|
||||
async with transaction() as tx:
|
||||
await tx.execute_raw(f"SELECT pg_advisory_xact_lock({lock_key})")
|
||||
yield tx
|
||||
|
||||
|
||||
class BaseDbModel(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid4()))
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from multiprocessing import Manager
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, TypeVar
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, Optional, TypeVar
|
||||
|
||||
from prisma.enums import AgentExecutionStatus
|
||||
from prisma.errors import PrismaError
|
||||
from prisma.models import (
|
||||
AgentGraphExecution,
|
||||
AgentNodeExecution,
|
||||
|
@ -31,6 +32,7 @@ class NodeExecutionEntry(BaseModel):
|
|||
graph_id: str
|
||||
node_exec_id: str
|
||||
node_id: str
|
||||
block_id: str
|
||||
data: BlockInput
|
||||
|
||||
|
||||
|
@ -324,6 +326,30 @@ async def update_execution_status(
|
|||
return ExecutionResult.from_db(res)
|
||||
|
||||
|
||||
async def get_execution(
|
||||
execution_id: str, user_id: str
|
||||
) -> Optional[AgentNodeExecution]:
|
||||
"""
|
||||
Get an execution by ID. Returns None if not found.
|
||||
|
||||
Args:
|
||||
execution_id: The ID of the execution to retrieve
|
||||
|
||||
Returns:
|
||||
The execution if found, None otherwise
|
||||
"""
|
||||
try:
|
||||
execution = await AgentNodeExecution.prisma().find_unique(
|
||||
where={
|
||||
"id": execution_id,
|
||||
"userId": user_id,
|
||||
}
|
||||
)
|
||||
return execution
|
||||
except PrismaError:
|
||||
return None
|
||||
|
||||
|
||||
async def get_execution_results(graph_exec_id: str) -> list[ExecutionResult]:
|
||||
executions = await AgentNodeExecution.prisma().find_many(
|
||||
where={"agentGraphExecutionId": graph_exec_id},
|
||||
|
|
|
@ -4,6 +4,7 @@ from typing import Any, Callable, Concatenate, Coroutine, ParamSpec, TypeVar, ca
|
|||
from backend.data.credit import get_user_credit_model
|
||||
from backend.data.execution import (
|
||||
ExecutionResult,
|
||||
NodeExecutionEntry,
|
||||
RedisExecutionEventBus,
|
||||
create_graph_execution,
|
||||
get_execution_results,
|
||||
|
@ -78,12 +79,8 @@ class DatabaseManager(AppService):
|
|||
|
||||
# Credits
|
||||
user_credit_model = get_user_credit_model()
|
||||
get_or_refill_credit = cast(
|
||||
Callable[[Any, str], int],
|
||||
exposed_run_and_wait(user_credit_model.get_or_refill_credit),
|
||||
)
|
||||
spend_credits = cast(
|
||||
Callable[[Any, str, int, str, dict[str, str], float, float], int],
|
||||
Callable[[Any, NodeExecutionEntry, float, float], int],
|
||||
exposed_run_and_wait(user_credit_model.spend_credits),
|
||||
)
|
||||
|
||||
|
|
|
@ -181,11 +181,13 @@ def execute_node(
|
|||
credentials, creds_lock = creds_manager.acquire(user_id, credentials_meta.id)
|
||||
extra_exec_kwargs[field_name] = credentials
|
||||
|
||||
# Inject extra execution arguments for the blocks via kwargs
|
||||
extra_exec_kwargs["user_id"] = user_id
|
||||
extra_exec_kwargs["run_id"] = graph_exec_id
|
||||
extra_exec_kwargs["graph_id"] = graph_id
|
||||
|
||||
output_size = 0
|
||||
end_status = ExecutionStatus.COMPLETED
|
||||
credit = db_client.get_or_refill_credit(user_id)
|
||||
if credit < 0:
|
||||
raise ValueError(f"Insufficient credit: {credit}")
|
||||
|
||||
try:
|
||||
for output_name, output_data in node_block.execute(
|
||||
|
@ -241,7 +243,8 @@ def execute_node(
|
|||
if res.end_time and res.start_time
|
||||
else 0
|
||||
)
|
||||
db_client.spend_credits(user_id, credit, node_block.id, input_data, s, t)
|
||||
data.data = input_data
|
||||
db_client.spend_credits(data, s, t)
|
||||
|
||||
# Update execution stats
|
||||
if execution_stats is not None:
|
||||
|
@ -260,7 +263,7 @@ def _enqueue_next_nodes(
|
|||
log_metadata: LogMetadata,
|
||||
) -> list[NodeExecutionEntry]:
|
||||
def add_enqueued_execution(
|
||||
node_exec_id: str, node_id: str, data: BlockInput
|
||||
node_exec_id: str, node_id: str, block_id: str, data: BlockInput
|
||||
) -> NodeExecutionEntry:
|
||||
exec_update = db_client.update_execution_status(
|
||||
node_exec_id, ExecutionStatus.QUEUED, data
|
||||
|
@ -272,6 +275,7 @@ def _enqueue_next_nodes(
|
|||
graph_id=graph_id,
|
||||
node_exec_id=node_exec_id,
|
||||
node_id=node_id,
|
||||
block_id=block_id,
|
||||
data=data,
|
||||
)
|
||||
|
||||
|
@ -325,7 +329,12 @@ def _enqueue_next_nodes(
|
|||
# Input is complete, enqueue the execution.
|
||||
log_metadata.info(f"Enqueued {suffix}")
|
||||
enqueued_executions.append(
|
||||
add_enqueued_execution(next_node_exec_id, next_node_id, next_node_input)
|
||||
add_enqueued_execution(
|
||||
node_exec_id=next_node_exec_id,
|
||||
node_id=next_node_id,
|
||||
block_id=next_node.block_id,
|
||||
data=next_node_input,
|
||||
)
|
||||
)
|
||||
|
||||
# Next execution stops here if the link is not static.
|
||||
|
@ -355,7 +364,12 @@ def _enqueue_next_nodes(
|
|||
continue
|
||||
log_metadata.info(f"Enqueueing static-link execution {suffix}")
|
||||
enqueued_executions.append(
|
||||
add_enqueued_execution(iexec.node_exec_id, next_node_id, idata)
|
||||
add_enqueued_execution(
|
||||
node_exec_id=iexec.node_exec_id,
|
||||
node_id=next_node_id,
|
||||
block_id=next_node.block_id,
|
||||
data=idata,
|
||||
)
|
||||
)
|
||||
return enqueued_executions
|
||||
|
||||
|
@ -803,8 +817,8 @@ class ExecutionManager(AppService):
|
|||
# Extract request input data, and assign it to the input pin.
|
||||
if block.block_type == BlockType.INPUT:
|
||||
name = node.input_default.get("name")
|
||||
if name and name in data:
|
||||
input_data = {"value": data[name]}
|
||||
if name in data.get("node_input", {}):
|
||||
input_data = {"value": data["node_input"][name]}
|
||||
|
||||
# Extract webhook payload, and assign it to the input pin
|
||||
webhook_payload_key = f"webhook_{node.webhook_id}_payload"
|
||||
|
@ -840,6 +854,7 @@ class ExecutionManager(AppService):
|
|||
graph_id=node_exec.graph_id,
|
||||
node_exec_id=node_exec.node_exec_id,
|
||||
node_id=node_exec.node_id,
|
||||
block_id=node_exec.block_id,
|
||||
data=node_exec.input_data,
|
||||
)
|
||||
)
|
||||
|
|
|
@ -23,6 +23,15 @@ from backend.util.settings import Settings
|
|||
|
||||
settings = Settings()
|
||||
|
||||
# This is an overrride since ollama doesn't actually require an API key, but the creddential system enforces one be attached
|
||||
ollama_credentials = APIKeyCredentials(
|
||||
id="744fdc56-071a-4761-b5a5-0af0ce10a2b5",
|
||||
provider="ollama",
|
||||
api_key=SecretStr("FAKE_API_KEY"),
|
||||
title="Use Credits for Ollama",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
revid_credentials = APIKeyCredentials(
|
||||
id="fdb7f412-f519-48d1-9b5f-d2f73d0e01fe",
|
||||
provider="revid",
|
||||
|
@ -121,9 +130,17 @@ nvidia_credentials = APIKeyCredentials(
|
|||
title="Use Credits for Nvidia",
|
||||
expires_at=None,
|
||||
)
|
||||
mem0_credentials = APIKeyCredentials(
|
||||
id="ed55ac19-356e-4243-a6cb-bc599e9b716f",
|
||||
provider="mem0",
|
||||
api_key=SecretStr(settings.secrets.mem0_api_key),
|
||||
title="Use Credits for Mem0",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
|
||||
DEFAULT_CREDENTIALS = [
|
||||
ollama_credentials,
|
||||
revid_credentials,
|
||||
ideogram_credentials,
|
||||
replicate_credentials,
|
||||
|
@ -138,6 +155,7 @@ DEFAULT_CREDENTIALS = [
|
|||
exa_credentials,
|
||||
e2b_credentials,
|
||||
nvidia_credentials,
|
||||
mem0_credentials,
|
||||
]
|
||||
|
||||
|
||||
|
@ -169,6 +187,10 @@ class IntegrationCredentialsStore:
|
|||
def get_all_creds(self, user_id: str) -> list[Credentials]:
|
||||
users_credentials = self._get_user_integrations(user_id).credentials
|
||||
all_credentials = users_credentials
|
||||
# These will always be added
|
||||
all_credentials.append(ollama_credentials)
|
||||
|
||||
# These will only be added if the API key is set
|
||||
if settings.secrets.revid_api_key:
|
||||
all_credentials.append(revid_credentials)
|
||||
if settings.secrets.ideogram_api_key:
|
||||
|
@ -197,6 +219,8 @@ class IntegrationCredentialsStore:
|
|||
all_credentials.append(e2b_credentials)
|
||||
if settings.secrets.nvidia_api_key:
|
||||
all_credentials.append(nvidia_credentials)
|
||||
if settings.secrets.mem0_api_key:
|
||||
all_credentials.append(mem0_credentials)
|
||||
return all_credentials
|
||||
|
||||
def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:
|
||||
|
|
|
@ -2,6 +2,7 @@ from typing import TYPE_CHECKING
|
|||
|
||||
from .github import GitHubOAuthHandler
|
||||
from .google import GoogleOAuthHandler
|
||||
from .linear import LinearOAuthHandler
|
||||
from .notion import NotionOAuthHandler
|
||||
from .twitter import TwitterOAuthHandler
|
||||
|
||||
|
@ -17,6 +18,7 @@ HANDLERS_BY_NAME: dict["ProviderName", type["BaseOAuthHandler"]] = {
|
|||
GoogleOAuthHandler,
|
||||
NotionOAuthHandler,
|
||||
TwitterOAuthHandler,
|
||||
LinearOAuthHandler,
|
||||
]
|
||||
}
|
||||
# --8<-- [end:HANDLERS_BY_NAMEExample]
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
import json
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.blocks.linear._api import LinearAPIException
|
||||
from backend.data.model import APIKeyCredentials, OAuth2Credentials
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.request import requests
|
||||
|
||||
from .base import BaseOAuthHandler
|
||||
|
||||
|
||||
class LinearOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
OAuth2 handler for Linear.
|
||||
"""
|
||||
|
||||
PROVIDER_NAME = ProviderName.LINEAR
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
self.auth_base_url = "https://linear.app/oauth/authorize"
|
||||
self.token_url = "https://api.linear.app/oauth/token" # Correct token URL
|
||||
self.revoke_url = "https://api.linear.app/oauth/revoke"
|
||||
|
||||
def get_login_url(
|
||||
self, scopes: list[str], state: str, code_challenge: Optional[str]
|
||||
) -> str:
|
||||
|
||||
params = {
|
||||
"client_id": self.client_id,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"response_type": "code", # Important: include "response_type"
|
||||
"scope": ",".join(scopes), # Comma-separated, not space-separated
|
||||
"state": state,
|
||||
}
|
||||
return f"{self.auth_base_url}?{urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(
|
||||
self, code: str, scopes: list[str], code_verifier: Optional[str]
|
||||
) -> OAuth2Credentials:
|
||||
return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri})
|
||||
|
||||
def revoke_tokens(self, credentials: OAuth2Credentials) -> bool:
|
||||
if not credentials.access_token:
|
||||
raise ValueError("No access token to revoke")
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {credentials.access_token.get_secret_value()}"
|
||||
}
|
||||
|
||||
response = requests.post(self.revoke_url, headers=headers)
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("error", "Unknown error")
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
raise LinearAPIException(
|
||||
f"Failed to revoke Linear tokens ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return True # Linear doesn't return JSON on successful revoke
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
if not credentials.refresh_token:
|
||||
raise ValueError(
|
||||
"No refresh token available."
|
||||
) # Linear uses non-expiring tokens
|
||||
|
||||
return self._request_tokens(
|
||||
{
|
||||
"refresh_token": credentials.refresh_token.get_secret_value(),
|
||||
"grant_type": "refresh_token",
|
||||
}
|
||||
)
|
||||
|
||||
def _request_tokens(
|
||||
self,
|
||||
params: dict[str, str],
|
||||
current_credentials: Optional[OAuth2Credentials] = None,
|
||||
) -> OAuth2Credentials:
|
||||
request_body = {
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"grant_type": "authorization_code", # Ensure grant_type is correct
|
||||
**params,
|
||||
}
|
||||
|
||||
headers = {
|
||||
"Content-Type": "application/x-www-form-urlencoded"
|
||||
} # Correct header for token request
|
||||
response = requests.post(self.token_url, data=request_body, headers=headers)
|
||||
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("error", "Unknown error")
|
||||
|
||||
except json.JSONDecodeError:
|
||||
error_message = response.text
|
||||
raise LinearAPIException(
|
||||
f"Failed to fetch Linear tokens ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
token_data = response.json()
|
||||
|
||||
# Note: Linear access tokens do not expire, so we set expires_at to None
|
||||
new_credentials = OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=current_credentials.title if current_credentials else None,
|
||||
username=token_data.get("user", {}).get(
|
||||
"name", "Unknown User"
|
||||
), # extract name or set appropriate
|
||||
access_token=token_data["access_token"],
|
||||
scopes=token_data["scope"].split(
|
||||
","
|
||||
), # Linear returns comma-separated scopes
|
||||
refresh_token=token_data.get(
|
||||
"refresh_token"
|
||||
), # Linear uses non-expiring tokens so this might be null
|
||||
access_token_expires_at=None,
|
||||
refresh_token_expires_at=None,
|
||||
)
|
||||
if current_credentials:
|
||||
new_credentials.id = current_credentials.id
|
||||
return new_credentials
|
||||
|
||||
def _request_username(self, access_token: str) -> Optional[str]:
|
||||
|
||||
# Use the LinearClient to fetch user details using GraphQL
|
||||
from backend.blocks.linear._api import LinearClient
|
||||
|
||||
try:
|
||||
|
||||
linear_client = LinearClient(
|
||||
APIKeyCredentials(
|
||||
api_key=SecretStr(access_token),
|
||||
title="temp",
|
||||
provider=self.PROVIDER_NAME,
|
||||
expires_at=None,
|
||||
)
|
||||
) # Temporary credentials for this request
|
||||
|
||||
query = """
|
||||
query Viewer {
|
||||
viewer {
|
||||
name
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
response = linear_client.query(query)
|
||||
return response["viewer"]["name"]
|
||||
|
||||
except Exception as e: # Handle any errors
|
||||
|
||||
print(f"Error fetching username: {e}")
|
||||
return None
|
|
@ -17,7 +17,9 @@ class ProviderName(str, Enum):
|
|||
HUBSPOT = "hubspot"
|
||||
IDEOGRAM = "ideogram"
|
||||
JINA = "jina"
|
||||
LINEAR = "linear"
|
||||
MEDIUM = "medium"
|
||||
MEM0 = "mem0"
|
||||
NOTION = "notion"
|
||||
NVIDIA = "nvidia"
|
||||
OLLAMA = "ollama"
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
from fastapi import FastAPI
|
||||
|
||||
from .routes.v1 import v1_router
|
||||
|
||||
external_app = FastAPI(
|
||||
title="AutoGPT External API",
|
||||
description="External API for AutoGPT integrations",
|
||||
docs_url="/docs",
|
||||
version="1.0",
|
||||
)
|
||||
external_app.include_router(v1_router, prefix="/v1")
|
|
@ -0,0 +1,37 @@
|
|||
from fastapi import Depends, HTTPException, Request
|
||||
from fastapi.security import APIKeyHeader
|
||||
from prisma.enums import APIKeyPermission
|
||||
|
||||
from backend.data.api_key import has_permission, validate_api_key
|
||||
|
||||
api_key_header = APIKeyHeader(name="X-API-Key")
|
||||
|
||||
|
||||
async def require_api_key(request: Request):
|
||||
"""Base middleware for API key authentication"""
|
||||
api_key = await api_key_header(request)
|
||||
|
||||
if api_key is None:
|
||||
raise HTTPException(status_code=401, detail="Missing API key")
|
||||
|
||||
api_key_obj = await validate_api_key(api_key)
|
||||
|
||||
if not api_key_obj:
|
||||
raise HTTPException(status_code=401, detail="Invalid API key")
|
||||
|
||||
request.state.api_key = api_key_obj
|
||||
return api_key_obj
|
||||
|
||||
|
||||
def require_permission(permission: APIKeyPermission):
|
||||
"""Dependency function for checking specific permissions"""
|
||||
|
||||
async def check_permission(api_key=Depends(require_api_key)):
|
||||
if not has_permission(api_key, permission):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail=f"API key missing required permission: {permission}",
|
||||
)
|
||||
return api_key
|
||||
|
||||
return check_permission
|
|
@ -0,0 +1,111 @@
|
|||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import Any, Sequence
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from prisma.enums import APIKeyPermission
|
||||
|
||||
import backend.data.block
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.api_key import APIKey
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.executor import ExecutionManager
|
||||
from backend.server.external.middleware import require_permission
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
|
||||
@thread_cached
|
||||
def execution_manager_client() -> ExecutionManager:
|
||||
return get_service_client(ExecutionManager)
|
||||
|
||||
|
||||
settings = Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
v1_router = APIRouter()
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/blocks",
|
||||
tags=["blocks"],
|
||||
dependencies=[Depends(require_permission(APIKeyPermission.READ_BLOCK))],
|
||||
)
|
||||
def get_graph_blocks() -> Sequence[dict[Any, Any]]:
|
||||
blocks = [block() for block in backend.data.block.get_blocks().values()]
|
||||
return [b.to_dict() for b in blocks]
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/blocks/{block_id}/execute",
|
||||
tags=["blocks"],
|
||||
dependencies=[Depends(require_permission(APIKeyPermission.EXECUTE_BLOCK))],
|
||||
)
|
||||
def execute_graph_block(
|
||||
block_id: str,
|
||||
data: BlockInput,
|
||||
api_key: APIKey = Depends(require_permission(APIKeyPermission.EXECUTE_BLOCK)),
|
||||
) -> CompletedBlockOutput:
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
raise HTTPException(status_code=404, detail=f"Block #{block_id} not found.")
|
||||
|
||||
output = defaultdict(list)
|
||||
for name, data in obj.execute(data):
|
||||
output[name].append(data)
|
||||
return output
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs/{graph_id}/execute",
|
||||
tags=["graphs"],
|
||||
)
|
||||
def execute_graph(
|
||||
graph_id: str,
|
||||
node_input: dict[Any, Any],
|
||||
api_key: APIKey = Depends(require_permission(APIKeyPermission.EXECUTE_GRAPH)),
|
||||
) -> dict[str, Any]:
|
||||
try:
|
||||
graph_exec = execution_manager_client().add_execution(
|
||||
graph_id, node_input, user_id=api_key.user_id
|
||||
)
|
||||
return {"id": graph_exec.graph_exec_id}
|
||||
except Exception as e:
|
||||
msg = e.__str__().encode().decode("unicode_escape")
|
||||
raise HTTPException(status_code=400, detail=msg)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}/results",
|
||||
tags=["graphs"],
|
||||
)
|
||||
async def get_graph_execution_results(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
api_key: APIKey = Depends(require_permission(APIKeyPermission.READ_GRAPH)),
|
||||
) -> dict:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=api_key.user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
results = await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
return {
|
||||
"execution_id": graph_exec_id,
|
||||
"nodes": [
|
||||
{
|
||||
"node_id": result.node_id,
|
||||
"input": (
|
||||
result.input_data.get("value")
|
||||
if "value" in result.input_data
|
||||
else result.input_data
|
||||
),
|
||||
"output": result.output_data.get(
|
||||
"response", result.output_data.get("result", [])
|
||||
),
|
||||
}
|
||||
for result in results
|
||||
],
|
||||
}
|
|
@ -110,6 +110,11 @@ def callback(
|
|||
|
||||
logger.debug(f"Received credentials with final scopes: {credentials.scopes}")
|
||||
|
||||
# Linear returns scopes as a single string with spaces, so we need to split them
|
||||
# TODO: make a bypass of this part of the OAuth handler
|
||||
if len(credentials.scopes) == 1 and " " in credentials.scopes[0]:
|
||||
credentials.scopes = credentials.scopes[0].split(" ")
|
||||
|
||||
# Check if the granted scopes are sufficient for the requested scopes
|
||||
if not set(scopes).issubset(set(credentials.scopes)):
|
||||
# For now, we'll just log the warning and continue
|
||||
|
|
|
@ -56,3 +56,8 @@ class SetGraphActiveVersion(pydantic.BaseModel):
|
|||
|
||||
class UpdatePermissionsRequest(pydantic.BaseModel):
|
||||
permissions: List[APIKeyPermission]
|
||||
|
||||
|
||||
class RequestTopUp(pydantic.BaseModel):
|
||||
amount: int
|
||||
"""Amount of credits to top up."""
|
||||
|
|
|
@ -20,6 +20,7 @@ import backend.server.v2.library.routes
|
|||
import backend.server.v2.store.routes
|
||||
import backend.util.service
|
||||
import backend.util.settings
|
||||
from backend.server.external.api import external_app
|
||||
|
||||
settings = backend.util.settings.Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -94,6 +95,8 @@ app.include_router(
|
|||
backend.server.v2.library.routes.router, tags=["v2"], prefix="/api/library"
|
||||
)
|
||||
|
||||
app.mount("/external-api", external_app)
|
||||
|
||||
|
||||
@app.get(path="/health", tags=["health"], dependencies=[])
|
||||
async def health():
|
||||
|
|
|
@ -4,10 +4,11 @@ from collections import defaultdict
|
|||
from typing import TYPE_CHECKING, Annotated, Any, Sequence
|
||||
|
||||
import pydantic
|
||||
import stripe
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
from autogpt_libs.feature_flag.client import feature_flag
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
||||
from typing_extensions import Optional, TypedDict
|
||||
|
||||
import backend.data.block
|
||||
|
@ -28,7 +29,11 @@ from backend.data.api_key import (
|
|||
update_api_key_permissions,
|
||||
)
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import get_block_costs, get_user_credit_model
|
||||
from backend.data.credit import (
|
||||
get_block_costs,
|
||||
get_stripe_customer_id,
|
||||
get_user_credit_model,
|
||||
)
|
||||
from backend.data.user import get_or_create_user
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler, scheduler
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
|
@ -40,6 +45,7 @@ from backend.server.model import (
|
|||
CreateAPIKeyRequest,
|
||||
CreateAPIKeyResponse,
|
||||
CreateGraph,
|
||||
RequestTopUp,
|
||||
SetGraphActiveVersion,
|
||||
UpdatePermissionsRequest,
|
||||
)
|
||||
|
@ -134,7 +140,69 @@ async def get_user_credits(
|
|||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[str, int]:
|
||||
# Credits can go negative, so ensure it's at least 0 for user to see.
|
||||
return {"credits": max(await _user_credit_model.get_or_refill_credit(user_id), 0)}
|
||||
return {"credits": max(await _user_credit_model.get_credits(user_id), 0)}
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/credits", tags=["credits"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def request_top_up(
|
||||
request: RequestTopUp, user_id: Annotated[str, Depends(get_user_id)]
|
||||
):
|
||||
checkout_url = await _user_credit_model.top_up_intent(user_id, request.amount)
|
||||
return {"checkout_url": checkout_url}
|
||||
|
||||
|
||||
@v1_router.patch(
|
||||
path="/credits", tags=["credits"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def fulfill_checkout(user_id: Annotated[str, Depends(get_user_id)]):
|
||||
await _user_credit_model.fulfill_checkout(user_id=user_id)
|
||||
return Response(status_code=200)
|
||||
|
||||
|
||||
@v1_router.post(path="/credits/stripe_webhook", tags=["credits"])
|
||||
async def stripe_webhook(request: Request):
|
||||
# Get the raw request body
|
||||
payload = await request.body()
|
||||
# Get the signature header
|
||||
sig_header = request.headers.get("stripe-signature")
|
||||
|
||||
try:
|
||||
event = stripe.Webhook.construct_event(
|
||||
payload, sig_header, settings.secrets.stripe_webhook_secret
|
||||
)
|
||||
except ValueError:
|
||||
# Invalid payload
|
||||
raise HTTPException(status_code=400)
|
||||
except stripe.SignatureVerificationError:
|
||||
# Invalid signature
|
||||
raise HTTPException(status_code=400)
|
||||
|
||||
if (
|
||||
event["type"] == "checkout.session.completed"
|
||||
or event["type"] == "checkout.session.async_payment_succeeded"
|
||||
):
|
||||
await _user_credit_model.fulfill_checkout(
|
||||
session_id=event["data"]["object"]["id"]
|
||||
)
|
||||
|
||||
return Response(status_code=200)
|
||||
|
||||
|
||||
@v1_router.get(path="/credits/manage", dependencies=[Depends(auth_middleware)])
|
||||
async def manage_payment_method(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> dict[str, str]:
|
||||
session = stripe.billing_portal.Session.create(
|
||||
customer=await get_stripe_customer_id(user_id),
|
||||
return_url=settings.config.platform_base_url + "/marketplace/credits",
|
||||
)
|
||||
if not session:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Failed to create billing portal session"
|
||||
)
|
||||
return {"url": session.url}
|
||||
|
||||
|
||||
########################################################
|
||||
|
@ -545,7 +613,6 @@ def get_execution_schedules(
|
|||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def create_api_key(
|
||||
request: CreateAPIKeyRequest, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> CreateAPIKeyResponse:
|
||||
|
@ -569,7 +636,6 @@ async def create_api_key(
|
|||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def get_api_keys(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[APIKeyWithoutHash]:
|
||||
|
@ -587,7 +653,6 @@ async def get_api_keys(
|
|||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def get_api_key(
|
||||
key_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> APIKeyWithoutHash:
|
||||
|
|
|
@ -81,10 +81,14 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
|||
default=True,
|
||||
description="If authentication is enabled or not",
|
||||
)
|
||||
enable_credit: str = Field(
|
||||
default="false",
|
||||
enable_credit: bool = Field(
|
||||
default=False,
|
||||
description="If user credit system is enabled or not",
|
||||
)
|
||||
enable_beta_monthly_credit: bool = Field(
|
||||
default=True,
|
||||
description="If beta monthly credits accounting is enabled or not",
|
||||
)
|
||||
num_user_credits_refill: int = Field(
|
||||
default=1500,
|
||||
description="Number of credits to refill for each user",
|
||||
|
@ -308,6 +312,13 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
|||
exa_api_key: str = Field(default="", description="Exa API key")
|
||||
e2b_api_key: str = Field(default="", description="E2B API key")
|
||||
nvidia_api_key: str = Field(default="", description="Nvidia API key")
|
||||
mem0_api_key: str = Field(default="", description="Mem0 API key")
|
||||
|
||||
linear_client_id: str = Field(default="", description="Linear client ID")
|
||||
linear_client_secret: str = Field(default="", description="Linear client secret")
|
||||
|
||||
stripe_api_key: str = Field(default="", description="Stripe API Key")
|
||||
stripe_webhook_secret: str = Field(default="", description="Stripe Webhook Secret")
|
||||
|
||||
# Add more secret fields as needed
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import logging
|
||||
import time
|
||||
import uuid
|
||||
from typing import Sequence, cast
|
||||
|
||||
from backend.data import db
|
||||
|
@ -120,6 +121,11 @@ def execute_block_test(block: Block):
|
|||
if field_name in block.test_credentials:
|
||||
extra_exec_kwargs[field_name] = block.test_credentials[field_name]
|
||||
|
||||
# inject fake user_id, run_id, graph_id
|
||||
extra_exec_kwargs["user_id"] = uuid.uuid4()
|
||||
extra_exec_kwargs["run_id"] = uuid.uuid4()
|
||||
extra_exec_kwargs["graph_id"] = uuid.uuid4()
|
||||
|
||||
for input_data in block.test_input:
|
||||
log.info(f"{prefix} in: {input_data}")
|
||||
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
-- AlterTable
|
||||
ALTER TABLE "CreditTransaction" ADD COLUMN "runningBalance" INTEGER;
|
|
@ -0,0 +1,35 @@
|
|||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `blockId` on the `CreditTransaction` table. All the data in the column will be moved to metadata->block_id.
|
||||
|
||||
*/
|
||||
BEGIN;
|
||||
|
||||
-- DropForeignKey blockId
|
||||
ALTER TABLE "CreditTransaction" DROP CONSTRAINT "CreditTransaction_blockId_fkey";
|
||||
|
||||
-- Update migrate blockId into metadata->"block_id"
|
||||
UPDATE "CreditTransaction"
|
||||
SET "metadata" = jsonb_set(
|
||||
COALESCE("metadata"::jsonb, '{}'),
|
||||
'{block_id}',
|
||||
to_jsonb("blockId")
|
||||
)
|
||||
WHERE "blockId" IS NOT NULL;
|
||||
|
||||
-- AlterTable drop blockId
|
||||
ALTER TABLE "CreditTransaction" DROP COLUMN "blockId";
|
||||
|
||||
COMMIT;
|
||||
|
||||
/*
|
||||
These indices dropped below were part of the cleanup during the schema change applied above.
|
||||
These indexes were not useful and will not impact anything upon their removal.
|
||||
*/
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX "StoreListingReview_storeListingVersionId_idx";
|
||||
|
||||
-- DropIndex
|
||||
DROP INDEX "StoreListingSubmission_Status_idx";
|
|
@ -306,6 +306,18 @@ supabase = "^2.10.0"
|
|||
type = "directory"
|
||||
url = "../autogpt_libs"
|
||||
|
||||
[[package]]
|
||||
name = "backoff"
|
||||
version = "2.2.1"
|
||||
description = "Function decoration for backoff and retry"
|
||||
optional = false
|
||||
python-versions = ">=3.7,<4.0"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"},
|
||||
{file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "24.10.0"
|
||||
|
@ -1472,6 +1484,76 @@ googleapis-common-protos = ">=1.5.5"
|
|||
grpcio = ">=1.68.0"
|
||||
protobuf = ">=5.26.1,<6.0dev"
|
||||
|
||||
[[package]]
|
||||
name = "grpcio-tools"
|
||||
version = "1.68.0"
|
||||
description = "Protobuf code generator for gRPC"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:9509a5c3ed3d54fa7ac20748d501cb86668f764605a0a68f275339ee0f1dc1a6"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:59a885091bf29700ba0e14a954d156a18714caaa2006a7f328b18e1ac4b1e721"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:d3e678162e1d7a8720dc05fdd537fc8df082a50831791f7bb1c6f90095f8368b"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10d03e3ad4af6284fd27cb14f5a3d52045913c1253e3e24a384ed91bc8adbfcd"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1769d7f529de1cc102f7fb900611e3c0b69bdb244fca1075b24d6e5b49024586"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88640d95ee41921ac7352fa5fadca52a06d7e21fbe53e6a706a9a494f756be7d"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e903d07bc65232aa9e7704c829aec263e1e139442608e473d7912417a9908e29"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-win32.whl", hash = "sha256:66b70b37184d40806844f51c2757c6b852511d4ea46a3bf2c7e931a47b455bc6"},
|
||||
{file = "grpcio_tools-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:b47ae076ffb29a68e517bc03552bef0d9c973f8e18adadff180b123e973a26ea"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:f65942fab440e99113ce14436deace7554d5aa554ea18358e3a5f3fc47efe322"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8fefc6d000e169a97336feded23ce614df3fb9926fc48c7a9ff8ea459d93b5b0"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:6dd69c9f3ff85eee8d1f71adf7023c638ca8d465633244ac1b7f19bc3668612d"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7dc5195dc02057668cc22da1ff1aea1811f6fa0deb801b3194dec1fe0bab1cf0"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849b12bec2320e49e988df104c92217d533e01febac172a4495caab36d9f0edc"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:766c2cd2e365e0fc0e559af56f2c2d144d95fd7cb8668a34d533e66d6435eb34"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2ec3a2e0afa4866ccc5ba33c071aebaa619245dfdd840cbb74f2b0591868d085"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-win32.whl", hash = "sha256:80b733014eb40d920d836d782e5cdea0dcc90d251a2ffb35ab378ef4f8a42c14"},
|
||||
{file = "grpcio_tools-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:f95103e3e4e7fee7c6123bc9e4e925e07ad24d8d09d7c1c916fb6c8d1cb9e726"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:dd9a654af8536b3de8525bff72a245fef62d572eabf96ac946fe850e707cb27d"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0f77957e3a0916a0dd18d57ce6b49d95fc9a5cfed92310f226339c0fda5394f6"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:92a09afe64fe26696595de2036e10967876d26b12c894cc9160f00152cacebe7"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28ebdbad2ef16699d07400b65260240851049a75502eff69a59b127d3ab960f1"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d3150d784d8050b10dcf5eb06e04fb90747a1547fed3a062a608d940fe57066"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:261d98fd635595de42aadee848f9af46da6654d63791c888891e94f66c5d0682"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:061345c0079b9471f32230186ab01acb908ea0e577bc1699a8cf47acef8be4af"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-win32.whl", hash = "sha256:533ce6791a5ba21e35d74c6c25caf4776f5692785a170c01ea1153783ad5af31"},
|
||||
{file = "grpcio_tools-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:56842a0ce74b4b92eb62cd5ee00181b2d3acc58ba0c4fd20d15a5db51f891ba6"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:1117a81592542f0c36575082daa6413c57ca39188b18a4c50ec7332616f4b97e"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:51e5a090849b30c99a2396d42140b8a3e558eff6cdfa12603f9582e2cd07724e"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:4fe611d89a1836df8936f066d39c7eb03d4241806449ec45d4b8e1c843ae8011"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c10f3faa0cc4d89eb546f53b623837af23e86dc495d3b89510bcc0e0a6c0b8b2"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46b537480b8fd2195d988120a28467601a2a3de2e504043b89fb90318e1eb754"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:17d0c9004ea82b4213955a585401e80c30d4b37a1d4ace32ccdea8db4d3b7d43"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:2919faae04fe47bad57fc9b578aeaab527da260e851f321a253b6b11862254a8"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-win32.whl", hash = "sha256:ee86157ef899f58ba2fe1055cce0d33bd703e99aa6d5a0895581ac3969f06bfa"},
|
||||
{file = "grpcio_tools-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:d0470ffc6a93c86cdda48edd428d22e2fef17d854788d60d0d5f291038873157"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:795f2cd76f68a12b0b5541b98187ba367dd69b49d359cf98b781ead742961370"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:57e29e78c33fb1b1d557fbe7650d722d1f2b0a9f53ea73beb8ea47e627b6000b"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:700f171cd3293ee8d50cd43171562ff07b14fa8e49ee471cd91c6924c7da8644"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:196cd8a3a5963a4c9e424314df9eb573b305e6f958fe6508d26580ce01e7aa56"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cad40c3164ee9cef62524dea509449ea581b17ea493178beef051bf79b5103ca"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ab93fab49fa1e699e577ff5fbb99aba660164d710d4c33cfe0aa9d06f585539f"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:511224a99726eb84db9ddb84dc8a75377c3eae797d835f99e80128ec618376d5"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-win32.whl", hash = "sha256:b4ca81770cd729a9ea536d871aacedbde2b732bb9bb83c9d993d63f58502153d"},
|
||||
{file = "grpcio_tools-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:6950725bf7a496f81d3ec3324334ffc9dbec743b510dd0e897f51f8627eeb6ac"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:01ace351a51d7ee120963a4612b1f00e964462ec548db20d17f8902e238592c8"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5afd2f3f7257b52228a7808a2b4a765893d4d802d7a2377d9284853e67d045c6"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:453ee3193d59c974c678d91f08786f43c25ef753651b0825dc3d008c31baf68d"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b094b22919b786ad73c20372ef5e546330e7cd2c6dc12293b7ed586975f35d38"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26335eea976dfc1ff5d90b19c309a9425bd53868112a0507ad20f297f2c21d3e"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c77ecc5164bb413a613bdac9091dcc29d26834a2ac42fcd1afdfcda9e3003e68"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e31be6dc61496a59c1079b0a669f93dfcc2cdc4b1dbdc4374247cd09cee1329b"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-win32.whl", hash = "sha256:3aa40958355920ae2846c6fb5cadac4f2c8e33234a2982fef8101da0990e3968"},
|
||||
{file = "grpcio_tools-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:19bafb80948eda979b1b3a63c1567162d06249f43068a0e46a028a448e6f72d4"},
|
||||
{file = "grpcio_tools-1.68.0.tar.gz", hash = "sha256:737804ec2225dd4cc27e633b4ca0e963b0795161bf678285fab6586e917fd867"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
grpcio = ">=1.68.0"
|
||||
protobuf = ">=5.26.1,<6.0dev"
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
|
@ -1986,6 +2068,41 @@ files = [
|
|||
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mem0ai"
|
||||
version = "0.1.44"
|
||||
description = "Long-term memory for AI Agents"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "mem0ai-0.1.44-py3-none-any.whl", hash = "sha256:32260a2cd935035a1b16ce04ad2e4510a5bd97618709466e2d06303e0eb8d9d4"},
|
||||
{file = "mem0ai-0.1.44.tar.gz", hash = "sha256:93214272915d94f673d370bb8fe7a8bfc21806267e65700b471bec454dcdfa5c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
openai = ">=1.33.0,<2.0.0"
|
||||
posthog = ">=3.5.0,<4.0.0"
|
||||
pydantic = ">=2.7.3,<3.0.0"
|
||||
pytz = ">=2024.1,<2025.0"
|
||||
qdrant-client = ">=1.9.1,<2.0.0"
|
||||
sqlalchemy = ">=2.0.31,<3.0.0"
|
||||
|
||||
[package.extras]
|
||||
graph = ["langchain-community (>=0.3.1,<0.4.0)", "neo4j (>=5.23.1,<6.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "monotonic"
|
||||
version = "1.6"
|
||||
description = "An implementation of time.monotonic() for Python 2 & < 3.3"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"},
|
||||
{file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "multidict"
|
||||
version = "6.1.0"
|
||||
|
@ -2115,6 +2232,71 @@ files = [
|
|||
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.2.1"
|
||||
description = "Fundamental package for array computing in Python"
|
||||
optional = false
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "numpy-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5edb4e4caf751c1518e6a26a83501fda79bff41cc59dac48d70e6d65d4ec4440"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa3017c40d513ccac9621a2364f939d39e550c542eb2a894b4c8da92b38896ab"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:61048b4a49b1c93fe13426e04e04fdf5a03f456616f6e98c7576144677598675"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7671dc19c7019103ca44e8d94917eba8534c76133523ca8406822efdd19c9308"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4250888bcb96617e00bfa28ac24850a83c9f3a16db471eca2ee1f1714df0f957"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7746f235c47abc72b102d3bce9977714c2444bdfaea7888d241b4c4bb6a78bf"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:059e6a747ae84fce488c3ee397cee7e5f905fd1bda5fb18c66bc41807ff119b2"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f62aa6ee4eb43b024b0e5a01cf65a0bb078ef8c395e8713c6e8a12a697144528"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-win32.whl", hash = "sha256:48fd472630715e1c1c89bf1feab55c29098cb403cc184b4859f9c86d4fcb6a95"},
|
||||
{file = "numpy-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:b541032178a718c165a49638d28272b771053f628382d5e9d1c93df23ff58dbf"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40f9e544c1c56ba8f1cf7686a8c9b5bb249e665d40d626a23899ba6d5d9e1484"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9b57eaa3b0cd8db52049ed0330747b0364e899e8a606a624813452b8203d5f7"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bc8a37ad5b22c08e2dbd27df2b3ef7e5c0864235805b1e718a235bcb200cf1cb"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9036d6365d13b6cbe8f27a0eaf73ddcc070cae584e5ff94bb45e3e9d729feab5"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51faf345324db860b515d3f364eaa93d0e0551a88d6218a7d61286554d190d73"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38efc1e56b73cc9b182fe55e56e63b044dd26a72128fd2fbd502f75555d92591"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:31b89fa67a8042e96715c68e071a1200c4e172f93b0fbe01a14c0ff3ff820fc8"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c86e2a209199ead7ee0af65e1d9992d1dce7e1f63c4b9a616500f93820658d0"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-win32.whl", hash = "sha256:b34d87e8a3090ea626003f87f9392b3929a7bbf4104a05b6667348b6bd4bf1cd"},
|
||||
{file = "numpy-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:360137f8fb1b753c5cde3ac388597ad680eccbbbb3865ab65efea062c4a1fd16"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:694f9e921a0c8f252980e85bce61ebbd07ed2b7d4fa72d0e4246f2f8aa6642ab"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3683a8d166f2692664262fd4900f207791d005fb088d7fdb973cc8d663626faa"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:780077d95eafc2ccc3ced969db22377b3864e5b9a0ea5eb347cc93b3ea900315"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:55ba24ebe208344aa7a00e4482f65742969a039c2acfcb910bc6fcd776eb4355"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1d07b53b78bf84a96898c1bc139ad7f10fda7423f5fd158fd0f47ec5e01ac7"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5062dc1a4e32a10dc2b8b13cedd58988261416e811c1dc4dbdea4f57eea61b0d"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fce4f615f8ca31b2e61aa0eb5865a21e14f5629515c9151850aa936c02a1ee51"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67d4cda6fa6ffa073b08c8372aa5fa767ceb10c9a0587c707505a6d426f4e046"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-win32.whl", hash = "sha256:32cb94448be47c500d2c7a95f93e2f21a01f1fd05dd2beea1ccd049bb6001cd2"},
|
||||
{file = "numpy-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:ba5511d8f31c033a5fcbda22dd5c813630af98c70b2661f2d2c654ae3cdfcfc8"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1d09e520217618e76396377c81fba6f290d5f926f50c35f3a5f72b01a0da780"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ecc47cd7f6ea0336042be87d9e7da378e5c7e9b3c8ad0f7c966f714fc10d821"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f419290bc8968a46c4933158c91a0012b7a99bb2e465d5ef5293879742f8797e"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b6c390bfaef8c45a260554888966618328d30e72173697e5cabe6b285fb2348"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:526fc406ab991a340744aad7e25251dd47a6720a685fa3331e5c59fef5282a59"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74e6fdeb9a265624ec3a3918430205dff1df7e95a230779746a6af78bc615af"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:53c09385ff0b72ba79d8715683c1168c12e0b6e84fb0372e97553d1ea91efe51"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3eac17d9ec51be534685ba877b6ab5edc3ab7ec95c8f163e5d7b39859524716"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-win32.whl", hash = "sha256:9ad014faa93dbb52c80d8f4d3dcf855865c876c9660cb9bd7553843dd03a4b1e"},
|
||||
{file = "numpy-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:164a829b6aacf79ca47ba4814b130c4020b202522a93d7bff2202bfb33b61c60"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4dfda918a13cc4f81e9118dea249e192ab167a0bb1966272d5503e39234d694e"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:733585f9f4b62e9b3528dd1070ec4f52b8acf64215b60a845fa13ebd73cd0712"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:89b16a18e7bba224ce5114db863e7029803c179979e1af6ad6a6b11f70545008"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:676f4eebf6b2d430300f1f4f4c2461685f8269f94c89698d832cdf9277f30b84"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f5cdf9f493b35f7e41e8368e7d7b4bbafaf9660cba53fb21d2cd174ec09631"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1ad395cf254c4fbb5b2132fee391f361a6e8c1adbd28f2cd8e79308a615fe9d"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08ef779aed40dbc52729d6ffe7dd51df85796a702afbf68a4f4e41fafdc8bda5"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:26c9c4382b19fcfbbed3238a14abf7ff223890ea1936b8890f058e7ba35e8d71"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-win32.whl", hash = "sha256:93cf4e045bae74c90ca833cba583c14b62cb4ba2cba0abd2b141ab52548247e2"},
|
||||
{file = "numpy-2.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bff7d8ec20f5f42607599f9994770fa65d76edca264a87b5e4ea5629bce12268"},
|
||||
{file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ba9cc93a91d86365a5d270dee221fdc04fb68d7478e6bf6af650de78a8339e3"},
|
||||
{file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3d03883435a19794e41f147612a77a8f56d4e52822337844fff3d4040a142964"},
|
||||
{file = "numpy-2.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4511d9e6071452b944207c8ce46ad2f897307910b402ea5fa975da32e0102800"},
|
||||
{file = "numpy-2.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5c5cc0cbabe9452038ed984d05ac87910f89370b9242371bd9079cb4af61811e"},
|
||||
{file = "numpy-2.2.1.tar.gz", hash = "sha256:45681fd7128c8ad1c379f0ca0776a8b0c6583d2f69889ddac01559dfe4390918"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "oauthlib"
|
||||
version = "3.2.2"
|
||||
|
@ -2348,6 +2530,26 @@ tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""}
|
|||
[package.extras]
|
||||
poetry-plugin = ["poetry (>=1.0,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "portalocker"
|
||||
version = "2.10.1"
|
||||
description = "Wraps the portalocker recipe for easy usage"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"},
|
||||
{file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[package.extras]
|
||||
docs = ["sphinx (>=1.7.1)"]
|
||||
redis = ["redis"]
|
||||
tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"]
|
||||
|
||||
[[package]]
|
||||
name = "postgrest"
|
||||
version = "0.19.1"
|
||||
|
@ -2366,6 +2568,31 @@ httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
|||
pydantic = ">=1.9,<3.0"
|
||||
strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "posthog"
|
||||
version = "3.8.3"
|
||||
description = "Integrate PostHog into any python application."
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "posthog-3.8.3-py2.py3-none-any.whl", hash = "sha256:7215c4d7649b0c87905b42f460403311564996d776ab48d39852f46539a50f22"},
|
||||
{file = "posthog-3.8.3.tar.gz", hash = "sha256:263df03ea312d4b47a3d5ea393fdb22ff2ed78140d5ce9af9dd0618ae245a44b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
backoff = ">=1.10.0"
|
||||
monotonic = ">=1.5"
|
||||
python-dateutil = ">2.1"
|
||||
requests = ">=2.7,<3.0"
|
||||
six = ">=1.5"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black", "flake8", "flake8-print", "isort", "pre-commit"]
|
||||
langchain = ["langchain (>=0.2.0)"]
|
||||
sentry = ["django", "sentry-sdk"]
|
||||
test = ["coverage", "django", "flake8", "freezegun (==0.3.15)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "mock (>=2.0.0)", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"]
|
||||
|
||||
[[package]]
|
||||
name = "praw"
|
||||
version = "7.8.1"
|
||||
|
@ -3096,6 +3323,47 @@ files = [
|
|||
{file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2024.2"
|
||||
description = "World timezone definitions, modern and historical"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
|
||||
{file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pywin32"
|
||||
version = "308"
|
||||
description = "Python for Window Extensions"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
groups = ["main"]
|
||||
markers = "platform_system == \"Windows\""
|
||||
files = [
|
||||
{file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"},
|
||||
{file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"},
|
||||
{file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"},
|
||||
{file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"},
|
||||
{file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"},
|
||||
{file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"},
|
||||
{file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"},
|
||||
{file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"},
|
||||
{file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"},
|
||||
{file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"},
|
||||
{file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"},
|
||||
{file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"},
|
||||
{file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"},
|
||||
{file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"},
|
||||
{file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"},
|
||||
{file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"},
|
||||
{file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"},
|
||||
{file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.2"
|
||||
|
@ -3159,6 +3427,34 @@ files = [
|
|||
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "qdrant-client"
|
||||
version = "1.12.2"
|
||||
description = "Client library for the Qdrant vector search engine"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "qdrant_client-1.12.2-py3-none-any.whl", hash = "sha256:a0ae500a46a679ff3521ba3f1f1cf3d72b57090a768cec65fc317066bcbac1e6"},
|
||||
{file = "qdrant_client-1.12.2.tar.gz", hash = "sha256:2777e09b3e89bb22bb490384d8b1fa8140f3915287884f18984f7031a346aba5"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
grpcio = ">=1.41.0"
|
||||
grpcio-tools = ">=1.41.0"
|
||||
httpx = {version = ">=0.20.0", extras = ["http2"]}
|
||||
numpy = [
|
||||
{version = ">=1.21", markers = "python_version >= \"3.10\" and python_version < \"3.12\""},
|
||||
{version = ">=1.26", markers = "python_version >= \"3.12\" and python_version < \"3.13\""},
|
||||
]
|
||||
portalocker = ">=2.7.0,<3.0.0"
|
||||
pydantic = ">=1.10.8"
|
||||
urllib3 = ">=1.26.14,<3"
|
||||
|
||||
[package.extras]
|
||||
fastembed = ["fastembed (==0.5.0)"]
|
||||
fastembed-gpu = ["fastembed-gpu (==0.5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "realtime"
|
||||
version = "2.0.5"
|
||||
|
@ -3506,6 +3802,27 @@ files = [
|
|||
{file = "serpent-1.41.tar.gz", hash = "sha256:0407035fe3c6644387d48cff1467d5aa9feff814d07372b78677ed0ee3ed7095"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "75.8.0"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"},
|
||||
{file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"]
|
||||
core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
|
||||
cover = ["pytest-cov"]
|
||||
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
|
||||
enabler = ["pytest-enabler (>=2.2)"]
|
||||
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
|
||||
type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"]
|
||||
|
||||
[[package]]
|
||||
name = "sgmllib3k"
|
||||
version = "1.0.0"
|
||||
|
@ -3688,6 +4005,22 @@ docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
|
|||
release = ["twine"]
|
||||
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "stripe"
|
||||
version = "11.4.1"
|
||||
description = "Python bindings for the Stripe API"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "stripe-11.4.1-py2.py3-none-any.whl", hash = "sha256:8aa47a241de0355c383c916c4ef7273ab666f096a44ee7081e357db4a36f0cce"},
|
||||
{file = "stripe-11.4.1.tar.gz", hash = "sha256:7ddd251b622d490fe57d78487855dc9f4d95b1bb113607e81fd377037a133d5a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = {version = ">=2.20", markers = "python_version >= \"3.0\""}
|
||||
typing-extensions = {version = ">=4.5.0", markers = "python_version >= \"3.7\""}
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.11.0"
|
||||
|
@ -4432,4 +4765,4 @@ type = ["pytest-mypy"]
|
|||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "711669de9e6d5b81f19286bd41d52f57bc0177ba8ff5f2b477313a5b2d012ae5"
|
||||
content-hash = "40985d7cb6a19cb0f0f4948f8aa72adeca6271834179a1fed557ce48b97422ce"
|
||||
|
|
|
@ -39,6 +39,7 @@ python-dotenv = "^1.0.1"
|
|||
redis = "^5.2.0"
|
||||
sentry-sdk = "2.19.2"
|
||||
strenum = "^0.4.9"
|
||||
stripe = "^11.3.0"
|
||||
supabase = "2.11.0"
|
||||
tenacity = "^9.0.0"
|
||||
tweepy = "^4.14.0"
|
||||
|
@ -54,6 +55,7 @@ sqlalchemy = "^2.0.36"
|
|||
psycopg2-binary = "^2.9.10"
|
||||
google-cloud-storage = "^2.18.2"
|
||||
launchdarkly-server-sdk = "^9.8.0"
|
||||
mem0ai = "^0.1.44"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
poethepoet = "^0.31.0"
|
||||
|
|
|
@ -207,7 +207,6 @@ model AgentBlock {
|
|||
|
||||
// Prisma requires explicit back-references.
|
||||
ReferencedByAgentNode AgentNode[]
|
||||
CreditTransaction CreditTransaction[]
|
||||
}
|
||||
|
||||
// This model describes the status of an AgentGraphExecution or AgentNodeExecution.
|
||||
|
@ -387,12 +386,11 @@ model CreditTransaction {
|
|||
userId String
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
blockId String?
|
||||
block AgentBlock? @relation(fields: [blockId], references: [id])
|
||||
|
||||
amount Int
|
||||
type CreditTransactionType
|
||||
|
||||
runningBalance Int?
|
||||
|
||||
isActive Boolean @default(true)
|
||||
metadata Json?
|
||||
|
||||
|
@ -526,7 +524,7 @@ model StoreListingVersion {
|
|||
agentVersion Int
|
||||
Agent AgentGraph @relation(fields: [agentId, agentVersion], references: [id, version])
|
||||
|
||||
// The detials for this version of the agent, this allows the author to update the details of the agent,
|
||||
// The details for this version of the agent, this allows the author to update the details of the agent,
|
||||
// But still allow using old versions of the agent with there original details.
|
||||
// TODO: Create a database view that shows only the latest version of each store listing.
|
||||
slug String
|
||||
|
@ -571,7 +569,6 @@ model StoreListingReview {
|
|||
comments String?
|
||||
|
||||
@@unique([storeListingVersionId, reviewByUserId])
|
||||
@@index([storeListingVersionId])
|
||||
}
|
||||
|
||||
enum SubmissionStatus {
|
||||
|
@ -599,7 +596,6 @@ model StoreListingSubmission {
|
|||
reviewComments String?
|
||||
|
||||
@@index([storeListingId])
|
||||
@@index([Status])
|
||||
}
|
||||
|
||||
enum APIKeyPermission {
|
||||
|
|
|
@ -4,24 +4,35 @@ import pytest
|
|||
from prisma.models import CreditTransaction
|
||||
|
||||
from backend.blocks.llm import AITextGeneratorBlock
|
||||
from backend.data.credit import UserCredit
|
||||
from backend.data.credit import BetaUserCredit
|
||||
from backend.data.execution import NodeExecutionEntry
|
||||
from backend.data.user import DEFAULT_USER_ID
|
||||
from backend.integrations.credentials_store import openai_credentials
|
||||
from backend.util.test import SpinTestServer
|
||||
|
||||
REFILL_VALUE = 1000
|
||||
user_credit = UserCredit(REFILL_VALUE)
|
||||
user_credit = BetaUserCredit(REFILL_VALUE)
|
||||
|
||||
|
||||
async def disable_test_user_transactions():
|
||||
await CreditTransaction.prisma().delete_many(where={"userId": DEFAULT_USER_ID})
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_block_credit_usage(server: SpinTestServer):
|
||||
current_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
await disable_test_user_transactions()
|
||||
await user_credit.top_up_credits(DEFAULT_USER_ID, 100)
|
||||
current_credit = await user_credit.get_credits(DEFAULT_USER_ID)
|
||||
|
||||
spending_amount_1 = await user_credit.spend_credits(
|
||||
DEFAULT_USER_ID,
|
||||
current_credit,
|
||||
AITextGeneratorBlock().id,
|
||||
{
|
||||
NodeExecutionEntry(
|
||||
user_id=DEFAULT_USER_ID,
|
||||
graph_id="test_graph",
|
||||
node_id="test_node",
|
||||
graph_exec_id="test_graph_exec",
|
||||
node_exec_id="test_node_exec",
|
||||
block_id=AITextGeneratorBlock().id,
|
||||
data={
|
||||
"model": "gpt-4-turbo",
|
||||
"credentials": {
|
||||
"id": openai_credentials.id,
|
||||
|
@ -29,70 +40,65 @@ async def test_block_credit_usage(server: SpinTestServer):
|
|||
"type": openai_credentials.type,
|
||||
},
|
||||
},
|
||||
),
|
||||
0.0,
|
||||
0.0,
|
||||
validate_balance=False,
|
||||
)
|
||||
assert spending_amount_1 > 0
|
||||
|
||||
spending_amount_2 = await user_credit.spend_credits(
|
||||
DEFAULT_USER_ID,
|
||||
current_credit,
|
||||
AITextGeneratorBlock().id,
|
||||
{"model": "gpt-4-turbo", "api_key": "owned_api_key"},
|
||||
NodeExecutionEntry(
|
||||
user_id=DEFAULT_USER_ID,
|
||||
graph_id="test_graph",
|
||||
node_id="test_node",
|
||||
graph_exec_id="test_graph_exec",
|
||||
node_exec_id="test_node_exec",
|
||||
block_id=AITextGeneratorBlock().id,
|
||||
data={"model": "gpt-4-turbo", "api_key": "owned_api_key"},
|
||||
),
|
||||
0.0,
|
||||
0.0,
|
||||
validate_balance=False,
|
||||
)
|
||||
assert spending_amount_2 == 0
|
||||
|
||||
new_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
new_credit = await user_credit.get_credits(DEFAULT_USER_ID)
|
||||
assert new_credit == current_credit - spending_amount_1 - spending_amount_2
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_block_credit_top_up(server: SpinTestServer):
|
||||
current_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
await disable_test_user_transactions()
|
||||
current_credit = await user_credit.get_credits(DEFAULT_USER_ID)
|
||||
|
||||
await user_credit.top_up_credits(DEFAULT_USER_ID, 100)
|
||||
|
||||
new_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
new_credit = await user_credit.get_credits(DEFAULT_USER_ID)
|
||||
assert new_credit == current_credit + 100
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_block_credit_reset(server: SpinTestServer):
|
||||
month1 = datetime(2022, 1, 15)
|
||||
month2 = datetime(2022, 2, 15)
|
||||
await disable_test_user_transactions()
|
||||
month1 = 1
|
||||
month2 = 2
|
||||
|
||||
user_credit.time_now = lambda: month2
|
||||
month2credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
# set the calendar to month 2 but use current time from now
|
||||
user_credit.time_now = lambda: datetime.now().replace(month=month2)
|
||||
month2credit = await user_credit.get_credits(DEFAULT_USER_ID)
|
||||
|
||||
# Month 1 result should only affect month 1
|
||||
user_credit.time_now = lambda: month1
|
||||
month1credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
user_credit.time_now = lambda: datetime.now().replace(month=month1)
|
||||
month1credit = await user_credit.get_credits(DEFAULT_USER_ID)
|
||||
await user_credit.top_up_credits(DEFAULT_USER_ID, 100)
|
||||
assert await user_credit.get_or_refill_credit(DEFAULT_USER_ID) == month1credit + 100
|
||||
assert await user_credit.get_credits(DEFAULT_USER_ID) == month1credit + 100
|
||||
|
||||
# Month 2 balance is unaffected
|
||||
user_credit.time_now = lambda: month2
|
||||
assert await user_credit.get_or_refill_credit(DEFAULT_USER_ID) == month2credit
|
||||
user_credit.time_now = lambda: datetime.now().replace(month=month2)
|
||||
assert await user_credit.get_credits(DEFAULT_USER_ID) == month2credit
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_credit_refill(server: SpinTestServer):
|
||||
# Clear all transactions within the month
|
||||
await CreditTransaction.prisma().update_many(
|
||||
where={
|
||||
"userId": DEFAULT_USER_ID,
|
||||
"createdAt": {
|
||||
"gte": datetime(2022, 2, 1),
|
||||
"lt": datetime(2022, 3, 1),
|
||||
},
|
||||
},
|
||||
data={"isActive": False},
|
||||
)
|
||||
user_credit.time_now = lambda: datetime(2022, 2, 15)
|
||||
|
||||
balance = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
await disable_test_user_transactions()
|
||||
balance = await user_credit.get_credits(DEFAULT_USER_ID)
|
||||
assert balance == REFILL_VALUE
|
||||
|
|
|
@ -38,7 +38,7 @@ async def execute_graph(
|
|||
|
||||
# Execution queue should be empty
|
||||
logger.info("Waiting for execution to complete...")
|
||||
result = await wait_execution(test_user.id, test_graph.id, graph_exec_id)
|
||||
result = await wait_execution(test_user.id, test_graph.id, graph_exec_id, 30)
|
||||
logger.info(f"Execution completed with {len(result)} results")
|
||||
assert result and len(result) == num_execs
|
||||
return graph_exec_id
|
||||
|
@ -125,7 +125,7 @@ async def test_agent_execution(server: SpinTestServer):
|
|||
logger.info("Starting test_agent_execution")
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(server, create_test_graph(), test_user)
|
||||
data = {"input_1": "Hello", "input_2": "World"}
|
||||
data = {"node_input": {"input_1": "Hello", "input_2": "World"}}
|
||||
graph_exec_id = await execute_graph(
|
||||
server.agent_server,
|
||||
test_graph,
|
||||
|
|
|
@ -298,7 +298,6 @@ async def main():
|
|||
data={
|
||||
"transactionKey": str(faker.uuid4()),
|
||||
"userId": user.id,
|
||||
"blockId": block.id,
|
||||
"amount": random.randint(1, 100),
|
||||
"type": (
|
||||
prisma.enums.CreditTransactionType.TOP_UP
|
||||
|
|
|
@ -5,6 +5,7 @@ NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
|||
NEXT_PUBLIC_LAUNCHDARKLY_ENABLED=false
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID=
|
||||
NEXT_PUBLIC_APP_ENV=dev
|
||||
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=
|
||||
|
||||
## Locale settings
|
||||
|
||||
|
|
|
@ -45,6 +45,7 @@
|
|||
"@radix-ui/react-toast": "^1.2.4",
|
||||
"@radix-ui/react-tooltip": "^1.1.6",
|
||||
"@sentry/nextjs": "^8",
|
||||
"@stripe/stripe-js": "^5.3.0",
|
||||
"@supabase/ssr": "^0.5.2",
|
||||
"@supabase/supabase-js": "^2.47.8",
|
||||
"@tanstack/react-table": "^8.20.6",
|
||||
|
@ -64,7 +65,7 @@
|
|||
"launchdarkly-react-client-sdk": "^3.6.0",
|
||||
"lucide-react": "^0.469.0",
|
||||
"moment": "^2.30.1",
|
||||
"next": "^14.2.13",
|
||||
"next": "^14.2.21",
|
||||
"next-themes": "^0.4.4",
|
||||
"react": "^18",
|
||||
"react-day-picker": "^9.5.0",
|
||||
|
|
|
@ -49,7 +49,7 @@ export default async function RootLayout({
|
|||
links={[
|
||||
{
|
||||
name: "Marketplace",
|
||||
href: "/store",
|
||||
href: "/marketplace",
|
||||
},
|
||||
{
|
||||
name: "Library",
|
||||
|
@ -66,7 +66,7 @@ export default async function RootLayout({
|
|||
{
|
||||
icon: IconType.Edit,
|
||||
text: "Edit profile",
|
||||
href: "/store/profile",
|
||||
href: "/marketplace/profile",
|
||||
},
|
||||
],
|
||||
},
|
||||
|
@ -75,7 +75,7 @@ export default async function RootLayout({
|
|||
{
|
||||
icon: IconType.LayoutDashboard,
|
||||
text: "Creator Dashboard",
|
||||
href: "/store/dashboard",
|
||||
href: "/marketplace/dashboard",
|
||||
},
|
||||
{
|
||||
icon: IconType.UploadCloud,
|
||||
|
@ -88,7 +88,7 @@ export default async function RootLayout({
|
|||
{
|
||||
icon: IconType.Settings,
|
||||
text: "Settings",
|
||||
href: "/store/settings",
|
||||
href: "/marketplace/settings",
|
||||
},
|
||||
],
|
||||
},
|
||||
|
|
|
@ -0,0 +1,104 @@
|
|||
"use client";
|
||||
import { Button } from "@/components/agptui/Button";
|
||||
import useCredits from "@/hooks/useCredits";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { useSearchParams, useRouter } from "next/navigation";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
export default function CreditsPage() {
|
||||
const { requestTopUp } = useCredits();
|
||||
const [amount, setAmount] = useState(5);
|
||||
const [patched, setPatched] = useState(false);
|
||||
const searchParams = useSearchParams();
|
||||
const router = useRouter();
|
||||
const topupStatus = searchParams.get("topup");
|
||||
const api = useBackendAPI();
|
||||
|
||||
useEffect(() => {
|
||||
if (!patched && topupStatus === "success") {
|
||||
api.fulfillCheckout();
|
||||
setPatched(true);
|
||||
}
|
||||
}, [api, patched, topupStatus]);
|
||||
|
||||
const openBillingPortal = async () => {
|
||||
const portal = await api.getUserPaymentPortalLink();
|
||||
router.push(portal.url);
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="w-full min-w-[800px] px-4 sm:px-8">
|
||||
<h1 className="font-circular mb-6 text-[28px] font-normal text-neutral-900 dark:text-neutral-100 sm:mb-8 sm:text-[35px]">
|
||||
Credits
|
||||
</h1>
|
||||
|
||||
<div className="grid grid-cols-1 gap-8 lg:grid-cols-2">
|
||||
{/* Left Column */}
|
||||
<div>
|
||||
<h2 className="text-lg">Top-up Credits</h2>
|
||||
|
||||
<p className="mb-6 text-neutral-600 dark:text-neutral-400">
|
||||
{topupStatus === "success" && (
|
||||
<span className="text-green-500">
|
||||
Your payment was successful. Your credits will be updated
|
||||
shortly.
|
||||
</span>
|
||||
)}
|
||||
{topupStatus === "cancel" && (
|
||||
<span className="text-red-500">
|
||||
Payment failed. Your payment method has not been charged.
|
||||
</span>
|
||||
)}
|
||||
</p>
|
||||
|
||||
<div className="mb-4 w-full">
|
||||
<label className="text-neutral-700">
|
||||
1 USD = 100 credits, 5 USD is a minimum top-up
|
||||
</label>
|
||||
<div className="rounded-[55px] border border-slate-200 px-4 py-2.5 dark:border-slate-700 dark:bg-slate-800">
|
||||
<input
|
||||
type="number"
|
||||
name="displayName"
|
||||
value={amount}
|
||||
placeholder="Top-up amount in USD"
|
||||
min="5"
|
||||
step="1"
|
||||
className="w-full"
|
||||
onChange={(e) => setAmount(parseInt(e.target.value))}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
variant="default"
|
||||
className="font-circular ml-auto"
|
||||
onClick={() => requestTopUp(amount)}
|
||||
>
|
||||
Top-up
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Right Column */}
|
||||
<div>
|
||||
<h2 className="text-lg">Manage Your Payment Methods</h2>
|
||||
<br />
|
||||
<p className="text-neutral-600">
|
||||
You can manage your cards and see your payment history in the
|
||||
billing portal.
|
||||
</p>
|
||||
<br />
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
variant="default"
|
||||
className="font-circular ml-auto"
|
||||
onClick={() => openBillingPortal()}
|
||||
>
|
||||
Open Portal
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -33,14 +33,14 @@ export default function Page({}: {}) {
|
|||
} catch (error) {
|
||||
console.error("Error fetching submissions:", error);
|
||||
}
|
||||
}, [api, supabase]);
|
||||
}, [api]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!supabase) {
|
||||
return;
|
||||
}
|
||||
fetchData();
|
||||
}, [supabase]);
|
||||
}, [supabase, fetchData]);
|
||||
|
||||
const onEditSubmission = useCallback((submission: StoreSubmissionRequest) => {
|
||||
setSubmissionData(submission);
|
||||
|
@ -56,7 +56,7 @@ export default function Page({}: {}) {
|
|||
api.deleteStoreSubmission(submission_id);
|
||||
fetchData();
|
||||
},
|
||||
[supabase],
|
||||
[api, supabase, fetchData],
|
||||
);
|
||||
|
||||
const onOpenPopout = useCallback(() => {
|
|
@ -98,6 +98,7 @@ export default function PrivatePage() {
|
|||
// This contains ids for built-in "Use Credits for X" credentials
|
||||
const hiddenCredentials = useMemo(
|
||||
() => [
|
||||
"744fdc56-071a-4761-b5a5-0af0ce10a2b5", // Ollama
|
||||
"fdb7f412-f519-48d1-9b5f-d2f73d0e01fe", // Revid
|
||||
"760f84fc-b270-42de-91f6-08efe1b512d0", // Ideogram
|
||||
"6b9fc200-4726-4973-86c9-cd526f5ce5db", // Replicate
|
||||
|
@ -108,6 +109,11 @@ export default function PrivatePage() {
|
|||
"7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina
|
||||
"66f20754-1b81-48e4-91d0-f4f0dd82145f", // Unreal Speech
|
||||
"b5a0e27d-0c98-4df3-a4b9-10193e1f3c40", // Open Router
|
||||
"6c0f5bd0-9008-4638-9d79-4b40b631803e", // FAL
|
||||
"96153e04-9c6c-4486-895f-5bb683b1ecec", // Exa
|
||||
"78d19fd7-4d59-4a16-8277-3ce310acf2b7", // E2B
|
||||
"96b83908-2789-4dec-9968-18f0ece4ceb3", // Nvidia
|
||||
"ed55ac19-356e-4243-a6cb-bc599e9b716f", // Mem0
|
||||
],
|
||||
[],
|
||||
);
|
|
@ -5,12 +5,13 @@ export default function Layout({ children }: { children: React.ReactNode }) {
|
|||
const sidebarLinkGroups = [
|
||||
{
|
||||
links: [
|
||||
{ text: "Creator Dashboard", href: "/store/dashboard" },
|
||||
{ text: "Agent dashboard", href: "/store/agent-dashboard" },
|
||||
{ text: "Integrations", href: "/store/integrations" },
|
||||
{ text: "API Keys", href: "/store/api_keys" },
|
||||
{ text: "Profile", href: "/store/profile" },
|
||||
{ text: "Settings", href: "/store/settings" },
|
||||
{ text: "Creator Dashboard", href: "/marketplace/dashboard" },
|
||||
{ text: "Agent dashboard", href: "/marketplace/agent-dashboard" },
|
||||
{ text: "Credits", href: "/marketplace/credits" },
|
||||
{ text: "Integrations", href: "/marketplace/integrations" },
|
||||
{ text: "API Keys", href: "/marketplace/api_keys" },
|
||||
{ text: "Profile", href: "/marketplace/profile" },
|
||||
{ text: "Settings", href: "/marketplace/settings" },
|
||||
],
|
||||
},
|
||||
];
|
|
@ -45,10 +45,10 @@ export default async function Page({
|
|||
});
|
||||
|
||||
const breadcrumbs = [
|
||||
{ name: "Store", link: "/store" },
|
||||
{ name: "Store", link: "/marketplace" },
|
||||
{
|
||||
name: agent.creator,
|
||||
link: `/store/creator/${encodeURIComponent(agent.creator)}`,
|
||||
link: `/marketplace/creator/${encodeURIComponent(agent.creator)}`,
|
||||
},
|
||||
{ name: agent.agent_name, link: "#" },
|
||||
];
|
|
@ -47,7 +47,7 @@ export default async function Page({
|
|||
<main className="mt-5 px-4">
|
||||
<BreadCrumbs
|
||||
items={[
|
||||
{ name: "Store", link: "/store" },
|
||||
{ name: "Store", link: "/marketplace" },
|
||||
{ name: creator.name, link: "#" },
|
||||
]}
|
||||
/>
|
|
@ -1,7 +1,179 @@
|
|||
"use client";
|
||||
import * as React from "react";
|
||||
import { HeroSection } from "@/components/agptui/composite/HeroSection";
|
||||
import {
|
||||
FeaturedSection,
|
||||
FeaturedAgent,
|
||||
} from "@/components/agptui/composite/FeaturedSection";
|
||||
import {
|
||||
AgentsSection,
|
||||
Agent,
|
||||
} from "@/components/agptui/composite/AgentsSection";
|
||||
import { BecomeACreator } from "@/components/agptui/BecomeACreator";
|
||||
import {
|
||||
FeaturedCreators,
|
||||
FeaturedCreator,
|
||||
} from "@/components/agptui/composite/FeaturedCreators";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { Metadata } from "next";
|
||||
import {
|
||||
StoreAgentsResponse,
|
||||
CreatorsResponse,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
|
||||
import { redirect } from "next/navigation";
|
||||
async function getStoreData() {
|
||||
try {
|
||||
const api = new BackendAPI();
|
||||
|
||||
export default function Page() {
|
||||
redirect("/store");
|
||||
// Add error handling and default values
|
||||
let featuredAgents: StoreAgentsResponse = {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
};
|
||||
let topAgents: StoreAgentsResponse = {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
};
|
||||
let featuredCreators: CreatorsResponse = {
|
||||
creators: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
[featuredAgents, topAgents, featuredCreators] = await Promise.all([
|
||||
api.getStoreAgents({ featured: true }),
|
||||
api.getStoreAgents({ sorted_by: "runs" }),
|
||||
api.getStoreCreators({ featured: true, sorted_by: "num_agents" }),
|
||||
]);
|
||||
} catch (error) {
|
||||
console.error("Error fetching store data:", error);
|
||||
}
|
||||
|
||||
return {
|
||||
featuredAgents,
|
||||
topAgents,
|
||||
featuredCreators,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error in getStoreData:", error);
|
||||
return {
|
||||
featuredAgents: {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
},
|
||||
topAgents: {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
},
|
||||
featuredCreators: {
|
||||
creators: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// FIX: Correct metadata
|
||||
export const metadata: Metadata = {
|
||||
title: "Marketplace - NextGen AutoGPT",
|
||||
description: "Find and use AI Agents created by our community",
|
||||
applicationName: "NextGen AutoGPT Store",
|
||||
authors: [{ name: "AutoGPT Team" }],
|
||||
keywords: [
|
||||
"AI agents",
|
||||
"automation",
|
||||
"artificial intelligence",
|
||||
"AutoGPT",
|
||||
"marketplace",
|
||||
],
|
||||
robots: {
|
||||
index: true,
|
||||
follow: true,
|
||||
},
|
||||
openGraph: {
|
||||
title: "Marketplace - NextGen AutoGPT",
|
||||
description: "Find and use AI Agents created by our community",
|
||||
type: "website",
|
||||
siteName: "NextGen AutoGPT Store",
|
||||
images: [
|
||||
{
|
||||
url: "/images/store-og.png",
|
||||
width: 1200,
|
||||
height: 630,
|
||||
alt: "NextGen AutoGPT Store",
|
||||
},
|
||||
],
|
||||
},
|
||||
twitter: {
|
||||
card: "summary_large_image",
|
||||
title: "Marketplace - NextGen AutoGPT",
|
||||
description: "Find and use AI Agents created by our community",
|
||||
images: ["/images/store-twitter.png"],
|
||||
},
|
||||
icons: {
|
||||
icon: "/favicon.ico",
|
||||
shortcut: "/favicon-16x16.png",
|
||||
apple: "/apple-touch-icon.png",
|
||||
},
|
||||
};
|
||||
|
||||
export default async function Page({}: {}) {
|
||||
// Get data server-side
|
||||
const { featuredAgents, topAgents, featuredCreators } = await getStoreData();
|
||||
|
||||
return (
|
||||
<div className="mx-auto w-screen max-w-[1360px]">
|
||||
<main className="px-4">
|
||||
<HeroSection />
|
||||
<FeaturedSection
|
||||
featuredAgents={featuredAgents.agents as FeaturedAgent[]}
|
||||
/>
|
||||
<Separator />
|
||||
<AgentsSection
|
||||
sectionTitle="Top Agents"
|
||||
agents={topAgents.agents as Agent[]}
|
||||
/>
|
||||
<Separator />
|
||||
<FeaturedCreators
|
||||
featuredCreators={featuredCreators.creators as FeaturedCreator[]}
|
||||
/>
|
||||
<Separator />
|
||||
<BecomeACreator
|
||||
title="Become a Creator"
|
||||
description="Join our ever-growing community of hackers and tinkerers"
|
||||
buttonText="Become a Creator"
|
||||
/>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
|
|
@ -61,7 +61,7 @@ function SearchResults({
|
|||
};
|
||||
|
||||
fetchData();
|
||||
}, [searchTerm, sort]);
|
||||
}, [api, searchTerm, sort]);
|
||||
|
||||
const agentsCount = agents.length;
|
||||
const creatorsCount = creators.length;
|
|
@ -3,5 +3,5 @@
|
|||
import { redirect } from "next/navigation";
|
||||
|
||||
export default function Page() {
|
||||
redirect("/store");
|
||||
redirect("/marketplace");
|
||||
}
|
||||
|
|
|
@ -98,6 +98,7 @@ export default function PrivatePage() {
|
|||
// This contains ids for built-in "Use Credits for X" credentials
|
||||
const hiddenCredentials = useMemo(
|
||||
() => [
|
||||
"744fdc56-071a-4761-b5a5-0af0ce10a2b5", // Ollama
|
||||
"fdb7f412-f519-48d1-9b5f-d2f73d0e01fe", // Revid
|
||||
"760f84fc-b270-42de-91f6-08efe1b512d0", // Ideogram
|
||||
"6b9fc200-4726-4973-86c9-cd526f5ce5db", // Replicate
|
||||
|
@ -108,6 +109,11 @@ export default function PrivatePage() {
|
|||
"7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina
|
||||
"66f20754-1b81-48e4-91d0-f4f0dd82145f", // Unreal Speech
|
||||
"b5a0e27d-0c98-4df3-a4b9-10193e1f3c40", // Open Router
|
||||
"6c0f5bd0-9008-4638-9d79-4b40b631803e", // FAL
|
||||
"96153e04-9c6c-4486-895f-5bb683b1ecec", // Exa
|
||||
"78d19fd7-4d59-4a16-8277-3ce310acf2b7", // E2B
|
||||
"96b83908-2789-4dec-9968-18f0ece4ceb3", // Nvidia
|
||||
"ed55ac19-356e-4243-a6cb-bc599e9b716f", // Mem0
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
|
|
@ -38,7 +38,7 @@ export async function signup(values: z.infer<typeof signupFormSchema>) {
|
|||
}
|
||||
console.log("Signed up");
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/store/profile");
|
||||
redirect("/marketplace/profile");
|
||||
},
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,181 +0,0 @@
|
|||
import * as React from "react";
|
||||
import { HeroSection } from "@/components/agptui/composite/HeroSection";
|
||||
import {
|
||||
FeaturedSection,
|
||||
FeaturedAgent,
|
||||
} from "@/components/agptui/composite/FeaturedSection";
|
||||
import {
|
||||
AgentsSection,
|
||||
Agent,
|
||||
} from "@/components/agptui/composite/AgentsSection";
|
||||
import { BecomeACreator } from "@/components/agptui/BecomeACreator";
|
||||
import {
|
||||
FeaturedCreators,
|
||||
FeaturedCreator,
|
||||
} from "@/components/agptui/composite/FeaturedCreators";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { Metadata } from "next";
|
||||
import {
|
||||
StoreAgentsResponse,
|
||||
CreatorsResponse,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
async function getStoreData() {
|
||||
try {
|
||||
const api = new BackendAPI();
|
||||
|
||||
// Add error handling and default values
|
||||
let featuredAgents: StoreAgentsResponse = {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
};
|
||||
let topAgents: StoreAgentsResponse = {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
};
|
||||
let featuredCreators: CreatorsResponse = {
|
||||
creators: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
};
|
||||
|
||||
try {
|
||||
[featuredAgents, topAgents, featuredCreators] = await Promise.all([
|
||||
api.getStoreAgents({ featured: true }),
|
||||
api.getStoreAgents({ sorted_by: "runs" }),
|
||||
api.getStoreCreators({ featured: true, sorted_by: "num_agents" }),
|
||||
]);
|
||||
} catch (error) {
|
||||
console.error("Error fetching store data:", error);
|
||||
}
|
||||
|
||||
return {
|
||||
featuredAgents,
|
||||
topAgents,
|
||||
featuredCreators,
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error in getStoreData:", error);
|
||||
return {
|
||||
featuredAgents: {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
},
|
||||
topAgents: {
|
||||
agents: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
},
|
||||
featuredCreators: {
|
||||
creators: [],
|
||||
pagination: {
|
||||
total_items: 0,
|
||||
total_pages: 0,
|
||||
current_page: 0,
|
||||
page_size: 0,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// FIX: Correct metadata
|
||||
export const metadata: Metadata = {
|
||||
title: "Marketplace - NextGen AutoGPT",
|
||||
description: "Find and use AI Agents created by our community",
|
||||
applicationName: "NextGen AutoGPT Store",
|
||||
authors: [{ name: "AutoGPT Team" }],
|
||||
keywords: [
|
||||
"AI agents",
|
||||
"automation",
|
||||
"artificial intelligence",
|
||||
"AutoGPT",
|
||||
"marketplace",
|
||||
],
|
||||
robots: {
|
||||
index: true,
|
||||
follow: true,
|
||||
},
|
||||
openGraph: {
|
||||
title: "Marketplace - NextGen AutoGPT",
|
||||
description: "Find and use AI Agents created by our community",
|
||||
type: "website",
|
||||
siteName: "NextGen AutoGPT Store",
|
||||
images: [
|
||||
{
|
||||
url: "/images/store-og.png",
|
||||
width: 1200,
|
||||
height: 630,
|
||||
alt: "NextGen AutoGPT Store",
|
||||
},
|
||||
],
|
||||
},
|
||||
twitter: {
|
||||
card: "summary_large_image",
|
||||
title: "Marketplace - NextGen AutoGPT",
|
||||
description: "Find and use AI Agents created by our community",
|
||||
images: ["/images/store-twitter.png"],
|
||||
},
|
||||
icons: {
|
||||
icon: "/favicon.ico",
|
||||
shortcut: "/favicon-16x16.png",
|
||||
apple: "/apple-touch-icon.png",
|
||||
},
|
||||
};
|
||||
|
||||
export default async function Page({}: {}) {
|
||||
// Get data server-side
|
||||
const { featuredAgents, topAgents, featuredCreators } = await getStoreData();
|
||||
|
||||
return (
|
||||
<div className="mx-auto w-screen max-w-[1360px]">
|
||||
<main className="px-4">
|
||||
<HeroSection />
|
||||
<FeaturedSection
|
||||
featuredAgents={featuredAgents.agents as FeaturedAgent[]}
|
||||
/>
|
||||
<Separator />
|
||||
<AgentsSection
|
||||
sectionTitle="Top Agents"
|
||||
agents={topAgents.agents as Agent[]}
|
||||
/>
|
||||
<Separator />
|
||||
<FeaturedCreators
|
||||
featuredCreators={featuredCreators.creators as FeaturedCreator[]}
|
||||
/>
|
||||
<Separator />
|
||||
<BecomeACreator
|
||||
title="Become a Creator"
|
||||
description="Join our ever-growing community of hackers and tinkerers"
|
||||
buttonText="Become a Creator"
|
||||
/>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -105,7 +105,7 @@ export const AgentInfo: React.FC<AgentInfoProps> = ({
|
|||
by
|
||||
</div>
|
||||
<Link
|
||||
href={`/store/creator/${encodeURIComponent(creator)}`}
|
||||
href={`/marketplace/creator/${encodeURIComponent(creator)}`}
|
||||
className="font-geist text-base font-medium text-neutral-800 hover:underline dark:text-neutral-200 sm:text-lg lg:text-xl"
|
||||
>
|
||||
{creator}
|
||||
|
|
|
@ -28,7 +28,7 @@ export const NavbarLink = ({ name, href }: NavbarLinkProps) => {
|
|||
: ""
|
||||
} flex items-center justify-start gap-3`}
|
||||
>
|
||||
{href === "/store" && (
|
||||
{href === "/marketplace" && (
|
||||
<IconShoppingCart
|
||||
className={`h-6 w-6 ${activeLink === href ? "text-white dark:text-black" : ""}`}
|
||||
/>
|
||||
|
|
|
@ -36,7 +36,7 @@ export const SearchBar: React.FC<SearchBarProps> = ({
|
|||
if (searchQuery.trim()) {
|
||||
// Encode the search term and navigate to the desired path
|
||||
const encodedTerm = encodeURIComponent(searchQuery);
|
||||
router.push(`/store/search?searchTerm=${encodedTerm}`);
|
||||
router.push(`/marketplace/search?searchTerm=${encodedTerm}`);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import {
|
|||
IconIntegrations,
|
||||
IconProfile,
|
||||
IconSliders,
|
||||
IconCoin,
|
||||
} from "../ui/icons";
|
||||
|
||||
interface SidebarLinkGroup {
|
||||
|
@ -22,6 +23,10 @@ interface SidebarProps {
|
|||
}
|
||||
|
||||
export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
||||
const stripeAvailable = Boolean(
|
||||
process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY,
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Sheet>
|
||||
|
@ -41,7 +46,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
<div className="h-full w-full rounded-2xl bg-zinc-200 dark:bg-zinc-800">
|
||||
<div className="inline-flex h-[264px] flex-col items-start justify-start gap-6 p-3">
|
||||
<Link
|
||||
href="/store/dashboard"
|
||||
href="/marketplace/dashboard"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconDashboardLayout className="h-6 w-6" />
|
||||
|
@ -49,8 +54,19 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
Creator dashboard
|
||||
</div>
|
||||
</Link>
|
||||
{stripeAvailable && (
|
||||
<Link
|
||||
href="/store/integrations"
|
||||
href="/marketplace/credits"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconCoin className="h-6 w-6" />
|
||||
<div className="p-ui-medium text-base font-medium leading-normal">
|
||||
Credits
|
||||
</div>
|
||||
</Link>
|
||||
)}
|
||||
<Link
|
||||
href="/marketplace/integrations"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconIntegrations className="h-6 w-6" />
|
||||
|
@ -59,7 +75,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
</div>
|
||||
</Link>
|
||||
<Link
|
||||
href="/store/api_keys"
|
||||
href="/marketplace/api_keys"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<KeyIcon className="h-6 w-6" />
|
||||
|
@ -68,7 +84,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
</div>
|
||||
</Link>
|
||||
<Link
|
||||
href="/store/profile"
|
||||
href="/marketplace/profile"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconProfile className="h-6 w-6" />
|
||||
|
@ -77,7 +93,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
</div>
|
||||
</Link>
|
||||
<Link
|
||||
href="/store/settings"
|
||||
href="/marketplace/settings"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconSliders className="h-6 w-6" />
|
||||
|
@ -94,7 +110,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
<div className="h-full w-full rounded-2xl bg-zinc-200 dark:bg-zinc-800">
|
||||
<div className="inline-flex h-[264px] flex-col items-start justify-start gap-6 p-3">
|
||||
<Link
|
||||
href="/store/dashboard"
|
||||
href="/marketplace/dashboard"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconDashboardLayout className="h-6 w-6" />
|
||||
|
@ -102,8 +118,19 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
Agent dashboard
|
||||
</div>
|
||||
</Link>
|
||||
{stripeAvailable && (
|
||||
<Link
|
||||
href="/store/integrations"
|
||||
href="/marketplace/credits"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconCoin className="h-6 w-6" />
|
||||
<div className="p-ui-medium text-base font-medium leading-normal">
|
||||
Credits
|
||||
</div>
|
||||
</Link>
|
||||
)}
|
||||
<Link
|
||||
href="/marketplace/integrations"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconIntegrations className="h-6 w-6" />
|
||||
|
@ -112,7 +139,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
</div>
|
||||
</Link>
|
||||
<Link
|
||||
href="/store/api_keys"
|
||||
href="/marketplace/api_keys"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<KeyIcon className="h-6 w-6" strokeWidth={1} />
|
||||
|
@ -121,7 +148,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
</div>
|
||||
</Link>
|
||||
<Link
|
||||
href="/store/profile"
|
||||
href="/marketplace/profile"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconProfile className="h-6 w-6" />
|
||||
|
@ -130,7 +157,7 @@ export const Sidebar: React.FC<SidebarProps> = ({ linkGroups }) => {
|
|||
</div>
|
||||
</Link>
|
||||
<Link
|
||||
href="/store/settings"
|
||||
href="/marketplace/settings"
|
||||
className="inline-flex w-full items-center gap-2.5 rounded-xl px-3 py-3 text-neutral-800 hover:bg-neutral-800 hover:text-white dark:text-neutral-200 dark:hover:bg-neutral-700 dark:hover:text-white"
|
||||
>
|
||||
<IconSliders className="h-6 w-6" />
|
||||
|
|
|
@ -39,7 +39,7 @@ export const AgentsSection: React.FC<AgentsSectionProps> = ({
|
|||
|
||||
const handleCardClick = (creator: string, slug: string) => {
|
||||
router.push(
|
||||
`/store/agent/${encodeURIComponent(creator)}/${encodeURIComponent(slug)}`,
|
||||
`/marketplace/agent/${encodeURIComponent(creator)}/${encodeURIComponent(slug)}`,
|
||||
);
|
||||
};
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ export const FeaturedCreators: React.FC<FeaturedCreatorsProps> = ({
|
|||
const router = useRouter();
|
||||
|
||||
const handleCardClick = (creator: string) => {
|
||||
router.push(`/store/creator/${encodeURIComponent(creator)}`);
|
||||
router.push(`/marketplace/creator/${encodeURIComponent(creator)}`);
|
||||
};
|
||||
|
||||
// Only show first 4 creators
|
||||
|
|
|
@ -43,7 +43,7 @@ export const FeaturedSection: React.FC<FeaturedSectionProps> = ({
|
|||
|
||||
const handleCardClick = (creator: string, slug: string) => {
|
||||
router.push(
|
||||
`/store/agent/${encodeURIComponent(creator)}/${encodeURIComponent(slug)}`,
|
||||
`/marketplace/agent/${encodeURIComponent(creator)}/${encodeURIComponent(slug)}`,
|
||||
);
|
||||
};
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ export const HeroSection: React.FC = () => {
|
|||
|
||||
function onFilterChange(selectedFilters: string[]) {
|
||||
const encodedTerm = encodeURIComponent(selectedFilters.join(", "));
|
||||
router.push(`/store/search?searchTerm=${encodedTerm}`);
|
||||
router.push(`/marketplace/search?searchTerm=${encodedTerm}`);
|
||||
}
|
||||
|
||||
return (
|
||||
|
|
|
@ -260,7 +260,7 @@ export const PublishAgentPopout: React.FC<PublishAgentPopoutProps> = ({
|
|||
onClose={handleClose}
|
||||
onDone={handleClose}
|
||||
onViewProgress={() => {
|
||||
router.push("/store/dashboard");
|
||||
router.push("/marketplace/dashboard");
|
||||
handleClose();
|
||||
}}
|
||||
/>
|
||||
|
|
|
@ -14,6 +14,7 @@ import {
|
|||
FaGoogle,
|
||||
FaMedium,
|
||||
FaKey,
|
||||
FaHubspot,
|
||||
} from "react-icons/fa";
|
||||
import { FC, useMemo, useState } from "react";
|
||||
import {
|
||||
|
@ -66,7 +67,9 @@ export const providerIcons: Record<
|
|||
google_maps: FaGoogle,
|
||||
jina: fallbackIcon,
|
||||
ideogram: fallbackIcon,
|
||||
linear: fallbackIcon,
|
||||
medium: FaMedium,
|
||||
mem0: fallbackIcon,
|
||||
ollama: fallbackIcon,
|
||||
openai: fallbackIcon,
|
||||
openweathermap: fallbackIcon,
|
||||
|
@ -79,7 +82,7 @@ export const providerIcons: Record<
|
|||
twitter: FaTwitter,
|
||||
unreal_speech: fallbackIcon,
|
||||
exa: fallbackIcon,
|
||||
hubspot: fallbackIcon,
|
||||
hubspot: FaHubspot,
|
||||
};
|
||||
// --8<-- [end:ProviderIconsEmbed]
|
||||
|
||||
|
|
|
@ -26,7 +26,9 @@ const providerDisplayNames: Record<CredentialsProviderName, string> = {
|
|||
groq: "Groq",
|
||||
ideogram: "Ideogram",
|
||||
jina: "Jina",
|
||||
linear: "Linear",
|
||||
medium: "Medium",
|
||||
mem0: "Mem0",
|
||||
notion: "Notion",
|
||||
nvidia: "Nvidia",
|
||||
ollama: "Ollama",
|
||||
|
|
|
@ -1,37 +1,21 @@
|
|||
"use client";
|
||||
|
||||
import { useState, useEffect, useCallback } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { IconRefresh } from "@/components/ui/icons";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import useCredits from "@/hooks/useCredits";
|
||||
|
||||
export default function CreditButton() {
|
||||
const [credit, setCredit] = useState<number | null>(null);
|
||||
const api = useBackendAPI();
|
||||
|
||||
const fetchCredit = useCallback(async () => {
|
||||
try {
|
||||
const response = await api.getUserCredit();
|
||||
setCredit(response.credits);
|
||||
} catch (error) {
|
||||
console.error("Error fetching credit:", error);
|
||||
setCredit(null);
|
||||
}
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
fetchCredit();
|
||||
}, [fetchCredit]);
|
||||
const { credits, fetchCredits } = useCredits();
|
||||
|
||||
return (
|
||||
credit !== null && (
|
||||
credits !== null && (
|
||||
<Button
|
||||
onClick={fetchCredit}
|
||||
onClick={fetchCredits}
|
||||
variant="outline"
|
||||
className="flex items-center space-x-2 rounded-xl bg-gray-200"
|
||||
>
|
||||
<span className="mr-2 flex items-center text-foreground">
|
||||
{credit} <span className="ml-2 text-muted-foreground"> credits</span>
|
||||
{credits} <span className="ml-2 text-muted-foreground"> credits</span>
|
||||
</span>
|
||||
<IconRefresh />
|
||||
</Button>
|
||||
|
|
|
@ -24,7 +24,7 @@ export function NavBarButtons({ className }: { className?: string }) {
|
|||
icon: <BsBoxes />,
|
||||
},
|
||||
{
|
||||
href: "/store",
|
||||
href: "/marketplace",
|
||||
text: "Marketplace",
|
||||
icon: <IconMarketplace />,
|
||||
},
|
||||
|
|
|
@ -313,8 +313,6 @@ export const NodeGenericInputField: FC<{
|
|||
);
|
||||
}
|
||||
|
||||
console.log("propSchema", propSchema);
|
||||
|
||||
if ("properties" in propSchema) {
|
||||
// Render a multi-select for all-boolean sub-schemas with more than 3 properties
|
||||
if (
|
||||
|
|
|
@ -323,7 +323,7 @@ export const IconCoin = createIcon((props) => (
|
|||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeWidth="1.25"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
aria-label="Coin Icon"
|
||||
|
|
|
@ -862,6 +862,7 @@ export default function useAgentGraph(
|
|||
title: "Error saving agent",
|
||||
description: errorMessage,
|
||||
});
|
||||
setSaveRunRequest({ request: "save", state: "error" });
|
||||
}
|
||||
}, [_saveAgent, toast]);
|
||||
|
||||
|
@ -874,7 +875,7 @@ export default function useAgentGraph(
|
|||
request: "save",
|
||||
state: "saving",
|
||||
});
|
||||
}, [saveAgent]);
|
||||
}, [saveAgent, saveRunRequest.state]);
|
||||
|
||||
const requestSaveAndRun = useCallback(() => {
|
||||
saveAgent();
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import { loadStripe } from "@stripe/stripe-js";
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
const stripePromise = loadStripe(
|
||||
process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY!,
|
||||
);
|
||||
|
||||
export default function useCredits(): {
|
||||
credits: number | null;
|
||||
fetchCredits: () => void;
|
||||
requestTopUp: (usd_amount: number) => Promise<void>;
|
||||
} {
|
||||
const [credits, setCredits] = useState<number | null>(null);
|
||||
const api = useMemo(() => new AutoGPTServerAPI(), []);
|
||||
const router = useRouter();
|
||||
|
||||
const fetchCredits = useCallback(async () => {
|
||||
const response = await api.getUserCredit();
|
||||
setCredits(response.credits);
|
||||
}, [api]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchCredits();
|
||||
}, [fetchCredits]);
|
||||
|
||||
const requestTopUp = useCallback(
|
||||
async (usd_amount: number) => {
|
||||
const stripe = await stripePromise;
|
||||
|
||||
if (!stripe) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Convert dollar amount to credit count
|
||||
const response = await api.requestTopUp(usd_amount * 100);
|
||||
router.push(response.checkout_url);
|
||||
},
|
||||
[api, router],
|
||||
);
|
||||
|
||||
return {
|
||||
credits,
|
||||
fetchCredits,
|
||||
requestTopUp,
|
||||
};
|
||||
}
|
|
@ -88,6 +88,18 @@ export default class BackendAPI {
|
|||
}
|
||||
}
|
||||
|
||||
requestTopUp(amount: number): Promise<{ checkout_url: string }> {
|
||||
return this._request("POST", "/credits", { amount });
|
||||
}
|
||||
|
||||
getUserPaymentPortalLink(): Promise<{ url: string }> {
|
||||
return this._get("/credits/manage");
|
||||
}
|
||||
|
||||
fulfillCheckout(): Promise<void> {
|
||||
return this._request("PATCH", "/credits");
|
||||
}
|
||||
|
||||
getBlocks(): Promise<Block[]> {
|
||||
return this._get("/blocks");
|
||||
}
|
||||
|
|
|
@ -111,7 +111,9 @@ export const PROVIDER_NAMES = {
|
|||
GROQ: "groq",
|
||||
IDEOGRAM: "ideogram",
|
||||
JINA: "jina",
|
||||
LINEAR: "linear",
|
||||
MEDIUM: "medium",
|
||||
MEM0: "mem0",
|
||||
NOTION: "notion",
|
||||
NVIDIA: "nvidia",
|
||||
OLLAMA: "ollama",
|
||||
|
|
|
@ -5,9 +5,9 @@ import { NextResponse, type NextRequest } from "next/server";
|
|||
const PROTECTED_PAGES = [
|
||||
"/monitor",
|
||||
"/build",
|
||||
"/store/profile",
|
||||
"/store/settings",
|
||||
"/store/dashboard",
|
||||
"/marketplace/profile",
|
||||
"/marketplace/settings",
|
||||
"/marketplace/dashboard",
|
||||
];
|
||||
const ADMIN_PAGES = ["/admin"];
|
||||
|
||||
|
@ -87,7 +87,7 @@ export async function updateSession(request: NextRequest) {
|
|||
ADMIN_PAGES.some((page) => request.nextUrl.pathname.startsWith(`${page}`))
|
||||
) {
|
||||
// no user, potentially respond by redirecting the user to the login page
|
||||
url.pathname = `/store`;
|
||||
url.pathname = `/marketplace`;
|
||||
return NextResponse.redirect(url);
|
||||
}
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ test.describe("Authentication", () => {
|
|||
test("user can login successfully", async ({ page, loginPage, testUser }) => {
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await test.expect(page).toHaveURL("/store");
|
||||
await test.expect(page).toHaveURL("/marketplace");
|
||||
await test
|
||||
.expect(page.getByTestId("profile-popout-menu-trigger"))
|
||||
.toBeVisible();
|
||||
|
@ -19,7 +19,7 @@ test.describe("Authentication", () => {
|
|||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
|
||||
await test.expect(page).toHaveURL("/store");
|
||||
await test.expect(page).toHaveURL("/marketplace");
|
||||
|
||||
// Click on the profile menu trigger to open popout
|
||||
await page.getByTestId("profile-popout-menu-trigger").click();
|
||||
|
@ -43,7 +43,7 @@ test.describe("Authentication", () => {
|
|||
}) => {
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await test.expect(page).toHaveURL("/store");
|
||||
await test.expect(page).toHaveURL("/marketplace");
|
||||
// Click on the profile menu trigger to open popout
|
||||
await page.getByTestId("profile-popout-menu-trigger").click();
|
||||
|
||||
|
@ -52,7 +52,7 @@ test.describe("Authentication", () => {
|
|||
|
||||
await test.expect(page).toHaveURL("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await test.expect(page).toHaveURL("/store");
|
||||
await test.expect(page).toHaveURL("/marketplace");
|
||||
await test
|
||||
.expect(page.getByTestId("profile-popout-menu-trigger"))
|
||||
.toBeVisible();
|
||||
|
|
|
@ -42,39 +42,75 @@ test.describe("Build", () => { //(1)!
|
|||
});
|
||||
// --8<-- [end:BuildPageExample]
|
||||
|
||||
test("user can add all blocks", async ({ page }, testInfo) => {
|
||||
test("user can add all blocks a-l", async ({ page }, testInfo) => {
|
||||
// this test is slow af so we 10x the timeout (sorry future me)
|
||||
await test.setTimeout(testInfo.timeout * 10);
|
||||
await test.setTimeout(testInfo.timeout * 100);
|
||||
await test.expect(buildPage.isLoaded()).resolves.toBeTruthy();
|
||||
await test.expect(page).toHaveURL(new RegExp("/.*build"));
|
||||
await buildPage.closeTutorial();
|
||||
await buildPage.openBlocksPanel();
|
||||
const blocks = await buildPage.getBlocks();
|
||||
|
||||
// add all the blocks in order
|
||||
const blocksToSkip = await buildPage.getBlocksToSkip();
|
||||
|
||||
// add all the blocks in order except for the agent executor block
|
||||
for (const block of blocks) {
|
||||
if (block.id !== "e189baac-8c20-45a1-94a7-55177ea42565") {
|
||||
if (block.name[0].toLowerCase() >= "m") {
|
||||
continue;
|
||||
}
|
||||
if (!blocksToSkip.some((b) => b === block.id)) {
|
||||
await buildPage.addBlock(block);
|
||||
}
|
||||
}
|
||||
await buildPage.closeBlocksPanel();
|
||||
// check that all the blocks are visible
|
||||
for (const block of blocks) {
|
||||
if (block.id !== "e189baac-8c20-45a1-94a7-55177ea42565") {
|
||||
if (block.name[0].toLowerCase() >= "m") {
|
||||
continue;
|
||||
}
|
||||
if (!blocksToSkip.some((b) => b === block.id)) {
|
||||
console.log("Checking block:", block.name);
|
||||
await test.expect(buildPage.hasBlock(block)).resolves.toBeTruthy();
|
||||
}
|
||||
}
|
||||
// fill in the input for the agent input block
|
||||
await buildPage.fillBlockInputByPlaceholder(
|
||||
blocks.find((b) => b.name === "Agent Input")?.id ?? "",
|
||||
"Enter Name",
|
||||
"Agent Input Field",
|
||||
);
|
||||
await buildPage.fillBlockInputByPlaceholder(
|
||||
blocks.find((b) => b.name === "Agent Output")?.id ?? "",
|
||||
"Enter Name",
|
||||
"Agent Output Field",
|
||||
);
|
||||
|
||||
// check that we can save the agent with all the blocks
|
||||
await buildPage.saveAgent("all blocks test", "all blocks test");
|
||||
// page should have a url like http://localhost:3000/build?flowID=f4f3a1da-cfb3-430f-a074-a455b047e340
|
||||
await test.expect(page).toHaveURL(new RegExp("/.*build\\?flowID=.+"));
|
||||
});
|
||||
|
||||
test("user can add all blocks m-z", async ({ page }, testInfo) => {
|
||||
// this test is slow af so we 10x the timeout (sorry future me)
|
||||
await test.setTimeout(testInfo.timeout * 100);
|
||||
await test.expect(buildPage.isLoaded()).resolves.toBeTruthy();
|
||||
await test.expect(page).toHaveURL(new RegExp("/.*build"));
|
||||
await buildPage.closeTutorial();
|
||||
await buildPage.openBlocksPanel();
|
||||
const blocks = await buildPage.getBlocks();
|
||||
|
||||
const blocksToSkip = await buildPage.getBlocksToSkip();
|
||||
|
||||
// add all the blocks in order except for the agent executor block
|
||||
for (const block of blocks) {
|
||||
if (block.name[0].toLowerCase() < "m") {
|
||||
continue;
|
||||
}
|
||||
if (!blocksToSkip.some((b) => b === block.id)) {
|
||||
await buildPage.addBlock(block);
|
||||
}
|
||||
}
|
||||
await buildPage.closeBlocksPanel();
|
||||
// check that all the blocks are visible
|
||||
for (const block of blocks) {
|
||||
if (block.name[0].toLowerCase() < "m") {
|
||||
continue;
|
||||
}
|
||||
if (!blocksToSkip.some((b) => b === block.id)) {
|
||||
await test.expect(buildPage.hasBlock(block)).resolves.toBeTruthy();
|
||||
}
|
||||
}
|
||||
|
||||
// check that we can save the agent with all the blocks
|
||||
await buildPage.saveAgent("all blocks test", "all blocks test");
|
||||
// page should have a url like http://localhost:3000/build?flowID=f4f3a1da-cfb3-430f-a074-a455b047e340
|
||||
|
|
|
@ -6,8 +6,7 @@ import { v4 as uuidv4 } from "uuid";
|
|||
import * as fs from "fs/promises";
|
||||
import path from "path";
|
||||
// --8<-- [start:AttachAgentId]
|
||||
|
||||
test.describe.skip("Monitor", () => {
|
||||
test.describe("Monitor", () => {
|
||||
let buildPage: BuildPage;
|
||||
let monitorPage: MonitorPage;
|
||||
|
||||
|
@ -54,21 +53,25 @@ test.describe.skip("Monitor", () => {
|
|||
await test.expect(agents.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test("user can export and import agents", async ({
|
||||
test.skip("user can export and import agents", async ({
|
||||
page,
|
||||
}, testInfo: TestInfo) => {
|
||||
// --8<-- [start:ReadAgentId]
|
||||
if (testInfo.attachments.length === 0 || !testInfo.attachments[0].body) {
|
||||
throw new Error("No agent id attached to the test");
|
||||
}
|
||||
const id = testInfo.attachments[0].body.toString();
|
||||
const testAttachName = testInfo.attachments[0].body.toString();
|
||||
// --8<-- [end:ReadAgentId]
|
||||
const agents = await monitorPage.listAgents();
|
||||
|
||||
const downloadPromise = page.waitForEvent("download");
|
||||
await monitorPage.exportToFile(
|
||||
agents.find((a: any) => a.id === id) || agents[0],
|
||||
const agent = agents.find(
|
||||
(a: any) => a.name === `test-agent-${testAttachName}`,
|
||||
);
|
||||
if (!agent) {
|
||||
throw new Error(`Agent ${testAttachName} not found`);
|
||||
}
|
||||
await monitorPage.exportToFile(agent);
|
||||
const download = await downloadPromise;
|
||||
|
||||
// Wait for the download process to complete and save the downloaded file somewhere.
|
||||
|
@ -78,9 +81,6 @@ test.describe.skip("Monitor", () => {
|
|||
console.log(`downloaded file to ${download.suggestedFilename()}`);
|
||||
await test.expect(download.suggestedFilename()).toBeDefined();
|
||||
// test-agent-uuid-v1.json
|
||||
if (id) {
|
||||
await test.expect(download.suggestedFilename()).toContain(id);
|
||||
}
|
||||
await test.expect(download.suggestedFilename()).toContain("test-agent-");
|
||||
await test.expect(download.suggestedFilename()).toContain("v1.json");
|
||||
|
||||
|
@ -89,9 +89,9 @@ test.describe.skip("Monitor", () => {
|
|||
const filesInFolder = await fs.readdir(
|
||||
`${monitorPage.downloadsFolder}/monitor`,
|
||||
);
|
||||
const importFile = filesInFolder.find((f) => f.includes(id));
|
||||
const importFile = filesInFolder.find((f) => f.includes(testAttachName));
|
||||
if (!importFile) {
|
||||
throw new Error(`No import file found for agent ${id}`);
|
||||
throw new Error(`No import file found for agent ${testAttachName}`);
|
||||
}
|
||||
const baseName = importFile.split(".")[0];
|
||||
await monitorPage.importFromFile(
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { ElementHandle, Locator, Page } from "@playwright/test";
|
||||
import { BasePage } from "./base.page";
|
||||
|
||||
interface Block {
|
||||
export interface Block {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
|
@ -378,6 +378,39 @@ export class BuildPage extends BasePage {
|
|||
};
|
||||
}
|
||||
|
||||
async getAgentExecutorBlockDetails(): Promise<Block> {
|
||||
return {
|
||||
id: "e189baac-8c20-45a1-94a7-55177ea42565",
|
||||
name: "Agent Executor",
|
||||
description: "Executes an existing agent inside your agent",
|
||||
};
|
||||
}
|
||||
|
||||
async getAgentOutputBlockDetails(): Promise<Block> {
|
||||
return {
|
||||
id: "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
name: "Agent Output",
|
||||
description: "This block is used to output the result of an agent.",
|
||||
};
|
||||
}
|
||||
|
||||
async getAgentInputBlockDetails(): Promise<Block> {
|
||||
return {
|
||||
id: "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
name: "Agent Input",
|
||||
description: "This block is used to provide input to the graph.",
|
||||
};
|
||||
}
|
||||
|
||||
async getGithubTriggerBlockDetails(): Promise<Block> {
|
||||
return {
|
||||
id: "6c60ec01-8128-419e-988f-96a063ee2fea",
|
||||
name: "Github Trigger",
|
||||
description:
|
||||
"This block triggers on pull request events and outputs the event type and payload.",
|
||||
};
|
||||
}
|
||||
|
||||
async nextTutorialStep(): Promise<void> {
|
||||
console.log(`clicking next tutorial step`);
|
||||
await this.page.getByRole("button", { name: "Next" }).click();
|
||||
|
@ -448,6 +481,15 @@ export class BuildPage extends BasePage {
|
|||
);
|
||||
}
|
||||
|
||||
async getBlocksToSkip(): Promise<string[]> {
|
||||
return [
|
||||
(await this.getAgentExecutorBlockDetails()).id,
|
||||
(await this.getAgentInputBlockDetails()).id,
|
||||
(await this.getAgentOutputBlockDetails()).id,
|
||||
(await this.getGithubTriggerBlockDetails()).id,
|
||||
];
|
||||
}
|
||||
|
||||
async waitForRunTutorialButton(): Promise<void> {
|
||||
console.log(`waiting for run tutorial button`);
|
||||
await this.page.waitForSelector('[id="press-run-label"]');
|
||||
|
|
|
@ -43,9 +43,6 @@ export class MonitorPage extends BasePage {
|
|||
async isLoaded(): Promise<boolean> {
|
||||
console.log(`checking if monitor page is loaded`);
|
||||
try {
|
||||
// Wait for network to settle first
|
||||
await this.page.waitForLoadState("networkidle", { timeout: 10_000 });
|
||||
|
||||
// Wait for the monitor page
|
||||
await this.page.getByTestId("monitor-page").waitFor({
|
||||
state: "visible",
|
||||
|
@ -55,7 +52,7 @@ export class MonitorPage extends BasePage {
|
|||
// Wait for table headers to be visible (indicates table structure is ready)
|
||||
await this.page.locator("thead th").first().waitFor({
|
||||
state: "visible",
|
||||
timeout: 5_000,
|
||||
timeout: 15_000,
|
||||
});
|
||||
|
||||
// Wait for either a table row or an empty tbody to be present
|
||||
|
@ -63,14 +60,14 @@ export class MonitorPage extends BasePage {
|
|||
// Wait for at least one row
|
||||
this.page.locator("tbody tr[data-testid]").first().waitFor({
|
||||
state: "visible",
|
||||
timeout: 5_000,
|
||||
timeout: 15_000,
|
||||
}),
|
||||
// OR wait for an empty tbody (indicating no agents but table is loaded)
|
||||
this.page
|
||||
.locator("tbody[data-testid='agent-flow-list-body']:empty")
|
||||
.waitFor({
|
||||
state: "visible",
|
||||
timeout: 5_000,
|
||||
timeout: 15_000,
|
||||
}),
|
||||
]);
|
||||
|
||||
|
@ -114,6 +111,13 @@ export class MonitorPage extends BasePage {
|
|||
});
|
||||
}
|
||||
|
||||
agents.reduce((acc, agent) => {
|
||||
if (!agent.id.includes("flow-run")) {
|
||||
acc.push(agent);
|
||||
}
|
||||
return acc;
|
||||
}, [] as Agent[]);
|
||||
|
||||
return agents;
|
||||
}
|
||||
|
||||
|
@ -219,7 +223,7 @@ export class MonitorPage extends BasePage {
|
|||
async exportToFile(agent: Agent) {
|
||||
await this.clickAgent(agent.id);
|
||||
|
||||
console.log(`exporting agent ${agent.id} ${agent.name} to file`);
|
||||
console.log(`exporting agent id: ${agent.id} name: ${agent.name} to file`);
|
||||
await this.page.getByTestId("export-button").click();
|
||||
}
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ test.describe("Profile", () => {
|
|||
// Start each test with login using worker auth
|
||||
await page.goto("/login");
|
||||
await loginPage.login(testUser.email, testUser.password);
|
||||
await test.expect(page).toHaveURL("/store");
|
||||
await test.expect(page).toHaveURL("/marketplace");
|
||||
});
|
||||
|
||||
test("user can view their profile information", async ({
|
||||
|
|
|
@ -1693,10 +1693,10 @@
|
|||
outvariant "^1.4.3"
|
||||
strict-event-emitter "^0.5.1"
|
||||
|
||||
"@next/env@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.2.20.tgz#0be2cc955f4eb837516e7d7382284cd5bc1d5a02"
|
||||
integrity sha512-JfDpuOCB0UBKlEgEy/H6qcBSzHimn/YWjUHzKl1jMeUO+QVRdzmTTl8gFJaNO87c8DXmVKhFCtwxQ9acqB3+Pw==
|
||||
"@next/env@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-14.2.23.tgz#3003b53693cbc476710b856f83e623c8231a6be9"
|
||||
integrity sha512-CysUC9IO+2Bh0omJ3qrb47S8DtsTKbFidGm6ow4gXIG6reZybqxbkH2nhdEm1tC8SmgzDdpq3BIML0PWsmyUYA==
|
||||
|
||||
"@next/eslint-plugin-next@15.1.3":
|
||||
version "15.1.3"
|
||||
|
@ -1705,50 +1705,50 @@
|
|||
dependencies:
|
||||
fast-glob "3.3.1"
|
||||
|
||||
"@next/swc-darwin-arm64@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.20.tgz#3c99d318c08362aedde5d2778eec3a50b8085d99"
|
||||
integrity sha512-WDfq7bmROa5cIlk6ZNonNdVhKmbCv38XteVFYsxea1vDJt3SnYGgxLGMTXQNfs5OkFvAhmfKKrwe7Y0Hs+rWOg==
|
||||
"@next/swc-darwin-arm64@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-14.2.23.tgz#6d83f03e35e163e8bbeaf5aeaa6bf55eed23d7a1"
|
||||
integrity sha512-WhtEntt6NcbABA8ypEoFd3uzq5iAnrl9AnZt9dXdO+PZLACE32z3a3qA5OoV20JrbJfSJ6Sd6EqGZTrlRnGxQQ==
|
||||
|
||||
"@next/swc-darwin-x64@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.20.tgz#fd547fad1446a677f29c1160006fdd482bba4052"
|
||||
integrity sha512-XIQlC+NAmJPfa2hruLvr1H1QJJeqOTDV+v7tl/jIdoFvqhoihvSNykLU/G6NMgoeo+e/H7p/VeWSOvMUHKtTIg==
|
||||
"@next/swc-darwin-x64@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-14.2.23.tgz#e02abc35d5e36ce1550f674f8676999f293ba54f"
|
||||
integrity sha512-vwLw0HN2gVclT/ikO6EcE+LcIN+0mddJ53yG4eZd0rXkuEr/RnOaMH8wg/sYl5iz5AYYRo/l6XX7FIo6kwbw1Q==
|
||||
|
||||
"@next/swc-linux-arm64-gnu@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.20.tgz#1d6ba1929d3a11b74c0185cdeca1e38b824222ca"
|
||||
integrity sha512-pnzBrHTPXIMm5QX3QC8XeMkpVuoAYOmyfsO4VlPn+0NrHraNuWjdhe+3xLq01xR++iCvX+uoeZmJDKcOxI201Q==
|
||||
"@next/swc-linux-arm64-gnu@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-14.2.23.tgz#f13516ad2d665950951b59e7c239574bb8504d63"
|
||||
integrity sha512-uuAYwD3At2fu5CH1wD7FpP87mnjAv4+DNvLaR9kiIi8DLStWSW304kF09p1EQfhcbUI1Py2vZlBO2VaVqMRtpg==
|
||||
|
||||
"@next/swc-linux-arm64-musl@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.20.tgz#0fe0c67b5d916f99ca76b39416557af609768f17"
|
||||
integrity sha512-WhJJAFpi6yqmUx1momewSdcm/iRXFQS0HU2qlUGlGE/+98eu7JWLD5AAaP/tkK1mudS/rH2f9E3WCEF2iYDydQ==
|
||||
"@next/swc-linux-arm64-musl@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-14.2.23.tgz#10d05a1c161dc8426d54ccf6d9bbed6953a3252a"
|
||||
integrity sha512-Mm5KHd7nGgeJ4EETvVgFuqKOyDh+UMXHXxye6wRRFDr4FdVRI6YTxajoV2aHE8jqC14xeAMVZvLqYqS7isHL+g==
|
||||
|
||||
"@next/swc-linux-x64-gnu@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.20.tgz#6d29fa8cdb6a9f8250c2048aaa24538f0cd0b02d"
|
||||
integrity sha512-ao5HCbw9+iG1Kxm8XsGa3X174Ahn17mSYBQlY6VGsdsYDAbz/ZP13wSLfvlYoIDn1Ger6uYA+yt/3Y9KTIupRg==
|
||||
"@next/swc-linux-x64-gnu@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-14.2.23.tgz#7f5856df080f58ba058268b30429a2ab52500536"
|
||||
integrity sha512-Ybfqlyzm4sMSEQO6lDksggAIxnvWSG2cDWnG2jgd+MLbHYn2pvFA8DQ4pT2Vjk3Cwrv+HIg7vXJ8lCiLz79qoQ==
|
||||
|
||||
"@next/swc-linux-x64-musl@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.20.tgz#bfc57482bc033fda8455e8aab1c3cbc44f0c4690"
|
||||
integrity sha512-CXm/kpnltKTT7945np6Td3w7shj/92TMRPyI/VvveFe8+YE+/YOJ5hyAWK5rpx711XO1jBCgXl211TWaxOtkaA==
|
||||
"@next/swc-linux-x64-musl@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-14.2.23.tgz#d494ebdf26421c91be65f9b1d095df0191c956d8"
|
||||
integrity sha512-OSQX94sxd1gOUz3jhhdocnKsy4/peG8zV1HVaW6DLEbEmRRtUCUQZcKxUD9atLYa3RZA+YJx+WZdOnTkDuNDNA==
|
||||
|
||||
"@next/swc-win32-arm64-msvc@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.20.tgz#6f7783e643310510240a981776532ffe0e02af95"
|
||||
integrity sha512-upJn2HGQgKNDbXVfIgmqT2BN8f3z/mX8ddoyi1I565FHbfowVK5pnMEwauvLvaJf4iijvuKq3kw/b6E9oIVRWA==
|
||||
"@next/swc-win32-arm64-msvc@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-14.2.23.tgz#62786e7ba4822a20b6666e3e03e5a389b0e7eb3b"
|
||||
integrity sha512-ezmbgZy++XpIMTcTNd0L4k7+cNI4ET5vMv/oqNfTuSXkZtSA9BURElPFyarjjGtRgZ9/zuKDHoMdZwDZIY3ehQ==
|
||||
|
||||
"@next/swc-win32-ia32-msvc@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.20.tgz#58c7720687e80a13795e22c29d5860fa142e44fc"
|
||||
integrity sha512-igQW/JWciTGJwj3G1ipalD2V20Xfx3ywQy17IV0ciOUBbFhNfyU1DILWsTi32c8KmqgIDviUEulW/yPb2FF90w==
|
||||
"@next/swc-win32-ia32-msvc@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-14.2.23.tgz#ef028af91e1c40a4ebba0d2c47b23c1eeb299594"
|
||||
integrity sha512-zfHZOGguFCqAJ7zldTKg4tJHPJyJCOFhpoJcVxKL9BSUHScVDnMdDuOU1zPPGdOzr/GWxbhYTjyiEgLEpAoFPA==
|
||||
|
||||
"@next/swc-win32-x64-msvc@14.2.20":
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.20.tgz#689bc7beb8005b73c95d926e7edfb7f73efc78f2"
|
||||
integrity sha512-AFmqeLW6LtxeFTuoB+MXFeM5fm5052i3MU6xD0WzJDOwku6SkZaxb1bxjBaRC8uNqTRTSPl0yMFtjNowIVI67w==
|
||||
"@next/swc-win32-x64-msvc@14.2.23":
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-14.2.23.tgz#c81838f02f2f16a321b7533890fb63c1edec68e1"
|
||||
integrity sha512-xCtq5BD553SzOgSZ7UH5LH+OATQihydObTrCTvVzOro8QiWYKdBVwcB2Mn2MLMo6DGW9yH1LSPw7jS7HhgJgjw==
|
||||
|
||||
"@next/third-parties@^15.1.3":
|
||||
version "15.1.3"
|
||||
|
@ -3257,6 +3257,11 @@
|
|||
resolved "https://registry.yarnpkg.com/@storybook/theming/-/theming-8.4.7.tgz#c308f6a883999bd35e87826738ab8a76515932b5"
|
||||
integrity sha512-99rgLEjf7iwfSEmdqlHkSG3AyLcK0sfExcr0jnc6rLiAkBhzuIsvcHjjUwkR210SOCgXqBPW0ZA6uhnuyppHLw==
|
||||
|
||||
"@stripe/stripe-js@^5.3.0":
|
||||
version "5.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@stripe/stripe-js/-/stripe-js-5.4.0.tgz#847e870ddfe9283432526867857a4c1fba9b11ed"
|
||||
integrity sha512-3tfMbSvLGB+OsJ2MsjWjWo+7sp29dwx+3+9kG/TEnZQJt+EwbF/Nomm43cSK+6oXZA9uhspgyrB+BbrPRumx4g==
|
||||
|
||||
"@supabase/auth-js@2.67.3":
|
||||
version "2.67.3"
|
||||
resolved "https://registry.yarnpkg.com/@supabase/auth-js/-/auth-js-2.67.3.tgz#a1f5eb22440b0cdbf87fe2ecae662a8dd8bb2028"
|
||||
|
@ -8976,12 +8981,12 @@ next-themes@^0.4.4:
|
|||
resolved "https://registry.yarnpkg.com/next-themes/-/next-themes-0.4.4.tgz#ce6f68a4af543821bbc4755b59c0d3ced55c2d13"
|
||||
integrity sha512-LDQ2qIOJF0VnuVrrMSMLrWGjRMkq+0mpgl6e0juCLqdJ+oo8Q84JRWT6Wh11VDQKkMMe+dVzDKLWs5n87T+PkQ==
|
||||
|
||||
next@^14.2.13:
|
||||
version "14.2.20"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-14.2.20.tgz#99b551d87ca6505ce63074904cb31a35e21dac9b"
|
||||
integrity sha512-yPvIiWsiyVYqJlSQxwmzMIReXn5HxFNq4+tlVQ812N1FbvhmE+fDpIAD7bcS2mGYQwPJ5vAsQouyme2eKsxaug==
|
||||
next@^14.2.21:
|
||||
version "14.2.23"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-14.2.23.tgz#37edc9a4d42c135fd97a4092f829e291e2e7c943"
|
||||
integrity sha512-mjN3fE6u/tynneLiEg56XnthzuYw+kD7mCujgVqioxyPqbmiotUCGJpIZGS/VaPg3ZDT1tvWxiVyRzeqJFm/kw==
|
||||
dependencies:
|
||||
"@next/env" "14.2.20"
|
||||
"@next/env" "14.2.23"
|
||||
"@swc/helpers" "0.5.5"
|
||||
busboy "1.6.0"
|
||||
caniuse-lite "^1.0.30001579"
|
||||
|
@ -8989,15 +8994,15 @@ next@^14.2.13:
|
|||
postcss "8.4.31"
|
||||
styled-jsx "5.1.1"
|
||||
optionalDependencies:
|
||||
"@next/swc-darwin-arm64" "14.2.20"
|
||||
"@next/swc-darwin-x64" "14.2.20"
|
||||
"@next/swc-linux-arm64-gnu" "14.2.20"
|
||||
"@next/swc-linux-arm64-musl" "14.2.20"
|
||||
"@next/swc-linux-x64-gnu" "14.2.20"
|
||||
"@next/swc-linux-x64-musl" "14.2.20"
|
||||
"@next/swc-win32-arm64-msvc" "14.2.20"
|
||||
"@next/swc-win32-ia32-msvc" "14.2.20"
|
||||
"@next/swc-win32-x64-msvc" "14.2.20"
|
||||
"@next/swc-darwin-arm64" "14.2.23"
|
||||
"@next/swc-darwin-x64" "14.2.23"
|
||||
"@next/swc-linux-arm64-gnu" "14.2.23"
|
||||
"@next/swc-linux-arm64-musl" "14.2.23"
|
||||
"@next/swc-linux-x64-gnu" "14.2.23"
|
||||
"@next/swc-linux-x64-musl" "14.2.23"
|
||||
"@next/swc-win32-arm64-msvc" "14.2.23"
|
||||
"@next/swc-win32-ia32-msvc" "14.2.23"
|
||||
"@next/swc-win32-x64-msvc" "14.2.23"
|
||||
|
||||
no-case@^3.0.4:
|
||||
version "3.0.4"
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 105 KiB After Width: | Height: | Size: 81 KiB |
|
@ -102,6 +102,11 @@ Follow these steps to create and test a new block:
|
|||
- **API request**: Send a GET request to the Wikipedia API.
|
||||
- **Error handling**: Handle various exceptions that might occur during the API request and data processing. We don't need to catch all exceptions, only the ones we expect and can handle. The uncaught exceptions will be automatically yielded as `error` in the output. Any block that raises an exception (or yields an `error` output) will be marked as failed. Prefer raising exceptions over yielding `error`, as it will stop the execution immediately.
|
||||
- **Yield**: Use `yield` to output the results. Prefer to output one result object at a time. If you are calling a function that returns a list, you can yield each item in the list separately. You can also yield the whole list as well, but do both rather than yielding the list. For example: If you were writing a block that outputs emails, you'd yield each email as a separate result object, but you could also yield the whole list as an additional single result object. Yielding output named `error` will break the execution right away and mark the block execution as failed.
|
||||
- **kwargs**: The `kwargs` parameter is used to pass additional arguments to the block. It is not used in the example above, but it is available to the block. You can also have args as inline signatures in the run method ala `def run(self, input_data: Input, *, user_id: str, **kwargs) -> BlockOutput:`.
|
||||
Available kwargs are:
|
||||
- `user_id`: The ID of the user running the block.
|
||||
- `run_id`: The ID of the run that is executing the block. This changes every time the agent has a new "run"
|
||||
- `graph_id`: The ID of the agent that is executing the block. This is the same for every version of the agent
|
||||
|
||||
### Field Types
|
||||
|
||||
|
|
|
@ -45,13 +45,7 @@ Now that both Ollama and the AutoGPT platform are running we can move onto using
|
|||
2. In the "LLM Model" dropdown, select "llama3.2" (This is the model we downloaded earlier)
|
||||

|
||||
|
||||
3. You will see it ask for "Ollama Credentials", simply press "Enter API key"
|
||||

|
||||
|
||||
And you will see "Add new API key for Ollama", In the API key field you can enter anything you want as Ollama does not require an API key, I usually just enter a space, for the Name call it "Ollama" then press "Save & use this API key"
|
||||

|
||||
|
||||
4. After that you will now see the block again, add your prompts then save and then run the graph:
|
||||
3. Now we need to add some prompts then save and then run the graph:
|
||||

|
||||
|
||||
That's it! You've successfully setup the AutoGPT platform and made a LLM call to Ollama.
|
||||
|
|
Loading…
Reference in New Issue