feat(autogpt_server): Add `GET /graphs/{graph_id}/executions` endpoint (#7330)
* feat(autogpt_builder): Add `AutoGPTServerAPI` client * migrate API calls in Flow.tsx to new API client * feat(autogpt_server): Add `/graphs/{graph_id}/executions` endpoint In `data/execution.py`: - Add `list_executions` function - Rename `get_executions` to `get_execution_results` In `server/server.py`: - Add route - Add `AgentServer.list_graph_runs` - Rename `AgentServer.get_executions` to `get_run_execution_results` * feat(autogpt_builder): Add `listFlowRunIDs` endpoint to `AutoGPTServerAPI` client * Move `Schema` to `types.ts` and rename to `ObjectSchema`pull/7291/head^2
parent
200800312a
commit
0df2199c42
|
@ -96,6 +96,21 @@ export default class AutoGPTServerAPI {
|
|||
}
|
||||
}
|
||||
|
||||
async listFlowRunIDs(flowId: string): Promise<string[]> {
|
||||
const path = `/graphs/${flowId}/executions`
|
||||
try {
|
||||
const response = await fetch(this.baseUrl + path);
|
||||
if (!response.ok) {
|
||||
console.warn(`GET ${path} returned non-OK response:`, response);
|
||||
throw new Error(`HTTP error ${response.status}!`);
|
||||
}
|
||||
return await response.json();
|
||||
} catch (error) {
|
||||
console.error('Error fetching flow runs:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async getFlowExecutionInfo(flowId: string, runId: string): Promise<NodeExecutionResult[]> {
|
||||
const path = `/graphs/${flowId}/executions/${runId}`;
|
||||
try {
|
||||
|
|
|
@ -205,7 +205,14 @@ async def update_execution_status(node_exec_id: str, status: ExecutionStatus) ->
|
|||
raise ValueError(f"Execution {node_exec_id} not found.")
|
||||
|
||||
|
||||
async def get_executions(graph_exec_id: str) -> list[ExecutionResult]:
|
||||
async def list_executions(graph_id: str) -> list[str]:
|
||||
executions = await AgentGraphExecution.prisma().find_many(
|
||||
where={"agentGraphId": graph_id},
|
||||
)
|
||||
return [execution.id for execution in executions]
|
||||
|
||||
|
||||
async def get_execution_results(graph_exec_id: str) -> list[ExecutionResult]:
|
||||
executions = await AgentNodeExecution.prisma().find_many(
|
||||
where={"agentGraphExecutionId": graph_exec_id},
|
||||
include={"Input": True, "Output": True},
|
||||
|
|
|
@ -79,9 +79,14 @@ class AgentServer(AppProcess):
|
|||
endpoint=self.execute_graph,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions",
|
||||
endpoint=self.list_graph_runs,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions/{run_id}",
|
||||
endpoint=self.get_executions,
|
||||
endpoint=self.get_run_execution_results,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
|
@ -155,14 +160,21 @@ class AgentServer(AppProcess):
|
|||
msg = e.__str__().encode().decode("unicode_escape")
|
||||
raise HTTPException(status_code=400, detail=msg)
|
||||
|
||||
async def get_executions(
|
||||
async def list_graph_runs(self, graph_id: str) -> list[str]:
|
||||
graph = await get_graph(graph_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Agent #{graph_id} not found.")
|
||||
|
||||
return await execution.list_executions(graph_id)
|
||||
|
||||
async def get_run_execution_results(
|
||||
self, graph_id: str, run_id: str
|
||||
) -> list[execution.ExecutionResult]:
|
||||
graph = await get_graph(graph_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Agent #{graph_id} not found.")
|
||||
|
||||
return await execution.get_executions(run_id)
|
||||
return await execution.get_execution_results(run_id)
|
||||
|
||||
async def create_schedule(self, graph_id: str, cron: str, input_data: dict) -> dict:
|
||||
graph = await get_graph(graph_id)
|
||||
|
|
|
@ -61,7 +61,9 @@ async def execute_graph(test_manager: ExecutionManager, test_graph: graph.Graph)
|
|||
assert len(executions) == 2
|
||||
|
||||
async def is_execution_completed():
|
||||
execs = await agent_server.get_executions(test_graph.id, graph_exec_id)
|
||||
execs = await agent_server.get_run_execution_results(
|
||||
test_graph.id, graph_exec_id
|
||||
)
|
||||
return test_manager.queue.empty() and len(execs) == 4
|
||||
|
||||
# Wait for the executions to complete
|
||||
|
@ -78,7 +80,9 @@ async def execute_graph(test_manager: ExecutionManager, test_graph: graph.Graph)
|
|||
async def assert_executions(test_graph: graph.Graph, graph_exec_id: str):
|
||||
text = "Hello, World!"
|
||||
agent_server = AgentServer()
|
||||
executions = await agent_server.get_executions(test_graph.id, graph_exec_id)
|
||||
executions = await agent_server.get_run_execution_results(
|
||||
test_graph.id, graph_exec_id
|
||||
)
|
||||
|
||||
# Executing ParrotBlock1
|
||||
exec = executions[0]
|
||||
|
|
|
@ -117,7 +117,7 @@ Make sure to only comment on a relevant post.
|
|||
|
||||
async def wait_execution(test_manager, graph_id, graph_exec_id) -> list:
|
||||
async def is_execution_completed():
|
||||
execs = await AgentServer().get_executions(graph_id, graph_exec_id)
|
||||
execs = await AgentServer().get_run_execution_results(graph_id, graph_exec_id)
|
||||
"""
|
||||
List of execution:
|
||||
reddit_get_post_node 1 (produced 3 posts)
|
||||
|
@ -136,7 +136,9 @@ async def wait_execution(test_manager, graph_id, graph_exec_id) -> list:
|
|||
# Wait for the executions to complete
|
||||
for i in range(120):
|
||||
if await is_execution_completed():
|
||||
return await AgentServer().get_executions(graph_id, graph_exec_id)
|
||||
return await AgentServer().get_run_execution_results(
|
||||
graph_id, graph_exec_id
|
||||
)
|
||||
time.sleep(1)
|
||||
|
||||
assert False, "Execution did not complete in time."
|
||||
|
|
Loading…
Reference in New Issue