feat(platform): Include all agent versions in Runs in Monitor (#8752)
The graph version is bumped on each save. While the agent version is changed, the past execution history is gone because the monitor page only shows the latest version's execution history. ### Changes 🏗️ - Add `get_executions` on the backend that returns all executions of all graphs for a user - Display all executions (for all versions) for graphs in Monitor - Rename ts mirror type `ExecutionMeta` to `GraphExecution` for consistency with the backend - Remove redundant `FlowRun` type on the frontend and use `GraphExecution` instead - Round execution duration text in Monitor to one decimal place ### Checklist 📋 #### For code changes: - [ ] I have clearly listed my changes in the PR description - [ ] I have made a test plan - [ ] I have tested my changes according to the test plan: <!-- Put your test plan here: --> - [ ] ... <details> <summary>Example test plan</summary> - [ ] Create from scratch and execute an agent with at least 3 blocks - [ ] Import an agent from file upload, and confirm it executes correctly - [ ] Upload agent to marketplace - [ ] Import an agent from marketplace and confirm it executes correctly - [ ] Edit an agent from monitor, and confirm it executes correctly </details> #### For configuration changes: - [ ] `.env.example` is updated or already compatible with my changes - [ ] `docker-compose.yml` is updated or already compatible with my changes - [ ] I have included a list of my configuration changes in the PR description (under **Changes**) <details> <summary>Examples of configuration changes</summary> - Changing ports - Adding new services that need to communicate with each other - Secrets or environment variable changes - New or infrastructure changes such as databases </details> --------- Co-authored-by: Zamil Majdy <zamil.majdy@agpt.co>pull/8940/head
parent
d827d4f9e4
commit
6307ca1841
|
@ -7,7 +7,7 @@ from typing import Any, Literal, Optional, Type
|
|||
|
||||
import prisma
|
||||
from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink
|
||||
from prisma.types import AgentGraphWhereInput
|
||||
from prisma.types import AgentGraphExecutionWhereInput, AgentGraphWhereInput
|
||||
from pydantic.fields import computed_field
|
||||
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
|
@ -105,6 +105,8 @@ class GraphExecution(BaseDbModel):
|
|||
duration: float
|
||||
total_run_time: float
|
||||
status: ExecutionStatus
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
|
||||
@staticmethod
|
||||
def from_db(execution: AgentGraphExecution):
|
||||
|
@ -130,6 +132,8 @@ class GraphExecution(BaseDbModel):
|
|||
duration=duration,
|
||||
total_run_time=total_run_time,
|
||||
status=ExecutionStatus(execution.executionStatus),
|
||||
graph_id=execution.agentGraphId,
|
||||
graph_version=execution.agentGraphVersion,
|
||||
)
|
||||
|
||||
|
||||
|
@ -423,15 +427,13 @@ async def get_graphs(
|
|||
Returns:
|
||||
list[GraphModel]: A list of objects representing the retrieved graphs.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {}
|
||||
where_clause: AgentGraphWhereInput = {"userId": user_id}
|
||||
|
||||
if filter_by == "active":
|
||||
where_clause["isActive"] = True
|
||||
elif filter_by == "template":
|
||||
where_clause["isTemplate"] = True
|
||||
|
||||
where_clause["userId"] = user_id
|
||||
|
||||
graph_include = AGENT_GRAPH_INCLUDE
|
||||
graph_include["AgentGraphExecution"] = include_executions
|
||||
|
||||
|
@ -445,6 +447,17 @@ async def get_graphs(
|
|||
return [GraphModel.from_db(graph) for graph in graphs]
|
||||
|
||||
|
||||
async def get_executions(user_id: str) -> list[GraphExecution]:
|
||||
where_clause: AgentGraphExecutionWhereInput = {"userId": user_id}
|
||||
|
||||
executions = await AgentGraphExecution.prisma().find_many(
|
||||
where=where_clause,
|
||||
order={"createdAt": "desc"},
|
||||
)
|
||||
|
||||
return [GraphExecution.from_db(execution) for execution in executions]
|
||||
|
||||
|
||||
async def get_graph(
|
||||
graph_id: str,
|
||||
version: int | None = None,
|
||||
|
|
|
@ -397,6 +397,17 @@ async def stop_graph_run(
|
|||
return await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/executions",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_executions(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[graph_db.GraphExecution]:
|
||||
return await graph_db.get_executions(user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/graphs/{graph_id}/executions",
|
||||
tags=["graphs"],
|
||||
|
|
|
@ -2,13 +2,12 @@
|
|||
import React, { useCallback, useEffect, useMemo, useState } from "react";
|
||||
|
||||
import AutoGPTServerAPI, {
|
||||
GraphMetaWithRuns,
|
||||
ExecutionMeta,
|
||||
GraphExecution,
|
||||
Schedule,
|
||||
GraphMeta,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
|
||||
import { Card } from "@/components/ui/card";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import {
|
||||
AgentFlowList,
|
||||
FlowInfo,
|
||||
|
@ -19,13 +18,11 @@ import {
|
|||
import { SchedulesTable } from "@/components/monitor/scheduleTable";
|
||||
|
||||
const Monitor = () => {
|
||||
const [flows, setFlows] = useState<GraphMetaWithRuns[]>([]);
|
||||
const [flowRuns, setFlowRuns] = useState<FlowRun[]>([]);
|
||||
const [flows, setFlows] = useState<GraphMeta[]>([]);
|
||||
const [executions, setExecutions] = useState<GraphExecution[]>([]);
|
||||
const [schedules, setSchedules] = useState<Schedule[]>([]);
|
||||
const [selectedFlow, setSelectedFlow] = useState<GraphMetaWithRuns | null>(
|
||||
null,
|
||||
);
|
||||
const [selectedRun, setSelectedRun] = useState<FlowRun | null>(null);
|
||||
const [selectedFlow, setSelectedFlow] = useState<GraphMeta | null>(null);
|
||||
const [selectedRun, setSelectedRun] = useState<GraphExecution | null>(null);
|
||||
const [sortColumn, setSortColumn] = useState<keyof Schedule>("id");
|
||||
const [sortDirection, setSortDirection] = useState<"asc" | "desc">("asc");
|
||||
|
||||
|
@ -44,16 +41,11 @@ const Monitor = () => {
|
|||
);
|
||||
|
||||
const fetchAgents = useCallback(() => {
|
||||
api.listGraphsWithRuns().then((agent) => {
|
||||
api.listGraphs().then((agent) => {
|
||||
setFlows(agent);
|
||||
const flowRuns = agent.flatMap((graph) =>
|
||||
graph.executions != null
|
||||
? graph.executions.map((execution) =>
|
||||
flowRunFromExecutionMeta(graph, execution),
|
||||
)
|
||||
: [],
|
||||
);
|
||||
setFlowRuns(flowRuns);
|
||||
});
|
||||
api.getExecutions().then((executions) => {
|
||||
setExecutions(executions);
|
||||
});
|
||||
}, [api]);
|
||||
|
||||
|
@ -91,7 +83,7 @@ const Monitor = () => {
|
|||
<AgentFlowList
|
||||
className={column1}
|
||||
flows={flows}
|
||||
flowRuns={flowRuns}
|
||||
executions={executions}
|
||||
selectedFlow={selectedFlow}
|
||||
onSelectFlow={(f) => {
|
||||
setSelectedRun(null);
|
||||
|
@ -103,25 +95,29 @@ const Monitor = () => {
|
|||
<FlowRunsList
|
||||
className={column2}
|
||||
flows={flows}
|
||||
runs={[
|
||||
executions={[
|
||||
...(selectedFlow
|
||||
? flowRuns.filter((v) => v.graphID == selectedFlow.id)
|
||||
: flowRuns),
|
||||
].sort((a, b) => Number(a.startTime) - Number(b.startTime))}
|
||||
? executions.filter((v) => v.graph_id == selectedFlow.id)
|
||||
: executions),
|
||||
].sort((a, b) => Number(b.started_at) - Number(a.started_at))}
|
||||
selectedRun={selectedRun}
|
||||
onSelectRun={(r) => setSelectedRun(r.id == selectedRun?.id ? null : r)}
|
||||
onSelectRun={(r) =>
|
||||
setSelectedRun(r.execution_id == selectedRun?.execution_id ? null : r)
|
||||
}
|
||||
/>
|
||||
{(selectedRun && (
|
||||
<FlowRunInfo
|
||||
flow={selectedFlow || flows.find((f) => f.id == selectedRun.graphID)!}
|
||||
flowRun={selectedRun}
|
||||
flow={
|
||||
selectedFlow || flows.find((f) => f.id == selectedRun.graph_id)!
|
||||
}
|
||||
execution={selectedRun}
|
||||
className={column3}
|
||||
/>
|
||||
)) ||
|
||||
(selectedFlow && (
|
||||
<FlowInfo
|
||||
flow={selectedFlow}
|
||||
flowRuns={flowRuns.filter((r) => r.graphID == selectedFlow.id)}
|
||||
executions={executions.filter((e) => e.graph_id == selectedFlow.id)}
|
||||
className={column3}
|
||||
refresh={() => {
|
||||
fetchAgents();
|
||||
|
@ -131,7 +127,7 @@ const Monitor = () => {
|
|||
/>
|
||||
)) || (
|
||||
<Card className={`p-6 ${column3}`}>
|
||||
<FlowRunsStats flows={flows} flowRuns={flowRuns} />
|
||||
<FlowRunsStats flows={flows} executions={executions} />
|
||||
</Card>
|
||||
)}
|
||||
<div className="col-span-full xl:col-span-6">
|
||||
|
@ -148,20 +144,4 @@ const Monitor = () => {
|
|||
);
|
||||
};
|
||||
|
||||
function flowRunFromExecutionMeta(
|
||||
graphMeta: GraphMetaWithRuns,
|
||||
executionMeta: ExecutionMeta,
|
||||
): FlowRun {
|
||||
return {
|
||||
id: executionMeta.execution_id,
|
||||
graphID: graphMeta.id,
|
||||
graphVersion: graphMeta.version,
|
||||
status: executionMeta.status,
|
||||
startTime: executionMeta.started_at,
|
||||
endTime: executionMeta.ended_at,
|
||||
duration: executionMeta.duration,
|
||||
totalRunTime: executionMeta.total_run_time,
|
||||
} as FlowRun;
|
||||
}
|
||||
|
||||
export default Monitor;
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
import AutoGPTServerAPI, { GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import AutoGPTServerAPI, {
|
||||
GraphExecution,
|
||||
GraphMeta,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import React, { useEffect, useMemo, useState } from "react";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
@ -29,18 +32,17 @@ import {
|
|||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import moment from "moment/moment";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import { DialogTitle } from "@/components/ui/dialog";
|
||||
|
||||
export const AgentFlowList = ({
|
||||
flows,
|
||||
flowRuns,
|
||||
executions,
|
||||
selectedFlow,
|
||||
onSelectFlow,
|
||||
className,
|
||||
}: {
|
||||
flows: GraphMeta[];
|
||||
flowRuns?: FlowRun[];
|
||||
executions?: GraphExecution[];
|
||||
selectedFlow: GraphMeta | null;
|
||||
onSelectFlow: (f: GraphMeta) => void;
|
||||
className?: string;
|
||||
|
@ -127,29 +129,29 @@ export const AgentFlowList = ({
|
|||
<TableHead>Name</TableHead>
|
||||
{/* <TableHead>Status</TableHead> */}
|
||||
{/* <TableHead>Last updated</TableHead> */}
|
||||
{flowRuns && (
|
||||
{executions && (
|
||||
<TableHead className="md:hidden lg:table-cell">
|
||||
# of runs
|
||||
</TableHead>
|
||||
)}
|
||||
{flowRuns && <TableHead>Last run</TableHead>}
|
||||
{executions && <TableHead>Last run</TableHead>}
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody data-testid="agent-flow-list-body">
|
||||
{flows
|
||||
.map((flow) => {
|
||||
let runCount = 0,
|
||||
lastRun: FlowRun | null = null;
|
||||
if (flowRuns) {
|
||||
const _flowRuns = flowRuns.filter(
|
||||
(r) => r.graphID == flow.id,
|
||||
lastRun: GraphExecution | null = null;
|
||||
if (executions) {
|
||||
const _flowRuns = executions.filter(
|
||||
(r) => r.graph_id == flow.id,
|
||||
);
|
||||
runCount = _flowRuns.length;
|
||||
lastRun =
|
||||
runCount == 0
|
||||
? null
|
||||
: _flowRuns.reduce((a, c) =>
|
||||
a.startTime > c.startTime ? a : c,
|
||||
a.started_at > c.started_at ? a : c,
|
||||
);
|
||||
}
|
||||
return { flow, runCount, lastRun };
|
||||
|
@ -158,7 +160,7 @@ export const AgentFlowList = ({
|
|||
if (!a.lastRun && !b.lastRun) return 0;
|
||||
if (!a.lastRun) return 1;
|
||||
if (!b.lastRun) return -1;
|
||||
return b.lastRun.startTime - a.lastRun.startTime;
|
||||
return b.lastRun.started_at - a.lastRun.started_at;
|
||||
})
|
||||
.map(({ flow, runCount, lastRun }) => (
|
||||
<TableRow
|
||||
|
@ -176,17 +178,17 @@ export const AgentFlowList = ({
|
|||
{/* <TableCell>
|
||||
{flow.updatedAt ?? "???"}
|
||||
</TableCell> */}
|
||||
{flowRuns && (
|
||||
{executions && (
|
||||
<TableCell className="md:hidden lg:table-cell">
|
||||
{runCount}
|
||||
</TableCell>
|
||||
)}
|
||||
{flowRuns &&
|
||||
{executions &&
|
||||
(!lastRun ? (
|
||||
<TableCell />
|
||||
) : (
|
||||
<TableCell title={moment(lastRun.startTime).toString()}>
|
||||
{moment(lastRun.startTime).fromNow()}
|
||||
<TableCell title={moment(lastRun.started_at).toString()}>
|
||||
{moment(lastRun.started_at).fromNow()}
|
||||
</TableCell>
|
||||
))}
|
||||
</TableRow>
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import React, { useEffect, useMemo, useState } from "react";
|
||||
import AutoGPTServerAPI, {
|
||||
GraphExecution,
|
||||
Graph,
|
||||
GraphMeta,
|
||||
safeCopyGraph,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import {
|
||||
DropdownMenu,
|
||||
|
@ -33,11 +33,11 @@ import {
|
|||
export const FlowInfo: React.FC<
|
||||
React.HTMLAttributes<HTMLDivElement> & {
|
||||
flow: GraphMeta;
|
||||
flowRuns: FlowRun[];
|
||||
executions: GraphExecution[];
|
||||
flowVersion?: number | "all";
|
||||
refresh: () => void;
|
||||
}
|
||||
> = ({ flow, flowRuns, flowVersion, refresh, ...props }) => {
|
||||
> = ({ flow, executions, flowVersion, refresh, ...props }) => {
|
||||
const api = useMemo(() => new AutoGPTServerAPI(), []);
|
||||
|
||||
const [flowVersions, setFlowVersions] = useState<Graph[] | null>(null);
|
||||
|
@ -142,10 +142,11 @@ export const FlowInfo: React.FC<
|
|||
<CardContent>
|
||||
<FlowRunsStats
|
||||
flows={[selectedFlowVersion ?? flow]}
|
||||
flowRuns={flowRuns.filter(
|
||||
(r) =>
|
||||
r.graphID == flow.id &&
|
||||
(selectedVersion == "all" || r.graphVersion == selectedVersion),
|
||||
executions={executions.filter(
|
||||
(execution) =>
|
||||
execution.graph_id == flow.id &&
|
||||
(selectedVersion == "all" ||
|
||||
execution.graph_version == selectedVersion),
|
||||
)}
|
||||
/>
|
||||
</CardContent>
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
import React, { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import AutoGPTServerAPI, {
|
||||
BlockIORootSchema,
|
||||
Graph,
|
||||
GraphExecution,
|
||||
GraphMeta,
|
||||
NodeExecutionResult,
|
||||
SpecialBlockID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import Link from "next/link";
|
||||
import { Button, buttonVariants } from "@/components/ui/button";
|
||||
|
@ -19,9 +17,9 @@ import RunnerOutputUI, { BlockOutput } from "../runner-ui/RunnerOutputUI";
|
|||
export const FlowRunInfo: React.FC<
|
||||
React.HTMLAttributes<HTMLDivElement> & {
|
||||
flow: GraphMeta;
|
||||
flowRun: FlowRun;
|
||||
execution: GraphExecution;
|
||||
}
|
||||
> = ({ flow, flowRun, ...props }) => {
|
||||
> = ({ flow, execution, ...props }) => {
|
||||
const [isOutputOpen, setIsOutputOpen] = useState(false);
|
||||
const [blockOutputs, setBlockOutputs] = useState<BlockOutput[]>([]);
|
||||
const api = useMemo(() => new AutoGPTServerAPI(), []);
|
||||
|
@ -29,7 +27,7 @@ export const FlowRunInfo: React.FC<
|
|||
const fetchBlockResults = useCallback(async () => {
|
||||
const executionResults = await api.getGraphExecutionInfo(
|
||||
flow.id,
|
||||
flowRun.id,
|
||||
execution.execution_id,
|
||||
);
|
||||
|
||||
// Create a map of the latest COMPLETED execution results of output nodes by node_id
|
||||
|
@ -71,7 +69,7 @@ export const FlowRunInfo: React.FC<
|
|||
result: result.output_data?.output || undefined,
|
||||
})),
|
||||
);
|
||||
}, [api, flow.id, flowRun.id]);
|
||||
}, [api, flow.id, execution.execution_id]);
|
||||
|
||||
// Fetch graph and execution data
|
||||
useEffect(() => {
|
||||
|
@ -79,15 +77,15 @@ export const FlowRunInfo: React.FC<
|
|||
fetchBlockResults();
|
||||
}, [isOutputOpen, fetchBlockResults]);
|
||||
|
||||
if (flowRun.graphID != flow.id) {
|
||||
if (execution.graph_id != flow.id) {
|
||||
throw new Error(
|
||||
`FlowRunInfo can't be used with non-matching flowRun.flowID and flow.id`,
|
||||
`FlowRunInfo can't be used with non-matching execution.graph_id and flow.id`,
|
||||
);
|
||||
}
|
||||
|
||||
const handleStopRun = useCallback(() => {
|
||||
api.stopGraphExecution(flow.id, flowRun.id);
|
||||
}, [api, flow.id, flowRun.id]);
|
||||
api.stopGraphExecution(flow.id, execution.execution_id);
|
||||
}, [api, flow.id, execution.execution_id]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
@ -95,17 +93,18 @@ export const FlowRunInfo: React.FC<
|
|||
<CardHeader className="flex-row items-center justify-between space-x-3 space-y-0">
|
||||
<div>
|
||||
<CardTitle>
|
||||
{flow.name} <span className="font-light">v{flow.version}</span>
|
||||
{flow.name}{" "}
|
||||
<span className="font-light">v{execution.graph_version}</span>
|
||||
</CardTitle>
|
||||
<p className="mt-2">
|
||||
Agent ID: <code>{flow.id}</code>
|
||||
</p>
|
||||
<p className="mt-1">
|
||||
Run ID: <code>{flowRun.id}</code>
|
||||
Run ID: <code>{execution.execution_id}</code>
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex space-x-2">
|
||||
{flowRun.status === "running" && (
|
||||
{execution.status === "RUNNING" && (
|
||||
<Button onClick={handleStopRun} variant="destructive">
|
||||
<IconSquare className="mr-2" /> Stop Run
|
||||
</Button>
|
||||
|
@ -124,19 +123,20 @@ export const FlowRunInfo: React.FC<
|
|||
<CardContent>
|
||||
<div>
|
||||
<strong>Status:</strong>{" "}
|
||||
<FlowRunStatusBadge status={flowRun.status} />
|
||||
<FlowRunStatusBadge status={execution.status} />
|
||||
</div>
|
||||
<p>
|
||||
<strong>Started:</strong>{" "}
|
||||
{moment(flowRun.startTime).format("YYYY-MM-DD HH:mm:ss")}
|
||||
{moment(execution.started_at).format("YYYY-MM-DD HH:mm:ss")}
|
||||
</p>
|
||||
<p>
|
||||
<strong>Finished:</strong>{" "}
|
||||
{moment(flowRun.endTime).format("YYYY-MM-DD HH:mm:ss")}
|
||||
{moment(execution.ended_at).format("YYYY-MM-DD HH:mm:ss")}
|
||||
</p>
|
||||
<p>
|
||||
<strong>Duration (run time):</strong> {flowRun.duration} (
|
||||
{flowRun.totalRunTime}) seconds
|
||||
<strong>Duration (run time):</strong>{" "}
|
||||
{execution.duration.toFixed(1)} (
|
||||
{execution.total_run_time.toFixed(1)}) seconds
|
||||
</p>
|
||||
</CardContent>
|
||||
</Card>
|
||||
|
|
|
@ -1,20 +1,20 @@
|
|||
import React from "react";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { GraphExecution } from "@/lib/autogpt-server-api";
|
||||
|
||||
export const FlowRunStatusBadge: React.FC<{
|
||||
status: FlowRun["status"];
|
||||
status: GraphExecution["status"];
|
||||
className?: string;
|
||||
}> = ({ status, className }) => (
|
||||
<Badge
|
||||
variant="default"
|
||||
className={cn(
|
||||
status === "running"
|
||||
status === "RUNNING"
|
||||
? "bg-blue-500 dark:bg-blue-700"
|
||||
: status === "waiting"
|
||||
: status === "QUEUED"
|
||||
? "bg-yellow-500 dark:bg-yellow-600"
|
||||
: status === "success"
|
||||
: status === "COMPLETED"
|
||||
? "bg-green-500 dark:bg-green-600"
|
||||
: "bg-red-500 dark:bg-red-700",
|
||||
className,
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import React from "react";
|
||||
import { GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import { GraphExecution, GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import {
|
||||
Table,
|
||||
|
@ -16,11 +15,11 @@ import { TextRenderer } from "../ui/render";
|
|||
|
||||
export const FlowRunsList: React.FC<{
|
||||
flows: GraphMeta[];
|
||||
runs: FlowRun[];
|
||||
executions: GraphExecution[];
|
||||
className?: string;
|
||||
selectedRun?: FlowRun | null;
|
||||
onSelectRun: (r: FlowRun) => void;
|
||||
}> = ({ flows, runs, selectedRun, onSelectRun, className }) => (
|
||||
selectedRun?: GraphExecution | null;
|
||||
onSelectRun: (r: GraphExecution) => void;
|
||||
}> = ({ flows, executions, selectedRun, onSelectRun, className }) => (
|
||||
<Card className={className}>
|
||||
<CardHeader>
|
||||
<CardTitle>Runs</CardTitle>
|
||||
|
@ -36,27 +35,33 @@ export const FlowRunsList: React.FC<{
|
|||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody data-testid="flow-runs-list-body">
|
||||
{runs.map((run) => (
|
||||
{executions.map((execution) => (
|
||||
<TableRow
|
||||
key={run.id}
|
||||
data-testid={`flow-run-${run.id}-graph-${run.graphID}`}
|
||||
data-runid={run.id}
|
||||
data-graphid={run.graphID}
|
||||
key={execution.execution_id}
|
||||
data-testid={`flow-run-${execution.execution_id}-graph-${execution.graph_id}`}
|
||||
data-runid={execution.execution_id}
|
||||
data-graphid={execution.graph_id}
|
||||
className="cursor-pointer"
|
||||
onClick={() => onSelectRun(run)}
|
||||
data-state={selectedRun?.id == run.id ? "selected" : null}
|
||||
onClick={() => onSelectRun(execution)}
|
||||
data-state={
|
||||
selectedRun?.execution_id == execution.execution_id
|
||||
? "selected"
|
||||
: null
|
||||
}
|
||||
>
|
||||
<TableCell>
|
||||
<TextRenderer
|
||||
value={flows.find((f) => f.id == run.graphID)!.name}
|
||||
value={flows.find((f) => f.id == execution.graph_id)?.name}
|
||||
truncateLengthLimit={30}
|
||||
/>
|
||||
</TableCell>
|
||||
<TableCell>{moment(run.startTime).format("HH:mm")}</TableCell>
|
||||
<TableCell>
|
||||
<FlowRunStatusBadge status={run.status} />
|
||||
{moment(execution.started_at).format("HH:mm")}
|
||||
</TableCell>
|
||||
<TableCell>{formatDuration(run.duration)}</TableCell>
|
||||
<TableCell>
|
||||
<FlowRunStatusBadge status={execution.status} />
|
||||
</TableCell>
|
||||
<TableCell>{formatDuration(execution.duration)}</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import React, { useState } from "react";
|
||||
import { GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import { GraphExecution, GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import { CardTitle } from "@/components/ui/card";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
|
@ -13,10 +12,10 @@ import { FlowRunsTimeline } from "@/components/monitor/FlowRunsTimeline";
|
|||
|
||||
export const FlowRunsStatus: React.FC<{
|
||||
flows: GraphMeta[];
|
||||
flowRuns: FlowRun[];
|
||||
executions: GraphExecution[];
|
||||
title?: string;
|
||||
className?: string;
|
||||
}> = ({ flows, flowRuns, title, className }) => {
|
||||
}> = ({ flows, executions: executions, title, className }) => {
|
||||
/* "dateMin": since the first flow in the dataset
|
||||
* number > 0: custom date (unix timestamp)
|
||||
* number < 0: offset relative to Date.now() (in seconds) */
|
||||
|
@ -30,8 +29,8 @@ export const FlowRunsStatus: React.FC<{
|
|||
: statsSince;
|
||||
const filteredFlowRuns =
|
||||
statsSinceTimestamp != null
|
||||
? flowRuns.filter((fr) => fr.startTime > statsSinceTimestamp)
|
||||
: flowRuns;
|
||||
? executions.filter((fr) => fr.started_at > statsSinceTimestamp)
|
||||
: executions;
|
||||
|
||||
return (
|
||||
<div className={className}>
|
||||
|
@ -94,7 +93,7 @@ export const FlowRunsStatus: React.FC<{
|
|||
</div>
|
||||
<FlowRunsTimeline
|
||||
flows={flows}
|
||||
flowRuns={flowRuns}
|
||||
executions={executions}
|
||||
dataMin={statsSince}
|
||||
className="mt-3"
|
||||
/>
|
||||
|
@ -105,7 +104,10 @@ export const FlowRunsStatus: React.FC<{
|
|||
</p>
|
||||
<p>
|
||||
<strong>Total run time:</strong>{" "}
|
||||
{filteredFlowRuns.reduce((total, run) => total + run.totalRunTime, 0)}{" "}
|
||||
{filteredFlowRuns.reduce(
|
||||
(total, run) => total + run.total_run_time,
|
||||
0,
|
||||
)}{" "}
|
||||
seconds
|
||||
</p>
|
||||
{/* <p><strong>Total cost:</strong> €1,23</p> */}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import { GraphExecution, GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import {
|
||||
ComposedChart,
|
||||
DefaultLegendContentProps,
|
||||
|
@ -14,17 +14,16 @@ import moment from "moment/moment";
|
|||
import { Card } from "@/components/ui/card";
|
||||
import { cn, hashString } from "@/lib/utils";
|
||||
import React from "react";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import { FlowRunStatusBadge } from "@/components/monitor/FlowRunStatusBadge";
|
||||
|
||||
export const FlowRunsTimeline = ({
|
||||
flows,
|
||||
flowRuns,
|
||||
executions,
|
||||
dataMin,
|
||||
className,
|
||||
}: {
|
||||
flows: GraphMeta[];
|
||||
flowRuns: FlowRun[];
|
||||
executions: GraphExecution[];
|
||||
dataMin: "dataMin" | number;
|
||||
className?: string;
|
||||
}) => (
|
||||
|
@ -61,9 +60,9 @@ export const FlowRunsTimeline = ({
|
|||
<Tooltip
|
||||
content={({ payload, label }) => {
|
||||
if (payload && payload.length) {
|
||||
const data: FlowRun & { time: number; _duration: number } =
|
||||
const data: GraphExecution & { time: number; _duration: number } =
|
||||
payload[0].payload;
|
||||
const flow = flows.find((f) => f.id === data.graphID);
|
||||
const flow = flows.find((f) => f.id === data.graph_id);
|
||||
return (
|
||||
<Card className="p-2 text-xs leading-normal">
|
||||
<p>
|
||||
|
@ -78,12 +77,12 @@ export const FlowRunsTimeline = ({
|
|||
</div>
|
||||
<p>
|
||||
<strong>Started:</strong>{" "}
|
||||
{moment(data.startTime).format("YYYY-MM-DD HH:mm:ss")}
|
||||
{moment(data.started_at).format("YYYY-MM-DD HH:mm:ss")}
|
||||
</p>
|
||||
<p>
|
||||
<strong>Duration / run time:</strong>{" "}
|
||||
{formatDuration(data.duration)} /{" "}
|
||||
{formatDuration(data.totalRunTime)}
|
||||
{formatDuration(data.total_run_time)}
|
||||
</p>
|
||||
</Card>
|
||||
);
|
||||
|
@ -94,27 +93,31 @@ export const FlowRunsTimeline = ({
|
|||
{flows.map((flow) => (
|
||||
<Scatter
|
||||
key={flow.id}
|
||||
data={flowRuns
|
||||
.filter((fr) => fr.graphID == flow.id)
|
||||
.map((fr) => ({
|
||||
...fr,
|
||||
time: fr.startTime + fr.totalRunTime * 1000,
|
||||
_duration: fr.totalRunTime,
|
||||
data={executions
|
||||
.filter((e) => e.graph_id == flow.id)
|
||||
.map((e) => ({
|
||||
...e,
|
||||
time: e.started_at + e.total_run_time * 1000,
|
||||
_duration: e.total_run_time,
|
||||
}))}
|
||||
name={flow.name}
|
||||
fill={`hsl(${(hashString(flow.id) * 137.5) % 360}, 70%, 50%)`}
|
||||
/>
|
||||
))}
|
||||
{flowRuns.map((run) => (
|
||||
{executions.map((execution) => (
|
||||
<Line
|
||||
key={run.id}
|
||||
key={execution.execution_id}
|
||||
type="linear"
|
||||
dataKey="_duration"
|
||||
data={[
|
||||
{ ...run, time: run.startTime, _duration: 0 },
|
||||
{ ...run, time: run.endTime, _duration: run.totalRunTime },
|
||||
{ ...execution, time: execution.started_at, _duration: 0 },
|
||||
{
|
||||
...execution,
|
||||
time: execution.ended_at,
|
||||
_duration: execution.total_run_time,
|
||||
},
|
||||
]}
|
||||
stroke={`hsl(${(hashString(run.graphID) * 137.5) % 360}, 70%, 50%)`}
|
||||
stroke={`hsl(${(hashString(execution.graph_id) * 137.5) % 360}, 70%, 50%)`}
|
||||
strokeWidth={2}
|
||||
dot={false}
|
||||
legendType="none"
|
||||
|
|
|
@ -7,7 +7,7 @@ import {
|
|||
CredentialsDeleteNeedConfirmationResponse,
|
||||
CredentialsDeleteResponse,
|
||||
CredentialsMetaResponse,
|
||||
ExecutionMeta,
|
||||
GraphExecution,
|
||||
Graph,
|
||||
GraphCreatable,
|
||||
GraphExecuteResponse,
|
||||
|
@ -74,6 +74,10 @@ export default class BaseAutoGPTServerAPI {
|
|||
return graphs.map(parseGraphMetaWithRuns);
|
||||
}
|
||||
|
||||
getExecutions(): Promise<GraphExecution[]> {
|
||||
return this._get(`/executions`);
|
||||
}
|
||||
|
||||
listTemplates(): Promise<GraphMeta[]> {
|
||||
return this._get("/templates");
|
||||
}
|
||||
|
@ -527,19 +531,9 @@ function parseGraphMetaWithRuns(result: any): GraphMetaWithRuns {
|
|||
};
|
||||
}
|
||||
|
||||
function parseExecutionMetaTimestamps(result: any): ExecutionMeta {
|
||||
let status: "running" | "waiting" | "success" | "failed" = "success";
|
||||
if (result.status === "FAILED") {
|
||||
status = "failed";
|
||||
} else if (["QUEUED", "RUNNING"].includes(result.status)) {
|
||||
status = "running";
|
||||
} else if (result.status === "INCOMPLETE") {
|
||||
status = "waiting";
|
||||
}
|
||||
|
||||
function parseExecutionMetaTimestamps(result: any): GraphExecution {
|
||||
return {
|
||||
...result,
|
||||
status,
|
||||
started_at: new Date(result.started_at).getTime(),
|
||||
ended_at: result.ended_at ? new Date(result.ended_at).getTime() : undefined,
|
||||
};
|
||||
|
|
|
@ -188,17 +188,19 @@ export type LinkCreatable = Omit<Link, "id" | "is_static"> & {
|
|||
id?: string;
|
||||
};
|
||||
|
||||
/* Mirror of autogpt_server/data/graph.py:ExecutionMeta */
|
||||
export type ExecutionMeta = {
|
||||
/* Mirror of backend/data/graph.py:GraphExecution */
|
||||
export type GraphExecution = {
|
||||
execution_id: string;
|
||||
started_at: number;
|
||||
ended_at: number;
|
||||
duration: number;
|
||||
total_run_time: number;
|
||||
status: "running" | "waiting" | "success" | "failed";
|
||||
status: "INCOMPLETE" | "QUEUED" | "RUNNING" | "COMPLETED" | "FAILED";
|
||||
graph_id: string;
|
||||
graph_version: number;
|
||||
};
|
||||
|
||||
/* Mirror of backend/data/graph.py:GraphMeta */
|
||||
/* backend/data/graph.py:Graph = GraphMeta & GraphMetaWithRuns & Graph */
|
||||
export type GraphMeta = {
|
||||
id: string;
|
||||
version: number;
|
||||
|
@ -211,10 +213,9 @@ export type GraphMeta = {
|
|||
};
|
||||
|
||||
export type GraphMetaWithRuns = GraphMeta & {
|
||||
executions: ExecutionMeta[];
|
||||
executions: GraphExecution[];
|
||||
};
|
||||
|
||||
/* Mirror of backend/data/graph.py:Graph */
|
||||
export type Graph = GraphMeta & {
|
||||
nodes: Array<Node>;
|
||||
links: Array<Link>;
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
import { NodeExecutionResult } from "@/lib/autogpt-server-api";
|
||||
|
||||
export type FlowRun = {
|
||||
id: string;
|
||||
graphID: string;
|
||||
graphVersion: number;
|
||||
status: "running" | "waiting" | "success" | "failed";
|
||||
startTime: number; // unix timestamp (ms)
|
||||
endTime: number; // unix timestamp (ms)
|
||||
duration: number; // seconds
|
||||
totalRunTime: number; // seconds
|
||||
nodeExecutionResults: NodeExecutionResult[];
|
||||
};
|
Loading…
Reference in New Issue