refactor(agent, forge): Move tests from `autogpt` to `forge` (#7247)

- Move `autogpt/tests/vcr_cassettes` submodule to `forge/tests/vcr_cassettes`
- Remove not needed markers from `pyproject.toml`: `"requires_openai_api_key", "requires_huggingface_api_key"`
- Update relevant GitHub workflows

Moved relevant tests from `autogpt/tests` to appropiate directories:
- Component tests to their respective component dirs
- `autogpt/tests/unit/test_web_search.py` → `forge/components/web/test_search.py`
- `autogpt/tests/unit/test_git_commands.py` → `forge/components/git_operations/test_git_operations.py`
- `autogpt/tests/unit/test_file_operations.py` → `forge/components/file_manager/test_file_manager.py`
- `autogpt/tests/integration/test_image_gen.py` → `forge/components/image_gen/test_image_gen.py`
- `autogpt/tests/integration/test_web_selenium.py` → `forge/components/web/test_selenium.py`
- `autogpt/tests/integration/test_execute_code.py` → `forge/components/code_executor/test_code_executor.py`
- `autogpt/tests/unit/test_s3_file_storage.py` → `forge/file_storage/test_s3_file_storage.py`
- `autogpt/tests/unit/test_gcs_file_storage.py` → `forge/file_storage/test_gcs_file_storage.py`
- `autogpt/tests/unit/test_local_file_storage.py` → `forge/file_storage/test_local_file_storage.py`
- `autogpt/tests/unit/test_json.py` → `forge/json/test_parsing.py`
- `autogpt/tests/unit/test_logs.py` → `forge/logging/test_utils.py`
- `autogpt/tests/unit/test_url_validation.py` → `forge/utils/test_url_validator.py`
- `autogpt/tests/unit/test_text_file_parsers.py` → `forge/utils/test_file_operations.py`

- (Re)moved dependencies from `autogpt/pyproject.toml` that were only used in these test files.

Also:
- Added `load_env_vars` fixture to `forge/conftest.py`
- Fixed a type error in `forge/components/web/test_search.py`
- Merged `autogpt/.gitattributes` into root `.gitattributes`

---------

Co-authored-by: Reinier van der Leer <pwuts@agpt.co>
pull/7316/head
Krzysztof Czerwinski 2024-07-04 01:09:01 +01:00 committed by GitHub
parent 7415e24fc3
commit 08612cc3bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
36 changed files with 315 additions and 247 deletions

3
.gitattributes vendored
View File

@ -3,3 +3,6 @@ frontend/build/** linguist-generated
**/poetry.lock linguist-generated
docs/_javascript/** linguist-vendored
# Exclude VCR cassettes from stats
forge/tests/vcr_cassettes/**/**.y*ml linguist-generated

View File

@ -6,13 +6,11 @@ on:
paths:
- '.github/workflows/autogpt-ci.yml'
- 'autogpt/**'
- '!autogpt/tests/vcr_cassettes'
pull_request:
branches: [ master, development, release-* ]
paths:
- '.github/workflows/autogpt-ci.yml'
- 'autogpt/**'
- '!autogpt/tests/vcr_cassettes'
concurrency:
group: ${{ format('autogpt-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
@ -73,37 +71,6 @@ jobs:
git config --global user.name "Auto-GPT-Bot"
git config --global user.email "github-bot@agpt.co"
- name: Checkout cassettes
if: ${{ startsWith(github.event_name, 'pull_request') }}
env:
PR_BASE: ${{ github.event.pull_request.base.ref }}
PR_BRANCH: ${{ github.event.pull_request.head.ref }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
run: |
cassette_branch="${PR_AUTHOR}-${PR_BRANCH}"
cassette_base_branch="${PR_BASE}"
cd tests/vcr_cassettes
if ! git ls-remote --exit-code --heads origin $cassette_base_branch ; then
cassette_base_branch="master"
fi
if git ls-remote --exit-code --heads origin $cassette_branch ; then
git fetch origin $cassette_branch
git fetch origin $cassette_base_branch
git checkout $cassette_branch
# Pick non-conflicting cassette updates from the base branch
git merge --no-commit --strategy-option=ours origin/$cassette_base_branch
echo "Using cassettes from mirror branch '$cassette_branch'," \
"synced to upstream branch '$cassette_base_branch'."
else
git checkout -b $cassette_branch
echo "Branch '$cassette_branch' does not exist in cassette submodule." \
"Using cassettes from '$cassette_base_branch'."
fi
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
@ -163,80 +130,6 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: autogpt-agent,${{ runner.os }}
- id: setup_git_auth
name: Set up git token authentication
# Cassettes may be pushed even when tests fail
if: success() || failure()
run: |
config_key="http.${{ github.server_url }}/.extraheader"
if [ "${{ runner.os }}" = 'macOS' ]; then
base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64)
else
base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64 -w0)
fi
git config "$config_key" \
"Authorization: Basic $base64_pat"
cd tests/vcr_cassettes
git config "$config_key" \
"Authorization: Basic $base64_pat"
echo "config_key=$config_key" >> $GITHUB_OUTPUT
- id: push_cassettes
name: Push updated cassettes
# For pull requests, push updated cassettes even when tests fail
if: github.event_name == 'push' || (! github.event.pull_request.head.repo.fork && (success() || failure()))
env:
PR_BRANCH: ${{ github.event.pull_request.head.ref }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
run: |
if [ "${{ startsWith(github.event_name, 'pull_request') }}" = "true" ]; then
is_pull_request=true
cassette_branch="${PR_AUTHOR}-${PR_BRANCH}"
else
cassette_branch="${{ github.ref_name }}"
fi
cd tests/vcr_cassettes
# Commit & push changes to cassettes if any
if ! git diff --quiet; then
git add .
git commit -m "Auto-update cassettes"
git push origin HEAD:$cassette_branch
if [ ! $is_pull_request ]; then
cd ../..
git add tests/vcr_cassettes
git commit -m "Update cassette submodule"
git push origin HEAD:$cassette_branch
fi
echo "updated=true" >> $GITHUB_OUTPUT
else
echo "updated=false" >> $GITHUB_OUTPUT
echo "No cassette changes to commit"
fi
- name: Post Set up git token auth
if: steps.setup_git_auth.outcome == 'success'
run: |
git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}'
git submodule foreach git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}'
- name: Apply "behaviour change" label and comment on PR
if: ${{ startsWith(github.event_name, 'pull_request') }}
run: |
PR_NUMBER="${{ github.event.pull_request.number }}"
TOKEN="${{ secrets.PAT_REVIEW }}"
REPO="${{ github.repository }}"
if [[ "${{ steps.push_cassettes.outputs.updated }}" == "true" ]]; then
echo "Adding label and comment..."
echo $TOKEN | gh auth login --with-token
gh issue edit $PR_NUMBER --add-label "behaviour change"
gh issue comment $PR_NUMBER --body "You changed AutoGPT's behaviour on ${{ runner.os }}. The cassettes have been updated and will be merged to the submodule when this Pull Request gets merged."
fi
- name: Upload logs to artifact
if: always()
uses: actions/upload-artifact@v4

View File

@ -6,13 +6,11 @@ on:
paths:
- '.github/workflows/autogpt-docker-ci.yml'
- 'autogpt/**'
- '!autogpt/tests/vcr_cassettes'
pull_request:
branches: [ master, development, release-* ]
paths:
- '.github/workflows/autogpt-docker-ci.yml'
- 'autogpt/**'
- '!autogpt/tests/vcr_cassettes'
concurrency:
group: ${{ format('autogpt-docker-ci-{0}', github.head_ref && format('pr-{0}', github.event.pull_request.number) || github.sha) }}

View File

@ -6,13 +6,11 @@ on:
paths:
- ".github/workflows/autogpt-server-ci.yml"
- "rnd/autogpt_server/**"
- "!autogpt/tests/vcr_cassettes"
pull_request:
branches: [master, development, release-*]
paths:
- ".github/workflows/autogpt-server-ci.yml"
- "rnd/autogpt_server/**"
- "!autogpt/tests/vcr_cassettes"
concurrency:
group: ${{ format('autogpt-server-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}

View File

@ -6,11 +6,13 @@ on:
paths:
- '.github/workflows/forge-ci.yml'
- 'forge/**'
- '!forge/tests/vcr_cassettes'
pull_request:
branches: [ master, development, release-* ]
paths:
- '.github/workflows/forge-ci.yml'
- 'forge/**'
- '!forge/tests/vcr_cassettes'
concurrency:
group: ${{ format('forge-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
@ -66,6 +68,37 @@ jobs:
fetch-depth: 0
submodules: true
- name: Checkout cassettes
if: ${{ startsWith(github.event_name, 'pull_request') }}
env:
PR_BASE: ${{ github.event.pull_request.base.ref }}
PR_BRANCH: ${{ github.event.pull_request.head.ref }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
run: |
cassette_branch="${PR_AUTHOR}-${PR_BRANCH}"
cassette_base_branch="${PR_BASE}"
cd tests/vcr_cassettes
if ! git ls-remote --exit-code --heads origin $cassette_base_branch ; then
cassette_base_branch="master"
fi
if git ls-remote --exit-code --heads origin $cassette_branch ; then
git fetch origin $cassette_branch
git fetch origin $cassette_base_branch
git checkout $cassette_branch
# Pick non-conflicting cassette updates from the base branch
git merge --no-commit --strategy-option=ours origin/$cassette_base_branch
echo "Using cassettes from mirror branch '$cassette_branch'," \
"synced to upstream branch '$cassette_base_branch'."
else
git checkout -b $cassette_branch
echo "Branch '$cassette_branch' does not exist in cassette submodule." \
"Using cassettes from '$cassette_base_branch'."
fi
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
@ -121,6 +154,80 @@ jobs:
token: ${{ secrets.CODECOV_TOKEN }}
flags: forge,${{ runner.os }}
- id: setup_git_auth
name: Set up git token authentication
# Cassettes may be pushed even when tests fail
if: success() || failure()
run: |
config_key="http.${{ github.server_url }}/.extraheader"
if [ "${{ runner.os }}" = 'macOS' ]; then
base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64)
else
base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64 -w0)
fi
git config "$config_key" \
"Authorization: Basic $base64_pat"
cd tests/vcr_cassettes
git config "$config_key" \
"Authorization: Basic $base64_pat"
echo "config_key=$config_key" >> $GITHUB_OUTPUT
- id: push_cassettes
name: Push updated cassettes
# For pull requests, push updated cassettes even when tests fail
if: github.event_name == 'push' || (! github.event.pull_request.head.repo.fork && (success() || failure()))
env:
PR_BRANCH: ${{ github.event.pull_request.head.ref }}
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
run: |
if [ "${{ startsWith(github.event_name, 'pull_request') }}" = "true" ]; then
is_pull_request=true
cassette_branch="${PR_AUTHOR}-${PR_BRANCH}"
else
cassette_branch="${{ github.ref_name }}"
fi
cd tests/vcr_cassettes
# Commit & push changes to cassettes if any
if ! git diff --quiet; then
git add .
git commit -m "Auto-update cassettes"
git push origin HEAD:$cassette_branch
if [ ! $is_pull_request ]; then
cd ../..
git add tests/vcr_cassettes
git commit -m "Update cassette submodule"
git push origin HEAD:$cassette_branch
fi
echo "updated=true" >> $GITHUB_OUTPUT
else
echo "updated=false" >> $GITHUB_OUTPUT
echo "No cassette changes to commit"
fi
- name: Post Set up git token auth
if: steps.setup_git_auth.outcome == 'success'
run: |
git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}'
git submodule foreach git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}'
- name: Apply "behaviour change" label and comment on PR
if: ${{ startsWith(github.event_name, 'pull_request') }}
run: |
PR_NUMBER="${{ github.event.pull_request.number }}"
TOKEN="${{ secrets.PAT_REVIEW }}"
REPO="${{ github.repository }}"
if [[ "${{ steps.push_cassettes.outputs.updated }}" == "true" ]]; then
echo "Adding label and comment..."
echo $TOKEN | gh auth login --with-token
gh issue edit $PR_NUMBER --add-label "behaviour change"
gh issue comment $PR_NUMBER --body "You changed AutoGPT's behaviour on ${{ runner.os }}. The cassettes have been updated and will be merged to the submodule when this Pull Request gets merged."
fi
- name: Upload logs to artifact
if: always()
uses: actions/upload-artifact@v4

View File

@ -5,7 +5,7 @@ on:
push:
branches: [ master, development, release-* ]
paths-ignore:
- 'autogpt/tests/vcr_cassettes'
- 'forge/tests/vcr_cassettes'
- 'benchmark/reports/**'
# So that the `dirtyLabel` is removed if conflicts are resolve
# We recommend `pull_request_target` so that github secrets are available.

View File

@ -9,7 +9,7 @@ on:
- 'forge/**'
- 'benchmark/**'
- '**.py'
- '!autogpt/tests/vcr_cassettes'
- '!forge/tests/vcr_cassettes'
pull_request:
branches: [ master, development, release-* ]
paths:
@ -18,7 +18,7 @@ on:
- 'forge/**'
- 'benchmark/**'
- '**.py'
- '!autogpt/tests/vcr_cassettes'
- '!forge/tests/vcr_cassettes'
concurrency:
group: ${{ format('lint-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}

4
.gitmodules vendored
View File

@ -1,3 +1,3 @@
[submodule "autogpt/tests/vcr_cassettes"]
path = autogpt/tests/vcr_cassettes
[submodule "forge/tests/vcr_cassettes"]
path = forge/tests/vcr_cassettes
url = https://github.com/Significant-Gravitas/Auto-GPT-test-cassettes

View File

@ -1,5 +0,0 @@
# Exclude VCR cassettes from stats
tests/vcr_cassettes/**/**.y*ml linguist-generated
# Mark documentation as such
docs/**.md linguist-documentation

38
autogpt/poetry.lock generated
View File

@ -6065,20 +6065,6 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
[[package]]
name = "types-beautifulsoup4"
version = "4.12.0.20240106"
description = "Typing stubs for beautifulsoup4"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-beautifulsoup4-4.12.0.20240106.tar.gz", hash = "sha256:98d628985b71b140bd3bc22a8cb0ab603c2f2d08f20d37925965eb4a21739be8"},
{file = "types_beautifulsoup4-4.12.0.20240106-py3-none-any.whl", hash = "sha256:cbdd60ab8aeac737ac014431b6e921b43e84279c0405fdd25a6900bb0e71da5b"},
]
[package.dependencies]
types-html5lib = "*"
[[package]]
name = "types-colorama"
version = "0.4.15.20240106"
@ -6090,28 +6076,6 @@ files = [
{file = "types_colorama-0.4.15.20240106-py3-none-any.whl", hash = "sha256:18294bc18f60dc0b4895de8119964a5d895f5e180c2d1308fdd33009c0fa0f38"},
]
[[package]]
name = "types-html5lib"
version = "1.1.11.20240106"
description = "Typing stubs for html5lib"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-html5lib-1.1.11.20240106.tar.gz", hash = "sha256:fc3a1b18eb601b3eeaf92c900bd67675c0a4fa1dd1d2a2893ebdb46923547ee9"},
{file = "types_html5lib-1.1.11.20240106-py3-none-any.whl", hash = "sha256:61993cb89220107481e0f1da65c388ff8cf3d8c5f6e8483c97559639a596b697"},
]
[[package]]
name = "types-pillow"
version = "10.2.0.20240111"
description = "Typing stubs for Pillow"
optional = false
python-versions = ">=3.8"
files = [
{file = "types-Pillow-10.2.0.20240111.tar.gz", hash = "sha256:e8d359bfdc5a149a3c90a7e153cb2d0750ddf7fc3508a20dfadabd8a9435e354"},
{file = "types_Pillow-10.2.0.20240111-py3-none-any.whl", hash = "sha256:1f4243b30c143b56b0646626f052e4269123e550f9096cdfb5fbd999daee7dbb"},
]
[[package]]
name = "typing-extensions"
version = "4.9.0"
@ -6793,4 +6757,4 @@ benchmark = ["agbenchmark"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "38a549db0d7726a14e3f990206928b6187e009aee31755b7286ede1c568d359b"
content-hash = "b3d4efee5861b32152024dada1ec61f4241122419cb538012c00a6ed55ac8a4b"

View File

@ -22,21 +22,16 @@ serve = "autogpt.app.cli:serve"
python = "^3.10"
autogpt-forge = { path = "../forge", develop = true }
# autogpt-forge = {git = "https://github.com/Significant-Gravitas/AutoGPT.git", subdirectory = "forge"}
beautifulsoup4 = "^4.12.2"
click = "*"
colorama = "^0.4.6"
distro = "^1.8.0"
fastapi = "^0.109.1"
gitpython = "^3.1.32"
google-api-python-client = "*"
hypercorn = "^0.14.4"
openai = "^1.7.2"
orjson = "^3.8.10"
Pillow = "*"
pydantic = "^2.7.2"
python-docx = "*"
python-dotenv = "^1.0.0"
pyyaml = "^6.0"
requests = "*"
sentry-sdk = "^1.40.4"
@ -55,9 +50,7 @@ pre-commit = "*"
pyright = "^1.1.364"
# Type stubs
types-beautifulsoup4 = "*"
types-colorama = "*"
types-Pillow = "*"
# Testing
pytest = "*"
@ -66,7 +59,6 @@ pytest-cov = "*"
pytest-mock = "*"
pytest-recording = "*"
pytest-xdist = "*"
vcrpy = { git = "https://github.com/Significant-Gravitas/vcrpy.git", rev = "master" }
[tool.poetry.group.build]
optional = true
@ -97,7 +89,3 @@ skip_glob = ["data"]
pythonVersion = "3.10"
exclude = ["data/**", "**/node_modules", "**/__pycache__", "**/.*"]
ignore = ["../forge/**"]
[tool.pytest.ini_options]
markers = ["slow", "requires_openai_api_key", "requires_huggingface_api_key"]

View File

@ -20,7 +20,6 @@ from autogpt.app.main import _configure_llm_provider
pytest_plugins = [
"tests.integration.agent_factory",
"tests.vcr",
]

41
forge/conftest.py Normal file
View File

@ -0,0 +1,41 @@
import uuid
from pathlib import Path
import pytest
from forge.file_storage.base import FileStorage, FileStorageConfiguration
from forge.file_storage.local import LocalFileStorage
pytest_plugins = [
"tests.vcr",
]
@pytest.fixture(scope="session", autouse=True)
def load_env_vars():
from dotenv import load_dotenv
load_dotenv()
@pytest.fixture()
def tmp_project_root(tmp_path: Path) -> Path:
return tmp_path
@pytest.fixture()
def app_data_dir(tmp_project_root: Path) -> Path:
dir = tmp_project_root / "data"
dir.mkdir(parents=True, exist_ok=True)
return dir
@pytest.fixture()
def storage(app_data_dir: Path) -> FileStorage:
storage = LocalFileStorage(
FileStorageConfiguration(
root=Path(f"{app_data_dir}/{str(uuid.uuid4())}"), restrict_to_root=False
)
)
storage.initialize()
return storage

View File

@ -12,9 +12,9 @@ from .models.task import StepRequestBody, Task, TaskListResponse, TaskRequestBod
@pytest.fixture
def agent(test_workspace: Path):
def agent(tmp_project_root: Path):
db = AgentDB("sqlite:///test.db")
config = FileStorageConfiguration(root=test_workspace)
config = FileStorageConfiguration(root=tmp_project_root)
workspace = LocalFileStorage(config)
return ProtocolAgent(db, workspace)

View File

View File

@ -1,8 +1,6 @@
from .code_executor import CodeExecutionError, CodeExecutorComponent
__all__ = [
"ALLOWLIST_CONTROL",
"DENYLIST_CONTROL",
"CodeExecutionError",
"CodeExecutorComponent",
]

View File

@ -4,13 +4,15 @@ import tempfile
from pathlib import Path
import pytest
from forge.components.code_executor.code_executor import (
from forge.file_storage.base import FileStorage
from forge.utils.exceptions import InvalidArgumentError, OperationNotAllowedError
from .code_executor import (
CodeExecutorComponent,
is_docker_available,
we_are_running_in_a_docker_container,
)
from forge.file_storage.base import FileStorage
from forge.utils.exceptions import InvalidArgumentError, OperationNotAllowedError
@pytest.fixture
@ -103,6 +105,22 @@ def test_execute_python_file_not_found(code_executor_component: CodeExecutorComp
code_executor_component.execute_python_file(Path("notexist.py"))
def test_execute_shell(
code_executor_component: CodeExecutorComponent, random_string: str
):
code_executor_component.config.shell_command_control = "allowlist"
code_executor_component.config.shell_allowlist = ["echo"]
result = code_executor_component.execute_shell(f"echo 'Hello {random_string}!'")
assert f"Hello {random_string}!" in result
def test_execute_shell_local_commands_not_allowed(
code_executor_component: CodeExecutorComponent, random_string: str
):
with pytest.raises(OperationNotAllowedError, match="not allowed"):
code_executor_component.execute_shell(f"echo 'Hello {random_string}!'")
def test_execute_shell_denylist_should_deny(
code_executor_component: CodeExecutorComponent, random_string: str
):

View File

@ -2,9 +2,11 @@ import os
from pathlib import Path
import pytest
from forge.agent.base import BaseAgentSettings
from forge.file_storage import FileStorage
from autogpt.agents.agent import Agent
from . import FileManagerComponent
@pytest.fixture()
@ -13,8 +15,13 @@ def file_content():
@pytest.fixture
def file_manager_component(agent: Agent):
return agent.file_manager
def file_manager_component(storage: FileStorage):
return FileManagerComponent(
storage,
BaseAgentSettings(
agent_id="TestAgent", name="TestAgent", description="Test Agent description"
),
)
@pytest.fixture()
@ -41,15 +48,14 @@ def test_nested_file(storage: FileStorage):
async def test_read_file(
test_file_path: Path,
file_content,
file_manager_component,
agent: Agent,
file_manager_component: FileManagerComponent,
):
await agent.file_manager.workspace.write_file(test_file_path.name, file_content)
await file_manager_component.workspace.write_file(test_file_path.name, file_content)
content = file_manager_component.read_file(test_file_path.name)
assert content.replace("\r", "") == file_content
def test_read_file_not_found(file_manager_component):
def test_read_file_not_found(file_manager_component: FileManagerComponent):
filename = "does_not_exist.txt"
with pytest.raises(FileNotFoundError):
file_manager_component.read_file(filename)
@ -57,12 +63,12 @@ def test_read_file_not_found(file_manager_component):
@pytest.mark.asyncio
async def test_write_to_file_relative_path(
test_file_name: Path, file_manager_component, agent: Agent
test_file_name: Path, file_manager_component: FileManagerComponent
):
new_content = "This is new content.\n"
await file_manager_component.write_to_file(test_file_name, new_content)
with open(
agent.file_manager.workspace.get_path(test_file_name), "r", encoding="utf-8"
file_manager_component.workspace.get_path(test_file_name), "r", encoding="utf-8"
) as f:
content = f.read()
assert content == new_content
@ -70,7 +76,7 @@ async def test_write_to_file_relative_path(
@pytest.mark.asyncio
async def test_write_to_file_absolute_path(
test_file_path: Path, file_manager_component
test_file_path: Path, file_manager_component: FileManagerComponent
):
new_content = "This is new content.\n"
await file_manager_component.write_to_file(test_file_path, new_content)
@ -80,18 +86,18 @@ async def test_write_to_file_absolute_path(
@pytest.mark.asyncio
async def test_list_files(file_manager_component, agent: Agent):
async def test_list_files(file_manager_component: FileManagerComponent):
# Create files A and B
file_a_name = "file_a.txt"
file_b_name = "file_b.txt"
test_directory = Path("test_directory")
await agent.file_manager.workspace.write_file(file_a_name, "This is file A.")
await agent.file_manager.workspace.write_file(file_b_name, "This is file B.")
await file_manager_component.workspace.write_file(file_a_name, "This is file A.")
await file_manager_component.workspace.write_file(file_b_name, "This is file B.")
# Create a subdirectory and place a copy of file_a in it
agent.file_manager.workspace.make_dir(test_directory)
await agent.file_manager.workspace.write_file(
file_manager_component.workspace.make_dir(test_directory)
await file_manager_component.workspace.write_file(
test_directory / file_a_name, "This is file A in the subdirectory."
)
@ -101,10 +107,10 @@ async def test_list_files(file_manager_component, agent: Agent):
assert os.path.join(test_directory, file_a_name) in files
# Clean up
agent.file_manager.workspace.delete_file(file_a_name)
agent.file_manager.workspace.delete_file(file_b_name)
agent.file_manager.workspace.delete_file(test_directory / file_a_name)
agent.file_manager.workspace.delete_dir(test_directory)
file_manager_component.workspace.delete_file(file_a_name)
file_manager_component.workspace.delete_file(file_b_name)
file_manager_component.workspace.delete_file(test_directory / file_a_name)
file_manager_component.workspace.delete_dir(test_directory)
# Case 2: Search for a file that does not exist and make sure we don't throw
non_existent_file = "non_existent_file.txt"

View File

@ -1,11 +1,11 @@
import pytest
from forge.components.git_operations import GitOperationsComponent
from forge.file_storage.base import FileStorage
from forge.utils.exceptions import CommandExecutionError
from git.exc import GitCommandError
from git.repo.base import Repo
from autogpt.agents.agent import Agent
from forge.file_storage.base import FileStorage
from forge.utils.exceptions import CommandExecutionError
from . import GitOperationsComponent
@pytest.fixture
@ -14,15 +14,14 @@ def mock_clone_from(mocker):
@pytest.fixture
def git_ops_component(agent: Agent):
return agent.git_ops
def git_ops_component():
return GitOperationsComponent()
def test_clone_auto_gpt_repository(
git_ops_component: GitOperationsComponent,
storage: FileStorage,
mock_clone_from,
agent: Agent,
):
mock_clone_from.return_value = None
@ -46,7 +45,6 @@ def test_clone_repository_error(
git_ops_component: GitOperationsComponent,
storage: FileStorage,
mock_clone_from,
agent: Agent,
):
url = "https://github.com/this-repository/does-not-exist.git"
clone_path = storage.get_path("does-not-exist")

View File

@ -4,17 +4,22 @@ from pathlib import Path
from unittest.mock import patch
import pytest
from PIL import Image
from pydantic import SecretStr, ValidationError
from forge.components.image_gen import ImageGeneratorComponent
from forge.components.image_gen.image_gen import ImageGeneratorConfiguration
from forge.file_storage.base import FileStorage
from forge.llm.providers.openai import OpenAICredentials
from PIL import Image
from pydantic import SecretStr
@pytest.fixture
def image_gen_component(storage: FileStorage):
cred = OpenAICredentials.from_env()
try:
cred = OpenAICredentials.from_env()
except ValidationError:
cred = OpenAICredentials(api_key=SecretStr("test"))
return ImageGeneratorComponent(storage, openai_credentials=cred)
@ -34,7 +39,6 @@ def image_size(request):
return request.param
@pytest.mark.requires_openai_api_key
@pytest.mark.vcr
def test_dalle(
image_gen_component: ImageGeneratorComponent,
@ -52,7 +56,6 @@ def test_dalle(
reason="The image is too big to be put in a cassette for a CI pipeline. "
"We're looking into a solution."
)
@pytest.mark.requires_huggingface_api_key
@pytest.mark.parametrize(
"image_model",
["CompVis/stable-diffusion-v1-4", "stabilityai/stable-diffusion-2-1"],

View File

@ -1,19 +1,23 @@
import json
import pytest
from forge.components.web.search import WebSearchComponent
from forge.utils.exceptions import ConfigurationError
from googleapiclient.errors import HttpError
from httplib2 import Response
from pydantic import SecretStr
from autogpt.agents.agent import Agent
from forge.utils.exceptions import ConfigurationError
from . import WebSearchComponent
@pytest.fixture
def web_search_component(agent: Agent):
agent.web_search.config.google_api_key = SecretStr("test")
agent.web_search.config.google_custom_search_engine_id = SecretStr("test")
return agent.web_search
def web_search_component():
component = WebSearchComponent()
if component.config.google_api_key is None:
component.config.google_api_key = SecretStr("test")
if component.config.google_custom_search_engine_id is None:
component.config.google_custom_search_engine_id = SecretStr("test")
return component
@pytest.mark.parametrize(
@ -134,16 +138,11 @@ def test_google_official_search_errors(
error_msg,
web_search_component: WebSearchComponent,
):
class resp:
def __init__(self, _status, _reason):
self.status = _status
self.reason = _reason
response_content = {
"error": {"code": http_code, "message": error_msg, "reason": "backendError"}
}
error = HttpError(
resp=resp(http_code, error_msg),
resp=Response({"status": http_code, "reason": error_msg}),
content=str.encode(json.dumps(response_content)),
uri="https://www.googleapis.com/customsearch/v1?q=invalid+query&cx",
)

View File

@ -1,19 +1,20 @@
import pytest
from forge.components.web.selenium import BrowsingError, WebSeleniumComponent
from pathlib import Path
from autogpt.agents.agent import Agent
import pytest
from forge.llm.providers.multi import MultiProvider
from . import BrowsingError, WebSeleniumComponent
@pytest.fixture
def web_selenium_component(agent: Agent):
return agent.web_selenium
def web_selenium_component(app_data_dir: Path):
return WebSeleniumComponent(MultiProvider(), app_data_dir)
@pytest.mark.vcr
@pytest.mark.requires_openai_api_key
@pytest.mark.asyncio
async def test_browse_website_nonexistent_url(
web_selenium_component: WebSeleniumComponent, cached_openai_client: None
web_selenium_component: WebSeleniumComponent,
):
url = "https://auto-gpt-thinks-this-website-does-not-exist.com"
question = "How to execute a barrel roll"

View File

@ -1,8 +0,0 @@
from pathlib import Path
import pytest
@pytest.fixture()
def test_workspace(tmp_path: Path) -> Path:
return tmp_path

View File

@ -4,11 +4,12 @@ from pathlib import Path
import pytest
import pytest_asyncio
from forge.file_storage.gcs import GCSFileStorage, GCSFileStorageConfiguration
from google.auth.exceptions import GoogleAuthError
from google.cloud import storage
from google.cloud.exceptions import NotFound
from .gcs import GCSFileStorage, GCSFileStorageConfiguration
try:
storage.Client()
except GoogleAuthError:

View File

@ -1,7 +1,8 @@
from pathlib import Path
import pytest
from forge.file_storage.local import FileStorageConfiguration, LocalFileStorage
from .local import FileStorageConfiguration, LocalFileStorage
_ACCESSIBLE_PATHS = [
Path("."),

View File

@ -5,7 +5,8 @@ from pathlib import Path
import pytest
import pytest_asyncio
from botocore.exceptions import ClientError
from forge.file_storage.s3 import S3FileStorage, S3FileStorageConfiguration
from .s3 import S3FileStorage, S3FileStorageConfiguration
if not (os.getenv("S3_ENDPOINT_URL") and os.getenv("AWS_ACCESS_KEY_ID")):
pytest.skip("S3 environment variables are not set", allow_module_level=True)

View File

@ -1,7 +1,8 @@
import json
import pytest
from forge.json.parsing import json_loads
from .parsing import json_loads
_JSON_FIXABLE: list[tuple[str, str]] = [
# Missing comma

View File

@ -1,5 +1,6 @@
import pytest
from forge.logging.utils import remove_color_codes
from .utils import remove_color_codes
@pytest.mark.parametrize(

View File

View File

@ -9,7 +9,8 @@ import docx
import pytest
import yaml
from bs4 import BeautifulSoup
from forge.utils.file_operations import decode_textual_file, is_file_binary_fn
from .file_operations import decode_textual_file, is_file_binary_fn
logger = logging.getLogger(__name__)

View File

@ -1,7 +1,8 @@
import pytest
from forge.utils.url_validator import validate_url
from pytest import raises
from .url_validator import validate_url
@validate_url
def dummy_method(url):

58
forge/poetry.lock generated
View File

@ -4986,6 +4986,42 @@ pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-mock"
version = "3.14.0"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.8"
files = [
{file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"},
{file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"},
]
[package.dependencies]
pytest = ">=6.2.5"
[package.extras]
dev = ["pre-commit", "pytest-asyncio", "tox"]
[[package]]
name = "pytest-recording"
version = "0.13.1"
description = "A pytest plugin that allows you recording of network interactions via VCR.py"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest_recording-0.13.1-py3-none-any.whl", hash = "sha256:e5c75feb2593eb4ed9362182c6640bfe19004204bf9a6082d62c91b5fdb50a3e"},
{file = "pytest_recording-0.13.1.tar.gz", hash = "sha256:1265d679f39263f115968ec01c2a3bfed250170fd1b0d9e288970b2e4a13737a"},
]
[package.dependencies]
pytest = ">=3.5.0"
vcrpy = ">=2.0.1"
[package.extras]
dev = ["pytest-recording[tests]"]
tests = ["pytest-httpbin", "pytest-mock", "requests", "werkzeug (==3.0.1)"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
@ -6504,6 +6540,26 @@ files = [
docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"]
test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"]
[[package]]
name = "vcrpy"
version = "5.1.0"
description = "Automatically mock your HTTP interactions to simplify and speed up testing"
optional = false
python-versions = ">=3.8"
files = []
develop = false
[package.dependencies]
PyYAML = "*"
wrapt = "*"
yarl = "*"
[package.source]
type = "git"
url = "https://github.com/Significant-Gravitas/vcrpy.git"
reference = "master"
resolved_reference = "bfd15f9d06a516138b673cb481547f3352d9cc43"
[[package]]
name = "virtualenv"
version = "20.25.0"
@ -7029,4 +7085,4 @@ benchmark = ["agbenchmark"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "5b8cca9caced2687d88fc61dc263054f15c49f2daa1560fa4d94fb5b38d461aa"
content-hash = "7523abd672967cbe924f045a00bf519ee08c8537fdf2f2191d2928201497d7b7"

View File

@ -76,7 +76,10 @@ types-requests = "^2.31.0.2"
pytest = "^7.4.0"
pytest-asyncio = "^0.23.3"
pytest-cov = "^5.0.0"
pytest-mock = "*"
pytest-recording = "*"
mock = "^5.1.0"
vcrpy = { git = "https://github.com/Significant-Gravitas/vcrpy.git", rev = "master" }
[build-system]
@ -101,3 +104,4 @@ pythonVersion = "3.10"
[tool.pytest.ini_options]
pythonpath = ["forge"]
testpaths = ["forge", "tests"]
markers = ["slow"]