Skip to content

Commit

Permalink
feat(rnd): AutoGPT Agent wrapper for Agent Server (#7365)
Browse files Browse the repository at this point in the history
- Add `autogpt` and `forge` dependency to the `autogpt_server`
- Add `AutoGPTAgentBlock` that initializes and runs a single agent loop on execution
- Add `BlockAgent` that inherits from `autogpt` `Agent` and is a thin extension on the agent that allows to disable components
- Add `OutputComponent` that adds `output` command for the agent
  • Loading branch information
kcze committed Jul 13, 2024
1 parent 0b9f3be commit 6550bdc
Show file tree
Hide file tree
Showing 10 changed files with 4,566 additions and 123 deletions.
9 changes: 8 additions & 1 deletion .github/workflows/autogpt-server-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ jobs:
env:
CI: true
PLAIN_OUTPUT: True
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}

# - name: Upload coverage reports to Codecov
# uses: codecov/codecov-action@v4
Expand Down Expand Up @@ -210,12 +211,18 @@ jobs:
WINDOWS_COMMAND: "poetry run poe dist_msi"
LINUX_COMMAND: "poetry run poe dist_appimage"

- name: Zip the .app directory
if: runner.os == 'macOS'
working-directory: ${{ runner.temp }}
run: |
zip -r autogptserver-app-${{ matrix.platform-os }}.app.zip /Users/runner/work/AutoGPT/AutoGPT/rnd/autogpt_server/build/*.app
# break this into seperate steps each with their own name that matches the file
- name: Upload App artifact
uses: actions/upload-artifact@v4
with:
name: autogptserver-app-${{ matrix.platform-os }}
path: /Users/runner/work/AutoGPT/AutoGPT/rnd/autogpt_server/build/*.app
path: /Users/runner/work/_temp/autogptserver-app-${{ matrix.platform-os }}.app.zip

- name: Upload dmg artifact
uses: actions/upload-artifact@v4
Expand Down
8 changes: 7 additions & 1 deletion autogpt/autogpt/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,13 +174,19 @@ async def propose_action(self) -> OneShotAgentActionProposal:
# Get messages
messages = await self.run_pipeline(MessageProvider.get_messages)

include_os_info = (
self.code_executor.config.execute_local_commands
if hasattr(self, "code_executor")
else False
)

prompt: ChatPrompt = self.prompt_strategy.build_prompt(
messages=messages,
task=self.state.task,
ai_profile=self.state.ai_profile,
ai_directives=directives,
commands=function_specs_from_commands(self.commands),
include_os_info=self.code_executor.config.execute_local_commands,
include_os_info=include_os_info,
)

logger.debug(f"Executing prompt:\n{dump_prompt(prompt)}")
Expand Down
5 changes: 3 additions & 2 deletions autogpt/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

69 changes: 34 additions & 35 deletions forge/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion forge/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ gTTS = "^2.3.1"
jinja2 = "^3.1.2"
jsonschema = "*"
litellm = "^1.17.9"
numpy = ">=1.26.0,<2.0.0"
openai = "^1.7.2"
Pillow = "*"
playsound = "~1.2.2"
Expand All @@ -51,7 +52,7 @@ spacy = "^3.0.0"
tenacity = "^8.2.2"
tiktoken = ">=0.7.0,<1.0.0"
toml = "^0.10.2"
uvicorn = ">=0.23.2,<1"
uvicorn = { extras = ["standard"], version = ">=0.23.2,<1" }
watchdog = "4.0.0"
webdriver-manager = "^4.0.1"

Expand Down
4 changes: 2 additions & 2 deletions rnd/autogpt_server/autogpt_server/blocks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from autogpt_server.blocks import sample, reddit, text, ai, wikipedia, discord
from autogpt_server.blocks import agent, sample, reddit, text, ai, wikipedia, discord
from autogpt_server.data.block import Block

AVAILABLE_BLOCKS = {
block.id: block
for block in [v() for v in Block.__subclasses__()]
}

__all__ = ["ai", "sample", "reddit", "text", "AVAILABLE_BLOCKS", "wikipedia", "discord"]
__all__ = ["agent", "ai", "sample", "reddit", "text", "AVAILABLE_BLOCKS", "wikipedia", "discord"]
172 changes: 172 additions & 0 deletions rnd/autogpt_server/autogpt_server/blocks/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,172 @@
from __future__ import annotations

import asyncio
import logging
from pathlib import Path
from typing import TYPE_CHECKING, Iterator

from autogpt.agents.agent import Agent, AgentSettings
from autogpt.app.config import ConfigBuilder
from autogpt_server.data.block import Block, BlockFieldSecret, BlockOutput, BlockSchema
from forge.agent.components import AgentComponent
from forge.agent.protocols import (
CommandProvider,
)
from forge.command import command
from forge.command.command import Command
from forge.file_storage import FileStorageBackendName, get_storage
from forge.file_storage.base import FileStorage
from forge.llm.providers import (
MultiProvider,
)
from forge.llm.providers.openai import OpenAICredentials, OpenAIProvider
from forge.llm.providers.schema import ModelProviderName
from forge.models.json_schema import JSONSchema
from pydantic import Field, SecretStr

if TYPE_CHECKING:
from autogpt.app.config import AppConfig

logger = logging.getLogger(__name__)


class BlockAgentSettings(AgentSettings):
enabled_components: list[str] = Field(default_factory=list)


class OutputComponent(CommandProvider):
def get_commands(self) -> Iterator[Command]:
yield self.output

@command(
parameters={
"output": JSONSchema(
type=JSONSchema.Type.STRING,
description="Output data to be returned.",
required=True,
),
},
)
def output(self, output: str) -> str:
"""Use this to output the result."""
return output


class BlockAgent(Agent):
def __init__(
self,
settings: BlockAgentSettings,
llm_provider: MultiProvider,
file_storage: FileStorage,
app_config: AppConfig,
):
super().__init__(settings, llm_provider, file_storage, app_config)

self.output = OutputComponent()

# Disable components
for attr_name in list(self.__dict__.keys()):
attr_value = getattr(self, attr_name)
if not isinstance(attr_value, AgentComponent):
continue
component_name = type(attr_value).__name__
if component_name != "SystemComponent" and component_name not in settings.enabled_components:
delattr(self, attr_name)


class AutoGPTAgentBlock(Block):
class Input(BlockSchema):
task: str
input: str
openai_api_key: BlockFieldSecret = BlockFieldSecret(key="openai_api_key")
enabled_components: list[str] = Field(default_factory=lambda: [OutputComponent.__name__])
disabled_commands: list[str] = Field(default_factory=list)
fast_mode: bool = False

class Output(BlockSchema):
result: str

def __init__(self):
super().__init__(
id="d2e2ecd2-9ae6-422d-8dfe-ceca500ce6a6",
input_schema=AutoGPTAgentBlock.Input,
output_schema=AutoGPTAgentBlock.Output,
test_input={
"task": "Make calculations and use output command to output the result.",
"input": "5 + 3",
"openai_api_key": "openai_api_key",
"enabled_components": [OutputComponent.__name__],
"disabled_commands": ["finish"],
"fast_mode": True,
},
test_output=[
("result", "8"),
],
test_mock={
"get_provider": lambda _: MultiProvider(),
"get_result": lambda _: "8",
}
)

@staticmethod
def get_provider(openai_api_key: str) -> MultiProvider:
# LLM provider
settings = OpenAIProvider.default_settings.model_copy()
settings.credentials = OpenAICredentials(api_key=SecretStr(openai_api_key))
openai_provider = OpenAIProvider(settings=settings)

multi_provider = MultiProvider()
# HACK: Add OpenAI provider to the multi provider with api key
multi_provider._provider_instances[ModelProviderName.OPENAI] = openai_provider

return multi_provider

@staticmethod
def get_result(agent: BlockAgent) -> str:
# Execute agent
for tries in range(3):
try:
proposal = asyncio.run(agent.propose_action())
break
except Exception as e:
if tries == 2:
raise e

result = asyncio.run(agent.execute(proposal))

return str(result)

def run(self, input_data: Input) -> BlockOutput:
# Set up configuration
config = ConfigBuilder.build_config_from_env()
# Disable commands
config.disabled_commands.extend(input_data.disabled_commands)

# Storage
local = config.file_storage_backend == FileStorageBackendName.LOCAL
restrict_to_root = not local or config.restrict_to_workspace
file_storage = get_storage(
config.file_storage_backend,
root_path=Path("data"),
restrict_to_root=restrict_to_root,
)
file_storage.initialize()

# State
state = BlockAgentSettings(
agent_id="TemporaryAgentID",
name="WrappedAgent",
description="Wrapped agent for the Agent Server.",
task=f"Your task: {input_data.task}\n"
f"Input data: {input_data.input}",
enabled_components=input_data.enabled_components,
)
# Switch big brain mode
state.config.big_brain = not input_data.fast_mode
provider = self.get_provider(input_data.openai_api_key.get())

agent = BlockAgent(state, provider, file_storage, config)

result = self.get_result(agent)

yield "result", result
Empty file modified rnd/autogpt_server/autogpt_server/cli.py
100644 → 100755
Empty file.
Loading

0 comments on commit 6550bdc

Please sign in to comment.