Skip to content

Commit

Permalink
Merge branch 'master' into reinier/open-591-add-claude-3-support
Browse files Browse the repository at this point in the history
  • Loading branch information
Pwuts committed Apr 22, 2024
2 parents 2aa4ca5 + 6ff0267 commit a60854e
Show file tree
Hide file tree
Showing 10 changed files with 70 additions and 121 deletions.
11 changes: 6 additions & 5 deletions autogpts/autogpt/agbenchmark_config/benchmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,13 @@ def run_specific_agent(task: str, continuous_mode: bool = False) -> None:


def bootstrap_agent(task: str, continuous_mode: bool) -> Agent:
config = ConfigBuilder.build_config_from_env()
config.logging.level = logging.DEBUG
config.logging.log_dir = LOG_DIR
config.logging.plain_console_output = True
configure_logging(**config.logging.dict())
configure_logging(
level=logging.DEBUG,
log_dir=LOG_DIR,
plain_console_output=True,
)

config = ConfigBuilder.build_config_from_env()
config.continuous_mode = continuous_mode
config.continuous_limit = 20
config.noninteractive_mode = True
Expand Down
22 changes: 0 additions & 22 deletions autogpts/autogpt/autogpt/app/configurator.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from autogpt.config import Config
from autogpt.config.config import GPT_3_MODEL, GPT_4_MODEL
from autogpt.core.resource.model_providers import ModelName, MultiProvider
from autogpt.logs.config import LogFormatName
from autogpt.logs.helpers import request_user_double_check
from autogpt.memory.vector import get_supported_memory_backends

Expand All @@ -26,11 +25,6 @@ async def apply_overrides_to_config(
ai_settings_file: Optional[Path] = None,
prompt_settings_file: Optional[Path] = None,
skip_reprompt: bool = False,
speak: bool = False,
debug: bool = False,
log_level: Optional[str] = None,
log_format: Optional[str] = None,
log_file_format: Optional[str] = None,
gpt3only: bool = False,
gpt4only: bool = False,
memory_type: Optional[str] = None,
Expand Down Expand Up @@ -60,19 +54,6 @@ async def apply_overrides_to_config(
skips_news (bool): Whether to suppress the output of latest news on startup.
"""
config.continuous_mode = False
config.tts_config.speak_mode = False

# Set log level
if debug:
config.logging.level = logging.DEBUG
elif log_level and type(_level := logging.getLevelName(log_level.upper())) is int:
config.logging.level = _level

# Set log format
if log_format and log_format in LogFormatName._value2member_map_:
config.logging.log_format = LogFormatName(log_format)
if log_file_format and log_file_format in LogFormatName._value2member_map_:
config.logging.log_file_format = LogFormatName(log_file_format)

if continuous:
logger.warning(
Expand All @@ -89,9 +70,6 @@ async def apply_overrides_to_config(
if continuous_limit and not continuous:
raise click.UsageError("--continuous-limit can only be used with --continuous")

if speak:
config.tts_config.speak_mode = True

# Set the default LLM models
if gpt3only:
# --gpt3only should always use gpt-3.5-turbo, despite user's FAST_LLM config
Expand Down
21 changes: 10 additions & 11 deletions autogpts/autogpt/autogpt/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,13 @@ async def run_auto_gpt(
file_storage.initialize()

# Set up logging module
if speak:
config.tts_config.speak_mode = True
configure_logging(
**config.logging.dict(),
debug=debug,
level=log_level,
log_format=log_format,
log_file_format=log_file_format,
tts_config=config.tts_config,
)

Expand All @@ -110,11 +115,6 @@ async def run_auto_gpt(
ai_settings_file=ai_settings,
prompt_settings_file=prompt_settings,
skip_reprompt=skip_reprompt,
speak=speak,
debug=debug,
log_level=log_level,
log_format=log_format,
log_file_format=log_file_format,
gpt3only=gpt3only,
gpt4only=gpt4only,
browser_name=browser_name,
Expand Down Expand Up @@ -380,7 +380,10 @@ async def run_auto_gpt_server(

# Set up logging module
configure_logging(
**config.logging.dict(),
debug=debug,
level=log_level,
log_format=log_format,
log_file_format=log_file_format,
tts_config=config.tts_config,
)

Expand All @@ -390,10 +393,6 @@ async def run_auto_gpt_server(
await apply_overrides_to_config(
config=config,
prompt_settings_file=prompt_settings,
debug=debug,
log_level=log_level,
log_format=log_format,
log_file_format=log_file_format,
gpt3only=gpt3only,
gpt4only=gpt4only,
browser_name=browser_name,
Expand Down
4 changes: 1 addition & 3 deletions autogpts/autogpt/autogpt/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
OpenAIModelName,
)
from autogpt.file_storage import FileStorageBackendName
from autogpt.logs.config import LoggingConfig
from autogpt.plugins.plugins_config import PluginsConfig
from autogpt.speech import TTSConfig

Expand Down Expand Up @@ -60,7 +59,6 @@ class Config(SystemSettings, arbitrary_types_allowed=True):

# TTS configuration
tts_config: TTSConfig = TTSConfig()
logging: LoggingConfig = LoggingConfig()

# File storage
file_storage_backend: FileStorageBackendName = UserConfigurable(
Expand All @@ -81,7 +79,7 @@ class Config(SystemSettings, arbitrary_types_allowed=True):

# Model configuration
fast_llm: ModelName = UserConfigurable(
default=OpenAIModelName.GPT3_v4,
default=OpenAIModelName.GPT3,
from_env="FAST_LLM",
)
smart_llm: ModelName = UserConfigurable(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
ModelProviderConfiguration,
ModelProviderCredentials,
ModelProviderName,
ModelProviderService,
ModelProviderSettings,
ModelTokenizer,
ToolResultMessage,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
ModelProviderConfiguration,
ModelProviderCredentials,
ModelProviderName,
ModelProviderService,
ModelProviderSettings,
ModelTokenizer,
)
Expand Down
1 change: 0 additions & 1 deletion autogpts/autogpt/autogpt/file_storage/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import contextlib
import inspect
import logging
import os
from io import IOBase, TextIOWrapper
from pathlib import Path
from typing import TYPE_CHECKING, Literal, Optional
Expand Down
58 changes: 44 additions & 14 deletions autogpts/autogpt/autogpt/logs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,28 +75,58 @@ class LoggingConfig(SystemConfiguration):


def configure_logging(
level: int = logging.INFO,
log_dir: Path = LOG_DIR,
log_format: Optional[LogFormatName] = None,
log_file_format: Optional[LogFormatName] = None,
plain_console_output: bool = False,
debug: bool = False,
level: Optional[int | str] = None,
log_dir: Optional[Path] = None,
log_format: Optional[LogFormatName | str] = None,
log_file_format: Optional[LogFormatName | str] = None,
plain_console_output: Optional[bool] = None,
tts_config: Optional[TTSConfig] = None,
) -> None:
"""Configure the native logging module.
"""Configure the native logging module, based on the environment config and any
specified overrides.
Arguments override values specified in the environment.
Should be usable as `configure_logging(**config.logging.dict())`, where
`config.logging` is a `LoggingConfig` object.
"""

# Auto-adjust default log format based on log level
log_format = log_format or (
LogFormatName.SIMPLE if level != logging.DEBUG else LogFormatName.DEBUG
if debug and level:
raise ValueError("Only one of either 'debug' and 'level' arguments may be set")

# Parse arguments
if isinstance(level, str):
if type(_level := logging.getLevelName(level.upper())) is int:
level = _level
else:
raise ValueError(f"Unknown log level '{level}'")
if isinstance(log_format, str):
if log_format in LogFormatName._value2member_map_:
log_format = LogFormatName(log_format)
elif not isinstance(log_format, LogFormatName):
raise ValueError(f"Unknown log format '{log_format}'")
if isinstance(log_file_format, str):
if log_file_format in LogFormatName._value2member_map_:
log_file_format = LogFormatName(log_file_format)
elif not isinstance(log_file_format, LogFormatName):
raise ValueError(f"Unknown log format '{log_format}'")

config = LoggingConfig.from_env()

# Aggregate arguments + env config
level = logging.DEBUG if debug else level or config.level
log_dir = log_dir or config.log_dir
log_format = log_format or (LogFormatName.DEBUG if debug else config.log_format)
log_file_format = log_file_format or log_format or config.log_file_format
plain_console_output = (
plain_console_output
if plain_console_output is not None
else config.plain_console_output
)
log_file_format = log_file_format or log_format

structured_logging = log_format == LogFormatName.STRUCTURED

if structured_logging:
# Structured logging is used for cloud environments,
# where logging to a file makes no sense.
if log_format == LogFormatName.STRUCTURED:
plain_console_output = True
log_file_format = None

Expand Down
8 changes: 5 additions & 3 deletions autogpts/autogpt/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,8 +79,6 @@ def config(
config.plugins_dir = "tests/unit/data/test_plugins"
config.plugins_config_file = temp_plugins_config_file

config.logging.log_dir = Path(__file__).parent / "logs"
config.logging.plain_console_output = True
config.noninteractive_mode = True

# avoid circular dependency
Expand All @@ -96,7 +94,11 @@ def config(

@pytest.fixture(scope="session")
def setup_logger(config: Config):
configure_logging(**config.logging.dict())
configure_logging(
debug=True,
log_dir=Path(__file__).parent / "logs",
plain_console_output=True,
)


@pytest.fixture
Expand Down
64 changes: 4 additions & 60 deletions autogpts/autogpt/tests/unit/test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,62 +30,6 @@ def test_initial_values(config: Config) -> None:
assert config.smart_llm.startswith("gpt-4")


def test_set_continuous_mode(config: Config) -> None:
"""
Test if the set_continuous_mode() method updates the continuous_mode attribute.
"""
# Store continuous mode to reset it after the test
continuous_mode = config.continuous_mode

config.continuous_mode = True
assert config.continuous_mode is True

# Reset continuous mode
config.continuous_mode = continuous_mode


def test_set_speak_mode(config: Config) -> None:
"""
Test if the set_speak_mode() method updates the speak_mode attribute.
"""
# Store speak mode to reset it after the test
speak_mode = config.tts_config.speak_mode

config.tts_config.speak_mode = True
assert config.tts_config.speak_mode is True

# Reset speak mode
config.tts_config.speak_mode = speak_mode


def test_set_fast_llm(config: Config) -> None:
"""
Test if the set_fast_llm() method updates the fast_llm attribute.
"""
# Store model name to reset it after the test
fast_llm = config.fast_llm

config.fast_llm = "gpt-3.5-turbo-test"
assert config.fast_llm == "gpt-3.5-turbo-test"

# Reset model name
config.fast_llm = fast_llm


def test_set_smart_llm(config: Config) -> None:
"""
Test if the set_smart_llm() method updates the smart_llm attribute.
"""
# Store model name to reset it after the test
smart_llm = config.smart_llm

config.smart_llm = "gpt-4-test"
assert config.smart_llm == "gpt-4-test"

# Reset model name
config.smart_llm = smart_llm


@pytest.mark.asyncio
@mock.patch("openai.resources.models.AsyncModels.list")
async def test_fallback_to_gpt3_if_gpt4_not_available(
Expand All @@ -94,8 +38,8 @@ async def test_fallback_to_gpt3_if_gpt4_not_available(
"""
Test if models update to gpt-3.5-turbo if gpt-4 is not available.
"""
config.fast_llm = "gpt-4"
config.smart_llm = "gpt-4"
config.fast_llm = GPT_4_MODEL
config.smart_llm = GPT_4_MODEL

mock_list_models.return_value = asyncio.Future()
mock_list_models.return_value.set_result(
Expand All @@ -111,8 +55,8 @@ async def test_fallback_to_gpt3_if_gpt4_not_available(
gpt4only=False,
)

assert config.fast_llm == "gpt-3.5-turbo"
assert config.smart_llm == "gpt-3.5-turbo"
assert config.fast_llm == GPT_3_MODEL
assert config.smart_llm == GPT_3_MODEL


def test_missing_azure_config(config: Config) -> None:
Expand Down

0 comments on commit a60854e

Please sign in to comment.