You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Traceback (most recent call last):
File "/usr/local/bin/interpreter", line 8, in
sys.exit(main())
^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/start_terminal_interface.py", line 612, in main
start_terminal_interface(interpreter)
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/start_terminal_interface.py", line 471, in start_terminal_interface
interpreter = profile(
^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/profiles/profiles.py", line 64, in profile
return apply_profile(interpreter, profile, profile_path)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/profiles/profiles.py", line 148, in apply_profile
exec(profile["start_script"], scope, scope)
File "", line 1, in
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/core.py", line 145, in local_setup
self = local_setup(self)
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/local_setup.py", line 314, in local_setup
interpreter.computer.ai.chat("ping")
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/computer/ai/ai.py", line 134, in chat
for chunk in self.computer.interpreter.llm.run(messages):
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 86, in run
self.load()
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 397, in load
self.interpreter.computer.ai.chat("ping")
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/computer/ai/ai.py", line 134, in chat
for chunk in self.computer.interpreter.llm.run(messages):
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 322, in run
yield from run_tool_calling_llm(self, params)
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/run_tool_calling_llm.py", line 178, in run_tool_calling_llm
for chunk in llm.completions(**request_params):
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 466, in fixed_litellm_completions
raise first_error # If all attempts fail, raise the first error
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 443, in fixed_litellm_completions
yield from litellm.completion(**params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/utils.py", line 1213, in wrapper
raise e
File "/usr/local/lib/python3.12/dist-packages/litellm/utils.py", line 1091, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/main.py", line 3093, in completion
raise exception_type(
File "/usr/local/lib/python3.12/dist-packages/litellm/main.py", line 2815, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 239, in completion
data = provider_config.transform_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/llms/ollama/completion/transformation.py", line 315, in transform_request
modified_prompt = ollama_pt(model=model, messages=messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 265, in ollama_pt
raise litellm.BadRequestError(
litellm.exceptions.BadRequestError: litellm.BadRequestError: Invalid Message passed in {'role': 'system', 'content': 'You are a helpful AI assistant. Produce JSON OUTPUT ONLY! Adhere to this format {"name": "function_name", "arguments":{"argument_name": "argument_value"}} The following functions are available to you:\n{'type': 'function', 'function': {'name': 'execute', 'description': "Executes code on the user's machine in the users local environment and returns the output", 'parameters': {'type': 'object', 'properties': {'language': {'type': 'string', 'description': 'The programming language (required parameter to the execute function)', 'enum': ['ruby', 'python', 'shell', 'javascript', 'html', 'applescript', 'r', 'powershell', 'react', 'java']}, 'code': {'type': 'string', 'description': 'The code to execute (required)'}}, 'required': ['language', 'code']}}}\n'}
root@Jose:/home/jose/Escritorio#
Reproduce
Help pls
Expected behavior
.
Screenshots
No response
Open Interpreter version
Version: 0.4.3
Python version
Python 3.12.3
Operating System name and version
Ubuntu 24.04 LTS
Additional context
Help pls
The text was updated successfully, but these errors were encountered:
Describe the bug
Open Interpreter supports multiple local model providers.
[?] Select a provider:
[?] Select a model:
Loading llama3.1...
Traceback (most recent call last):
File "/usr/local/bin/interpreter", line 8, in
sys.exit(main())
^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/start_terminal_interface.py", line 612, in main
start_terminal_interface(interpreter)
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/start_terminal_interface.py", line 471, in start_terminal_interface
interpreter = profile(
^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/profiles/profiles.py", line 64, in profile
return apply_profile(interpreter, profile, profile_path)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/profiles/profiles.py", line 148, in apply_profile
exec(profile["start_script"], scope, scope)
File "", line 1, in
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/core.py", line 145, in local_setup
self = local_setup(self)
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/terminal_interface/local_setup.py", line 314, in local_setup
interpreter.computer.ai.chat("ping")
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/computer/ai/ai.py", line 134, in chat
for chunk in self.computer.interpreter.llm.run(messages):
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 86, in run
self.load()
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 397, in load
self.interpreter.computer.ai.chat("ping")
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/computer/ai/ai.py", line 134, in chat
for chunk in self.computer.interpreter.llm.run(messages):
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 322, in run
yield from run_tool_calling_llm(self, params)
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/run_tool_calling_llm.py", line 178, in run_tool_calling_llm
for chunk in llm.completions(**request_params):
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 466, in fixed_litellm_completions
raise first_error # If all attempts fail, raise the first error
^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/interpreter/core/llm/llm.py", line 443, in fixed_litellm_completions
yield from litellm.completion(**params)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/utils.py", line 1213, in wrapper
raise e
File "/usr/local/lib/python3.12/dist-packages/litellm/utils.py", line 1091, in wrapper
result = original_function(*args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/main.py", line 3093, in completion
raise exception_type(
File "/usr/local/lib/python3.12/dist-packages/litellm/main.py", line 2815, in completion
response = base_llm_http_handler.completion(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/llms/custom_httpx/llm_http_handler.py", line 239, in completion
data = provider_config.transform_request(
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/llms/ollama/completion/transformation.py", line 315, in transform_request
modified_prompt = ollama_pt(model=model, messages=messages)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "/usr/local/lib/python3.12/dist-packages/litellm/litellm_core_utils/prompt_templates/factory.py", line 265, in ollama_pt
raise litellm.BadRequestError(
litellm.exceptions.BadRequestError: litellm.BadRequestError: Invalid Message passed in {'role': 'system', 'content': 'You are a helpful AI assistant. Produce JSON OUTPUT ONLY! Adhere to this format {"name": "function_name", "arguments":{"argument_name": "argument_value"}} The following functions are available to you:\n{'type': 'function', 'function': {'name': 'execute', 'description': "Executes code on the user's machine in the users local environment and returns the output", 'parameters': {'type': 'object', 'properties': {'language': {'type': 'string', 'description': 'The programming language (required parameter to the
execute
function)', 'enum': ['ruby', 'python', 'shell', 'javascript', 'html', 'applescript', 'r', 'powershell', 'react', 'java']}, 'code': {'type': 'string', 'description': 'The code to execute (required)'}}, 'required': ['language', 'code']}}}\n'}root@Jose:/home/jose/Escritorio#
Reproduce
Help pls
Expected behavior
.
Screenshots
No response
Open Interpreter version
Version: 0.4.3
Python version
Python 3.12.3
Operating System name and version
Ubuntu 24.04 LTS
Additional context
Help pls
The text was updated successfully, but these errors were encountered: