diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000..457f44d9ba --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.analysis.typeCheckingMode": "basic" +} \ No newline at end of file diff --git a/interpreter/__init__.py b/interpreter/__init__.py index 6ca64ffaa0..efd4748c54 100644 --- a/interpreter/__init__.py +++ b/interpreter/__init__.py @@ -1,15 +1,41 @@ -from .core.core import Interpreter import sys +from .core.core import Interpreter +from .cli.cli import cli + -# This is done so when users `import interpreter`, -# they get an instance of interpreter: -sys.modules["interpreter"] = Interpreter() +def create_interpreter(**kwargs): + """ + Factory function to create an instance of Interpreter with the provided keyword arguments. + + Parameters: + **kwargs: Keyword arguments to be set as attributes in the Interpreter instance. + + Returns: + An instance of Interpreter initialized with the provided arguments. + """ + # Create a new interpreter instance + new_interpreter = Interpreter() + + # Iterate through the provided keyword arguments + for key, value in kwargs.items(): + # Check if the attribute exists in the interpreter + if hasattr(new_interpreter, key): + # Check if the provided value is of the correct type + if isinstance(value, type(getattr(new_interpreter, key))): + setattr(new_interpreter, key, value) + else: + print( + f"Type mismatch: '{key}' should be of type {type(getattr(new_interpreter, key))}. Using the default value instead.") + + else: + print( + f"Unknown attribute: '{key}'. Ignoring.") + + + return new_interpreter -# **This is a controversial thing to do,** -# because perhaps modules ought to behave like modules. -# But I think it saves a step, removes friction, and looks good. # ____ ____ __ __ # / __ \____ ___ ____ / _/___ / /____ _________ ________ / /____ _____ diff --git a/interpreter/cli/cli.py b/interpreter/cli/cli.py index b3c285aa80..a4a0909719 100644 --- a/interpreter/cli/cli.py +++ b/interpreter/cli/cli.py @@ -8,6 +8,16 @@ from ..utils.get_config import get_config_path from ..terminal_interface.conversation_navigator import conversation_navigator +import sys +import pysqlite3 + +# Alias pysqlite3 as sqlite3 in sys.modules. this fixes a chromadb error where it whines about the wrong version being installed, but we cant change the containers sqlite. +# 'pysqlite3' is a drop in replacement for default python sqlite3 lib. ( identical apis ) +sys.modules['sqlite3'] = pysqlite3 + + + + arguments = [ { "name": "system_message", @@ -71,6 +81,13 @@ "type": str, }, { + "name": "use_containers", + "nickname": "uc", + "help_text": "optionally use a Docker Container for the interpreters code execution. this will seperate execution from your main computer. this also allows execution on a remote server via the 'DOCKER_HOST' environment variable and the dockerengine api.", + "type": bool + }, + { + "name": "safe_mode", "nickname": "safe", "help_text": "optionally enable safety mechanisms like code scanning; valid options are off, ask, and auto", @@ -91,10 +108,11 @@ }, ] - -def cli(interpreter): +def cli(): parser = argparse.ArgumentParser(description="Open Interpreter") + from ..core.core import Interpreter + # Add arguments for arg in arguments: if arg["type"] == bool: @@ -158,6 +176,8 @@ def cli(interpreter): args = parser.parse_args() + interpreter = Interpreter() + # This should be pushed into an open_config.py util # If --config is used, open the config.yaml file in the Open Interpreter folder of the user's config dir if args.config: diff --git a/interpreter/code_interpreters/__init__.py b/interpreter/code_interpreters/__init__.py index e69de29bb2..8b13789179 100644 --- a/interpreter/code_interpreters/__init__.py +++ b/interpreter/code_interpreters/__init__.py @@ -0,0 +1 @@ + diff --git a/interpreter/code_interpreters/container_utils/__init__.py b/interpreter/code_interpreters/container_utils/__init__.py new file mode 100644 index 0000000000..04e19b0576 --- /dev/null +++ b/interpreter/code_interpreters/container_utils/__init__.py @@ -0,0 +1,38 @@ +import appdirs +import shutil +import atexit +import os +import re + +import docker +from docker.tls import TLSConfig +from docker.utils import kwargs_from_env + + +def destroy(): # this fn is called when the entire program exits. registered with atexit in the __init__.py + # Prepare the Docker client + client_kwargs = kwargs_from_env() + if client_kwargs.get('tls'): + client_kwargs['tls'] = TLSConfig(**client_kwargs['tls']) + client = docker.APIClient(**client_kwargs) + + # Get all containers + all_containers = client.containers(all=True) + + # Filter containers based on the label + for container in all_containers: + labels = container['Labels'] + if labels: + session_id = labels.get('session_id') + if session_id and re.match(r'^ses-', session_id): + # Stop the container if it's running + if container['State'] == 'running': + client.stop(container=container['Id']) + # Remove the container + client.remove_container(container=container['Id']) + session_path = os.path.join(appdirs.user_data_dir("Open Interpreter"), "sessions", session_id) + if os.path.exists(session_path): + shutil.rmtree(session_path) + +atexit.register(destroy) + diff --git a/interpreter/code_interpreters/container_utils/auto_remove.py b/interpreter/code_interpreters/container_utils/auto_remove.py new file mode 100644 index 0000000000..a717151574 --- /dev/null +++ b/interpreter/code_interpreters/container_utils/auto_remove.py @@ -0,0 +1,68 @@ +import threading +import time +from functools import wraps + +def access_aware(cls): + class AccessAwareWrapper: + def __init__(self, wrapped, auto_remove_timeout, close_callback=None): + self._wrapped = wrapped + self._last_accessed = time.time() + self._auto_remove = auto_remove_timeout is not None + self._timeout = auto_remove_timeout + self.close_callback = close_callback # Store the callback + if self._auto_remove: + self._monitor_thread = threading.Thread(target=self._monitor_object, daemon=True) + self._monitor_thread.start() + + def _monitor_object(self): + while True: + time.sleep(1) # Check every second + if self._auto_remove and self.check_timeout(): + # If a close_callback is defined, call it + if self.close_callback: + try: + self.close_callback() # Call the callback + except Exception as e: + # Log or handle the exception as required + return f"An error occurred during callback: {e}" + + try: + self._wrapped.stop() + except Exception: + continue # why care? we are removing it anyway + + # If the wrapped object has a __del__ method, call it + if self._wrapped and hasattr(self._wrapped, '__del__'): + try: + self._wrapped.__del__() + except Exception as e: + # Log or handle the exception as required + return f"An error occurred during deletion: {e}" + + # Remove the strong reference to the wrapped object. this makes it go bye bye. + self._wrapped = None + break + + def touch(self): + self._last_accessed = time.time() + + def check_timeout(self): + return time.time() - self._last_accessed > self._timeout + + def __getattr__(self, attr): + if self._wrapped is None: + raise ValueError("Object has been removed due to inactivity.") + self.touch() # Update last accessed time + return getattr(self._wrapped, attr) # Use the actual object here + + def __del__(self): + if self._auto_remove: + self._monitor_thread.join() # Ensure the monitoring thread is cleaned up + + @wraps(cls) + def wrapper(*args, **kwargs): + auto_remove_timeout = kwargs.pop('auto_remove_timeout', None) # Extract the auto_remove_timeout argument + close_callback = kwargs.pop('close_callback', None) # Extract the close_callback argument + obj = cls(*args, **kwargs) # Create an instance of the original class + return AccessAwareWrapper(obj, auto_remove_timeout, close_callback) # Wrap it + return wrapper diff --git a/interpreter/code_interpreters/container_utils/build_image.py b/interpreter/code_interpreters/container_utils/build_image.py new file mode 100644 index 0000000000..12d95286d6 --- /dev/null +++ b/interpreter/code_interpreters/container_utils/build_image.py @@ -0,0 +1,108 @@ +import os +import json +import hashlib +import subprocess +from docker import DockerClient +from docker.errors import DockerException +from rich import print as Print + +def get_files_hash(*file_paths): + """Return the SHA256 hash of multiple files.""" + hasher = hashlib.sha256() + for file_path in file_paths: + with open(file_path, "rb") as f: + while chunk := f.read(4096): + hasher.update(chunk) + return hasher.hexdigest() + + +def build_docker_images( + dockerfile_dir = os.path.join(os.path.abspath(os.path.dirname(os.path.dirname(__file__))), "dockerfiles") +, +): + """ + Builds a Docker image for the Open Interpreter runtime container if needed. + + Args: + dockerfile_dir (str): The directory containing the Dockerfile and requirements.txt files. + + Returns: + None + """ + try: + client = DockerClient.from_env() + except DockerException: + Print("[bold red]ERROR[/bold red]: Could not connect to Docker daemon. Is Docker Engine installed and running?") + Print( + "\nFor information on Docker installation, visit: https://docs.docker.com/engine/install/ and follow the instructions for your system." + ) + return + + image_name = "openinterpreter-runtime-container" + hash_file_path = os.path.join(dockerfile_dir, "hash.json") + + dockerfile_name = "Dockerfile" + requirements_name = "requirements.txt" + dockerfile_path = os.path.join(dockerfile_dir, dockerfile_name) + requirements_path = os.path.join(dockerfile_dir, requirements_name) + + if not os.path.exists(dockerfile_path) or not os.path.exists(requirements_path): + Print("ERROR: Dockerfile or requirements.txt not found. Did you delete or rename them?") + raise RuntimeError( + "No container Dockerfiles or requirements.txt found. Make sure they are in the dockerfiles/ subdir of the module." + ) + + current_hash = get_files_hash(dockerfile_path, requirements_path) + + stored_hashes = {} + if os.path.exists(hash_file_path): + with open(hash_file_path, "rb") as f: + stored_hashes = json.load(f) + + original_hash = stored_hashes.get("original_hash") + previous_hash = stored_hashes.get("last_hash") + + if current_hash == original_hash: + images = client.images.list(name=image_name, all=True) + if not images: + Print("Downloading default image from Docker Hub, please wait...") + + subprocess.run(["docker", "pull", "unaidedelf/openinterpreter-runtime-container:latest"]) + subprocess.run(["docker", "tag", "unaidedelf/openinterpreter-runtime-container:latest", image_name ], + check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + elif current_hash != previous_hash: + Print("Dockerfile or requirements.txt has changed. Building container...") + + try: + # Run the subprocess without capturing stdout and stderr + # This will allow Docker's output to be printed to the console in real-time + subprocess.run( + [ + "docker", + "build", + "-t", + f"{image_name}:latest", + dockerfile_dir, + ], + check=True, # This will raise a CalledProcessError if the command returns a non-zero exit code + text=True, + ) + + # Update the stored current hash + stored_hashes["last_hash"] = current_hash + with open(hash_file_path, "w") as f: + json.dump(stored_hashes, f) + + except subprocess.CalledProcessError: + # Suppress Docker's error messages and display your own error message + Print("Docker Build Error: Building Docker image failed. Please review the error message above and resolve the issue.") + + except FileNotFoundError: + Print("ERROR: The 'docker' command was not found on your system.") + Print( + "Please ensure Docker Engine is installed and the 'docker' command is available in your PATH." + ) + Print( + "For information on Docker installation, visit: https://docs.docker.com/engine/install/" + ) + Print("If Docker is installed, try starting a new terminal session.") diff --git a/interpreter/code_interpreters/container_utils/container_utils.py b/interpreter/code_interpreters/container_utils/container_utils.py new file mode 100644 index 0000000000..da9c23a9f5 --- /dev/null +++ b/interpreter/code_interpreters/container_utils/container_utils.py @@ -0,0 +1,255 @@ +"""wrapper classes of the Docker python sdk which allows interaction like its a subprocess object.""" +import os +import re +import select +import struct +import threading +import time + + +# Third-party imports +import appdirs +import docker +from docker.utils import kwargs_from_env +from docker.tls import TLSConfig +from rich import print as Print + +# Modules +from .auto_remove import access_aware + +class DockerStreamWrapper: + def __init__(self, exec_id, sock): + self.exec_id = exec_id + self._sock = sock + self._stdout_r, self._stdout_w = os.pipe() + self._stderr_r, self._stderr_w = os.pipe() + self.stdout = self.Stream(self, self._stdout_r) + self.stderr = self.Stream(self, self._stderr_r) + + ## stdin pipe and fd. dosent need a pipe, but its easier and thread safe and less mem intensive than a queue.Queue() + self._stdin_r, self._stdin_w = os.pipe() # Pipe for stdin + self.stdin = os.fdopen(self._stdin_w, 'w') + self._stdin_buffer = b"" # Buffer for stdin data. more complex = better fr + + ## start recieving thread to watch socket, and send data from stdin pipe. + self._stop_event = threading.Event() + self._thread = threading.Thread(target=self._listen, daemon=True) + self._thread.start() + + class Stream: + def __init__(self, parent, read_fd): + self.parent = parent + self._read_fd = read_fd + self._buffer = "" + + def readline(self): + while '\n' not in self._buffer: + ready_to_read, _, _ = select.select([self._read_fd], [], [], None) + if not ready_to_read: + return '' + chunk = os.read(self._read_fd, 1024).decode('utf-8') + self._buffer += chunk + + newline_pos = self._buffer.find('\n') + line = self._buffer[:newline_pos] + self._buffer = self._buffer[newline_pos + 1:] + return line + + def _listen(self): + while not self._stop_event.is_set(): + ready_to_read, _, _ = select.select([self._sock, self._stdin_r], [], [], None) + + for s in ready_to_read: + if s == self._sock: + raw_data = self._sock.recv(2048) + stdout, stderr = self.demux_docker_stream(raw_data) + os.write(self._stdout_w, stdout.encode()) + os.write(self._stderr_w, stderr.encode()) + elif s == self._stdin_r: + # Read from the read end of the stdin pipe and add to the buffer + data_to_write = os.read(self._stdin_r, 2048).decode('utf-8') + + # Remove escape characters for quotes but leave other backslashes untouched + data_to_write = re.sub(r'\\([\'"])', r'\1', data_to_write) + + data_to_write = data_to_write.replace('\\n', '\n') + + self._stdin_buffer += data_to_write.encode() + + # Check for newline and send line by line + while b'\n' in self._stdin_buffer: + newline_pos = self._stdin_buffer.find(b'\n') + line = self._stdin_buffer[:newline_pos + 1] # Include the newline + self._stdin_buffer = self._stdin_buffer[newline_pos + 1:] + + + # Send the line to the Docker container + self._sock.sendall(line) + + + def demux_docker_stream(self, data): + stdout = "" + stderr = "" + offset = 0 + while offset + 8 <= len(data): + header = data[offset:offset + 8] + stream_type, length = struct.unpack('>BxxxL', header) + offset += 8 + chunk = data[offset:offset + length].decode('utf-8') + offset += length + if stream_type == 1: + stdout += chunk + elif stream_type == 2: + stderr += chunk + + return stdout, stderr + + def flush(self): + pass + + def terminate(self): + self._stop_event.set() + self._thread.join() + os.close(self._stdout_r) + os.close(self._stdout_w) + os.close(self._stderr_r) + os.close(self._stderr_w) + + +# The `@access_aware` decorator enables automatic container cleanup based on activity monitoring. +# It functions under the following conditions: +# 1. The container is subject to removal when it remains unaccessed beyond the duration specified by `auto_remove_timeout`. +# 2. This feature necessitates a non-None argument; absence of a valid argument renders this functionality inactive. +# 3. During interactive sessions, the auto-removal feature is disabled to prevent unintended interruptions. +# 4. The "INTERPRETER_CONTAINER_TIMEOUT" environment variable allows customization of the timeout period. +# It accepts an integer value representing the desired timeout in seconds. +# 5. In the event of an unexpected program termination, the container is still ensured to be removed, +# courtesy of the integration with the `atexit` module, safeguarding system resources from being unnecessarily occupied. +@access_aware +class DockerProcWrapper: + def __init__(self, command, session_id, auto_remove_timeout=None, close_callback=None, mount=False): ## Mounting isnt implemented in main code, but i did it here prior so we just hide it behind a flag for now. + + # Docker stuff + client_kwargs = kwargs_from_env() + if client_kwargs.get('tls'): + client_kwargs['tls'] = TLSConfig(**client_kwargs['tls']) + self.client = docker.APIClient(**client_kwargs) + self.image_name = "openinterpreter-runtime-container:latest" + self.exec_id = None + self.exec_socket = None + + # close callback + self.close_callback = close_callback + + # session info + self.session_id = session_id + self.session_path = os.path.join(appdirs.user_data_dir("Open Interpreter"), "sessions", session_id) + self.mount = mount + + + # Initialize container + self.init_container() + + self.init_exec_instance() + + + self.wrapper = DockerStreamWrapper(self.exec_id, self.exec_socket) + self.stdout = self.wrapper.stdout + self.stderr = self.wrapper.stderr + self.stdin = self.wrapper.stdin + + self.stdin.write(command + "\n") + + def init_container(self): + self.container = None + try: + containers = self.client.containers( + filters={"label": f"session_id={self.session_id}"}, all=True) + if containers: + self.container = containers[0] + container_id = self.container.get('Id') + container_info = self.client.inspect_container(container_id) + if container_info.get('State', {}).get('Running') is False: + self.client.start(container=container_id) + self.wait_for_container_start(container_id) + else: + if self.mount: + + os.makedirs(self.session_path, exist_ok=True) + + host_config = self.client.create_host_config( + binds={self.session_path: {'bind': '/mnt/data', 'mode': 'rw'}} + ) + else: + host_config = None + + self.container = self.client.create_container( + image=self.image_name, + detach=True, + labels={'session_id': os.path.basename(self.session_path)}, + host_config=host_config, + user="docker", + stdin_open=True, + tty=False + ) + + self.client.start(container=self.container.get('Id')) + self.wait_for_container_start(self.container.get('Id')) + + + except Exception as e: + print(f"An error occurred: {e}") + + def init_exec_instance(self): + if self.container: + container_info = self.client.inspect_container(self.container.get('Id')) + + if container_info.get("State").get('Running') is False: # Not sure of the cause of this, but this works for now. + self.client.start(self.container.get("Id")) + + self.exec_id = self.client.exec_create( + self.container.get("Id"), + cmd="/bin/bash", + stdin=True, + stdout=True, + stderr=True, + workdir="/mnt/data", + user="docker", + tty=False + + )['Id'] + # when socket=True, this returns a socketIO socket, so we just kinda hijack the underlying socket + # since docker sets up the socketio wierd and tries to make it hard to mess with and write to. + # We make the socket "Cooperative" + self.exec_socket = self.client.exec_start( + self.exec_id, socket=True, tty=False, demux=False)._sock # type: ignore + + + def wait_for_container_start(self, container_id, timeout=30): + start_time = time.time() + while True: + container_info = self.client.inspect_container(container_id) + if container_info.get('State', {}).get('Running') is True: + return True + elif time.time() - start_time > timeout: + raise TimeoutError( + "Container did not start within the specified timeout.") + time.sleep(1) + + def terminate(self): + self.wrapper.terminate() + self.client.stop(self.container.get("Id")) + self.client.remove_container(self.container.get("Id")) + + def stop(self): + self.wrapper.terminate() + self.client.stop(self.container.get("Id"), 30) + + + def __del__(self): + self.terminate() + + + + + diff --git a/interpreter/code_interpreters/container_utils/download_file.py b/interpreter/code_interpreters/container_utils/download_file.py new file mode 100644 index 0000000000..dc3047e5d7 --- /dev/null +++ b/interpreter/code_interpreters/container_utils/download_file.py @@ -0,0 +1,52 @@ +import docker +import tarfile +import os +import tempfile +import appdirs +from tqdm import tqdm + + +def download_file_from_container(container_id, file_path_in_container, local_dir): + # Check if the specified local directory exists + if not os.path.isdir(local_dir): + # If not, use a "Downloads" folder in the user's data directory as the default + local_dir = os.path.join(appdirs.user_data_dir(), "Open Interpreter", "downloads") + print(f"file is being downloaded to {local_dir}") + # Create the Downloads directory if it doesn't exist + os.makedirs(local_dir, exist_ok=True) + + # Create a Docker client + client = docker.from_env() + + # Get the container + container = client.containers.get(container_id) + + # Use get_archive to get a file from the container + stream, stat = container.get_archive(os.path.join("/mnt/data/", file_path_in_container)) + + # Get the file name from the stat info + file_name = os.path.basename(stat['name']) + # Get the size of the file from the stat object for the progress bar + total_size = stat['size'] + # Initialize the progress bar + pbar = tqdm(total=total_size, unit="B", unit_scale=True, desc="Downloading") + + # Update the progress bar within the loop where chunks are being written + with tempfile.NamedTemporaryFile(delete=False) as temp_file: + for chunk in stream: + temp_file.write(chunk) + pbar.update(len(chunk)) + temp_file.flush() + pbar.close() + + # Open the temporary tar file for reading + with tarfile.open(temp_file.name, 'r') as tar: + # Extract the file to the local directory + tar.extractall(path=local_dir) + + # Delete the temporary tar file + os.remove(temp_file.name) + + # Return the path to the extracted file + return os.path.join(local_dir, file_name) + diff --git a/interpreter/code_interpreters/container_utils/upload_file.py b/interpreter/code_interpreters/container_utils/upload_file.py new file mode 100644 index 0000000000..ecf79fd1de --- /dev/null +++ b/interpreter/code_interpreters/container_utils/upload_file.py @@ -0,0 +1,62 @@ +"""Short function to upload a file to a docker container via the docker module. yes its hacky, but its easy and I didnt want to over complicate.""" +import io +import tarfile +import os +import docker +from tqdm import tqdm + +def copy_file_to_container(container_id, local_path, path_in_container, pbar=True): + # Validate input + if not os.path.exists(local_path): + raise ValueError(f"The specified local path {local_path} does not exist.") + + # Create a Docker client + client = docker.APIClient() + + # Get the container + container = client.containers()[0] + + container_id = container.get("Id") + + # Get the directory path and name in the container + dir_path_in_container = os.path.dirname(path_in_container) + name = os.path.basename(path_in_container) + + # Calculate the total size of the content to be uploaded + total_size = 0 + for dirpath, dirnames, filenames in os.walk(local_path): + for filename in filenames: + filepath = os.path.join(dirpath, filename) + total_size += os.path.getsize(filepath) + + # Create a tarball in memory + file_data = io.BytesIO() + with tarfile.open(fileobj=file_data, mode='w') as tar: + # Check if the local path is a directory or a file + if os.path.isdir(local_path): + # Add the entire directory to the tar archive with the specified name + tar.add(local_path, arcname=name) + else: + # Add the local file to the tar archive with the specified file name + tar.add(local_path, arcname=name) + + # Seek to the beginning of the in-memory tarball + file_data.seek(0) + + # Create a tqdm progress bar + with tqdm(total=total_size, unit='B', unit_scale=True, desc='Uploading') as pbar: + # Define a generator to read the file data in chunks and update the progress bar + def file_data_with_progress(): + chunk_size = 1024 # Define an appropriate chunk size + while True: + chunk = file_data.read(chunk_size) + if not chunk: + break + pbar.update(len(chunk)) + yield chunk + file_data.close() + + # Use put_archive to copy the file or directory into the container + client.put_archive(container=container_id, path=dir_path_in_container, data=file_data_with_progress()) + + return path_in_container diff --git a/interpreter/code_interpreters/create_code_interpreter.py b/interpreter/code_interpreters/create_code_interpreter.py index e18db43efd..247d5f916f 100644 --- a/interpreter/code_interpreters/create_code_interpreter.py +++ b/interpreter/code_interpreters/create_code_interpreter.py @@ -1,11 +1,49 @@ +import os +import uuid +from functools import partial + +import appdirs from .language_map import language_map -def create_code_interpreter(language): + +def create_code_interpreter(interpreter, language, use_containers=False): + """ + Creates and returns a CodeInterpreter instance for the specified language. + + Parameters: + - interpreter (Interpreter): The calling Interpreter object. + - language (str): The programming language for which the CodeInterpreter is to be created. + - use_containers (bool): A flag indicating whether to use containers. If True, a session ID is + generated and associated with the calling Interpreter object. + + Returns: + - CodeInterpreter: An instance of the CodeInterpreter class for the specified language, + configured with the session ID if use_containers is True. + + Raises: + - ValueError: If the specified language is unknown or unsupported. + """ + # Case in-sensitive language = language.lower() - try: - CodeInterpreter = language_map[language] - return CodeInterpreter() - except KeyError: + if language not in language_map: raise ValueError(f"Unknown or unsupported language: {language}") + + CodeInterpreter = language_map[language] + + if not use_containers: + return CodeInterpreter() + + if interpreter.session_id: + session_id = interpreter.session_id + else: + session_id = f"ses-{str(uuid.uuid4())}" + interpreter.session_id = session_id + + timeout = os.getenv("OI_CONTAINER_TIMEOUT", None) + + if timeout is not None: + timeout = int(timeout) + + return CodeInterpreter(session_id=session_id, use_containers=use_containers, close_callback=partial(interpreter.container_callback, language=language), auto_remove_timeout=timeout) diff --git a/interpreter/code_interpreters/dockerfiles/Dockerfile b/interpreter/code_interpreters/dockerfiles/Dockerfile new file mode 100644 index 0000000000..761207e7c0 --- /dev/null +++ b/interpreter/code_interpreters/dockerfiles/Dockerfile @@ -0,0 +1,78 @@ +# Base image +FROM debian:bullseye + +# Set environment variables to disable prompts +ENV DEBIAN_FRONTEND=noninteractive + +# Create user with sudo only in the container for security. +RUN useradd docker && echo "docker:docker" | chpasswd && mkdir -p /home/docker && chown docker:docker /home/docker +RUN mkdir -p /mnt/data && chown -R docker:docker /mnt/data + +RUN apt-get update && apt-get install -y sudo + +RUN usermod -aG sudo docker + +RUN echo "docker ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers +# add write perms for user 'docker' in the correct dirs ( where it cant mess things up much. it does have passwordless sudo, so it can override this if it figures out it has sudo.) +RUN chown -R docker:docker /mnt/data + +# Update package list and install essential utilities and runtimes +RUN sudo apt-get update && apt-get install -y \ + jq \ + build-essential \ + python3 \ + python3-pip \ + python3-dev \ + curl \ + libssl-dev \ + libexpat1 \ + r-base \ + r-base-dev + +# install nodejs stuff +RUN sudo apt-get update +RUN sudo apt-get install -y ca-certificates curl gnupg +RUN sudo mkdir -p /etc/apt/keyrings +RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | sudo gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg +RUN NODE_MAJOR=20 +RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" | sudo tee /etc/apt/sources.list.d/nodesource.list +RUN sudo apt-get update +RUN sudo apt-get install nodejs -y + + +# Copy unified requirements.txt into the image +COPY ./requirements.txt /tmp/ + +# Install Python packages +RUN awk '/\[python\]/, /^\s*$/' /tmp/requirements.txt | grep -v '^\[python\]' > /tmp/python_requirements.txt && \ + pip3 install --no-cache-dir -r /tmp/python_requirements.txt --no-input + +# Create an initial empty package.json +RUN echo '{}' > /tmp/package.json + +RUN awk '/\[node\]/, /^\s*$/' /tmp/requirements.txt | grep -v '^\[node\]' > /tmp/node_requirements.txt && \ + while read -r package; do \ + name=$(echo $package | grep -Eo "^[a-zA-Z0-9_-]+") && \ + version=$(echo $package | grep -Eo "[>=<]+[0-9.]+"); \ + if [ -z "$version" ]; then version="*"; fi && \ + jq ".dependencies.\"$name\" = \"$version\"" /tmp/package.json > /tmp/package_temp.json && mv /tmp/package_temp.json /tmp/package.json; \ + done < /tmp/node_requirements.txt + + + +# Install NodeJS packages +RUN npm install --save --yes --no-package-lock --prefix /tmp + +# Install R packages +RUN awk '/\[r\]/,/^\s*$/' /tmp/requirements.txt | grep -v '^\[r\]' > /tmp/r_requirements.txt && \ + while read -r package; do \ + R -e "install.packages('$package', repos='http://cran.rstudio.com/', dependencies=TRUE, INSTALL_opts = c('--no-tests'))"; \ + done < /tmp/r_requirements.txt + +# Install NodeJS packages + +# Remove temporary files +RUN rm /tmp/package.json /tmp/node_requirements.txt /tmp/python_requirements.txt /tmp/r_requirements.txt + +# workdir set. pray it dosent try to go exploring. +WORKDIR /mnt/data diff --git a/interpreter/code_interpreters/dockerfiles/dev_note.md b/interpreter/code_interpreters/dockerfiles/dev_note.md new file mode 100644 index 0000000000..25635278c3 --- /dev/null +++ b/interpreter/code_interpreters/dockerfiles/dev_note.md @@ -0,0 +1,22 @@ +======================================================================= + NOTE TO DEVELOPERS +======================================================================= + +Please avoid manually editing the following files: +- Dockerfile +- requirements.txt +- hash.json + +These files are key components for Open-Interpreter's containerized execution features. Manually editing them can disrupt the program's ability to: + +1. Know when to rebuild the Docker image. +2. Perform other related functionalities efficiently. + +If you need to make adjustments, kindly use the 'DockerManager' class. It offers convenient methods like: +- add_dependency +- remove_dependency +- add_language + +Your cooperation helps maintain a smooth and reliable development workflow. + +======================================================================= diff --git a/interpreter/code_interpreters/dockerfiles/docker_manager.py b/interpreter/code_interpreters/dockerfiles/docker_manager.py new file mode 100644 index 0000000000..6446fd106b --- /dev/null +++ b/interpreter/code_interpreters/dockerfiles/docker_manager.py @@ -0,0 +1,59 @@ +import os + +class DockerManager: + + here = os.path.abspath(__file__) + requirements_file = os.path.normpath(os.path.join(here, "..", "requirements.txt")) + docker_file = os.path.normpath(os.path.join(here,"..", "Dockerfile")) + + def add_dependency(language, dependency): + lines = [] + language_section_found = False + dependency_name = dependency.split('==')[0] + + with open(DockerManager.requirements_file, 'r') as f: + lines = f.readlines() + + for i, line in enumerate(lines): + if line.strip() == f'[{language}]': + language_section_found = True + elif language_section_found: + if line.strip() == '' or line.strip().startswith('['): + break + existing_dependency_name = line.strip().split('==')[0] + if existing_dependency_name == dependency_name: + print(f"Dependency {dependency} already exists under [{language}].") + return + + if not language_section_found: + print(f"Error: Language section [{language}] not found. Please add it first.") + return + + lines.insert(i, f"{dependency}\n") + + with open(DockerManager.requirements_file, 'w') as f: + f.writelines(lines) + + def remove_dependency(language, dependency_name): + lines = [] + language_section_found = False + + with open(DockerManager.requirements_file, 'r') as f: + lines = f.readlines() + + for i, line in enumerate(lines): + if line.strip() == f'[{language}]': + language_section_found = True + elif language_section_found: + if line.strip() == '' or line.strip().startswith('['): + break + existing_dependency_name = line.strip().split('==')[0] + if existing_dependency_name == dependency_name: + del lines[i] + break + else: + raise ValueError(f"Error: Language section [{language}] or dependency {dependency_name} not found.") + + with open(DockerManager.requirements_file, 'w') as f: + f.writelines(lines) + diff --git a/interpreter/code_interpreters/dockerfiles/hash.json b/interpreter/code_interpreters/dockerfiles/hash.json new file mode 100644 index 0000000000..633d7ba48f --- /dev/null +++ b/interpreter/code_interpreters/dockerfiles/hash.json @@ -0,0 +1,4 @@ +{ + "original_hash": "fa63fa21b690fe37b4e3580221440008bbb9a31a3d58a5d77dbf76c776dd7c80", + "last_hash": "fa63fa21b690fe37b4e3580221440008bbb9a31a3d58a5d77dbf76c776dd7c80" +} \ No newline at end of file diff --git a/interpreter/code_interpreters/dockerfiles/requirements.txt b/interpreter/code_interpreters/dockerfiles/requirements.txt new file mode 100644 index 0000000000..0da60e08a9 --- /dev/null +++ b/interpreter/code_interpreters/dockerfiles/requirements.txt @@ -0,0 +1,11 @@ +[python] +numpy==1.21.2 +pandas==1.3.3 +matplotlib==3.4.3 +beautifulsoup4==4.10.0 + +[r] + + +[node] +chalk==2.0 \ No newline at end of file diff --git a/interpreter/code_interpreters/languages/applescript.py b/interpreter/code_interpreters/languages/applescript.py index 403c3e0fc3..78168e4577 100644 --- a/interpreter/code_interpreters/languages/applescript.py +++ b/interpreter/code_interpreters/languages/applescript.py @@ -2,11 +2,13 @@ from ..subprocess_code_interpreter import SubprocessCodeInterpreter class AppleScript(SubprocessCodeInterpreter): + file_extension = "applescript" proper_name = "AppleScript" - def __init__(self): - super().__init__() + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.start_cmd = os.environ.get('SHELL', '/bin/zsh') def preprocess_code(self, code): diff --git a/interpreter/code_interpreters/languages/html.py b/interpreter/code_interpreters/languages/html.py index 965b38717b..20a7baa268 100644 --- a/interpreter/code_interpreters/languages/html.py +++ b/interpreter/code_interpreters/languages/html.py @@ -2,20 +2,31 @@ import tempfile import os from ..base_code_interpreter import BaseCodeInterpreter +from ..container_utils.upload_file import copy_file_to_container class HTML(BaseCodeInterpreter): file_extension = "html" proper_name = "HTML" - def __init__(self): - super().__init__() - + def __init__(self, **kwargs): ## accept the kwargs though we dont use them, since its easier this way. + super().__init__() + self.kwargs = kwargs def run(self, code): # Create a temporary HTML file with the content with tempfile.NamedTemporaryFile(delete=False, suffix=".html") as f: f.write(code.encode()) + save_dir = os.path.realpath(f.name) + + if self.kwargs.get("use_containers"): + save_dir = copy_file_to_container( + local_path=os.path.realpath(f.name), + path_in_container=os.path.join("/mnt/data", f.name), + container_id=self.kwargs.get("session_id"), + pbar=False + ) + # Open the HTML file with the default web browser webbrowser.open('file://' + os.path.realpath(f.name)) - yield {"output": f"Saved to {os.path.realpath(f.name)} and opened with the user's default web browser."} \ No newline at end of file + yield {"output": f"Saved to {save_dir} and opened with the user's default web browser."} \ No newline at end of file diff --git a/interpreter/code_interpreters/languages/javascript.py b/interpreter/code_interpreters/languages/javascript.py index d5e74ff824..4a078e7df7 100644 --- a/interpreter/code_interpreters/languages/javascript.py +++ b/interpreter/code_interpreters/languages/javascript.py @@ -2,11 +2,13 @@ import re class JavaScript(SubprocessCodeInterpreter): + file_extension = "js" proper_name = "JavaScript" + + def __init__(self, **kwargs): + super().__init__(**kwargs) - def __init__(self): - super().__init__() self.start_cmd = "node -i" def preprocess_code(self, code): @@ -53,12 +55,12 @@ def preprocess_javascript(code): # Wrap in a try-catch and add end of execution marker processed_code = f""" -try {{ -{processed_code} -}} catch (e) {{ - console.log(e); -}} -console.log("## end_of_execution ##"); -""" + try {{ + {processed_code} + }} catch (e) {{ + console.log(e); + }} + console.log("## end_of_execution ##"); + """ return processed_code \ No newline at end of file diff --git a/interpreter/code_interpreters/languages/python.py b/interpreter/code_interpreters/languages/python.py index 8747a42661..804f0b6fdd 100644 --- a/interpreter/code_interpreters/languages/python.py +++ b/interpreter/code_interpreters/languages/python.py @@ -6,15 +6,19 @@ import shlex class Python(SubprocessCodeInterpreter): + file_extension = "py" proper_name = "Python" - def __init__(self): - super().__init__() - executable = sys.executable - if os.name != 'nt': # not Windows - executable = shlex.quote(executable) - self.start_cmd = executable + " -i -q -u" + def __init__(self, **kwargs): + super().__init__(**kwargs) + if 'use_containers' in kwargs and kwargs['use_containers']: + self.start_cmd = "python3 -i -q -u" + else: + executable = sys.executable + if os.name != 'nt': # not Windows + executable = shlex.quote(executable) + self.start_cmd = executable + " -i -q -u" def preprocess_code(self, code): return preprocess_python(code) @@ -53,7 +57,7 @@ def preprocess_python(code): code = "\n".join(code_lines) # Add end command (we'll be listening for this so we know when it ends) - code += '\n\nprint("## end_of_execution ##")' + code += '\n\nprint("## end_of_execution ##")\n' return code diff --git a/interpreter/code_interpreters/languages/r.py b/interpreter/code_interpreters/languages/r.py index 16f51f93cf..13e54d6848 100644 --- a/interpreter/code_interpreters/languages/r.py +++ b/interpreter/code_interpreters/languages/r.py @@ -2,11 +2,13 @@ import re class R(SubprocessCodeInterpreter): + + file_extension = "r" proper_name = "R" - def __init__(self): - super().__init__() + def __init__(self, **kwargs): + super().__init__(**kwargs) self.start_cmd = "R -q --vanilla" # Start R in quiet and vanilla mode def preprocess_code(self, code): @@ -28,13 +30,13 @@ def preprocess_code(self, code): # Wrap in a tryCatch for error handling and add end of execution marker processed_code = f""" -tryCatch({{ -{processed_code} -}}, error=function(e){{ - cat("## execution_error ##\\n", conditionMessage(e), "\\n"); -}}) -cat("## end_of_execution ##\\n"); -""" + tryCatch({{ + {processed_code} + }}, error=function(e){{ + cat("## execution_error ##\\n", conditionMessage(e), "\\n"); + }}) + cat("## end_of_execution ##\\n"); + """ # Count the number of lines of processed_code # (R echoes all code back for some reason, but we can skip it if we track this!) self.code_line_count = len(processed_code.split("\n")) - 1 diff --git a/interpreter/code_interpreters/languages/shell.py b/interpreter/code_interpreters/languages/shell.py index 136160dbd0..17f594d82e 100644 --- a/interpreter/code_interpreters/languages/shell.py +++ b/interpreter/code_interpreters/languages/shell.py @@ -3,12 +3,12 @@ import os class Shell(SubprocessCodeInterpreter): + file_extension = "sh" proper_name = "Shell" - def __init__(self): - super().__init__() - + def __init__(self, **kwargs): + super().__init__(**kwargs) # Determine the start command based on the platform if platform.system() == 'Windows': self.start_cmd = 'cmd.exe' @@ -54,4 +54,15 @@ def add_active_line_prints(code): for index, line in enumerate(lines): # Insert the echo command before the actual line lines[index] = f'echo "## active_line {index + 1} ##"\n{line}' - return '\n'.join(lines) \ No newline at end of file + return '\n'.join(lines) + + +def wrap_in_trap(code): + """ + Wrap Bash code with a trap to catch errors and display them. + """ + trap_code = """ + trap 'echo "An error occurred on line $LINENO"; exit' ERR + set -E + """ + return trap_code + code diff --git a/interpreter/code_interpreters/subprocess_code_interpreter.py b/interpreter/code_interpreters/subprocess_code_interpreter.py index 04428cece0..7435e9097d 100644 --- a/interpreter/code_interpreters/subprocess_code_interpreter.py +++ b/interpreter/code_interpreters/subprocess_code_interpreter.py @@ -1,30 +1,54 @@ - - +import os +import queue import subprocess import threading -import queue import time import traceback +import appdirs + from .base_code_interpreter import BaseCodeInterpreter +from .container_utils.container_utils import DockerProcWrapper + class SubprocessCodeInterpreter(BaseCodeInterpreter): - def __init__(self): + """ + A code interpreter that uses subprocess to execute code in a separate process. + + Attributes: + - start_cmd (str): The command to start the interpreter process. + - process (subprocess.Popen): The interpreter process. + - debug_mode (bool): Whether to print debug information. + - output_queue (queue.Queue): A queue to store the output of the interpreter process. + - done (threading.Event): An event to signal when the interpreter process has finished executing. + - contain (bool): Whether to run the interpreter process inside a Docker container. + - session_id (str): The ID of the Docker container session, if `contain` is True. + """ + + def __init__(self, use_containers=False, **container_args): + self.container_args = container_args self.start_cmd = "" self.process = None self.debug_mode = False self.output_queue = queue.Queue() self.done = threading.Event() + self.use_containers = use_containers + if self.use_containers: + self.session_id = container_args.get("session_id") - def detect_active_line(self, line): + @staticmethod + def detect_active_line(line): return None - - def detect_end_of_execution(self, line): + + @staticmethod + def detect_end_of_execution(line): return None - - def line_postprocessor(self, line): + + @staticmethod + def line_postprocessor(line): return line - - def preprocess_code(self, code): + + @staticmethod + def preprocess_code(code): """ This needs to insert an end_of_execution marker of some kind, which can be detected by detect_end_of_execution. @@ -32,27 +56,44 @@ def preprocess_code(self, code): Optionally, add active line markers for detect_active_line. """ return code - + def terminate(self): self.process.terminate() def start_process(self): - if self.process: - self.terminate() - - self.process = subprocess.Popen(self.start_cmd.split(), - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - bufsize=0, - universal_newlines=True) - threading.Thread(target=self.handle_stream_output, - args=(self.process.stdout, False), - daemon=True).start() - threading.Thread(target=self.handle_stream_output, - args=(self.process.stderr, True), - daemon=True).start() + """ + Starts the process for the code interpreter. If the interpreter is running in a Docker container, + it uses the DockerProcWrapper class to start the process. Otherwise, it uses subprocess.Popen + to start the process. + + The method also starts two threads to handle the stdout and stderr streams of the process. + + Returns: + None + """ + + if self.use_containers: + self.process = DockerProcWrapper( + command=self.start_cmd, + **self.container_args + ) + else: + self.process = subprocess.Popen( + self.start_cmd.split(), + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=0, + universal_newlines=True, + ) + + threading.Thread( + target=self.handle_stream_output, args=(self.process.stdout, False), daemon=True + ).start() + threading.Thread( + target=self.handle_stream_output, args=(self.process.stderr, True), daemon=True + ).start() def run(self, code): retry_count = 0 @@ -63,10 +104,9 @@ def run(self, code): code = self.preprocess_code(code) if not self.process: self.start_process() - except: + except subprocess.SubprocessError: yield {"output": traceback.format_exc()} return - while retry_count <= max_retries: if self.debug_mode: @@ -78,7 +118,8 @@ def run(self, code): self.process.stdin.write(code + "\n") self.process.stdin.flush() break - except: + except subprocess.SubprocessError: + yield {"output": traceback.format_exc()} if retry_count != 0: # For UX, I like to hide this if it happens once. Obviously feels better to not see errors # Most of the time it doesn't matter, but we should figure out why it happens frequently with: @@ -100,8 +141,8 @@ def run(self, code): else: time.sleep(0.1) try: - output = self.output_queue.get(timeout=0.3) # Waits for 0.3 seconds - yield output + # Waits for 0.3 seconds + yield self.output_queue.get(timeout=0.3) except queue.Empty: if self.done.is_set(): # Try to yank 3 more times from it... maybe there's something in there... @@ -113,14 +154,14 @@ def run(self, code): break def handle_stream_output(self, stream, is_error_stream): - for line in iter(stream.readline, ''): + for line in iter(stream.readline, ""): if self.debug_mode: print(f"Received output line:\n{line}\n---") line = self.line_postprocessor(line) if line is None: - continue # `line = None` is the postprocessor's signal to discard completely + continue # `line = None` is the postprocessor's signal to discard completely if self.detect_active_line(line): active_line = self.detect_active_line(line) @@ -135,4 +176,4 @@ def handle_stream_output(self, stream, is_error_stream): self.done.set() else: self.output_queue.put({"output": line}) - + \ No newline at end of file diff --git a/interpreter/core/core.py b/interpreter/core/core.py index c744e33bea..5e59bdd7d6 100644 --- a/interpreter/core/core.py +++ b/interpreter/core/core.py @@ -1,8 +1,21 @@ """ This file defines the Interpreter class. -It's the main file. `import interpreter` will import an instance of this class. +running ```import interpreter``` followed by ```interpreter.create_interpreter(**kwargs)``` will create an instance of this class. """ -from interpreter.utils import display_markdown_message + +import json +import appdirs +import os +from datetime import datetime +from typing import (Optional, + Union, + Iterator, + Any, + Callable, + List, + Dict + ) + from ..cli.cli import cli from ..utils.get_config import get_config, user_config_path from ..utils.local_storage_path import get_storage_path @@ -11,19 +24,16 @@ from ..terminal_interface.terminal_interface import terminal_interface from ..terminal_interface.validate_llm_settings import validate_llm_settings from .generate_system_message import generate_system_message -import appdirs -import os -from datetime import datetime from ..rag.get_relevant_procedures_string import get_relevant_procedures_string -import json from ..utils.check_for_update import check_for_update from ..utils.display_markdown_message import display_markdown_message +from ..code_interpreters.container_utils.build_image import build_docker_images from ..utils.embed import embed_function + + class Interpreter: - def cli(self): - cli(self) def __init__(self): # State @@ -64,23 +74,36 @@ def __init__(self): # Number of procedures to add to the system message self.num_procedures = 2 + # Container options + self.use_containers = False + self.session_id = None + # Load config defaults self.extend_config(self.config_file) + + + # Check for update if not self.local: # This should actually be pushed into the utility if check_for_update(): display_markdown_message("> **A new version of Open Interpreter is available.**\n>Please run: `pip install --upgrade open-interpreter`\n\n---") + + - def extend_config(self, config_path): + def extend_config(self, config_path: str) -> None: if self.debug_mode: print(f'Extending configuration from `{config_path}`') config = get_config(config_path) self.__dict__.update(config) - def chat(self, message=None, display=True, stream=False): + def chat(self, message: Optional[str] = None, display: bool = True, stream: bool = False) -> Union[List[Dict[str, Any]], None]: + + if self.use_containers: + build_docker_images() # Build images if needed. does nothing if already built + if stream: return self._streaming_chat(message=message, display=display) @@ -90,7 +113,7 @@ def chat(self, message=None, display=True, stream=False): return self.messages - def _streaming_chat(self, message=None, display=True): + def _streaming_chat(self, message: Optional[str] = None, display: bool = True) -> Iterator: # If we have a display, # we can validate our LLM settings w/ the user first @@ -137,12 +160,12 @@ def _streaming_chat(self, message=None, display=True): json.dump(self.messages, f) return - raise Exception("`interpreter.chat()` requires a display. Set `display=True` or pass a message into `interpreter.chat(message)`.") - - def _respond(self): + raise ValueError("`interpreter.chat()` requires a display. Set `interpreter.display=True` or pass a message into `interpreter.chat(message)`.") + + def _respond(self) -> Iterator: yield from respond(self) - def reset(self): + def reset(self) -> None: for code_interpreter in self._code_interpreters.values(): code_interpreter.terminate() self._code_interpreters = {} @@ -153,10 +176,13 @@ def reset(self): self.__init__() - # These functions are worth exposing to developers # I wish we could just dynamically expose all of our functions to devs... - def generate_system_message(self): + def generate_system_message(self) -> str: return generate_system_message(self) - def get_relevant_procedures_string(self): + + def get_relevant_procedures_string(self) -> str: return get_relevant_procedures_string(self) + + def container_callback(self, language: str) -> None: + self._code_interpreters.pop(language) diff --git a/interpreter/core/respond.py b/interpreter/core/respond.py index f5150f9916..68bb517626 100644 --- a/interpreter/core/respond.py +++ b/interpreter/core/respond.py @@ -114,7 +114,11 @@ def respond(interpreter): # Get a code interpreter to run it language = interpreter.messages[-1]["language"] if language not in interpreter._code_interpreters: - interpreter._code_interpreters[language] = create_code_interpreter(language) + if interpreter.use_containers: + interpreter._code_interpreters[language] = create_code_interpreter(interpreter, language, use_containers=True) + else: + interpreter._code_interpreters[language] = create_code_interpreter(interpreter, language, use_containers=False) + code_interpreter = interpreter._code_interpreters[language] # Yield a message, such that the user can stop code execution if they want to @@ -127,7 +131,13 @@ def respond(interpreter): # Yield each line, also append it to last messages' output interpreter.messages[-1]["output"] = "" - for line in code_interpreter.run(code): + + code_to_run = code + + if not code_to_run.endswith("\n"): + code_to_run += "\n" + + for line in code_interpreter.run(code_to_run): yield line if "output" in line: output = interpreter.messages[-1]["output"] @@ -149,4 +159,4 @@ def respond(interpreter): # Doesn't want to run code. We're done break - return \ No newline at end of file + return diff --git a/interpreter/llm/get_schema.py b/interpreter/llm/get_schema.py new file mode 100644 index 0000000000..ff77fd5ca0 --- /dev/null +++ b/interpreter/llm/get_schema.py @@ -0,0 +1,45 @@ +"""Function to enable / disable different lang based on operating system. """ +import platform +import copy + +BASE_FUNCTION_SCHEMA = { + "name": "execute", + "description": + "Executes code on the user's machine, **in the users local environment**, and returns the output", + "parameters": { + "type": "object", + "properties": { + "language": { + "type": "string", + "description": + "The programming language (required parameter to the `execute` function)", + "enum": ["python", "R", "shell", "javascript", "html",] + }, + "code": { + "type": "string", + "description": "The code to execute (required)" + } + }, + "required": ["language", "code"] + }, +} + +def get_schema(): + # Detect the operating system + os_type = platform.system().lower() + + # Define the base languages that are common to all supported operating systems + base_languages = ["python", "R", "shell", "javascript", "html"] + + # Copy the schema to avoid modifying the original + corrected_schema = copy.deepcopy(BASE_FUNCTION_SCHEMA) + + # Add 'powershell' if the OS is Windows, 'applescript' if macOS, or none if it's another OS + if os_type == 'windows': + base_languages.append('powershell') + elif os_type == 'darwin': # Darwin is the system name for macOS + base_languages.append('applescript') + + corrected_schema['parameters']['properties']['language']['enum'] = base_languages + + return corrected_schema diff --git a/interpreter/llm/setup_openai_coding_llm.py b/interpreter/llm/setup_openai_coding_llm.py index 763dc4f181..1f6aa9cb57 100644 --- a/interpreter/llm/setup_openai_coding_llm.py +++ b/interpreter/llm/setup_openai_coding_llm.py @@ -3,31 +3,10 @@ from ..utils.parse_partial_json import parse_partial_json from ..utils.convert_to_openai_messages import convert_to_openai_messages from ..utils.display_markdown_message import display_markdown_message +from .get_schema import get_schema import tokentrim as tt -function_schema = { - "name": "execute", - "description": - "Executes code on the user's machine, **in the users local environment**, and returns the output", - "parameters": { - "type": "object", - "properties": { - "language": { - "type": "string", - "description": - "The programming language (required parameter to the `execute` function)", - "enum": ["python", "R", "shell", "applescript", "javascript", "html", "powershell"] - }, - "code": { - "type": "string", - "description": "The code to execute (required)" - } - }, - "required": ["language", "code"] - }, -} - def setup_openai_coding_llm(interpreter): """ Takes an Interpreter (which includes a ton of LLM settings), @@ -68,7 +47,7 @@ def coding_llm(messages): 'model': interpreter.model, 'messages': messages, 'stream': True, - 'functions': [function_schema] + 'functions': [get_schema()] } # Optional inputs @@ -135,4 +114,4 @@ def coding_llm(messages): if code_delta: yield {"code": code_delta} - return coding_llm \ No newline at end of file + return coding_llm diff --git a/interpreter/terminal_interface/components/file_dialog.py b/interpreter/terminal_interface/components/file_dialog.py new file mode 100644 index 0000000000..4c0bdbb377 --- /dev/null +++ b/interpreter/terminal_interface/components/file_dialog.py @@ -0,0 +1,47 @@ +"""Simple class and method to launch the users system filedialog """ + +from PyQt5.QtWidgets import QApplication, QFileDialog, QMessageBox + +class FileDialog: + def get_path(self, type=None): + """ + Open a file dialog and return the selected file or folder path. + + :param type: str, optional (default=None) + Specifies the type of path to select ("file", "folder", or None). + If None, the user will be asked whether to select a file or a folder. + :return: str + The path selected by the user. If the user cancels the operation, it returns None. + """ + app = QApplication.instance() + if app is None: + app = QApplication([]) + + options = QFileDialog.Options() + options |= QFileDialog.ReadOnly + + if type is None: + msg_box = QMessageBox() + msg_box.setWindowTitle("Choose Action") + msg_box.setText("Do you want to select a file or a folder?") + btn_file = msg_box.addButton("File", QMessageBox.YesRole) + btn_folder = msg_box.addButton("Folder", QMessageBox.NoRole) + msg_box.addButton(QMessageBox.Cancel) + user_choice = msg_box.exec_() + + if user_choice == QMessageBox.Cancel: + return None + type = "file" if msg_box.clickedButton() == btn_file else "folder" + + if type == "file": + path, _ = QFileDialog.getOpenFileName(None, "Open File", "", + "All Files (*)", options=options) + elif type == "folder": + path = QFileDialog.getExistingDirectory(None, "Open Folder", + "", options=options) + else: + path = self.get_path(type=None) # this or val err, may aswell explicitly pass none. + + return path + + diff --git a/interpreter/terminal_interface/magic_commands.py b/interpreter/terminal_interface/magic_commands.py index fddfbc9335..5973659f7a 100644 --- a/interpreter/terminal_interface/magic_commands.py +++ b/interpreter/terminal_interface/magic_commands.py @@ -1,7 +1,16 @@ -from ..utils.display_markdown_message import display_markdown_message -from ..utils.count_tokens import count_messages_tokens import json import os +import appdirs +import docker + +from ..utils.display_markdown_message import display_markdown_message +from ..utils.count_tokens import count_messages_tokens +from ..utils.display_markdown_message import display_markdown_message +from ..code_interpreters.container_utils.download_file import download_file_from_container +from ..code_interpreters.container_utils.upload_file import copy_file_to_container + +from rich import print as Print + def handle_undo(self, arguments): # Removes all messages after the most recent user entry (and the entry itself). @@ -9,7 +18,7 @@ def handle_undo(self, arguments): # Also gives a visual representation of the messages removed. if len(self.messages) == 0: - return + return # Find the index of the last 'role': 'user' entry last_user_index = None for i, message in enumerate(self.messages): @@ -23,38 +32,45 @@ def handle_undo(self, arguments): removed_messages = self.messages[last_user_index:] self.messages = self.messages[:last_user_index] - print("") # Aesthetics. + print("") # Aesthetics. # Print out a preview of what messages were removed. for message in removed_messages: - if 'content' in message and message['content'] != None: - display_markdown_message(f"**Removed message:** `\"{message['content'][:30]}...\"`") - elif 'function_call' in message: - display_markdown_message(f"**Removed codeblock**") # TODO: Could add preview of code removed here. - - print("") # Aesthetics. + if 'content' in message and message['content'] != None: + display_markdown_message( + f"**Removed message:** `\"{message['content'][:30]}...\"`") + elif 'function_call' in message: + # TODO: Could add preview of code removed here. + display_markdown_message(f"**Removed codeblock**") + + print("") # Aesthetics. + def handle_help(self, arguments): commands_description = { - "%debug [true/false]": "Toggle debug mode. Without arguments or with 'true', it enters debug mode. With 'false', it exits debug mode.", - "%reset": "Resets the current session.", - "%undo": "Remove previous messages and its response from the message history.", - "%save_message [path]": "Saves messages to a specified JSON path. If no path is provided, it defaults to 'messages.json'.", - "%load_message [path]": "Loads messages from a specified JSON path. If no path is provided, it defaults to 'messages.json'.", - "%tokens [prompt]": "Calculate the tokens used by the current conversation's messages and estimate their cost and optionally calculate the tokens and estimated cost of a `prompt` if one is provided.", - "%help": "Show this help message.", + "%debug [true/false]": "Toggle debug mode. Without arguments or with 'true', it enters debug mode. With 'false', it exits debug mode.", + "%reset": "Resets the current session.", + "%undo": "Remove previous messages and its response from the message history.", + "%save_message [path]": "Saves messages to a specified JSON path. If no path is provided, it defaults to 'messages.json'.", + "%load_message [path]": "Loads messages from a specified JSON path. If no path is provided, it defaults to 'messages.json'.", + "%tokens [prompt]": "Calculate the tokens used by the current conversation's messages and estimate their cost and optionally calculate the tokens and estimated cost of a `prompt` if one is provided.", + "%help": "Show this help message.", + "%upload": "open a File Dialog, and select a file to upload to the container. only used when using containerized code execution", + "%upload folder": "same as upload command, except you can upload a folder instead of just a file.", + "%upload file": "same as upload command, except you can upload a file.", + "%download" : "Download a file or directory given the file or folder name in the container." } base_message = [ - "> **Available Commands:**\n\n" + "> **Available Commands:**\n\n" ] # Add each command and its description to the message for cmd, desc in commands_description.items(): - base_message.append(f"- `{cmd}`: {desc}\n") + base_message.append(f"- `{cmd}`: {desc}\n") additional_info = [ - "\n\nFor further assistance, please join our community Discord or consider contributing to the project's development." + "\n\nFor further assistance, please join our community Discord or consider contributing to the project's development." ] # Combine the base message with the additional info @@ -74,33 +90,153 @@ def handle_debug(self, arguments=None): else: display_markdown_message("> Unknown argument to debug command.") + def handle_reset(self, arguments): self.reset() display_markdown_message("> Reset Done") + def default_handle(self, arguments): display_markdown_message("> Unknown command") handle_help(self,arguments) def handle_save_message(self, json_path): if json_path == "": - json_path = "messages.json" + json_path = "messages.json" if not json_path.endswith(".json"): - json_path += ".json" + json_path += ".json" with open(json_path, 'w') as f: - json.dump(self.messages, f, indent=2) + json.dump(self.messages, f, indent=2) + + display_markdown_message( + f"> messages json export to {os.path.abspath(json_path)}") - display_markdown_message(f"> messages json export to {os.path.abspath(json_path)}") def handle_load_message(self, json_path): if json_path == "": - json_path = "messages.json" + json_path = "messages.json" if not json_path.endswith(".json"): - json_path += ".json" + json_path += ".json" with open(json_path, 'r') as f: - self.messages = json.load(f) + self.messages = json.load(f) + + display_markdown_message( + f"> messages json loaded from {os.path.abspath(json_path)}") + +def handle_container_upload(self,type=None, *args): + def is_gui_available(): + try: + from PyQt5.QtWidgets import QApplication + app = QApplication([]) + del app + return True + except Exception as e: + print(f"An error occurred: {str(e)}") + return False + + args = list(args) + if self.use_containers: + try: + client = docker.APIClient() + except: + error_message = ( + "We were not able to connect to the Docker Container daemon. " + "Please ensure Docker is installed and running. If you have not run any code yet, " + "you will need to in order to start a container." + ) + display_markdown_message(f"{error_message}") + return + if len(args) == 0: + if is_gui_available(): + try: + from .components.file_dialog import FileDialog + + fd = FileDialog() + if type is not None: + path = fd.get_path(type=type) + else: + path = fd.get_path(type=None) + if path is not None: # if none, they exited + + args.append(path) + else: # We shall now exit on them out of spite + return + except ImportError as e: + Print(f"Internal import error {e}") + return + else: + Print(f"No GUI available for your system.\n please provide a filepath manually. use the command %upload ") + return + + for filepath in args: + if os.path.exists(filepath): + session_id = self.session_id + if session_id is None: + Print("[BOLD] [RED] No session found. Please run any code to start one. [/RED] [/BOLD]") + return + containers = client.containers(filters={"label": f"session_id={session_id}"}) + if containers: + container_id = containers[0]['Id'] + # /mnt/data is default workdir for container + copy_file_to_container( + container_id=container_id, local_path=filepath, path_in_container=f"/mnt/data/{os.path.basename(filepath)}" + ) + success_message = f"[{filepath}](#) successfully uploaded to container in dir `/mnt/data`." + display_markdown_message(success_message) + else: + no_container_message = ( + "No container found to upload to. Please run any code to start one. " + "This will be fixed in a later update." + ) + display_markdown_message(f"**'{no_container_message}'**") + else: + file_not_found_message = f"File `{filepath}` does not exist." + display_markdown_message(file_not_found_message) + else: + ignore_command_message = "File uploads are only used when using containerized code execution. Ignoring command." + display_markdown_message(f"**{ignore_command_message}**") + +def handle_container_download(self, *args): + if self.use_containers: + try: + client = docker.APIClient() + except Exception as e: + print("[BOLD][RED]Unable to connect to the Docker Container daemon. Please ensure Docker is installed and running. ignoring command[/RED][/BOLD]") + return + + session_id = self.session_id + if session_id is None: + print("No session found. Please run any code to start one.") + return + + containers = client.containers(filters={"label": f"session_id={session_id}"}) + if not containers: + print("No container found to download from. Please run any code to start one.") + return + + container_id = containers[0]['Id'] + + # Define the local directory where the files will be downloaded. + # Using 'Open Interpreter' as the appname and no author. + local_dir = appdirs.user_data_dir(appname="Open Interpreter") + + for file_path_in_container in args: + + if not file_path_in_container.startswith("/mnt/data"): + file_path_in_container = os.path.join("/mnt/data", file_path_in_container) + + # Construct the local file path + local_file_path = os.path.join(local_dir, os.path.basename(file_path_in_container)) + + # Attempt to download the file and handle exceptions + try: + download_file_from_container(container_id, file_path_in_container, local_file_path) + print(f"File downloaded to {local_file_path}") + except docker.errors.NotFound: + print(f"File {file_path_in_container} not found in the container.") + else: + print("File downloads are only used when using containerized code execution. Ignoring command.") - display_markdown_message(f"> messages json loaded from {os.path.abspath(json_path)}") def handle_count_tokens(self, prompt): messages = [{"role": "system", "message": self.system_message}] + self.messages @@ -124,17 +260,19 @@ def handle_count_tokens(self, prompt): def handle_magic_command(self, user_input): # split the command into the command and the arguments, by the first whitespace switch = { - "help": handle_help, - "debug": handle_debug, - "reset": handle_reset, - "save_message": handle_save_message, - "load_message": handle_load_message, - "undo": handle_undo, - "tokens": handle_count_tokens, + "help": handle_help, + "debug": handle_debug, + "reset": handle_reset, + "save_message": handle_save_message, + "load_message": handle_load_message, + "tokens": handle_count_tokens, + "undo": handle_undo, + "upload": handle_container_upload, + "download": handle_container_download, } user_input = user_input[1:].strip() # Capture the part after the `%` command = user_input.split(" ")[0] arguments = user_input[len(command):].strip() action = switch.get(command, default_handle) # Get the function from the dictionary, or default_handle if not found - action(self, arguments) # Execute the function + action(self, arguments) # Execute the function. diff --git a/interpreter/terminal_interface/terminal_interface.py b/interpreter/terminal_interface/terminal_interface.py index a65b3d63cc..0ba904c38c 100644 --- a/interpreter/terminal_interface/terminal_interface.py +++ b/interpreter/terminal_interface/terminal_interface.py @@ -10,7 +10,6 @@ from ..utils.truncate_output import truncate_output from ..utils.scan_code import scan_code - def terminal_interface(interpreter, message): if not interpreter.auto_run: interpreter_intro_message = [ @@ -172,4 +171,6 @@ def terminal_interface(interpreter, message): # (this cancels LLM, returns to the interactive "> " input) continue else: - break \ No newline at end of file + break + + \ No newline at end of file diff --git a/interpreter/utils/scan_code.py b/interpreter/utils/scan_code.py index fa5db98431..4148823e99 100644 --- a/interpreter/utils/scan_code.py +++ b/interpreter/utils/scan_code.py @@ -63,7 +63,7 @@ def scan_code(code, language, interpreter): if scan.returncode == 0: language_name = get_language_proper_name(language) print( - f" {'Code Scaner: ' if interpreter.safe_mode == 'auto' else ''}No issues were found in this {language_name} code." + f" {'Code Scaner: ' if interpreter.safe_mode == 'auto' else ''} No issues were found in this {language_name} code." ) print("") diff --git a/poetry.lock b/poetry.lock index b7b908b9f8..d4d50a06d3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -277,6 +277,28 @@ jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} six = ">=1.9.0" wcwidth = ">=0.1.4" +[[package]] +name = "boltons" +version = "21.0.0" +description = "When they're not builtins, they're boltons." +optional = false +python-versions = "*" +files = [ + {file = "boltons-21.0.0-py2.py3-none-any.whl", hash = "sha256:b9bb7b58b2b420bbe11a6025fdef6d3e5edc9f76a42fb467afe7ca212ef9948b"}, + {file = "boltons-21.0.0.tar.gz", hash = "sha256:65e70a79a731a7fe6e98592ecfb5ccf2115873d01dbc576079874629e5c90f13"}, +] + +[[package]] +name = "bracex" +version = "2.4" +description = "Bash style brace expander." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bracex-2.4-py3-none-any.whl", hash = "sha256:efdc71eff95eaff5e0f8cfebe7d01adf2c8637c8c92edaf63ef348c241a82418"}, + {file = "bracex-2.4.tar.gz", hash = "sha256:a27eaf1df42cf561fed58b7a8f3fdf129d1ea16a81e1fadd1d17989bc6384beb"}, +] + [[package]] name = "certifi" version = "2023.7.22" @@ -479,6 +501,25 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "click-option-group" +version = "0.5.6" +description = "Option groups missing in Click" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "click-option-group-0.5.6.tar.gz", hash = "sha256:97d06703873518cc5038509443742b25069a3c7562d1ea72ff08bfadde1ce777"}, + {file = "click_option_group-0.5.6-py3-none-any.whl", hash = "sha256:38a26d963ee3ad93332ddf782f9259c5bdfe405e73408d943ef5e7d0c3767ec7"}, +] + +[package.dependencies] +Click = ">=7.0,<9" + +[package.extras] +docs = ["Pallets-Sphinx-Themes", "m2r2", "sphinx"] +tests = ["pytest"] +tests-cov = ["coverage", "coveralls", "pytest", "pytest-cov"] + [[package]] name = "colorama" version = "0.4.6" @@ -507,6 +548,38 @@ humanfriendly = ">=9.1" [package.extras] cron = ["capturer (>=2.4)"] +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + +[[package]] +name = "docker" +version = "6.1.3" +description = "A Python library for the Docker Engine API." +optional = false +python-versions = ">=3.7" +files = [ + {file = "docker-6.1.3-py3-none-any.whl", hash = "sha256:aecd2277b8bf8e506e484f6ab7aec39abe0038e29fa4a6d3ba86c3fe01844ed9"}, + {file = "docker-6.1.3.tar.gz", hash = "sha256:aa6d17830045ba5ef0168d5eaa34d37beeb113948c413affe1d5991fc11f9a20"}, +] + +[package.dependencies] +packaging = ">=14.0" +pywin32 = {version = ">=304", markers = "sys_platform == \"win32\""} +requests = ">=2.26.0" +urllib3 = ">=1.26.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.3)"] + [[package]] name = "exceptiongroup" version = "1.1.3" @@ -521,6 +594,20 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "face" +version = "22.0.0" +description = "A command-line application framework (and CLI parser). Friendly for users, full-featured for developers." +optional = false +python-versions = "*" +files = [ + {file = "face-22.0.0-py3-none-any.whl", hash = "sha256:344fe31562d0f6f444a45982418f3793d4b14f9abb98ccca1509d22e0a3e7e35"}, + {file = "face-22.0.0.tar.gz", hash = "sha256:d5d692f90bc8f5987b636e47e36384b9bbda499aaf0a77aa0b0bbe834c76923d"}, +] + +[package.dependencies] +boltons = ">=20.0.0" + [[package]] name = "fastapi" version = "0.104.0" @@ -689,13 +776,13 @@ gitpython = "*" [[package]] name = "gitdb" -version = "4.0.10" +version = "4.0.11" description = "Git Object Database" optional = false python-versions = ">=3.7" files = [ - {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, - {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, ] [package.dependencies] @@ -718,6 +805,25 @@ gitdb = ">=4.0.1,<5" [package.extras] test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] +[[package]] +name = "glom" +version = "22.1.0" +description = "A declarative object transformer and formatter, for conglomerating nested data." +optional = false +python-versions = "*" +files = [ + {file = "glom-22.1.0-py2.py3-none-any.whl", hash = "sha256:5339da206bf3532e01a83a35aca202960ea885156986d190574b779598e9e772"}, + {file = "glom-22.1.0.tar.gz", hash = "sha256:1510c6587a8f9c64a246641b70033cbc5ebde99f02ad245693678038e821aeb5"}, +] + +[package.dependencies] +attrs = "*" +boltons = ">=19.3.0" +face = ">=20.1.0" + +[package.extras] +yaml = ["PyYAML"] + [[package]] name = "grpcio" version = "1.59.0" @@ -993,6 +1099,41 @@ files = [ [package.dependencies] ansicon = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "jsonschema" +version = "4.19.1" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.7.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] + +[package.dependencies] +referencing = ">=0.28.0" + [[package]] name = "litellm" version = "0.8.6" @@ -1365,6 +1506,16 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "peewee" +version = "3.17.0" +description = "a little orm" +optional = false +python-versions = "*" +files = [ + {file = "peewee-3.17.0.tar.gz", hash = "sha256:3a56967f28a43ca7a4287f4803752aeeb1a57a08dee2e839b99868181dfb5df8"}, +] + [[package]] name = "pluggy" version = "1.3.0" @@ -1633,6 +1784,67 @@ files = [ {file = "PyPika-0.48.9.tar.gz", hash = "sha256:838836a61747e7c8380cd1b7ff638694b7a7335345d0f559b04b2cd832ad5378"}, ] +[[package]] +name = "pyqt5" +version = "5.15.10" +description = "Python bindings for the Qt cross platform application toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyQt5-5.15.10-cp37-abi3-macosx_10_13_x86_64.whl", hash = "sha256:93288d62ebd47b1933d80c27f5d43c7c435307b84d480af689cef2474e87e4c8"}, + {file = "PyQt5-5.15.10-cp37-abi3-manylinux_2_17_x86_64.whl", hash = "sha256:b89478d16d4118664ff58ed609e0a804d002703c9420118de7e4e70fa1cb5486"}, + {file = "PyQt5-5.15.10-cp37-abi3-win32.whl", hash = "sha256:ff99b4f91aa8eb60510d5889faad07116d3340041916e46c07d519f7cad344e1"}, + {file = "PyQt5-5.15.10-cp37-abi3-win_amd64.whl", hash = "sha256:501355f327e9a2c38db0428e1a236d25ebcb99304cd6e668c05d1188d514adec"}, + {file = "PyQt5-5.15.10.tar.gz", hash = "sha256:d46b7804b1b10a4ff91753f8113e5b5580d2b4462f3226288e2d84497334898a"}, +] + +[package.dependencies] +PyQt5-Qt5 = ">=5.15.2" +PyQt5-sip = ">=12.13,<13" + +[[package]] +name = "pyqt5-qt5" +version = "5.15.2" +description = "The subset of a Qt installation needed by PyQt5." +optional = false +python-versions = "*" +files = [ + {file = "PyQt5_Qt5-5.15.2-py3-none-macosx_10_13_intel.whl", hash = "sha256:76980cd3d7ae87e3c7a33bfebfaee84448fd650bad6840471d6cae199b56e154"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:1988f364ec8caf87a6ee5d5a3a5210d57539988bf8e84714c7d60972692e2f4a"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-win32.whl", hash = "sha256:9cc7a768b1921f4b982ebc00a318ccb38578e44e45316c7a4a850e953e1dd327"}, + {file = "PyQt5_Qt5-5.15.2-py3-none-win_amd64.whl", hash = "sha256:750b78e4dba6bdf1607febedc08738e318ea09e9b10aea9ff0d73073f11f6962"}, +] + +[[package]] +name = "pyqt5-sip" +version = "12.13.0" +description = "The sip module support for PyQt5" +optional = false +python-versions = ">=3.7" +files = [ + {file = "PyQt5_sip-12.13.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a7e3623b2c743753625c4650ec7696362a37fb36433b61824cf257f6d3d43cca"}, + {file = "PyQt5_sip-12.13.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6e4ac714252370ca037c7d609da92388057165edd4f94e63354f6d65c3ed9d53"}, + {file = "PyQt5_sip-12.13.0-cp310-cp310-win32.whl", hash = "sha256:d5032da3fff62da055104926ffe76fd6044c1221f8ad35bb60804bcb422fe866"}, + {file = "PyQt5_sip-12.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:9a8cdd6cb66adcbe5c941723ed1544eba05cf19b6c961851b58ccdae1c894afb"}, + {file = "PyQt5_sip-12.13.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f85fb633a522f04e48008de49dce1ff1d947011b48885b8428838973fbca412"}, + {file = "PyQt5_sip-12.13.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ec60162e034c42fb99859206d62b83b74f987d58937b3a82bdc07b5c3d190dec"}, + {file = "PyQt5_sip-12.13.0-cp311-cp311-win32.whl", hash = "sha256:205cd449d08a2b024a468fb6100cd7ed03e946b4f49706f508944006f955ae1a"}, + {file = "PyQt5_sip-12.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:1c8371682f77852256f1f2d38c41e2e684029f43330f0635870895ab01c02f6c"}, + {file = "PyQt5_sip-12.13.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7fe3375b508c5bc657d73b9896bba8a768791f1f426c68053311b046bcebdddf"}, + {file = "PyQt5_sip-12.13.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:773731b1b5ab1a7cf5621249f2379c95e3d2905e9bd96ff3611b119586daa876"}, + {file = "PyQt5_sip-12.13.0-cp312-cp312-win32.whl", hash = "sha256:fb4a5271fa3f6bc2feb303269a837a95a6d8dd16be553aa40e530de7fb81bfdf"}, + {file = "PyQt5_sip-12.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:3a4498f3b1b15f43f5d12963accdce0fd652b0bcaae6baf8008663365827444c"}, + {file = "PyQt5_sip-12.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b984c2620a7a7eaf049221b09ae50a345317add2624c706c7d2e9e6632a9587"}, + {file = "PyQt5_sip-12.13.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3188a06956aef86f604fb0d14421a110fad70d2a9e943dbacbfc3303f651dade"}, + {file = "PyQt5_sip-12.13.0-cp38-cp38-win32.whl", hash = "sha256:108a15f603e1886988c4b0d9d41cb74c9f9815bf05cefc843d559e8c298a10ce"}, + {file = "PyQt5_sip-12.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:db228cd737f5cbfc66a3c3e50042140cb80b30b52edc5756dbbaa2346ec73137"}, + {file = "PyQt5_sip-12.13.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5338773bbaedaa4f16a73c142fb23cc18c327be6c338813af70260b756c7bc92"}, + {file = "PyQt5_sip-12.13.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:29fa9cc964517c9fc3f94f072b9a2aeef4e7a2eda1879cb835d9e06971161cdf"}, + {file = "PyQt5_sip-12.13.0-cp39-cp39-win32.whl", hash = "sha256:96414c93f3d33963887cf562d50d88b955121fbfd73f937c8eca46643e77bf61"}, + {file = "PyQt5_sip-12.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:bbc7cd498bf19e0862097be1ad2243e824dea56726f00c11cff1b547c2d31d01"}, + {file = "PyQt5_sip-12.13.0.tar.gz", hash = "sha256:7f321daf84b9c9dbca61b80e1ef37bdaffc0e93312edae2cd7da25b953971d91"}, +] + [[package]] name = "pyreadline3" version = "3.4.1" @@ -1644,6 +1856,22 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] +[[package]] +name = "pysqlite3-binary" +version = "0.5.2.post1" +description = "DB-API 2.0 interface for Sqlite 3.x" +optional = false +python-versions = "*" +files = [ + {file = "pysqlite3_binary-0.5.2.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:668e7853b9e3db5c23b32a57634f658db5008fa1781121d2554a103c34775fe8"}, + {file = "pysqlite3_binary-0.5.2.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3748b00d927b2153a6c5f5d5cdefef11ca9e3ef1e7a87122e3b93c38aced68a9"}, + {file = "pysqlite3_binary-0.5.2.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00221940d874917e95ef0385b4c09c30d6b63fbe89d742ab0ef01229e76f834"}, + {file = "pysqlite3_binary-0.5.2.post1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbecaaf34bdbc04b98dfbca8ea85509b7d0b1e8302c150544065c268b6cf220c"}, + {file = "pysqlite3_binary-0.5.2.post1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0686824294a0a00b9c0d4def0572c7eb7d2334088f127d26c9f73191ddf75c"}, + {file = "pysqlite3_binary-0.5.2.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d83eefb3c20a51d1c36ce49d5fecc84e3f40c729f5f1a76c9e2cbd39f0420ff1"}, + {file = "pysqlite3_binary-0.5.2.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26f297d5e3c48a01483b215f485ac82e074e0716ef0a82aeb0491cba038af819"}, +] + [[package]] name = "pytest" version = "7.4.2" @@ -1706,6 +1934,46 @@ files = [ {file = "python_editor-1.0.4-py3-none-any.whl", hash = "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d"}, ] +[[package]] +name = "python-lsp-jsonrpc" +version = "1.0.0" +description = "JSON RPC 2.0 server library" +optional = false +python-versions = "*" +files = [ + {file = "python-lsp-jsonrpc-1.0.0.tar.gz", hash = "sha256:7bec170733db628d3506ea3a5288ff76aa33c70215ed223abdb0d95e957660bd"}, + {file = "python_lsp_jsonrpc-1.0.0-py3-none-any.whl", hash = "sha256:079b143be64b0a378bdb21dff5e28a8c1393fe7e8a654ef068322d754e545fc7"}, +] + +[package.dependencies] +ujson = ">=3.0.0" + +[package.extras] +test = ["coverage", "pycodestyle", "pyflakes", "pylint", "pytest", "pytest-cov"] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -1779,6 +2047,21 @@ files = [ [package.dependencies] setuptools = ">=41.0" +[[package]] +name = "referencing" +version = "0.30.2" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + [[package]] name = "regex" version = "2023.10.3" @@ -1915,6 +2198,214 @@ pygments = ">=2.13.0,<3.0.0" [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] +[[package]] +name = "rpds-py" +version = "0.10.6" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.10.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:6bdc11f9623870d75692cc33c59804b5a18d7b8a4b79ef0b00b773a27397d1f6"}, + {file = "rpds_py-0.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:26857f0f44f0e791f4a266595a7a09d21f6b589580ee0585f330aaccccb836e3"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7f5e15c953ace2e8dde9824bdab4bec50adb91a5663df08d7d994240ae6fa31"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61fa268da6e2e1cd350739bb61011121fa550aa2545762e3dc02ea177ee4de35"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c48f3fbc3e92c7dd6681a258d22f23adc2eb183c8cb1557d2fcc5a024e80b094"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0503c5b681566e8b722fe8c4c47cce5c7a51f6935d5c7012c4aefe952a35eed"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:734c41f9f57cc28658d98270d3436dba65bed0cfc730d115b290e970150c540d"}, + {file = "rpds_py-0.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a5d7ed104d158c0042a6a73799cf0eb576dfd5fc1ace9c47996e52320c37cb7c"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e3df0bc35e746cce42579826b89579d13fd27c3d5319a6afca9893a9b784ff1b"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:73e0a78a9b843b8c2128028864901f55190401ba38aae685350cf69b98d9f7c9"}, + {file = "rpds_py-0.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5ed505ec6305abd2c2c9586a7b04fbd4baf42d4d684a9c12ec6110deefe2a063"}, + {file = "rpds_py-0.10.6-cp310-none-win32.whl", hash = "sha256:d97dd44683802000277bbf142fd9f6b271746b4846d0acaf0cefa6b2eaf2a7ad"}, + {file = "rpds_py-0.10.6-cp310-none-win_amd64.whl", hash = "sha256:b455492cab07107bfe8711e20cd920cc96003e0da3c1f91297235b1603d2aca7"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:e8cdd52744f680346ff8c1ecdad5f4d11117e1724d4f4e1874f3a67598821069"}, + {file = "rpds_py-0.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66414dafe4326bca200e165c2e789976cab2587ec71beb80f59f4796b786a238"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc435d059f926fdc5b05822b1be4ff2a3a040f3ae0a7bbbe672babb468944722"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e7f2219cb72474571974d29a191714d822e58be1eb171f229732bc6fdedf0ac"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3953c6926a63f8ea5514644b7afb42659b505ece4183fdaaa8f61d978754349e"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2bb2e4826be25e72013916eecd3d30f66fd076110de09f0e750163b416500721"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bf347b495b197992efc81a7408e9a83b931b2f056728529956a4d0858608b80"}, + {file = "rpds_py-0.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:102eac53bb0bf0f9a275b438e6cf6904904908562a1463a6fc3323cf47d7a532"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40f93086eef235623aa14dbddef1b9fb4b22b99454cb39a8d2e04c994fb9868c"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e22260a4741a0e7a206e175232867b48a16e0401ef5bce3c67ca5b9705879066"}, + {file = "rpds_py-0.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f4e56860a5af16a0fcfa070a0a20c42fbb2012eed1eb5ceeddcc7f8079214281"}, + {file = "rpds_py-0.10.6-cp311-none-win32.whl", hash = "sha256:0774a46b38e70fdde0c6ded8d6d73115a7c39d7839a164cc833f170bbf539116"}, + {file = "rpds_py-0.10.6-cp311-none-win_amd64.whl", hash = "sha256:4a5ee600477b918ab345209eddafde9f91c0acd931f3776369585a1c55b04c57"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:5ee97c683eaface61d38ec9a489e353d36444cdebb128a27fe486a291647aff6"}, + {file = "rpds_py-0.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0713631d6e2d6c316c2f7b9320a34f44abb644fc487b77161d1724d883662e31"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5a53f5998b4bbff1cb2e967e66ab2addc67326a274567697379dd1e326bded7"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6a555ae3d2e61118a9d3e549737bb4a56ff0cec88a22bd1dfcad5b4e04759175"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:945eb4b6bb8144909b203a88a35e0a03d22b57aefb06c9b26c6e16d72e5eb0f0"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c215eb46307c25f9fd2771cac8135d14b11a92ae48d17968eda5aa9aaf5071"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1b3cd23d905589cb205710b3988fc8f46d4a198cf12862887b09d7aaa6bf9b9"}, + {file = "rpds_py-0.10.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64ccc28683666672d7c166ed465c09cee36e306c156e787acef3c0c62f90da5a"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:516a611a2de12fbea70c78271e558f725c660ce38e0006f75139ba337d56b1f6"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:9ff93d3aedef11f9c4540cf347f8bb135dd9323a2fc705633d83210d464c579d"}, + {file = "rpds_py-0.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d858532212f0650be12b6042ff4378dc2efbb7792a286bee4489eaa7ba010586"}, + {file = "rpds_py-0.10.6-cp312-none-win32.whl", hash = "sha256:3c4eff26eddac49d52697a98ea01b0246e44ca82ab09354e94aae8823e8bda02"}, + {file = "rpds_py-0.10.6-cp312-none-win_amd64.whl", hash = "sha256:150eec465dbc9cbca943c8e557a21afdcf9bab8aaabf386c44b794c2f94143d2"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:cf693eb4a08eccc1a1b636e4392322582db2a47470d52e824b25eca7a3977b53"}, + {file = "rpds_py-0.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4134aa2342f9b2ab6c33d5c172e40f9ef802c61bb9ca30d21782f6e035ed0043"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e782379c2028a3611285a795b89b99a52722946d19fc06f002f8b53e3ea26ea9"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f6da6d842195fddc1cd34c3da8a40f6e99e4a113918faa5e60bf132f917c247"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b4a9fe992887ac68256c930a2011255bae0bf5ec837475bc6f7edd7c8dfa254e"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b788276a3c114e9f51e257f2a6f544c32c02dab4aa7a5816b96444e3f9ffc336"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:caa1afc70a02645809c744eefb7d6ee8fef7e2fad170ffdeacca267fd2674f13"}, + {file = "rpds_py-0.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bddd4f91eede9ca5275e70479ed3656e76c8cdaaa1b354e544cbcf94c6fc8ac4"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:775049dfa63fb58293990fc59473e659fcafd953bba1d00fc5f0631a8fd61977"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c6c45a2d2b68c51fe3d9352733fe048291e483376c94f7723458cfd7b473136b"}, + {file = "rpds_py-0.10.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0699ab6b8c98df998c3eacf51a3b25864ca93dab157abe358af46dc95ecd9801"}, + {file = "rpds_py-0.10.6-cp38-none-win32.whl", hash = "sha256:ebdab79f42c5961682654b851f3f0fc68e6cc7cd8727c2ac4ffff955154123c1"}, + {file = "rpds_py-0.10.6-cp38-none-win_amd64.whl", hash = "sha256:24656dc36f866c33856baa3ab309da0b6a60f37d25d14be916bd3e79d9f3afcf"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:0898173249141ee99ffcd45e3829abe7bcee47d941af7434ccbf97717df020e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e9184fa6c52a74a5521e3e87badbf9692549c0fcced47443585876fcc47e469"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5752b761902cd15073a527b51de76bbae63d938dc7c5c4ad1e7d8df10e765138"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99a57006b4ec39dbfb3ed67e5b27192792ffb0553206a107e4aadb39c5004cd5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09586f51a215d17efdb3a5f090d7cbf1633b7f3708f60a044757a5d48a83b393"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e225a6a14ecf44499aadea165299092ab0cba918bb9ccd9304eab1138844490b"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2039f8d545f20c4e52713eea51a275e62153ee96c8035a32b2abb772b6fc9e5"}, + {file = "rpds_py-0.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:34ad87a831940521d462ac11f1774edf867c34172010f5390b2f06b85dcc6014"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dcdc88b6b01015da066da3fb76545e8bb9a6880a5ebf89e0f0b2e3ca557b3ab7"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:25860ed5c4e7f5e10c496ea78af46ae8d8468e0be745bd233bab9ca99bfd2647"}, + {file = "rpds_py-0.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7854a207ef77319ec457c1eb79c361b48807d252d94348305db4f4b62f40f7f3"}, + {file = "rpds_py-0.10.6-cp39-none-win32.whl", hash = "sha256:e6fcc026a3f27c1282c7ed24b7fcac82cdd70a0e84cc848c0841a3ab1e3dea2d"}, + {file = "rpds_py-0.10.6-cp39-none-win_amd64.whl", hash = "sha256:e98c4c07ee4c4b3acf787e91b27688409d918212dfd34c872201273fdd5a0e18"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:68fe9199184c18d997d2e4293b34327c0009a78599ce703e15cd9a0f47349bba"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3339eca941568ed52d9ad0f1b8eb9fe0958fa245381747cecf2e9a78a5539c42"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a360cfd0881d36c6dc271992ce1eda65dba5e9368575663de993eeb4523d895f"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:031f76fc87644a234883b51145e43985aa2d0c19b063e91d44379cd2786144f8"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f36a9d751f86455dc5278517e8b65580eeee37d61606183897f122c9e51cef3"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:052a832078943d2b2627aea0d19381f607fe331cc0eb5df01991268253af8417"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023574366002bf1bd751ebaf3e580aef4a468b3d3c216d2f3f7e16fdabd885ed"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:defa2c0c68734f4a82028c26bcc85e6b92cced99866af118cd6a89b734ad8e0d"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:879fb24304ead6b62dbe5034e7b644b71def53c70e19363f3c3be2705c17a3b4"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:53c43e10d398e365da2d4cc0bcaf0854b79b4c50ee9689652cdc72948e86f487"}, + {file = "rpds_py-0.10.6-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:3777cc9dea0e6c464e4b24760664bd8831738cc582c1d8aacf1c3f546bef3f65"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:40578a6469e5d1df71b006936ce95804edb5df47b520c69cf5af264d462f2cbb"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:cf71343646756a072b85f228d35b1d7407da1669a3de3cf47f8bbafe0c8183a4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10f32b53f424fc75ff7b713b2edb286fdbfc94bf16317890260a81c2c00385dc"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:81de24a1c51cfb32e1fbf018ab0bdbc79c04c035986526f76c33e3f9e0f3356c"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac17044876e64a8ea20ab132080ddc73b895b4abe9976e263b0e30ee5be7b9c2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e8a78bd4879bff82daef48c14d5d4057f6856149094848c3ed0ecaf49f5aec2"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78ca33811e1d95cac8c2e49cb86c0fb71f4d8409d8cbea0cb495b6dbddb30a55"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c63c3ef43f0b3fb00571cff6c3967cc261c0ebd14a0a134a12e83bdb8f49f21f"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:7fde6d0e00b2fd0dbbb40c0eeec463ef147819f23725eda58105ba9ca48744f4"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:79edd779cfc46b2e15b0830eecd8b4b93f1a96649bcb502453df471a54ce7977"}, + {file = "rpds_py-0.10.6-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9164ec8010327ab9af931d7ccd12ab8d8b5dc2f4c6a16cbdd9d087861eaaefa1"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d29ddefeab1791e3c751e0189d5f4b3dbc0bbe033b06e9c333dca1f99e1d523e"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:30adb75ecd7c2a52f5e76af50644b3e0b5ba036321c390b8e7ec1bb2a16dd43c"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd609fafdcdde6e67a139898196698af37438b035b25ad63704fd9097d9a3482"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6eef672de005736a6efd565577101277db6057f65640a813de6c2707dc69f396"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cf4393c7b41abbf07c88eb83e8af5013606b1cdb7f6bc96b1b3536b53a574b8"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad857f42831e5b8d41a32437f88d86ead6c191455a3499c4b6d15e007936d4cf"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d7360573f1e046cb3b0dceeb8864025aa78d98be4bb69f067ec1c40a9e2d9df"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d08f63561c8a695afec4975fae445245386d645e3e446e6f260e81663bfd2e38"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:f0f17f2ce0f3529177a5fff5525204fad7b43dd437d017dd0317f2746773443d"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:442626328600bde1d09dc3bb00434f5374948838ce75c41a52152615689f9403"}, + {file = "rpds_py-0.10.6-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e9616f5bd2595f7f4a04b67039d890348ab826e943a9bfdbe4938d0eba606971"}, + {file = "rpds_py-0.10.6.tar.gz", hash = "sha256:4ce5a708d65a8dbf3748d2474b580d606b1b9f91b5c6ab2a316e0b0cf7a4ba50"}, +] + +[[package]] +name = "ruamel-yaml" +version = "0.17.40" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +optional = false +python-versions = ">=3" +files = [ + {file = "ruamel.yaml-0.17.40-py3-none-any.whl", hash = "sha256:b16b6c3816dff0a93dca12acf5e70afd089fa5acb80604afd1ffa8b465b7722c"}, + {file = "ruamel.yaml-0.17.40.tar.gz", hash = "sha256:6024b986f06765d482b5b07e086cc4b4cd05dd22ddcbc758fa23d54873cf313d"}, +] + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} + +[package.extras] +docs = ["mercurial (>5.7)", "ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.8" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +optional = false +python-versions = ">=3.6" +files = [ + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, + {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, + {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +] + +[[package]] +name = "semgrep" +version = "1.45.0" +description = "Lightweight static analysis for many languages. Find bug variants with patterns that look like source code." +optional = false +python-versions = ">=3.7" +files = [ + {file = "semgrep-1.45.0-cp37.cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-any.whl", hash = "sha256:b466501971f9491ab089d01e29dec6fab404b5f99e1279c888d4a8e6aac3b443"}, + {file = "semgrep-1.45.0-cp37.cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-macosx_10_14_x86_64.whl", hash = "sha256:4f2bc7482746d3383d909d46a0d878184580eac1b2cafe65c4192f2d226d3df5"}, + {file = "semgrep-1.45.0-cp37.cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-macosx_11_0_arm64.whl", hash = "sha256:7bac4ac8c613ba9851cce28537117636f7356489324cb55503a6f90be51d6e91"}, + {file = "semgrep-1.45.0-cp37.cp38.cp39.cp310.cp311.py37.py38.py39.py310.py311-none-musllinux_1_0_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519aa0752d206b2442895be6ba279d4826783f1db0b994f87a5855bdce310078"}, + {file = "semgrep-1.45.0.tar.gz", hash = "sha256:f2efad4236a0cf8b397e8f367b49d77a5ea0ec92de518f158247160041dbd980"}, +] + +[package.dependencies] +attrs = ">=21.3" +boltons = ">=21.0,<22.0" +click = ">=8.1,<9.0" +click-option-group = ">=0.5,<1.0" +colorama = ">=0.4.0,<0.5.0" +defusedxml = ">=0.7.1,<0.8.0" +glom = ">=22.1,<23.0" +jsonschema = ">=4.6,<5.0" +packaging = ">=21.0" +peewee = ">=3.14,<4.0" +python-lsp-jsonrpc = ">=1.0.0,<1.1.0" +requests = ">=2.22,<3.0" +rich = ">=12.6.0" +"ruamel.yaml" = ">=0.16.0,<0.18" +tomli = ">=2.0.1,<2.1.0" +typing-extensions = ">=4.2,<5.0" +urllib3 = ">=1.26,<2.0" +wcmatch = ">=8.3,<9.0" + +[package.extras] +experiments = ["jsonnet (>=0.18,<1.0)"] + [[package]] name = "setuptools" version = "68.2.2" @@ -2246,22 +2737,91 @@ files = [ {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] +[[package]] +name = "ujson" +version = "5.8.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ujson-5.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4511560d75b15ecb367eef561554959b9d49b6ec3b8d5634212f9fed74a6df1"}, + {file = "ujson-5.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9399eaa5d1931a0ead49dce3ffacbea63f3177978588b956036bfe53cdf6af75"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4e7bb7eba0e1963f8b768f9c458ecb193e5bf6977090182e2b4f4408f35ac76"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40931d7c08c4ce99adc4b409ddb1bbb01635a950e81239c2382cfe24251b127a"}, + {file = "ujson-5.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d53039d39de65360e924b511c7ca1a67b0975c34c015dd468fca492b11caa8f7"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bdf04c6af3852161be9613e458a1fb67327910391de8ffedb8332e60800147a2"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a70f776bda2e5072a086c02792c7863ba5833d565189e09fabbd04c8b4c3abba"}, + {file = "ujson-5.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f26629ac531d712f93192c233a74888bc8b8212558bd7d04c349125f10199fcf"}, + {file = "ujson-5.8.0-cp310-cp310-win32.whl", hash = "sha256:7ecc33b107ae88405aebdb8d82c13d6944be2331ebb04399134c03171509371a"}, + {file = "ujson-5.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:3b27a8da7a080add559a3b73ec9ebd52e82cc4419f7c6fb7266e62439a055ed0"}, + {file = "ujson-5.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:193349a998cd821483a25f5df30b44e8f495423840ee11b3b28df092ddfd0f7f"}, + {file = "ujson-5.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ddeabbc78b2aed531f167d1e70387b151900bc856d61e9325fcdfefb2a51ad8"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ce24909a9c25062e60653073dd6d5e6ec9d6ad7ed6e0069450d5b673c854405"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27a2a3c7620ebe43641e926a1062bc04e92dbe90d3501687957d71b4bdddaec4"}, + {file = "ujson-5.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b852bdf920fe9f84e2a2c210cc45f1b64f763b4f7d01468b33f7791698e455e"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:20768961a6a706170497129960762ded9c89fb1c10db2989c56956b162e2a8a3"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e0147d41e9fb5cd174207c4a2895c5e24813204499fd0839951d4c8784a23bf5"}, + {file = "ujson-5.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e3673053b036fd161ae7a5a33358ccae6793ee89fd499000204676baafd7b3aa"}, + {file = "ujson-5.8.0-cp311-cp311-win32.whl", hash = "sha256:a89cf3cd8bf33a37600431b7024a7ccf499db25f9f0b332947fbc79043aad879"}, + {file = "ujson-5.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3659deec9ab9eb19e8646932bfe6fe22730757c4addbe9d7d5544e879dc1b721"}, + {file = "ujson-5.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:102bf31c56f59538cccdfec45649780ae00657e86247c07edac434cb14d5388c"}, + {file = "ujson-5.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:299a312c3e85edee1178cb6453645217ba23b4e3186412677fa48e9a7f986de6"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e385a7679b9088d7bc43a64811a7713cc7c33d032d020f757c54e7d41931ae"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad24ec130855d4430a682c7a60ca0bc158f8253ec81feed4073801f6b6cb681b"}, + {file = "ujson-5.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16fde596d5e45bdf0d7de615346a102510ac8c405098e5595625015b0d4b5296"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6d230d870d1ce03df915e694dcfa3f4e8714369cce2346686dbe0bc8e3f135e7"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9571de0c53db5cbc265945e08f093f093af2c5a11e14772c72d8e37fceeedd08"}, + {file = "ujson-5.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7cba16b26efe774c096a5e822e4f27097b7c81ed6fb5264a2b3f5fd8784bab30"}, + {file = "ujson-5.8.0-cp312-cp312-win32.whl", hash = "sha256:48c7d373ff22366eecfa36a52b9b55b0ee5bd44c2b50e16084aa88b9de038916"}, + {file = "ujson-5.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ac97b1e182d81cf395ded620528c59f4177eee024b4b39a50cdd7b720fdeec6"}, + {file = "ujson-5.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2a64cc32bb4a436e5813b83f5aab0889927e5ea1788bf99b930fad853c5625cb"}, + {file = "ujson-5.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e54578fa8838ddc722539a752adfce9372474114f8c127bb316db5392d942f8b"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9721cd112b5e4687cb4ade12a7b8af8b048d4991227ae8066d9c4b3a6642a582"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d9707e5aacf63fb919f6237d6490c4e0244c7f8d3dc2a0f84d7dec5db7cb54c"}, + {file = "ujson-5.8.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0be81bae295f65a6896b0c9030b55a106fb2dec69ef877253a87bc7c9c5308f7"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ae7f4725c344bf437e9b881019c558416fe84ad9c6b67426416c131ad577df67"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9ab282d67ef3097105552bf151438b551cc4bedb3f24d80fada830f2e132aeb9"}, + {file = "ujson-5.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94c7bd9880fa33fcf7f6d7f4cc032e2371adee3c5dba2922b918987141d1bf07"}, + {file = "ujson-5.8.0-cp38-cp38-win32.whl", hash = "sha256:bf5737dbcfe0fa0ac8fa599eceafae86b376492c8f1e4b84e3adf765f03fb564"}, + {file = "ujson-5.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:11da6bed916f9bfacf13f4fc6a9594abd62b2bb115acfb17a77b0f03bee4cfd5"}, + {file = "ujson-5.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:69b3104a2603bab510497ceabc186ba40fef38ec731c0ccaa662e01ff94a985c"}, + {file = "ujson-5.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9249fdefeb021e00b46025e77feed89cd91ffe9b3a49415239103fc1d5d9c29a"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2873d196725a8193f56dde527b322c4bc79ed97cd60f1d087826ac3290cf9207"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4dafa9010c366589f55afb0fd67084acd8added1a51251008f9ff2c3e44042"}, + {file = "ujson-5.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a42baa647a50fa8bed53d4e242be61023bd37b93577f27f90ffe521ac9dc7a3"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f3554eaadffe416c6f543af442066afa6549edbc34fe6a7719818c3e72ebfe95"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fb87decf38cc82bcdea1d7511e73629e651bdec3a43ab40985167ab8449b769c"}, + {file = "ujson-5.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:407d60eb942c318482bbfb1e66be093308bb11617d41c613e33b4ce5be789adc"}, + {file = "ujson-5.8.0-cp39-cp39-win32.whl", hash = "sha256:0fe1b7edaf560ca6ab023f81cbeaf9946a240876a993b8c5a21a1c539171d903"}, + {file = "ujson-5.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:3f9b63530a5392eb687baff3989d0fb5f45194ae5b1ca8276282fb647f8dcdb3"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:efeddf950fb15a832376c0c01d8d7713479fbeceaed1eaecb2665aa62c305aec"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d8283ac5d03e65f488530c43d6610134309085b71db4f675e9cf5dff96a8282"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb0142f6f10f57598655340a3b2c70ed4646cbe674191da195eb0985a9813b83"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d459aca895eb17eb463b00441986b021b9312c6c8cc1d06880925c7f51009c"}, + {file = "ujson-5.8.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d524a8c15cfc863705991d70bbec998456a42c405c291d0f84a74ad7f35c5109"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d6f84a7a175c75beecde53a624881ff618e9433045a69fcfb5e154b73cdaa377"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b748797131ac7b29826d1524db1cc366d2722ab7afacc2ce1287cdafccddbf1f"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e72ba76313d48a1a3a42e7dc9d1db32ea93fac782ad8dde6f8b13e35c229130"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f504117a39cb98abba4153bf0b46b4954cc5d62f6351a14660201500ba31fe7f"}, + {file = "ujson-5.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8c91b6f4bf23f274af9002b128d133b735141e867109487d17e344d38b87d94"}, + {file = "ujson-5.8.0.tar.gz", hash = "sha256:78e318def4ade898a461b3d92a79f9441e7e0e4d2ad5419abed4336d702c7425"}, +] + [[package]] name = "urllib3" -version = "2.0.7" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-2.0.7-py3-none-any.whl", hash = "sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e"}, - {file = "urllib3-2.0.7.tar.gz", hash = "sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uvicorn" @@ -2425,6 +2985,20 @@ files = [ [package.dependencies] anyio = ">=3.0.0" +[[package]] +name = "wcmatch" +version = "8.5" +description = "Wildcard/glob file name matcher." +optional = false +python-versions = ">=3.8" +files = [ + {file = "wcmatch-8.5-py3-none-any.whl", hash = "sha256:14554e409b142edeefab901dc68ad570b30a72a8ab9a79106c5d5e9a6d241bd5"}, + {file = "wcmatch-8.5.tar.gz", hash = "sha256:86c17572d0f75cbf3bcb1a18f3bf2f9e72b39a9c08c9b4a74e991e1882a8efb3"}, +] + +[package.dependencies] +bracex = ">=2.1.1" + [[package]] name = "wcwidth" version = "0.2.8" @@ -2436,6 +3010,22 @@ files = [ {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, ] +[[package]] +name = "websocket-client" +version = "1.6.4" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.6.4.tar.gz", hash = "sha256:b3324019b3c28572086c4a319f91d1dcd44e6e11cd340232978c684a7650d0df"}, + {file = "websocket_client-1.6.4-py3-none-any.whl", hash = "sha256:084072e0a7f5f347ef2ac3d8698a5e0b4ffbfcab607628cadabc650fc9a83a24"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "websockets" version = "11.0.3" @@ -2644,4 +3234,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "2ea4e2d34f63c6d07bbf5adcc477ab74cb6991270c1f319a45ce2c7a209459a1" +content-hash = "0f23835864e7762730f6b215e086842606f6afeb1d78a0959a4eaee0bd4001fd" diff --git a/pyproject.toml b/pyproject.toml index 6123fe61c1..bd4ae4c0b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,10 +25,18 @@ wget = "^3.2" huggingface-hub = "^0.17.3" litellm = "0.8.6" pyyaml = "^6.0.1" +docker = "^6.1.3" +semgrep = "^1.41.0" yaspin = "^3.0.1" +pyqt5-qt5 = "5.15.2" +pyqt5 = "5.15.10" ooba = "^0.0.21" chroma = "^0.2.0" chromadb = "^0.4.14" +pysqlite3-binary = "^0.5.2.post1" +[tool.poetry.dependencies.pyreadline3] +version = "^3.4.1" +markers = "sys_platform == 'win32'" # DISABLED # but perhaps we should re-enable soon. Windows + readline errors sometimes, need more testing # On non-windows systems, you can just `import readline`. diff --git a/tests/test_interpreter.py b/tests/test_interpreter.py index ad95506302..bd6eda06da 100644 --- a/tests/test_interpreter.py +++ b/tests/test_interpreter.py @@ -2,24 +2,25 @@ from random import randint import time import pytest -import interpreter +import interpreter as i from interpreter.utils.count_tokens import count_tokens, count_messages_tokens import time -interpreter.auto_run = True -interpreter.model = "gpt-4" -interpreter.temperature = 0 - # this function will run before each test # we're clearing out the messages Array so we can start fresh and reduce token usage -def setup_function(): + +@pytest.fixture(scope="function") # This will make the interpreter instance available to all test cases. +def interpreter(): + interpreter = i.create_interpreter() interpreter.reset() interpreter.temperature = 0 interpreter.auto_run = True interpreter.model = "gpt-4" interpreter.debug_mode = False + yield interpreter + # this function will run after each test # we're introducing some sleep to help avoid timeout issues with the OpenAI API @@ -27,7 +28,7 @@ def teardown_function(): time.sleep(5) -def test_config_loading(): +def test_config_loading(interpreter): # because our test is running from the root directory, we need to do some # path manipulation to get the actual path to the config file or our config # loader will try to load from the wrong directory and fail @@ -43,7 +44,7 @@ def test_config_loading(): assert temperature_ok and model_ok and debug_mode_ok -def test_system_message_appending(): +def test_system_message_appending(interpreter): ping_system_message = ( "Respond to a `ping` with a `pong`. No code. No explanations. Just `pong`." ) @@ -61,12 +62,12 @@ def test_system_message_appending(): ] -def test_reset(): +def test_reset(interpreter): # make sure that interpreter.reset() clears out the messages Array assert interpreter.messages == [] -def test_token_counter(): +def test_token_counter(interpreter): system_tokens = count_tokens(text=interpreter.system_message, model=interpreter.model) prompt = "How many tokens is this?" @@ -88,20 +89,22 @@ def test_token_counter(): assert system_tokens_ok and prompt_tokens_ok -def test_hello_world(): +def test_hello_world(interpreter): hello_world_response = "Hello, World!" hello_world_message = f"Please reply with just the words {hello_world_response} and nothing else. Do not run code. No confirmation just the text." messages = interpreter.chat(hello_world_message) + print(messages) + assert messages == [ {"role": "user", "message": hello_world_message}, {"role": "assistant", "message": hello_world_response}, ] @pytest.mark.skip(reason="Math is hard") -def test_math(): +def test_math(interpreter): # we'll generate random integers between this min and max in our math tests min_number = randint(1, 99) max_number = randint(1001, 9999) @@ -122,19 +125,19 @@ def test_math(): assert str(round(test_result, 2)) in messages[-1]["message"] -def test_delayed_exec(): +def test_delayed_exec(interpreter): interpreter.chat( """Can you write a single block of code and run_code it that prints something, then delays 1 second, then prints something else? No talk just code. Thanks!""" ) @pytest.mark.skip(reason="This works fine when I run it but fails frequently in Github Actions... will look into it after the hackathon") -def test_nested_loops_and_multiple_newlines(): +def test_nested_loops_and_multiple_newlines(interpreter): interpreter.chat( """Can you write a nested for loop in python and shell and run them? Don't forget to properly format your shell script and use semicolons where necessary. Also put 1-3 newlines between each line in the code. Only generate and execute the code. No explanations. Thanks!""" ) -def test_markdown(): +def test_markdown(interpreter): interpreter.chat( """Hi, can you test out a bunch of markdown features? Try writing a fenced code block, a table, headers, everything. DO NOT write the markdown inside a markdown code block, just write it raw.""" )