Skip to content

Commit

Permalink
Merge pull request #118 from iKonoTelecomunicaciones/117-new-chatgpt-…
Browse files Browse the repository at this point in the history
…assistant-node

feat(node): ✨ Added new node GPT assistant
  • Loading branch information
egalvis39 committed May 22, 2024
2 parents e2f1c51 + cba5384 commit 70dc8ea
Show file tree
Hide file tree
Showing 11 changed files with 192 additions and 6 deletions.
12 changes: 12 additions & 0 deletions menuflow/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
CheckTime,
Delay,
Email,
GPTAssistant,
HTTPRequest,
Input,
InteractiveInput,
Expand Down Expand Up @@ -45,6 +46,7 @@
Subroutine,
Switch,
Delay,
GPTAssistant,
),
)

Expand Down Expand Up @@ -215,6 +217,16 @@ def node(self, room: Room) -> Node | None:
node_initialized = Delay(
delay_node_data=node_data, room=room, default_variables=self.flow_variables
)
elif node_data.get("type") == "gpt_assistant":
if GPTAssistant.assistant_cache.get((room.room_id, room.route.id)):
node_initialized = GPTAssistant.assistant_cache.get((room.room_id, room.route.id))
else:
node_initialized = GPTAssistant(
gpt_assistant_node_data=node_data,
room=room,
default_variables=self.flow_variables,
)
GPTAssistant.assistant_cache[(room.room_id, room.route.id)] = node_initialized
else:
return

Expand Down
6 changes: 5 additions & 1 deletion menuflow/matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

from .config import Config
from .db.route import RouteState
from .nodes import Base, Input, InteractiveInput
from .nodes import Base, GPTAssistant, Input, InteractiveInput
from .room import Room
from .user import User
from .utils import Util
Expand Down Expand Up @@ -170,7 +170,11 @@ async def handle_join(self, evt: StrippedStateEvent):
room.config = self.config
room.matrix_client = self

# Clean up the actions
await room.clean_up()
if (room.room_id, room.route.id) in GPTAssistant.assistant_cache:
del GPTAssistant.assistant_cache[(room.room_id, room.route.id)]

await self.load_room_constants(evt.room_id)
await self.algorithm(room=room)

Expand Down
2 changes: 1 addition & 1 deletion menuflow/middlewares/llm.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import Dict, Tuple

from aiohttp import ClientTimeout, ContentTypeError, FormData
from aiohttp import ClientTimeout, ContentTypeError
from mautrix.util.config import RecursiveDict
from ruamel.yaml.comments import CommentedMap

Expand Down
1 change: 1 addition & 0 deletions menuflow/nodes/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from .check_time import CheckTime
from .delay import Delay
from .email import Email
from .gpt_assistant import GPTAssistant
from .http_request import HTTPRequest
from .input import Input
from .interactive_input import InteractiveInput
Expand Down
123 changes: 123 additions & 0 deletions menuflow/nodes/gpt_assistant.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
import html
import json
import re
from asyncio import sleep
from typing import Dict, Optional, Tuple

import openai
from mautrix.types import RoomID

from ..db.route import RouteState
from ..repository import GPTAssistant as GPTAssistantModel
from ..room import Room
from .base import Base


class GPTAssistant(Base):
assistant_cache: Dict[Tuple[RoomID, int], "GPTAssistant"] = {}

def __init__(
self, gpt_assistant_node_data: GPTAssistantModel, room: Room, default_variables: Dict
) -> None:
Base.__init__(self, room=room, default_variables=default_variables)
self.log = self.log.getChild(gpt_assistant_node_data.get("id"))
self.content: Dict = gpt_assistant_node_data
self.client = openai.OpenAI(api_key=self.api_key)
self.setup_assistant()

@property
def name(self) -> str:
return self.render_data(data=self.content.get("name", ""))

@property
def instructions(self) -> str:
return self.render_data(data=self.content.get("instructions", ""))

@property
def model(self) -> str:
return self.render_data(data=self.content.get("model", ""))

@property
def assistant_id(self) -> str:
return self.render_data(data=self.content.get("assistant_id", ""))

@property
def api_key(self) -> str:
return self.render_data(data=self.content.get("api_key", ""))

@property
def user_input(self) -> str:
return self.render_data(data=self.content.get("user_input", ""))

@property
def variable(self) -> str:
return self.render_data(data=self.content.get("variable", ""))

@property
def o_connection(self) -> str:
return self.render_data(data=self.content.get("o_connection", ""))

def setup_assistant(self):
if self.assistant_id:
self.assistant = self.client.beta.assistants.retrieve(self.assistant_id)
else:
self.assistant = self.client.beta.assistants.create(
name=self.name,
instructions=self.instructions,
tools=[{"type": "code_interpreter"}],
model=self.model,
)

self.thread = self.client.beta.threads.create()

def add_message(self, content: str):
self.client.beta.threads.messages.create(
thread_id=self.thread.id,
role="user",
content=content,
)

async def run_assistant(self, instructions: Optional[str] = None) -> str:
# Runs the assistant with the given thread and assistant IDs.
run = self.client.beta.threads.runs.create(
thread_id=self.thread.id,
assistant_id=self.assistant.id,
instructions=instructions,
)

while run.status == "in_progress" or run.status == "queued":
await sleep(1)
run = self.client.beta.threads.runs.retrieve(thread_id=self.thread.id, run_id=run.id)

if run.status == "completed":
messages = self.client.beta.threads.messages.list(thread_id=self.thread.id)
message_dict = json.loads(messages.model_dump_json())
most_recent_message = message_dict["data"][0]
return most_recent_message["content"][0]["text"]["value"]

async def _update_node(self, o_connection: str):
await self.room.update_menu(
node_id=o_connection,
state=RouteState.END if not o_connection else None,
)

def json_in_text(self, text: str) -> Dict | None:
json_pattern = re.compile(r"```json(.*?)```", re.DOTALL)
match = json_pattern.search(text)
if match:
json_str = match.group(1).strip()
json_str = html.unescape(json_str)
return json_str

async def run(self):
self.add_message(str(self.user_input))
assistant_resp = await self.run_assistant()
response = int(assistant_resp) if assistant_resp.isdigit() else assistant_resp
if json_str := self.json_in_text(response):
response = json.loads(json_str)

await self.room.set_variable(
self.variable,
value=response,
)
await self._update_node(self.o_connection)
3 changes: 2 additions & 1 deletion menuflow/nodes/invite_user.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import Dict

import mautrix.errors.request
from mautrix.types import UserID

from ..db.route import RouteState
from ..repository import InviteUser as InviteUserModel
Expand All @@ -18,7 +19,7 @@ def __init__(
self.content = invite_node_data

@property
def invitee(self) -> list[str]:
def invitee(self) -> UserID:
return self.render_data(self.content.get("invitee"))

@property
Expand Down
1 change: 1 addition & 0 deletions menuflow/repository/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
CheckTime,
Delay,
Email,
GPTAssistant,
HTTPRequest,
InactivityOptions,
Input,
Expand Down
1 change: 1 addition & 0 deletions menuflow/repository/nodes/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from .check_time import CheckTime
from .delay import Delay
from .email import Email
from .gpt_assistant import GPTAssistant
from .http_request import HTTPRequest
from .input import InactivityOptions, Input
from .interactive_input import InteractiveInput, InteractiveMessage
Expand Down
41 changes: 41 additions & 0 deletions menuflow/repository/nodes/gpt_assistant.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
from __future__ import annotations

from attr import dataclass, ib

from ..flow_object import FlowObject


@dataclass
class GPTAssistant(FlowObject):
"""
## GptAssistant
This node allows to interact with the GPT Assistant API.
* If you want to create a new assistant, you need to provide name, instructions, model parameters.
* If you want to use an existing assistant, you need to provide assistant_id.
content:
```yaml
- id: g1
type: gpt_assistant
name: "GPT Assistant"
instructions: "Please select an option"
model: "gpt-3.5-turbo"
assistant_id: "123456"
api_key: "123456"
variable: "gpt_response"
user_input: "user_input"
o_connection: "m1"
```
"""

name: str = ib(default=None)
instructions: str = ib(default=None)
model: str = ib(default=None)
assistant_id: str = ib(default=None)
api_key: str = ib(factory=str)
variable: str = ib(factory=str)
user_input: str = ib(factory=str)
o_connection: str = ib(factory=str)
7 changes: 4 additions & 3 deletions menuflow/repository/nodes/invite_user.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from __future__ import annotations

from attr import dataclass, ib
from mautrix.types import UserID

from .switch import Case, Switch
from .switch import Switch


@dataclass
Expand All @@ -14,7 +15,7 @@ class InviteUser(Switch):
- id: 'invite_user'
type: 'invite_user'
timeout: 5
invitees: '{{ main_menu }}'
invitee: '{{ main_menu }}'
cases:
- id: 'join'
o_connection: 'next_node'
Expand All @@ -24,5 +25,5 @@ class InviteUser(Switch):
o_connection: 'timeout_invite_user'
"""

invitees: list[str] = ib(default=None)
invitee: UserID = ib(default=None)
timeout: int = ib(default=5)
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@ fuzzywuzzy==0.18.0
python-Levenshtein==0.21.1
nats-py==2.6.0
aiohttp-cors==0.7.0
openai==1.30.1

0 comments on commit 70dc8ea

Please sign in to comment.