Skip to content

Commit

Permalink
Merge pull request #381 from LlmKira/dev
Browse files Browse the repository at this point in the history
Hook
  • Loading branch information
sudoskys committed Apr 17, 2024
2 parents c1b621d + cd7a09d commit c0b4510
Show file tree
Hide file tree
Showing 33 changed files with 674 additions and 213 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -148,3 +148,4 @@ config_dir/*.secret/
/.pdm-python
/.montydb/
/.snapshot/
/.tutorial.db
23 changes: 20 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,18 @@ The model adheres to the Openai Schema, other models are not supported. Please a
|-----------------------------------|
| ![sticker](./docs/chain_chat.gif) |

## 🍔 Roadmap
## 🔨 Roadmap

- [x] Removal of legacy code
- [x] Deletion of metric system
- [x] Deletion of model selection system, unified to OpenAI Schema
- [x] Implementation of a more robust plugin system
- [x] Project structure simplification
- [x] Elimination of the Provider system
- [x] Hook support.
- [x] Access to TTS.
- [ ] Add LLM reference support to the plugin environment. (extract && search in text)
- [ ] Add standalone support for Openai's new Schema. (vision)

## 📦 Features

Expand All @@ -71,9 +75,9 @@ The model adheres to the Openai Schema, other models are not supported. Please a

### 🍔 Login Modes

- `Login via url`: Use `/login token#https://provider.com` to Login. The program posts the token to the interface to
- `Login via url`: Use `/login token$https://provider.com` to Login. The program posts the token to the interface to
retrieve configuration information
- `Login`: Use `/login https://api.com/v1#key#model` to login
- `Login`: Use `/login https://api.com/v1$key$model` to login

### 🧀 Plugin Previews

Expand Down Expand Up @@ -168,6 +172,19 @@ env - Environment variables of the function
Refer to the example plugins in the `plugins` directory and
the [🧀 Plugin Development Document](https://llmkira.github.io/Docs/dev/basic) for plugin development documentation.

### Hooks

Hooks control the EventMessage in sender and receiver. For example, we have `voice_hook` in built-in hooks.

you can enable it by setting `VOICE_REPLY_ME=true` in `.env`.

```shell
/env VOICE_REPLY_ME=true
/env REECHO_VOICE_KEY=<key in dev.reecho.ai>
```

check the source code in `llmkira/extra/voice_hook.py`, learn to write your own hooks.

## 🧀 Sponsor

[![sponsor](./.github/sponsor_ohmygpt.png)](https://www.ohmygpt.com)
Expand Down
41 changes: 37 additions & 4 deletions app/middleware/llm_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from app.components.credential import Credential
from app.components.user_manager import record_cost
from llmkira.kv_manager.instruction import InstructionManager
from llmkira.kv_manager.time import TimeFeelManager
from llmkira.kv_manager.tool_call import GLOBAL_TOOLCALL_CACHE_HANDLER
from llmkira.memory import global_message_runtime
from llmkira.openai.cell import (
Expand Down Expand Up @@ -125,10 +126,9 @@ async def remember(self, *, message: Optional[Message] = None):
if message:
await self.message_history.append(messages=[message])

async def build_message(self, remember=True):
async def build_history_messages(self):
"""
从任务会话和历史消息中构建消息
:param remember: 是否写入历史消息
:return: None
"""
system_prompt = await InstructionManager(
Expand All @@ -147,6 +147,15 @@ async def build_message(self, remember=True):
continue
else:
message_run.append(msg)
return message_run

async def build_task_messages(self, remember=True):
"""
从任务会话和历史消息中构建消息
:param remember: 是否写入历史消息
:return: None
"""
message_run = []
# 处理 人类 发送的消息
task_message = self.task.message
task_message: List[EventMessage]
Expand All @@ -171,7 +180,14 @@ async def request_openai(
:param disable_tool: 禁用函数
:param credential: 凭证
:return: OpenaiResult 返回结果
:raise RuntimeError: 无法处理消息
:raise RuntimeError: # Feel time leave
time_feel = await TimeFeelManager(self.session_uid).get_leave()
if time_feel:
await self.remember(
message=SystemMessage(
content=f"statu:[After {time_feel} leave, user is back]"
)
) 无法处理消息
:raise AssertionError: 无法处理消息
:raise OpenaiError: Openai错误
"""
Expand All @@ -182,7 +198,24 @@ async def request_openai(
logger.warning(f"llm_task:Tool is not unique {self.tools}")
if isinstance(self.task.task_sign.instruction, str):
messages.append(SystemMessage(content=self.task.task_sign.instruction))
messages.extend(await self.build_message(remember=remember))
# 先读取历史消息才能操作
message_head = await self.build_history_messages()
messages.extend(message_head) # 历史消息
# 操作先写入一个状态
time_feel = await TimeFeelManager(self.session_uid).get_leave()
if time_feel:
messages.append(
SystemMessage(content=f"statu:[After {time_feel} leave, user is back]")
) # 插入消息
await self.remember(
message=SystemMessage(
content=f"statu:[After {time_feel} leave, user is back]"
)
)
# 同步状态到历史消息
message_body = await self.build_task_messages(remember=remember)
messages.extend(message_body)

# TODO:实现消息时序切片
# 日志
logger.info(
Expand Down
1 change: 1 addition & 0 deletions app/receiver/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ async def _main(_func):
# 导入插件
load_plugins("llmkira/extra/plugins")
load_from_entrypoint("llmkira.extra.plugin")
import llmkira.extra.voice_hook # noqa

loaded_message = "\n >>".join(get_entrypoint_plugins())
logger.success(
Expand Down
21 changes: 13 additions & 8 deletions app/receiver/discord/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,13 +109,18 @@ async def reply(
"""
模型直转发,Message是Openai的类型
"""
for item in messages:
# raw_message = await self.loop_turn_from_openai(platform_name=__receiver__, message=item, locate=receiver)
event_message = EventMessage.from_openai_message(
message=item, locate=receiver
)
await self.file_forward(receiver=receiver, file_list=event_message.files)
if not event_message.text:
event_message = [
EventMessage.from_openai_message(message=item, locate=receiver)
for item in messages
]
# 转析器
_, event_message, receiver = await self.hook(
platform_name=__receiver__, messages=event_message, locate=receiver
)
event_message: list
for event in event_message:
await self.file_forward(receiver=receiver, file_list=event.files)
if not event.text:
continue
async with self.bot as client:
client: hikari.impl.RESTClientImpl
Expand All @@ -129,7 +134,7 @@ async def reply(
)
await client.create_message(
channel=int(receiver.thread_id),
content=event_message.text,
content=event.text,
reply=_reply,
)
return logger.trace("reply message")
Expand Down
21 changes: 13 additions & 8 deletions app/receiver/kook/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,20 +118,25 @@ async def reply(
"""
模型直转发,Message是Openai的类型
"""
for item in messages:
# raw_message = await self.loop_turn_from_openai(platform_name=__receiver__, message=item, locate=receiver)
event_message = EventMessage.from_openai_message(
message=item, locate=receiver
)
await self.file_forward(receiver=receiver, file_list=event_message.files)
if not event_message.text:
event_message = [
EventMessage.from_openai_message(message=item, locate=receiver)
for item in messages
]
# 转析器
_, event_message, receiver = await self.hook(
platform_name=__receiver__, messages=event_message, locate=receiver
)
event_message: list
for event in event_message:
await self.file_forward(receiver=receiver, file_list=event.files)
if not event.text:
continue
await self.send_message(
channel_id=receiver.thread_id,
user_id=receiver.user_id,
dm=receiver.thread_id == receiver.chat_id,
message_type=MessageTypes.KMD,
content=event_message.text,
content=event.text,
)
return logger.trace("reply message")

Expand Down
43 changes: 19 additions & 24 deletions app/receiver/receiver_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@

import shortuuid
from aio_pika.abc import AbstractIncomingMessage
from deprecated import deprecated
from loguru import logger
from telebot import formatting

Expand All @@ -26,7 +25,7 @@
from llmkira.openai.cell import ToolCall, Message, Tool
from llmkira.openai.request import OpenAIResult
from llmkira.openapi.fuse import get_error_plugin
from llmkira.openapi.transducer import LoopRunner
from llmkira.openapi.hook import run_hook, Trigger
from llmkira.sdk.tools import ToolRegister
from llmkira.task import Task, TaskHeader
from llmkira.task.schema import Location, EventMessage, Router
Expand Down Expand Up @@ -116,30 +115,23 @@ async def reorganize_tools(task: TaskHeader, error_times_limit: int = 10) -> Lis

class BaseSender(object, metaclass=ABCMeta):
@staticmethod
@deprecated(reason="use another function")
async def loop_turn_from_openai(platform_name, message, locate):
async def hook(platform_name, messages: List[EventMessage], locate: Location):
"""
# FIXME 删除这个函数
将 Openai 消息传入 Receiver Loop 进行修饰
此过程将忽略掉其他属性。只留下 content
:param platform_name: 平台名称
:param messages: 消息
:param locate: 位置
:return: 平台名称,消息,位置
"""
loop_runner = LoopRunner()
trans_loop = loop_runner.get_receiver_loop(platform_name=platform_name)
_raw_message = EventMessage.from_openai_message(message=message, locate=locate)
await loop_runner.exec_loop(
pipe=trans_loop,
pipe_arg={
"message": _raw_message,
},
arg, kwarg = await run_hook(
Trigger.RECEIVER,
platform=platform_name,
messages=messages,
locate=locate,
)
arg: dict = loop_runner.result_pipe_arg
if not arg.get("message"):
logger.error("Message Loop Lose Message")
raw_message: EventMessage = arg.get("message", _raw_message)
assert isinstance(
raw_message, EventMessage
), f"message type error {type(raw_message)}"
return raw_message
platform_name = kwarg.get("platform", platform_name)
messages = kwarg.get("messages", messages)
locate = kwarg.get("locate", locate)
return platform_name, messages, locate

@abstractmethod
async def file_forward(self, receiver: Location, file_list: list):
Expand Down Expand Up @@ -265,7 +257,10 @@ async def _flash(
)
return exc
except AssertionError as exc:
await self.sender.error(receiver=task.receiver, text=str(exc))
logger.exception(exc)
await self.sender.error(
receiver=task.receiver, text=f"Assert {str(exc)}"
)
return exc
except Exception as exc:
logger.exception(exc)
Expand Down
25 changes: 15 additions & 10 deletions app/receiver/slack/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,25 +91,30 @@ async def reply(
"""
模型直转发,Message是Openai的类型
"""
for item in messages:
# raw_message = await self.loop_turn_from_openai(platform_name=__receiver__, message=item, locate=receiver)
event_message = EventMessage.from_openai_message(
message=item, locate=receiver
)
await self.file_forward(receiver=receiver, file_list=event_message.files)
if not event_message.text:
event_message = [
EventMessage.from_openai_message(message=item, locate=receiver)
for item in messages
]
# 转析器
_, event_message, receiver = await self.hook(
platform_name=__receiver__, messages=event_message, locate=receiver
)
event_message: list
for event in event_message:
await self.file_forward(receiver=receiver, file_list=event.files)
if not event.text:
continue
_message = (
ChatMessageCreator(
channel=receiver.chat_id, thread_ts=receiver.message_id
)
.update_content(message_text=event_message.text)
.get_message_payload(message_text=event_message.text)
.update_content(message_text=event.text)
.get_message_payload(message_text=event.text)
)
await self.bot.chat_postMessage(
channel=receiver.chat_id,
thread_ts=receiver.message_id,
text=event_message.text,
text=event.text,
)
return logger.trace("reply message")

Expand Down
22 changes: 14 additions & 8 deletions app/receiver/telegram/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,17 +118,23 @@ async def reply(
:param messages: OPENAI Format Message
:param reply_to_message: 是否回复消息
"""
for message in messages:
event_message = EventMessage.from_openai_message(
message=message, locate=receiver
)
await self.file_forward(receiver=receiver, file_list=event_message.files)
if not event_message.text:
event_message = [
EventMessage.from_openai_message(message=item, locate=receiver)
for item in messages
]
# 转析器
_, event_message, receiver = await self.hook(
platform_name=__receiver__, messages=event_message, locate=receiver
)
event_message: list
for event in event_message:
await self.file_forward(receiver=receiver, file_list=event.files)
if not event.text:
continue
try:
await self.bot.send_message(
chat_id=receiver.chat_id,
text=convert(event_message.text),
text=convert(event.text),
reply_to_message_id=receiver.message_id
if reply_to_message
else None,
Expand All @@ -137,7 +143,7 @@ async def reply(
except telebot.apihelper.ApiTelegramException as e:
if "message to reply not found" in str(e):
await self.bot.send_message(
chat_id=receiver.chat_id, text=convert(event_message.text)
chat_id=receiver.chat_id, text=convert(event.text)
)
else:
logger.error(f"User {receiver.user_id} send message error")
Expand Down

0 comments on commit c0b4510

Please sign in to comment.