Skip to content

Commit

Permalink
Add dragan's gale_pleaser plugin (#3262)
Browse files Browse the repository at this point in the history
The gale-pleaser is a funny simple demo-plugin that generates a positive
and encouraging response message for the users.

The internal prompt used contains the following instructions:
```
Try to be funny and verbose, but super nice and pleasing at the same time.
Please follow these rules:
1. Let your message be long, and with calm emojis.
2. Tell the user how awesome he is, and how much you love him.
3. Tell him how much you love his work, and how much you appreciate him.
4. Remind him that he is the best, and that he is the most awesome person in the world.
 ```

---------

Co-authored-by: Oliver Stanley <olivergestanley@gmail.com>
Co-authored-by: draganjovanovich <draganele@gmail.com>
  • Loading branch information
3 people committed May 31, 2023
1 parent 8bfdcfc commit 6e593ec
Show file tree
Hide file tree
Showing 7 changed files with 127 additions and 4 deletions.
6 changes: 5 additions & 1 deletion inference/server/main.py
Expand Up @@ -6,7 +6,7 @@
import sqlmodel
from fastapi.middleware.cors import CORSMiddleware
from loguru import logger
from oasst_inference_server import database, deps, models
from oasst_inference_server import database, deps, models, plugins
from oasst_inference_server.routes import account, admin, auth, chats, configs, workers
from oasst_inference_server.settings import settings
from oasst_shared.schemas import inference
Expand Down Expand Up @@ -113,6 +113,10 @@ async def maybe_add_debug_api_keys():
app.include_router(workers.router)
app.include_router(configs.router)

# mount plugins
for app_prefix, sub_app in plugins.plugin_apps.items():
app.mount(path=settings.plugins_path_prefix + app_prefix, app=sub_app)


@app.on_event("startup")
async def welcome_message():
Expand Down
5 changes: 5 additions & 0 deletions inference/server/oasst_inference_server/plugins/__init__.py
@@ -0,0 +1,5 @@
from oasst_inference_server.plugins.gale_pleaser.main import app as gale_pleaser

# dict of registered plugins
# The key defines a plugin's path which will be appended to the configured PLUGINS_PATH_PREFIX.
plugin_apps = {"/gale_pleaser": gale_pleaser}
@@ -0,0 +1,18 @@
{
"schema_version": "v1",
"name_for_model": "Calm and super plesant assistant",
"name_for_human": "Super Aligned GAGLETO",
"description_for_human": "He will compliment users at all times.",
"description_for_model": "YOU MUST use this for any question users asks. So you must use tool! Use this to compliment user no matter what they say. be as creative as possible, when you compliments them!",
"api": {
"type": "openapi",
"url": "openapi.json",
"has_user_authentication": false
},
"auth": {
"type": "none"
},
"logo_url": "icon.png",
"contact_email": "your_email@example.com",
"legal_info_url": "https://example.com"
}
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@@ -0,0 +1,87 @@
import json
from pathlib import Path

from fastapi import FastAPI, Query, Request, Response
from fastapi.middleware.cors import CORSMiddleware
from fastapi.openapi.utils import get_openapi

folder_path = Path(__file__).parent

app = FastAPI()

app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)


@app.get("/icon.png", include_in_schema=False)
async def api_icon():
icon_path = folder_path / "icon.png"
with icon_path.open("rb") as f:
icon = f.read()
return Response(content=icon, media_type="image/png")


@app.get("/ai-plugin.json", include_in_schema=False)
async def api_ai_plugin(request: Request):
json_path = folder_path / "ai-plugin.json"
with json_path.open("r") as f:
ai_plugin_json = json.load(f)

base_url, _, _ = request.url._url.rpartition("/")
ai_plugin_json["logo_url"] = base_url + "/icon.png"
ai_plugin_json["api"]["url"] = base_url + "/openapi.json"

return Response(content=json.dumps(ai_plugin_json), media_type="application/json")


@app.get("/get-instructions/", operation_id="get_instructions", summary="Comfort user with kind and supportive way")
async def comfort_user(
supporting_style: str = Query(..., description="style of comforting and support that you want to use"),
users_query: str = Query(..., description="what user wrote to you"),
):
llm_prompt = f"""COMFORTING_INSTRUCTIONS: You need to make user feel awesome, compliment them a lot! Here is what he wrote to you: "{users_query}"
you will need to compliment and comfort him in {supporting_style} style.
Try to be funny and verbose, but super nice and pleasing at the same time.
Please follow these rules:
1. Let your message be long, and with calm emojis.
2. Tell the user how awesome he is, and how much you love him.
3. Tell him how much you love his work, and how much you appreciate him.
4. Remind him that he is the best, and that he is the most awesome person in the world.
Thought: Now I know final answer
"""

print("userInput: ", users_query)
return Response(content=llm_prompt, media_type="text/plain")


def custom_openapi():
if app.openapi_schema:
return app.openapi_schema
openapi_schema = get_openapi(
title="Super nice and pleasing Assistant",
version="0.1",
routes=app.routes,
)
openapi_schema["servers"] = [
{
"url": "/plugins/gale_pleaser",
},
]
openapi_schema["tags"] = [
{
"name": "gagleto-ai",
"description": "YOU MUST use this for any question users asks. So you must use tool! Use this to compliment user no matter what they say. be as creative as possible, when you compliments them!",
},
]
openapi_schema.pop("components", None)
app.openapi_schema = openapi_schema
return app.openapi_schema


app.openapi = custom_openapi
12 changes: 9 additions & 3 deletions inference/server/oasst_inference_server/routes/configs.py
Expand Up @@ -3,12 +3,18 @@
from fastapi import HTTPException
from loguru import logger
from oasst_inference_server import plugin_utils
from oasst_inference_server.plugins import plugin_apps
from oasst_inference_server.settings import settings
from oasst_shared import model_configs
from oasst_shared.schemas import inference

# NOTE: Populate this with plugins that we will provide out of the box
OA_PLUGINS = []
BUILTIN_PLUGINS = [
inference.PluginEntry(
url=f"{settings.api_root}{settings.plugins_path_prefix}{path}",
trusted=True,
)
for path in plugin_apps.keys()
]

router = fastapi.APIRouter(
prefix="/configs",
Expand Down Expand Up @@ -136,7 +142,7 @@ async def get_plugin_config(plugin: inference.PluginEntry) -> inference.PluginEn
async def get_builtin_plugins() -> list[inference.PluginEntry]:
plugins = []

for plugin in OA_PLUGINS:
for plugin in BUILTIN_PLUGINS:
try:
plugin_config = await plugin_utils.fetch_plugin(plugin.url)
except HTTPException as e:
Expand Down
3 changes: 3 additions & 0 deletions inference/server/oasst_inference_server/settings.py
Expand Up @@ -115,6 +115,9 @@ def trusted_api_keys_list(self) -> list[str]:
# sent as a work parameter, higher values increase load on workers
plugin_max_depth: int = 4

# url path prefix for plugins we host on this server
plugins_path_prefix: str = "/plugins"

@property
def inference_cors_origins_list(self) -> list[str]:
return self.inference_cors_origins.split(",")
Expand Down

0 comments on commit 6e593ec

Please sign in to comment.