Skip to content

Commit

Permalink
Merge branch 'main' into machow-feat-data-grid-polars
Browse files Browse the repository at this point in the history
* main:
  test(controllers): Refactor column sort and filter methods for Dataframe class (posit-dev#1496)
  Follow up to posit-dev#1453: allow user roles when normalizing a dictionary (posit-dev#1495)
  fix(layout_columns): Fix coercion of scalar row height to list for python <= 3.9 (posit-dev#1494)
  Add `shiny.ui.Chat` (posit-dev#1453)
  docs(Theme): Fix example and clarify usage (posit-dev#1491)
  chore(pyright): Pin pyright version to `1.1.369` to avoid CI failures (posit-dev#1493)
  tests(dataframe): Add additional tests for dataframe (posit-dev#1487)
  bug(data frame): Export `render.StyleInfo` (posit-dev#1488)
  • Loading branch information
schloerke committed Jul 5, 2024
2 parents 4fe7cf2 + a00a350 commit a99948d
Show file tree
Hide file tree
Showing 85 changed files with 4,757 additions and 138 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

### New features

* Added a new `shiny.ui.Chat` class for building conversational interfaces with fully customizable and performant response generation. (#1453)

* Expose `shiny.playwright`, `shiny.run`, and `shiny.pytest` modules that allow users to testing their Shiny apps. (#1448, #1456, #1481)
* `shiny.playwright` contains `controller` and `expect` submodules. `controller` will contain many classes to interact with (and verify!) your Shiny app using Playwright. `expect` contains expectation functions that enhance standard Playwright expectation methods.
* `shiny.run` contains the `run_shiny_app` command and the return type `ShinyAppProc`. `ShinyAppProc` can be used to type the Shiny app pytest fixtures.
Expand Down
5 changes: 5 additions & 0 deletions docs/_quartodoc-core.yml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,11 @@ quartodoc:
- ui.input_file
- ui.download_button
- ui.download_link
- title: Chat interface
desc: Build a chatbot interface
contents:
- ui.Chat
- ui.chat_ui
- title: Custom UI
desc: Lower-level UI functions for creating custom HTML/CSS/JS
contents:
Expand Down
4 changes: 4 additions & 0 deletions docs/_quartodoc-express.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,10 @@ quartodoc:
- express.ui.navset_underline
- express.ui.navset_pill_list
- express.ui.navset_hidden
- title: Chat interface
desc: Build a chatbot interface
contents:
- express.ui.Chat
- title: Reactive programming
desc: Create reactive functions and dependencies.
contents:
Expand Down
1 change: 1 addition & 0 deletions examples/chat/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.env
57 changes: 57 additions & 0 deletions examples/chat/RAG/recipes/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# ------------------------------------------------------------------------------------
# A simple recipe extractor chatbot that extracts recipes from URLs using the OpenAI API.
# To run it, you'll need an OpenAI API key.
# To get one, follow the instructions at https://platform.openai.com/docs/quickstart
# ------------------------------------------------------------------------------------
import os

from openai import AsyncOpenAI
from utils import recipe_prompt, scrape_page_with_url

from shiny.express import ui

# Provide your API key here (or set the environment variable)
llm = AsyncOpenAI(api_key=os.environ.get("OPENAI_API_KEY"))

# Set some Shiny page options
ui.page_opts(
title="Recipe Extractor Chat",
fillable=True,
fillable_mobile=True,
)

# Initialize the chat (with a system prompt and starting message)
chat = ui.Chat(
id="chat",
messages=[
{"role": "system", "content": recipe_prompt},
{
"role": "assistant",
"content": "Hello! I'm a recipe extractor. Please enter a URL to a recipe page. For example, <https://www.thechunkychef.com/epic-dry-rubbed-baked-chicken-wings/>",
},
],
)

chat.ui(placeholder="Enter a recipe URL...")


# A function to transform user input
# Note that, if an exception occurs, the function will return a message to the user
# "short-circuiting" the conversation and asking the user to try again.
@chat.transform_user_input
async def try_scrape_page(input: str) -> str | None:
try:
return await scrape_page_with_url(input)
except Exception:
await chat.append_message(
"I'm sorry, I couldn't extract content from that URL. Please try again. "
)
return None


@chat.on_user_submit
async def _():
response = await llm.chat.completions.create(
model="gpt-4o", messages=chat.messages(), temperature=0, stream=True
)
await chat.append_message_stream(response)
106 changes: 106 additions & 0 deletions examples/chat/RAG/recipes/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
import aiohttp
from bs4 import BeautifulSoup

recipe_prompt = """
You are RecipeExtractorGPT.
Your goal is to extract recipe content from text and return a JSON representation of the useful information.
The JSON should be structured like this:
```
{
"title": "Scrambled eggs",
"ingredients": {
"eggs": "2",
"butter": "1 tbsp",
"milk": "1 tbsp",
"salt": "1 pinch"
},
"directions": [
"Beat eggs, milk, and salt together in a bowl until thoroughly combined.",
"Heat butter in a large skillet over medium-high heat. Pour egg mixture into the hot skillet; cook and stir until eggs are set, 3 to 5 minutes."
],
"servings": 2,
"prep_time": 5,
"cook_time": 5,
"total_time": 10,
"tags": [
"breakfast",
"eggs",
"scrambled"
],
"source": "https://recipes.com/scrambled-eggs/",
}
```
The user will provide text content from a web page.
It is not very well structured, but the recipe is in there.
Please look carefully for the useful information about the recipe.
IMPORTANT: Return the result as JSON in a Markdown code block surrounded with three backticks!
"""


async def scrape_page_with_url(url: str, max_length: int = 14000) -> str:
"""
Given a URL, scrapes the web page and return the contents. This also adds adds the
URL to the beginning of the text.
Parameters
----------
url:
The URL to scrape
max_length:
Max length of recipe text to process. This is to prevent the model from running
out of tokens. 14000 bytes translates to approximately 3200 tokens.
"""
contents = await scrape_page(url)
# Trim the string so that the prompt and reply will fit in the token limit.. It
# would be better to trim by tokens, but that requires using the tiktoken package,
# which can be very slow to load when running on containerized servers, because it
# needs to download the model from the internet each time the container starts.
contents = contents[:max_length]
return f"From: {url}\n\n" + contents


async def scrape_page(url: str) -> str:
# Asynchronously send an HTTP request to the URL.
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
if response.status != 200:
raise aiohttp.ClientError(f"An error occurred: {response.status}")
html = await response.text()

# Parse the HTML content using BeautifulSoup
soup = BeautifulSoup(html, "html.parser")

# Remove script and style elements
for script in soup(["script", "style"]):
script.decompose()

# List of element IDs or class names to remove
elements_to_remove = [
"header",
"footer",
"sidebar",
"nav",
"menu",
"ad",
"advertisement",
"cookie-banner",
"popup",
"social",
"breadcrumb",
"pagination",
"comment",
"comments",
]

# Remove unwanted elements by ID or class name
for element in elements_to_remove:
for e in soup.find_all(id=element) + soup.find_all(class_=element):
e.decompose()

# Extract text from the remaining HTML tags
text = " ".join(soup.stripped_strings)

return text
5 changes: 5 additions & 0 deletions examples/chat/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Shiny `Chat` examples

This folder contains a collection of examples illustrating `shiny.ui.Chat` usage. Many of them require API keys from providers such as OpenAI, Anthropic, etc. In those cases, the example should have commentary explaining how to obtain keys as well as how to provide them to the app.

To get started with an app that doesn't require an API key, see the `hello-world` example. This example has both a Shiny Core and Express app to illustrate how it's used in either mode.
47 changes: 47 additions & 0 deletions examples/chat/enterprise/aws-bedrock-anthropic/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# ------------------------------------------------------------------------------------
# A basic Shiny Chat powered by Anthropic's Claude model with Bedrock.
# To run it, you'll need an AWS Bedrock configuration.
# To get started, follow the instructions at https://aws.amazon.com/bedrock/claude/
# as well as https://github.com/anthropics/anthropic-sdk-python#aws-bedrock
# ------------------------------------------------------------------------------------
from anthropic import AnthropicBedrock

from shiny.express import ui

# Although you can set the AWS credentials here, it's recommended to put them in an .env
# file and load them with `dotenv` so your keys aren't exposed with your code.
# from dotenv import load_dotenv
# _ = load_dotenv()
llm = AnthropicBedrock(
# aws_secret_key="..."
# aws_access_key="..."
# aws_region="..."
# aws_account_id="..."
)

# Set some Shiny page options
ui.page_opts(
title="Hello Anthropic Claude Chat",
fillable=True,
fillable_mobile=True,
)

# Create and display empty chat
chat = ui.Chat(id="chat")
chat.ui()


# Define a callback to run when the user submits a message
@chat.on_user_submit
async def _():
# Get messages currently in the chat
messages = chat.messages()
# Create a response message stream
response = await llm.messages.create(
model="anthropic.claude-3-sonnet-20240229-v1:0",
messages=messages,
stream=True,
max_tokens=1000,
)
# Append the response stream into the chat
await chat.append_message_stream(response)
55 changes: 55 additions & 0 deletions examples/chat/enterprise/azure-openai/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
# ------------------------------------------------------------------------------------
# A basic Shiny Chat example powered by OpenAI running on Azure.
# To run it, you'll need OpenAI API key.
# To get setup, follow the instructions at https://learn.microsoft.com/en-us/azure/ai-services/openai/quickstart?tabs=command-line%2Cpython-new&pivots=programming-language-python#create-a-new-python-application
# ------------------------------------------------------------------------------------
import os

from openai import AzureOpenAI

from shiny.express import ui

# Although you can set API keys here, it's recommended to put it in an .env file
# and load it with `dotenv` so your keys aren't exposed with your code.
# from dotenv import load_dotenv
# _ = load_dotenv()
llm = AzureOpenAI(
api_key=os.getenv("AZURE_OPENAI_API_KEY"),
api_version="2024-02-01",
azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"),
)

deployment_name = "REPLACE_WITH_YOUR_DEPLOYMENT_NAME"

# Set some Shiny page options
ui.page_opts(
title="Hello OpenAI Chat",
fillable=True,
fillable_mobile=True,
)

# Create a chat instance, with an initial message
chat = ui.Chat(
id="chat",
messages=[
{"content": "Hello! How can I help you today?", "role": "assistant"},
],
)

# Display the chat
chat.ui()


# Define a callback to run when the user submits a message
@chat.on_user_submit
async def _():
# Get messages currently in the chat
messages = chat.messages()
# Create a response message stream
response = await llm.chat.completions.create(
model=deployment_name,
messages=messages,
stream=True,
)
# Append the response stream into the chat
await chat.append_message_stream(response)
43 changes: 43 additions & 0 deletions examples/chat/hello-providers/anthropic/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
# ------------------------------------------------------------------------------------
# A basic Shiny Chat example powered by Anthropic's Claude model.
# To run it, you'll need an Anthropic API key.
# To get one, follow the instructions at https://docs.anthropic.com/en/api/getting-started
# ------------------------------------------------------------------------------------
import os

from anthropic import AsyncAnthropic

from shiny.express import ui

# Although you can set the API key here, it's recommended to put it in an .env file
# and load it with `dotenv` so your key isn't exposed with your code.
# from dotenv import load_dotenv
# _ = load_dotenv()
llm = AsyncAnthropic(api_key=os.environ.get("ANTHROPIC_API_KEY"))

# Set some Shiny page options
ui.page_opts(
title="Hello Anthropic Claude Chat",
fillable=True,
fillable_mobile=True,
)

# Create and display empty chat
chat = ui.Chat(id="chat")
chat.ui()


# Define a callback to run when the user submits a message
@chat.on_user_submit
async def _():
# Get messages currently in the chat
messages = chat.messages()
# Create a response message stream
response = await llm.messages.create(
model="claude-3-opus-20240229",
messages=messages,
stream=True,
max_tokens=1000,
)
# Append the response stream into the chat
await chat.append_message_stream(response)
Loading

0 comments on commit a99948d

Please sign in to comment.