Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions pkg-py/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# Changelog

All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [UNRELEASED]

* `querychat.init()` now accepts a `client` argument, replacing the previous `create_chat_callback` argument. (#60)

The `client` can be:

* a `chatlas.Chat` object,
* a function that returns a `chatlas.Chat` object,
* or a provider-model string, e.g. `"openai/gpt-4.1"`, to be passed to `chatlas.ChatAuto()`.

If `client` is not provided, querychat will use the `QUERYCHAT_CLIENT` environment variable, which should be a provider-model string. If the envvar is not set, querychat uses OpenAI with the default model from `chatlas.ChatOpenAI()`.


## [0.1.0] - 2025-05-24

This first release of the `querychat` package.

48 changes: 48 additions & 0 deletions pkg-py/src/querychat/_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
from __future__ import annotations

import os
from contextlib import contextmanager
from typing import Optional


@contextmanager
def temp_env_vars(env_vars: dict[str, Optional[str]]):
"""
Temporarily set environment variables and restore them when exiting.

Parameters
----------
env_vars : Dict[str, str]
Dictionary of environment variable names to values to set temporarily

Example
-------
with temp_env_vars({"FOO": "bar", "BAZ": "qux"}):
# FOO and BAZ are set to "bar" and "qux"
do_something()
# FOO and BAZ are restored to their original values (or unset if they weren't set)

"""
original_values: dict[str, Optional[str]] = {}
for key in env_vars:
original_values[key] = os.environ.get(key)

for key, value in env_vars.items():
if value is None:
# If value is None, remove the variable
os.environ.pop(key, None)
else:
# Otherwise set the variable to the specified value
os.environ[key] = value

try:
yield
finally:
# Restore original values
for key, original_value in original_values.items():
if original_value is None:
# Variable wasn't set originally, so remove it
os.environ.pop(key, None)
else:
# Restore original value
os.environ[key] = original_value
127 changes: 108 additions & 19 deletions pkg-py/src/querychat/querychat.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from __future__ import annotations

import copy
import os
import re
import sys
import warnings
from dataclasses import dataclass
from functools import partial
from pathlib import Path
from typing import TYPE_CHECKING, Any, Callable, Optional, Protocol, Union

Expand All @@ -13,6 +15,8 @@
import sqlalchemy
from shiny import Inputs, Outputs, Session, module, reactive, ui

from ._utils import temp_env_vars

if TYPE_CHECKING:
import pandas as pd
from narwhals.typing import IntoFrame
Expand All @@ -33,7 +37,7 @@ class QueryChatConfig:
data_source: DataSource
system_prompt: str
greeting: Optional[str]
create_chat_callback: CreateChatCallback
client: chatlas.Chat


class QueryChat:
Expand Down Expand Up @@ -233,6 +237,74 @@ def df_to_html(df: IntoFrame, maxrows: int = 5) -> str:
return table_html + rows_notice


def _get_client_from_env() -> Optional[str]:
"""Get client configuration from environment variable."""
env_client = os.getenv("QUERYCHAT_CLIENT", "")
if not env_client:
return None
return env_client


def _create_client_from_string(client_str: str) -> chatlas.Chat:
"""Create a chatlas.Chat client from a provider-model string."""
provider, model = (
client_str.split("/", 1) if "/" in client_str else (client_str, None)
)
# We unset chatlas's envvars so we can listen to querychat's envvars instead
with temp_env_vars(
{
"CHATLAS_CHAT_PROVIDER": provider,
"CHATLAS_CHAT_MODEL": model,
"CHATLAS_CHAT_ARGS": os.environ["QUERYCHAT_CLIENT_ARGS"],
},
):
return chatlas.ChatAuto(provider="openai")


def _resolve_querychat_client(
client: Optional[Union[chatlas.Chat, CreateChatCallback, str]] = None,
) -> chatlas.Chat:
"""
Resolve the client argument into a chatlas.Chat object.

Parameters
----------
client : chatlas.Chat, CreateChatCallback, str, or None
The client to resolve. Can be:
- A chatlas.Chat object (returned as-is)
- A function that returns a chatlas.Chat object
- A provider-model string (e.g., "openai/gpt-4.1")
- None (fall back to environment variable or default)

Returns
-------
chatlas.Chat
A resolved chatlas.Chat object

"""
if client is None:
client = _get_client_from_env()

if client is None:
# Default to OpenAI with using chatlas's default model
return chatlas.ChatOpenAI()

if callable(client) and not isinstance(client, chatlas.Chat):
# Backcompat: support the old create_chat_callback style, using an empty
# system prompt as a placeholder.
client = client(system_prompt="")

if isinstance(client, str):
client = _create_client_from_string(client)

if not isinstance(client, chatlas.Chat):
raise TypeError(
"client must be a chatlas.Chat object or function that returns one",
)

return client


def init(
data_source: IntoFrame | sqlalchemy.Engine,
table_name: str,
Expand All @@ -242,6 +314,7 @@ def init(
extra_instructions: Optional[str | Path] = None,
prompt_template: Optional[str | Path] = None,
system_prompt_override: Optional[str] = None,
client: Optional[Union[chatlas.Chat, CreateChatCallback, str]] = None,
create_chat_callback: Optional[CreateChatCallback] = None,
) -> QueryChatConfig:
"""
Expand Down Expand Up @@ -283,15 +356,41 @@ def init(
A custom system prompt to use instead of the default. If provided,
`data_description`, `extra_instructions`, and `prompt_template` will be
silently ignored.
client : chatlas.Chat, CreateChatCallback, str, optional
A `chatlas.Chat` object, a string to be passed to `chatlas.ChatAuto()`
describing the model to use (e.g. `"openai/gpt-4.1"`), or a function
that creates a chat client. If using a function, the function should
accept a `system_prompt` argument and return a `chatlas.Chat` object.

If `client` is not provided, querychat consults the `QUERYCHAT_CLIENT`
environment variable, which can be set to a provider-model string. If no
option is provided, querychat defaults to using
`chatlas.ChatOpenAI(model="gpt-4.1")`.
create_chat_callback : CreateChatCallback, optional
A function that creates a chat object
**Deprecated.** Use the `client` argument instead.

Returns
-------
QueryChatConfig
A QueryChatConfig object that can be passed to server()

"""
# Handle deprecated create_chat_callback argument
if create_chat_callback is not None:
warnings.warn(
"The 'create_chat_callback' parameter is deprecated. Use 'client' instead.",
DeprecationWarning,
stacklevel=2,
)
if client is not None:
raise ValueError(
"You cannot pass both `create_chat_callback` and `client` to `init()`.",
)
client = create_chat_callback

# Resolve the client
resolved_client = _resolve_querychat_client(client)

# Validate table name (must begin with letter, contain only letters, numbers, underscores)
if not re.match(r"^[a-zA-Z][a-zA-Z0-9_]*$", table_name):
raise ValueError(
Expand Down Expand Up @@ -330,17 +429,11 @@ def init(
prompt_template=prompt_template,
)

# Default chat function if none provided
create_chat_callback = create_chat_callback or partial(
chatlas.ChatOpenAI,
model="gpt-4.1",
)

return QueryChatConfig(
data_source=data_source_obj,
system_prompt=system_prompt_,
greeting=greeting_str,
create_chat_callback=create_chat_callback,
client=resolved_client,
)


Expand Down Expand Up @@ -441,7 +534,7 @@ def _():
data_source = querychat_config.data_source
system_prompt = querychat_config.system_prompt
greeting = querychat_config.greeting
create_chat_callback = querychat_config.create_chat_callback
client = querychat_config.client

# Reactive values to store state
current_title = reactive.value[Union[str, None]](None)
Expand Down Expand Up @@ -517,17 +610,13 @@ async def query(query: str):

chat_ui = ui.Chat("chat")

# Initialize the chat with the system prompt
# This is a placeholder - actual implementation would depend on chatlas
chat = create_chat_callback(system_prompt=system_prompt)
# Set up the chat object for this session
chat = copy.deepcopy(client)
chat.set_turns([])
chat.system_prompt = system_prompt
chat.register_tool(update_dashboard)
chat.register_tool(query)

# Register tools with the chat
# This is a placeholder - actual implementation would depend on chatlas
# chat.register_tool("update_dashboard", update_dashboard)
# chat.register_tool("query", query)

# Add greeting if provided
if greeting and any(len(g) > 0 for g in greeting.split("\n")):
# Display greeting in chat UI
Expand Down
11 changes: 7 additions & 4 deletions pkg-r/DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: querychat
Title: Filter and Query Data Frames in 'shiny' Using an LLM Chat Interface
Version: 0.0.0.9000
Version: 0.0.1.9000
Authors@R: c(
person("Joe", "Cheng", , "joe@posit.co", role = c("aut", "cre")),
person("Posit Software, PBC", role = c("cph", "fnd"))
Expand All @@ -18,19 +18,22 @@ Imports:
bslib,
DBI,
duckdb,
ellmer,
ellmer (>= 0.3.0),
htmltools,
lifecycle,
purrr,
rlang,
shiny,
shinychat (>= 0.2.0),
whisker,
xtable
Suggests:
Suggests:
DT,
R6,
RSQLite,
shinytest2,
testthat (>= 3.0.0)
testthat (>= 3.0.0),
withr
Config/testthat/edition: 3
Encoding: UTF-8
Roxygen: list(markdown = TRUE)
Expand Down
1 change: 1 addition & 0 deletions pkg-r/NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,4 @@ export(querychat_server)
export(querychat_sidebar)
export(querychat_ui)
export(test_query)
importFrom(lifecycle,deprecated)
14 changes: 14 additions & 0 deletions pkg-r/NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,17 @@
* Initial CRAN submission.

* Added `prompt_template` support for `querychat_system_prompt()`. (Thank you, @oacar! #37, #45)

* `querychat_init()` now accepts a `client`, replacing the previous `create_chat_func` argument. (#60)

The `client` can be:

* an `ellmer::Chat` object,
* a function that returns an `ellmer::Chat` object,
* or a provider-model string, e.g. `"openai/gpt-4.1"`, to be passed to `ellmer::chat()`.

If `client` is not provided, querychat will use

* the `querychat.client` R option, which can be any of the above options,
* the `QUERYCHAT_CLIENT` environment variable, which should be a provider-model string,
* or the default model from `ellmer::chat_openai()`.
7 changes: 7 additions & 0 deletions pkg-r/R/querychat-package.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
#' @keywords internal
"_PACKAGE"

## usethis namespace: start
#' @importFrom lifecycle deprecated
## usethis namespace: end
NULL
Loading
Loading