Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Init PR #1

Merged
merged 3 commits into from
Sep 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions langserve/.python-version
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
langserve_3_9
54 changes: 54 additions & 0 deletions langserve/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
.PHONY: all lint format test help

# Default target executed when no arguments are given to make.
all: help

######################
# TESTING AND COVERAGE
######################

# Define a variable for the test file path.
TEST_FILE ?= tests/unit_tests/

test:
poetry run pytest --disable-socket --allow-unix-socket $(TEST_FILE)

######################
# LINTING AND FORMATTING
######################

# Define a variable for Python and notebook files.
PYTHON_FILES=.
lint format: PYTHON_FILES=.
lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/langserve --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$')

lint lint_diff:
poetry run ruff .
poetry run black $(PYTHON_FILES) --check

format format_diff:
poetry run black $(PYTHON_FILES)
poetry run ruff --select I --fix $(PYTHON_FILES)

spell_check:
poetry run codespell --toml pyproject.toml

spell_fix:
poetry run codespell --toml pyproject.toml -w

######################
# HELP
######################

help:
@echo '===================='
@echo '-- LINTING --'
@echo 'format - run code formatters'
@echo 'lint - run linters'
@echo 'spell_check - run codespell on the project'
@echo 'spell_fix - run codespell on the project and fix the errors'
@echo '-- TESTS --'
@echo 'coverage - run unit tests and generate coverage report'
@echo 'test - run unit tests'
@echo 'test TEST_FILE=<test_file> - run all tests in file'
@echo '-- DOCUMENTATION tasks are from the top-level Makefile --'
124 changes: 124 additions & 0 deletions langserve/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
# LangServe 🦜️🔗

## Overview

`LangServe` is a library that allows developers to host their Langchain runnables /
call into them remotely from a runnable interface.

## Examples

For more examples, see the [examples](./examples) directory.

### Server

```python
#!/usr/bin/env python
from fastapi import FastAPI
from langchain.prompts import ChatPromptTemplate
from langchain.chat_models import ChatAnthropic, ChatOpenAI
from langserve import add_routes
from typing_extensions import TypedDict


app = FastAPI(
title="LangChain Server",
version="1.0",
description="A simple api server using Langchain's Runnable interfaces",
)


# Serve Open AI and Anthropic models
LLMInput = Union[List[Union[SystemMessage, HumanMessage, str]], str]

add_routes(
app,
ChatOpenAI(),
path="/openai",
input_type=LLMInput,
config_keys=[],
)
add_routes(
app,
ChatAnthropic(),
path="/anthropic",
input_type=LLMInput,
config_keys=[],
)

# Serve a joke chain
class ChainInput(TypedDict):
"""The input to the chain."""

topic: str
"""The topic of the joke."""

model = ChatAnthropic()
prompt = ChatPromptTemplate.from_template("tell me a joke about {topic}")
add_routes(app, prompt | model, path="/chain", input_type=ChainInput)

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="localhost", port=8000)
```


### Client

```python

from langchain.schema import SystemMessage, HumanMessage
from langchain.prompts import ChatPromptTemplate
from langchain.schema.runnable import RunnableMap
from langserve import RemoteRunnable

openai = RemoteRunnable("http://localhost:8000/openai/")
anthropic = RemoteRunnable("http://localhost:8000/anthropic/")
joke_chain = RemoteRunnable("http://localhost:8000/chain/")

joke_chain.invoke({"topic": "parrots"})

# or async
await joke_chain.ainvoke({"topic": "parrots"})

prompt = [
SystemMessage(content='Act like either a cat or a parrot.'),
HumanMessage(content='Hello!')
]

# Supports astream
async for msg in anthropic.astream(prompt):
print(msg, end="", flush=True)

prompt = ChatPromptTemplate.from_messages(
[("system", "Tell me a long story about {topic}")]
)

# Can define custom chains
chain = prompt | RunnableMap({
"openai": openai,
"anthropic": anthropic,
})

chain.batch([{ "topic": "parrots" }, { "topic": "cats" }])
```

## Installation

```bash
# pip install langserve[all] -- has not been published to pypi yet
```

or use `client` extra for client code, and `server` extra for server code.

## Features

- Deploy runnables with FastAPI
- Client can use remote runnables almost as if they were local
- Supports async
- Supports batch
- Supports stream

### Limitations

- Chain callbacks cannot be passed from the client to the server
163 changes: 163 additions & 0 deletions langserve/examples/chain/client.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,163 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Client\n",
"\n",
"Demo of client interacting with the simple chain server, which deploys a chain that tells jokes about a particular topic."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langchain.prompts.chat import (\n",
" HumanMessagePromptTemplate,\n",
" SystemMessagePromptTemplate,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"from langserve import RemoteRunnable\n",
"\n",
"remote_runnable = RemoteRunnable(\"http://localhost:8000/\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Remote runnable has the same interface as local runnables"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"response = await remote_runnable.ainvoke({\"topic\": \"sports\"})"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The client can also execute langchain code synchronously, and pass in configs"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"tags": []
},
"outputs": [
{
"data": {
"text/plain": [
"[AIMessage(content='Why did the football coach go to the bank?\\n\\nBecause he wanted to get his quarterback!', additional_kwargs={}, example=False),\n",
" AIMessage(content='Why did the car bring a sweater to the race?\\n\\nBecause it wanted to have a \"car-digan\" finish!', additional_kwargs={}, example=False)]"
]
},
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"from langchain.schema.runnable.config import RunnableConfig\n",
"\n",
"remote_runnable.batch([{\"topic\": \"sports\"}, {\"topic\": \"cars\"}])"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"The server supports streaming (using HTTP server-side events), which can help interact with long responses in real time"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"tags": []
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Ah, indulge me in this lighthearted endeavor, dear interlocutor! Allow me to regale you with a rather verbose jest concerning our hirsute friends of the wilderness, the bears!\n",
"\n",
"Once upon a time, in the vast expanse of a verdant forest, there existed a most erudite and sagacious bear, renowned for his prodigious intellect and unabated curiosity. This bear, with his inquisitive disposition, embarked on a quest to uncover the secrets of humor, for he believed that laughter possessed the power to unite and uplift the spirits of all creatures, great and small.\n",
"\n",
"Upon his journey, our erudite bear encountered a group of mischievous woodland creatures, who, captivated by his exalted intelligence, dared to challenge him to create a jest that would truly encompass the majestic essence of the bear. Our sagacious bear, never one to back down from a challenge, took a moment to ponder, his profound thoughts swirling amidst the verdant canopy above.\n",
"\n",
"After much contemplation, the bear delivered his jest, thusly: \"Pray, dear friends, envision a most estimable gathering of bears, replete with their formidable bulk and majestic presence. In this symposium of ursine brilliance, one bear, with a prodigious appetite, sauntered forth to procure his daily sustenance. Alas, upon reaching his intended destination, he encountered a dapper gentleman, clad in a most resplendent suit, hitherto unseen in the realm of the forest.\n",
"\n",
"The gentleman, possessing an air of sophistication, addressed the bear with an air of candor, remarking, 'Good sir, I must confess that your corporeal form inspires awe and admiration in equal measure. However, I beseech you, kindly abstain from consuming the berries that grow in this territory, for they possess a most deleterious effect upon the digestive systems of bears.'\n",
"\n",
"In response, the bear, known for his indomitable spirit, replied in a most eloquent manner, 'Dearest sir, I appreciate your concern and your eloquent admonition, yet I must humbly convey that the allure of these succulent berries is simply irresistible. The culinary satisfaction they bring far outweighs the potential discomfort they may inflict upon my digestive faculties. Therefore, I am compelled to disregard your sage counsel and indulge in their delectable essence.'\n",
"\n",
"And so, dear listener, the bear, driven by his insatiable hunger, proceeded to relish the berries with unmitigated gusto, heedless of the gentleman's cautions. After partaking in his feast, the bear, much to his chagrin, soon discovered the veracity of the gentleman's warning, as his digestive faculties embarked upon an unrestrained journey of turmoil and trepidation.\n",
"\n",
"In the aftermath of his ill-fated indulgence, the bear, with a countenance of utmost regret, turned to the gentleman and uttered, 'Verily, good sir, your counsel was indeed sagacious and prescient. I find myself ensnared in a maelstrom of gastrointestinal distress, beseeching the heavens for respite from this discomfort.'\n",
"\n",
"And thus, dear interlocutor, we find ourselves at the crux of this jest, whereupon the bear, in his most vulnerable state, beseeches the heavens for relief from his gastrointestinal plight. In this moment of levity, we are reminded that even the most erudite and sagacious among us can succumb to the allure of temptation, and the consequences that follow serve as a timeless lesson for all creatures within the realm of nature.\"\n",
"\n",
"Oh, the whimsy of the bear's gastronomic misadventure! May it serve as a reminder that, even amidst the grandeur of the natural world, we must exercise prudence and contemplate the ramifications of our actions."
]
}
],
"source": [
"async for chunk in remote_runnable.astream({\"topic\": \"bears, but super verbose\"}):\n",
" print(chunk.content, end=\"\", flush=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
33 changes: 33 additions & 0 deletions langserve/examples/chain/server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
#!/usr/bin/env python
"""Example LangChain server exposes a chain composed of a prompt and an LLM."""
from fastapi import FastAPI
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from typing_extensions import TypedDict

from langserve import add_routes

model = ChatOpenAI()
prompt = ChatPromptTemplate.from_template("tell me a joke about {topic}")
chain = prompt | model

app = FastAPI(
title="LangChain Server",
version="1.0",
description="Spin up a simple api server using Langchain's Runnable interfaces",
)


class ChainInput(TypedDict):
"""The input to the chain."""

topic: str
"""The topic of the joke."""


add_routes(app, chain, input_type=ChainInput)

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="localhost", port=8000)
Loading