Skip to content

Commit

Permalink
core[patch]: docstring update (#16813)
Browse files Browse the repository at this point in the history
- added missed docstrings
- formated docstrings to consistent form
  • Loading branch information
leo-gan committed Feb 9, 2024
1 parent e10030e commit ae66bcb
Show file tree
Hide file tree
Showing 33 changed files with 162 additions and 131 deletions.
10 changes: 9 additions & 1 deletion libs/core/langchain_core/messages/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
def get_buffer_string(
messages: Sequence[BaseMessage], human_prefix: str = "Human", ai_prefix: str = "AI"
) -> str:
"""Convert sequence of Messages to strings and concatenate them into one string.
"""Convert a sequence of Messages to strings and concatenate them into one string.
Args:
messages: Messages to be converted to strings.
Expand Down Expand Up @@ -111,6 +111,14 @@ def messages_from_dict(messages: Sequence[dict]) -> List[BaseMessage]:


def message_chunk_to_message(chunk: BaseMessageChunk) -> BaseMessage:
"""Convert a message chunk to a message.
Args:
chunk: Message chunk to convert.
Returns:
Message.
"""
if not isinstance(chunk, BaseMessageChunk):
return chunk
# chunk classes always have the equivalent non-chunk class as their first parent
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/ai.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


class AIMessage(BaseMessage):
"""A Message from an AI."""
"""Message from an AI."""

example: bool = False
"""Whether this Message is being passed in to the model as part of an example
Expand All @@ -27,7 +27,7 @@ def get_lc_namespace(cls) -> List[str]:


class AIMessageChunk(AIMessage, BaseMessageChunk):
"""A Message chunk from an AI."""
"""Message chunk from an AI."""

# Ignoring mypy re-assignment here since we're overriding the value
# to make sure that the chunk variant can be discriminated from the
Expand Down
13 changes: 11 additions & 2 deletions libs/core/langchain_core/messages/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@


class BaseMessage(Serializable):
"""The base abstract Message class.
"""Base abstract Message class.
Messages are the inputs and outputs of ChatModels.
"""
Expand Down Expand Up @@ -96,7 +96,7 @@ def merge_content(


class BaseMessageChunk(BaseMessage):
"""A Message chunk, which can be concatenated with other Message chunks."""
"""Message chunk, which can be concatenated with other Message chunks."""

@classmethod
def get_lc_namespace(cls) -> List[str]:
Expand Down Expand Up @@ -195,6 +195,15 @@ def messages_to_dict(messages: Sequence[BaseMessage]) -> List[dict]:


def get_msg_title_repr(title: str, *, bold: bool = False) -> str:
"""Get a title representation for a message.
Args:
title: The title.
bold: Whether to bold the title.
Returns:
The title representation.
"""
padded = " " + title + " "
sep_len = (80 - len(padded)) // 2
sep = "=" * sep_len
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


class ChatMessage(BaseMessage):
"""A Message that can be assigned an arbitrary speaker (i.e. role)."""
"""Message that can be assigned an arbitrary speaker (i.e. role)."""

role: str
"""The speaker / role of the Message."""
Expand All @@ -25,7 +25,7 @@ def get_lc_namespace(cls) -> List[str]:


class ChatMessageChunk(ChatMessage, BaseMessageChunk):
"""A Chat Message chunk."""
"""Chat Message chunk."""

# Ignoring mypy re-assignment here since we're overriding the value
# to make sure that the chunk variant can be discriminated from the
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/function.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


class FunctionMessage(BaseMessage):
"""A Message for passing the result of executing a function back to a model."""
"""Message for passing the result of executing a function back to a model."""

name: str
"""The name of the function that was executed."""
Expand All @@ -25,7 +25,7 @@ def get_lc_namespace(cls) -> List[str]:


class FunctionMessageChunk(FunctionMessage, BaseMessageChunk):
"""A Function Message chunk."""
"""Function Message chunk."""

# Ignoring mypy re-assignment here since we're overriding the value
# to make sure that the chunk variant can be discriminated from the
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/human.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


class HumanMessage(BaseMessage):
"""A Message from a human."""
"""Message from a human."""

example: bool = False
"""Whether this Message is being passed in to the model as part of an example
Expand All @@ -23,7 +23,7 @@ def get_lc_namespace(cls) -> List[str]:


class HumanMessageChunk(HumanMessage, BaseMessageChunk):
"""A Human Message chunk."""
"""Human Message chunk."""

# Ignoring mypy re-assignment here since we're overriding the value
# to make sure that the chunk variant can be discriminated from the
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


class SystemMessage(BaseMessage):
"""A Message for priming AI behavior, usually passed in as the first of a sequence
"""Message for priming AI behavior, usually passed in as the first of a sequence
of input messages.
"""

Expand All @@ -20,7 +20,7 @@ def get_lc_namespace(cls) -> List[str]:


class SystemMessageChunk(SystemMessage, BaseMessageChunk):
"""A System Message chunk."""
"""System Message chunk."""

# Ignoring mypy re-assignment here since we're overriding the value
# to make sure that the chunk variant can be discriminated from the
Expand Down
4 changes: 2 additions & 2 deletions libs/core/langchain_core/messages/tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


class ToolMessage(BaseMessage):
"""A Message for passing the result of executing a tool back to a model."""
"""Message for passing the result of executing a tool back to a model."""

tool_call_id: str
"""Tool call that this message is responding to."""
Expand All @@ -25,7 +25,7 @@ def get_lc_namespace(cls) -> List[str]:


class ToolMessageChunk(ToolMessage, BaseMessageChunk):
"""A Tool Message chunk."""
"""Tool Message chunk."""

# Ignoring mypy re-assignment here since we're overriding the value
# to make sure that the chunk variant can be discriminated from the
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/outputs/chat_generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def get_lc_namespace(cls) -> List[str]:


class ChatGenerationChunk(ChatGeneration):
"""A ChatGeneration chunk, which can be concatenated with other
"""ChatGeneration chunk, which can be concatenated with other
ChatGeneration chunks.
Attributes:
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/outputs/generation.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def get_lc_namespace(cls) -> List[str]:


class GenerationChunk(Generation):
"""A Generation chunk, which can be concatenated with other Generation chunks."""
"""Generation chunk, which can be concatenated with other Generation chunks."""

@classmethod
def get_lc_namespace(cls) -> List[str]:
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/prompts/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,7 +556,7 @@ def pretty_print(self) -> None:


class ChatPromptTemplate(BaseChatPromptTemplate):
"""A prompt template for chat models.
"""Prompt template for chat models.
Use to create flexible templated prompts for chat models.
Expand Down
2 changes: 1 addition & 1 deletion libs/core/langchain_core/prompts/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ def _get_inputs(inputs: dict, input_variables: List[str]) -> dict:


class PipelinePromptTemplate(BasePromptTemplate):
"""A prompt template for composing multiple prompt templates together.
"""Prompt template for composing multiple prompt templates together.
This can be useful when you want to reuse parts of prompts.
A PipelinePrompt consists of two main parts:
Expand Down
22 changes: 11 additions & 11 deletions libs/core/langchain_core/runnables/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -1632,7 +1632,7 @@ async def _atransform_stream_with_config(


class RunnableSerializable(Serializable, Runnable[Input, Output]):
"""A Runnable that can be serialized to JSON."""
"""Runnable that can be serialized to JSON."""

name: Optional[str] = None
"""The name of the runnable. Used for debugging and tracing."""
Expand Down Expand Up @@ -1752,7 +1752,7 @@ def _seq_output_schema(


class RunnableSequence(RunnableSerializable[Input, Output]):
"""A sequence of runnables, where the output of each is the input of the next.
"""Sequence of Runnables, where the output of each is the input of the next.
RunnableSequence is the most important composition operator in LangChain as it is
used in virtually every chain.
Expand All @@ -1764,7 +1764,7 @@ class RunnableSequence(RunnableSerializable[Input, Output]):
The default implementations of `batch` and `abatch` utilize threadpools and
asyncio gather and will be faster than naive invocation of invoke or ainvoke
for IO bound runnables.
for IO bound Runnables.
Batching is implemented by invoking the batch method on each component of the
RunnableSequence in order.
Expand Down Expand Up @@ -2451,11 +2451,11 @@ async def input_aiter() -> AsyncIterator[Input]:


class RunnableParallel(RunnableSerializable[Input, Dict[str, Any]]):
"""A runnable that runs a mapping of runnables in parallel, and returns a mapping
"""Runnable that runs a mapping of Runnables in parallel, and returns a mapping
of their outputs.
RunnableParallel is one of the two main composition primitives for the LCEL,
alongside RunnableSequence. It invokes runnables concurrently, providing the same
alongside RunnableSequence. It invokes Runnables concurrently, providing the same
input to each.
A RunnableParallel can be instantiated directly or by using a dict literal within a
Expand Down Expand Up @@ -2882,7 +2882,7 @@ async def input_aiter() -> AsyncIterator[Input]:


class RunnableGenerator(Runnable[Input, Output]):
"""A runnable that runs a generator function.
"""Runnable that runs a generator function.
RunnableGenerators can be instantiated directly or by using a generator within
a sequence.
Expand Down Expand Up @@ -3730,7 +3730,7 @@ async def input_aiter() -> AsyncIterator[Input]:


class RunnableEachBase(RunnableSerializable[List[Input], List[Output]]):
"""A runnable that delegates calls to another runnable
"""Runnable that delegates calls to another Runnable
with each element of the input sequence.
Use only if creating a new RunnableEach subclass with different __init__ args.
Expand Down Expand Up @@ -3838,13 +3838,13 @@ async def astream_events(


class RunnableEach(RunnableEachBase[Input, Output]):
"""A runnable that delegates calls to another runnable
"""Runnable that delegates calls to another Runnable
with each element of the input sequence.
It allows you to call multiple inputs with the bounded Runnable.
RunnableEach makes it easy to run multiple inputs for the runnable.
In the below example, we associate and run three three inputs
In the below example, we associate and run three inputs
with a Runnable:
.. code-block:: python
Expand Down Expand Up @@ -3910,7 +3910,7 @@ def with_listeners(


class RunnableBindingBase(RunnableSerializable[Input, Output]):
"""A runnable that delegates calls to another runnable with a set of kwargs.
"""Runnable that delegates calls to another Runnable with a set of kwargs.
Use only if creating a new RunnableBinding subclass with different __init__ args.
Expand Down Expand Up @@ -4189,7 +4189,7 @@ async def atransform(


class RunnableBinding(RunnableBindingBase[Input, Output]):
"""Wrap a runnable with additional functionality.
"""Wrap a Runnable with additional functionality.
A RunnableBinding can be thought of as a "runnable decorator" that
preserves the essential features of Runnable; i.e., batching, streaming,
Expand Down
6 changes: 3 additions & 3 deletions libs/core/langchain_core/runnables/branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,13 +38,13 @@


class RunnableBranch(RunnableSerializable[Input, Output]):
"""A Runnable that selects which branch to run based on a condition.
"""Runnable that selects which branch to run based on a condition.
The runnable is initialized with a list of (condition, runnable) pairs and
The Runnable is initialized with a list of (condition, Runnable) pairs and
a default branch.
When operating on an input, the first condition that evaluates to True is
selected, and the corresponding runnable is run on the input.
selected, and the corresponding Runnable is run on the input.
If no condition evaluates to True, the default branch is run on the input.
Expand Down
10 changes: 5 additions & 5 deletions libs/core/langchain_core/runnables/configurable.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@


class DynamicRunnable(RunnableSerializable[Input, Output]):
"""A Serializable Runnable that can be dynamically configured."""
"""Serializable Runnable that can be dynamically configured."""

default: RunnableSerializable[Input, Output]

Expand Down Expand Up @@ -220,7 +220,7 @@ async def atransform(


class RunnableConfigurableFields(DynamicRunnable[Input, Output]):
"""A Runnable that can be dynamically configured."""
"""Runnable that can be dynamically configured."""

fields: Dict[str, AnyConfigurableField]

Expand Down Expand Up @@ -297,7 +297,7 @@ def _prepare(

# Before Python 3.11 native StrEnum is not available
class StrEnum(str, enum.Enum):
"""A string enum."""
"""String enum."""

pass

Expand All @@ -313,10 +313,10 @@ class StrEnum(str, enum.Enum):


class RunnableConfigurableAlternatives(DynamicRunnable[Input, Output]):
"""A Runnable that can be dynamically configured.
"""Runnable that can be dynamically configured.
A RunnableConfigurableAlternatives should be initiated using the
`configurable_alternatives` method of a runnable or can be
`configurable_alternatives` method of a Runnable or can be
initiated directly as well.
Here is an example of using a RunnableConfigurableAlternatives that uses
Expand Down
12 changes: 6 additions & 6 deletions libs/core/langchain_core/runnables/fallbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,20 +39,20 @@


class RunnableWithFallbacks(RunnableSerializable[Input, Output]):
"""A Runnable that can fallback to other Runnables if it fails.
"""Runnable that can fallback to other Runnables if it fails.
External APIs (e.g., APIs for a language model) may at times experience
degraded performance or even downtime.
In these cases, it can be useful to have a fallback runnable that can be
used in place of the original runnable (e.g., fallback to another LLM provider).
In these cases, it can be useful to have a fallback Runnable that can be
used in place of the original Runnable (e.g., fallback to another LLM provider).
Fallbacks can be defined at the level of a single runnable, or at the level
of a chain of runnables. Fallbacks are tried in order until one succeeds or
Fallbacks can be defined at the level of a single Runnable, or at the level
of a chain of Runnables. Fallbacks are tried in order until one succeeds or
all fail.
While you can instantiate a ``RunnableWithFallbacks`` directly, it is usually
more convenient to use the ``with_fallbacks`` method on a runnable.
more convenient to use the ``with_fallbacks`` method on a Runnable.
Example:
Expand Down

0 comments on commit ae66bcb

Please sign in to comment.