diff --git a/libs/community/langchain_community/adapters/openai.py b/libs/community/langchain_community/adapters/openai.py index 4f03c3a9c452f..e6ff136f7a7c3 100644 --- a/libs/community/langchain_community/adapters/openai.py +++ b/libs/community/langchain_community/adapters/openai.py @@ -40,25 +40,33 @@ async def aenumerate( class IndexableBaseModel(BaseModel): - """Allows a BaseModel to return its fields by string variable indexing""" + """Allows a BaseModel to return its fields by string variable indexing.""" def __getitem__(self, item: str) -> Any: return getattr(self, item) class Choice(IndexableBaseModel): + """Choice.""" + message: dict class ChatCompletions(IndexableBaseModel): + """Chat completions.""" + choices: List[Choice] class ChoiceChunk(IndexableBaseModel): + """Choice chunk.""" + delta: dict class ChatCompletionChunk(IndexableBaseModel): + """Chat completion chunk.""" + choices: List[ChoiceChunk] @@ -301,7 +309,7 @@ def convert_messages_for_finetuning( class Completions: - """Completion.""" + """Completions.""" @overload @staticmethod @@ -399,6 +407,8 @@ async def acreate( class Chat: + """Chat.""" + def __init__(self) -> None: self.completions = Completions() diff --git a/libs/community/langchain_community/agent_toolkits/openapi/planner.py b/libs/community/langchain_community/agent_toolkits/openapi/planner.py index e112dfc51506e..c04876514d277 100644 --- a/libs/community/langchain_community/agent_toolkits/openapi/planner.py +++ b/libs/community/langchain_community/agent_toolkits/openapi/planner.py @@ -191,7 +191,7 @@ async def _arun(self, text: str) -> str: class RequestsDeleteToolWithParsing(BaseRequestsTool, BaseTool): - """A tool that sends a DELETE request and parses the response.""" + """Tool that sends a DELETE request and parses the response.""" name: str = "requests_delete" """The name of the tool.""" diff --git a/libs/community/langchain_community/callbacks/mlflow_callback.py b/libs/community/langchain_community/callbacks/mlflow_callback.py index f631a5c51c29b..b524bc19c2b0a 100644 --- a/libs/community/langchain_community/callbacks/mlflow_callback.py +++ b/libs/community/langchain_community/callbacks/mlflow_callback.py @@ -39,6 +39,7 @@ def import_mlflow() -> Any: def mlflow_callback_metrics() -> List[str]: + """Get the metrics to log to MLFlow.""" return [ "step", "starts", @@ -59,6 +60,7 @@ def mlflow_callback_metrics() -> List[str]: def get_text_complexity_metrics() -> List[str]: + """Get the text complexity metrics from textstat.""" return [ "flesch_reading_ease", "flesch_kincaid_grade", diff --git a/libs/community/langchain_community/callbacks/streamlit/streamlit_callback_handler.py b/libs/community/langchain_community/callbacks/streamlit/streamlit_callback_handler.py index b336a09a3a6a3..725862c53b697 100644 --- a/libs/community/langchain_community/callbacks/streamlit/streamlit_callback_handler.py +++ b/libs/community/langchain_community/callbacks/streamlit/streamlit_callback_handler.py @@ -225,7 +225,7 @@ def clear(self) -> None: class StreamlitCallbackHandler(BaseCallbackHandler): - """A callback handler that writes to a Streamlit app.""" + """Callback handler that writes to a Streamlit app.""" def __init__( self, diff --git a/libs/community/langchain_community/chat_message_histories/sql.py b/libs/community/langchain_community/chat_message_histories/sql.py index e30a86a52d60f..f77deee61e762 100644 --- a/libs/community/langchain_community/chat_message_histories/sql.py +++ b/libs/community/langchain_community/chat_message_histories/sql.py @@ -21,7 +21,7 @@ class BaseMessageConverter(ABC): - """The class responsible for converting BaseMessage to your SQLAlchemy model.""" + """Class that converts BaseMessage to the SQLAlchemy model.""" @abstractmethod def from_sql_model(self, sql_message: Any) -> BaseMessage: diff --git a/libs/community/langchain_community/chat_models/azureml_endpoint.py b/libs/community/langchain_community/chat_models/azureml_endpoint.py index 1684be6a681d2..dc1385411f996 100644 --- a/libs/community/langchain_community/chat_models/azureml_endpoint.py +++ b/libs/community/langchain_community/chat_models/azureml_endpoint.py @@ -20,6 +20,8 @@ class LlamaContentFormatter(ContentFormatterBase): + """Content formatter for `LLaMA`.""" + def __init__(self) -> None: raise TypeError( "`LlamaContentFormatter` is deprecated for chat models. Use " @@ -34,7 +36,7 @@ class LlamaChatContentFormatter(ContentFormatterBase): @staticmethod def _convert_message_to_dict(message: BaseMessage) -> Dict: - """Converts message to a dict according to role""" + """Converts a message to a dict according to a role""" content = cast(str, message.content) if isinstance(message, HumanMessage): return { diff --git a/libs/community/langchain_community/chat_models/deepinfra.py b/libs/community/langchain_community/chat_models/deepinfra.py index 2338967779f8c..349e6074fd782 100644 --- a/libs/community/langchain_community/chat_models/deepinfra.py +++ b/libs/community/langchain_community/chat_models/deepinfra.py @@ -58,6 +58,8 @@ class ChatDeepInfraException(Exception): + """Exception raised when the DeepInfra API returns an error.""" + pass @@ -67,7 +69,7 @@ def _create_retry_decorator( Union[AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun] ] = None, ) -> Callable[[Any], Any]: - """Returns a tenacity retry decorator, preconfigured to handle PaLM exceptions""" + """Returns a tenacity retry decorator, preconfigured to handle PaLM exceptions.""" return create_base_retry_decorator( error_types=[requests.exceptions.ConnectTimeout, ChatDeepInfraException], max_retries=llm.max_retries, diff --git a/libs/community/langchain_community/chat_models/gpt_router.py b/libs/community/langchain_community/chat_models/gpt_router.py index 68860e90539de..fe919f4969336 100644 --- a/libs/community/langchain_community/chat_models/gpt_router.py +++ b/libs/community/langchain_community/chat_models/gpt_router.py @@ -53,6 +53,8 @@ class GPTRouterException(Exception): class GPTRouterModel(BaseModel): + """GPTRouter model.""" + name: str provider_name: str diff --git a/libs/community/langchain_community/chat_models/volcengine_maas.py b/libs/community/langchain_community/chat_models/volcengine_maas.py index 7d821713bff24..ddaf849c750c5 100644 --- a/libs/community/langchain_community/chat_models/volcengine_maas.py +++ b/libs/community/langchain_community/chat_models/volcengine_maas.py @@ -39,8 +39,8 @@ def convert_dict_to_message(_dict: Mapping[str, Any]) -> AIMessage: class VolcEngineMaasChat(BaseChatModel, VolcEngineMaasBase): + """Volc Engine Maas hosts a plethora of models. - """volc engine maas hosts a plethora of models. You can utilize these models through this class. To use, you should have the ``volcengine`` python package installed. diff --git a/libs/community/langchain_community/chat_models/zhipuai.py b/libs/community/langchain_community/chat_models/zhipuai.py index 3e2e2504b0eca..dd55c5ee12c85 100644 --- a/libs/community/langchain_community/chat_models/zhipuai.py +++ b/libs/community/langchain_community/chat_models/zhipuai.py @@ -20,11 +20,15 @@ class ref(BaseModel): + """Reference used in CharacterGLM.""" + enable: bool = Field(True) search_query: str = Field("") class meta(BaseModel): + """Metadata used in CharacterGLM.""" + user_info: str = Field("") bot_info: str = Field("") bot_name: str = Field("") diff --git a/libs/community/langchain_community/document_loaders/chm.py b/libs/community/langchain_community/document_loaders/chm.py index 381087d9917e5..207d31bd4f872 100644 --- a/libs/community/langchain_community/document_loaders/chm.py +++ b/libs/community/langchain_community/document_loaders/chm.py @@ -9,7 +9,7 @@ class UnstructuredCHMLoader(UnstructuredFileLoader): """Load `CHM` files using `Unstructured`. - CHM mean Microsoft Compiled HTML Help. + CHM means Microsoft Compiled HTML Help. Examples -------- @@ -35,6 +35,8 @@ def _get_elements(self) -> List: class CHMParser(object): + """Microsoft Compiled HTML Help (CHM) Parser.""" + path: str file: "chm.CHMFile" diff --git a/libs/community/langchain_community/document_loaders/parsers/vsdx.py b/libs/community/langchain_community/document_loaders/parsers/vsdx.py index d4dde56de0c99..b5077b17191ca 100644 --- a/libs/community/langchain_community/document_loaders/parsers/vsdx.py +++ b/libs/community/langchain_community/document_loaders/parsers/vsdx.py @@ -11,6 +11,8 @@ class VsdxParser(BaseBlobParser, ABC): + """Parser for vsdx files.""" + def parse(self, blob: Blob) -> Iterator[Document]: # type: ignore[override] """Parse a vsdx file.""" return self.lazy_parse(blob) diff --git a/libs/community/langchain_community/document_loaders/youtube.py b/libs/community/langchain_community/document_loaders/youtube.py index ec6a5e15b570d..c1cea0b2dc320 100644 --- a/libs/community/langchain_community/document_loaders/youtube.py +++ b/libs/community/langchain_community/document_loaders/youtube.py @@ -141,6 +141,8 @@ def _parse_video_id(url: str) -> Optional[str]: class TranscriptFormat(Enum): + """Transcript format.""" + TEXT = "text" LINES = "lines" diff --git a/libs/community/langchain_community/embeddings/mlflow.py b/libs/community/langchain_community/embeddings/mlflow.py index 1b1abb4103237..e44c53d31c532 100644 --- a/libs/community/langchain_community/embeddings/mlflow.py +++ b/libs/community/langchain_community/embeddings/mlflow.py @@ -13,7 +13,7 @@ def _chunk(texts: List[str], size: int) -> Iterator[List[str]]: class MlflowEmbeddings(Embeddings, BaseModel): - """Wrapper around embeddings LLMs in MLflow. + """Embedding LLMs in MLflow. To use, you should have the `mlflow[genai]` python package installed. For more information, see https://mlflow.org/docs/latest/llms/deployments/server.html. @@ -85,5 +85,7 @@ def embed_query(self, text: str) -> List[float]: class MlflowCohereEmbeddings(MlflowEmbeddings): + """Cohere embedding LLMs in MLflow.""" + query_params: Dict[str, str] = {"input_type": "search_query"} documents_params: Dict[str, str] = {"input_type": "search_document"} diff --git a/libs/community/langchain_community/embeddings/oci_generative_ai.py b/libs/community/langchain_community/embeddings/oci_generative_ai.py index afcbb62024a02..b9495260b9177 100644 --- a/libs/community/langchain_community/embeddings/oci_generative_ai.py +++ b/libs/community/langchain_community/embeddings/oci_generative_ai.py @@ -8,6 +8,8 @@ class OCIAuthType(Enum): + """OCI authentication types as enumerator.""" + API_KEY = 1 SECURITY_TOKEN = 2 INSTANCE_PRINCIPAL = 3 diff --git a/libs/community/langchain_community/graphs/neo4j_graph.py b/libs/community/langchain_community/graphs/neo4j_graph.py index 9df427e38c721..8d7da7fe91b41 100644 --- a/libs/community/langchain_community/graphs/neo4j_graph.py +++ b/libs/community/langchain_community/graphs/neo4j_graph.py @@ -32,7 +32,8 @@ def value_sanitize(d: Dict[str, Any]) -> Dict[str, Any]: - """ + """Sanitize the input dictionary. + Sanitizes the input dictionary by removing embedding-like values, lists with more than 128 elements, that are mostly irrelevant for generating answers in a LLM context. These properties, if left in @@ -63,7 +64,8 @@ def value_sanitize(d: Dict[str, Any]) -> Dict[str, Any]: class Neo4jGraph(GraphStore): - """Provides a connection to a Neo4j database for various graph operations. + """Neo4j database wrapper for various graph operations. + Parameters: url (Optional[str]): The URL of the Neo4j database server. username (Optional[str]): The username for database authentication. diff --git a/libs/community/langchain_community/graphs/neptune_graph.py b/libs/community/langchain_community/graphs/neptune_graph.py index 6dc45d12e94fe..d1ee0db14e00f 100644 --- a/libs/community/langchain_community/graphs/neptune_graph.py +++ b/libs/community/langchain_community/graphs/neptune_graph.py @@ -2,7 +2,7 @@ class NeptuneQueryException(Exception): - """A class to handle queries that fail to execute""" + """Exception for the Neptune queries.""" def __init__(self, exception: Union[str, Dict]): if isinstance(exception, dict): diff --git a/libs/community/langchain_community/llms/oci_generative_ai.py b/libs/community/langchain_community/llms/oci_generative_ai.py index 0ed977e24b035..3369763c87656 100644 --- a/libs/community/langchain_community/llms/oci_generative_ai.py +++ b/libs/community/langchain_community/llms/oci_generative_ai.py @@ -15,6 +15,8 @@ class OCIAuthType(Enum): + """OCI authentication types as enumerator.""" + API_KEY = 1 SECURITY_TOKEN = 2 INSTANCE_PRINCIPAL = 3 diff --git a/libs/community/langchain_community/llms/tongyi.py b/libs/community/langchain_community/llms/tongyi.py index 69b09b7eb07de..a11cf9c5153b3 100644 --- a/libs/community/langchain_community/llms/tongyi.py +++ b/libs/community/langchain_community/llms/tongyi.py @@ -91,7 +91,9 @@ def _stream_generate_with_retry(**_kwargs: Any) -> Any: async def astream_generate_with_retry(llm: Tongyi, **kwargs: Any) -> Any: - """Because the dashscope SDK doesn't provide an async API, + """Async version of `stream_generate_with_retry`. + + Because the dashscope SDK doesn't provide an async API, we wrap `stream_generate_with_retry` with an async generator.""" class _AioTongyiGenerator: diff --git a/libs/community/langchain_community/retrievers/arcee.py b/libs/community/langchain_community/retrievers/arcee.py index 4e2116e58bc77..dbe4449fd9a0d 100644 --- a/libs/community/langchain_community/retrievers/arcee.py +++ b/libs/community/langchain_community/retrievers/arcee.py @@ -10,7 +10,7 @@ class ArceeRetriever(BaseRetriever): - """Document retriever for Arcee's Domain Adapted Language Models (DALMs). + """Retriever for Arcee's Domain Adapted Language Models (DALMs). To use, set the ``ARCEE_API_KEY`` environment variable with your Arcee API key, or pass ``arcee_api_key`` as a named parameter. diff --git a/libs/community/langchain_community/storage/astradb.py b/libs/community/langchain_community/storage/astradb.py index 79d5bebcd37ff..f84ae1721c837 100644 --- a/libs/community/langchain_community/storage/astradb.py +++ b/libs/community/langchain_community/storage/astradb.py @@ -25,6 +25,8 @@ class AstraDBBaseStore(Generic[V], BaseStore[str, V], ABC): + """Base class for the DataStax AstraDB data store.""" + def __init__( self, collection_name: str, @@ -79,6 +81,7 @@ def yield_keys(self, *, prefix: Optional[str] = None) -> Iterator[str]: class AstraDBStore(AstraDBBaseStore[Any]): """BaseStore implementation using DataStax AstraDB as the underlying store. + The value type can be any type serializable by json.dumps. Can be used to store embeddings with the CacheBackedEmbeddings. Documents in the AstraDB collection will have the format @@ -97,6 +100,7 @@ def encode_value(self, value: Any) -> Any: class AstraDBByteStore(AstraDBBaseStore[bytes], ByteStore): """ByteStore implementation using DataStax AstraDB as the underlying store. + The bytes values are converted to base64 encoded strings Documents in the AstraDB collection will have the format { diff --git a/libs/community/langchain_community/tools/polygon/last_quote.py b/libs/community/langchain_community/tools/polygon/last_quote.py index 55fe3d9301020..ce6ebe750ea57 100644 --- a/libs/community/langchain_community/tools/polygon/last_quote.py +++ b/libs/community/langchain_community/tools/polygon/last_quote.py @@ -8,6 +8,8 @@ class Inputs(BaseModel): + """Inputs for Polygon's Last Quote API""" + query: str diff --git a/libs/community/langchain_community/utilities/vertexai.py b/libs/community/langchain_community/utilities/vertexai.py index 88b9bd3394caf..1dc1f14b3f850 100644 --- a/libs/community/langchain_community/utilities/vertexai.py +++ b/libs/community/langchain_community/utilities/vertexai.py @@ -57,7 +57,7 @@ def init_vertexai( location: Optional[str] = None, credentials: Optional["Credentials"] = None, ) -> None: - """Init vertexai. + """Init Vertex AI. Args: project: The default GCP project to use when making Vertex API calls. diff --git a/libs/community/langchain_community/vectorstores/kdbai.py b/libs/community/langchain_community/vectorstores/kdbai.py index 9ac1a7d580a82..619f96269af21 100644 --- a/libs/community/langchain_community/vectorstores/kdbai.py +++ b/libs/community/langchain_community/vectorstores/kdbai.py @@ -16,6 +16,8 @@ class KDBAI(VectorStore): """`KDB.AI` vector store. + See [https://kdb.ai](https://kdb.ai) + To use, you should have the `kdbai_client` python package installed. Args: @@ -25,7 +27,7 @@ class KDBAI(VectorStore): distance_strategy: One option from DistanceStrategy.EUCLIDEAN_DISTANCE, DistanceStrategy.DOT_PRODUCT or DistanceStrategy.COSINE. - See the example https://github.com/KxSystems/langchain/blob/KDB.AI/docs/docs/integrations/vectorstores/kdbai.ipynb. + See the example [notebook](https://github.com/KxSystems/langchain/blob/KDB.AI/docs/docs/integrations/vectorstores/kdbai.ipynb). """ def __init__( diff --git a/libs/community/langchain_community/vectorstores/lantern.py b/libs/community/langchain_community/vectorstores/lantern.py index 9b16af9ce98cd..643993bffb4e2 100644 --- a/libs/community/langchain_community/vectorstores/lantern.py +++ b/libs/community/langchain_community/vectorstores/lantern.py @@ -47,12 +47,14 @@ def _results_to_docs(docs_and_scores: Any) -> List[Document]: class BaseEmbeddingStore: - """Embedding store.""" + """Base class for the Lantern embedding store.""" def get_embedding_store( distance_strategy: DistanceStrategy, collection_name: str ) -> Any: + """Get the embedding store class.""" + embedding_type = None if distance_strategy == DistanceStrategy.HAMMING: