From f726b428ad13300a3fd45b0814adc4416815d1c3 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 15:41:51 -0700 Subject: [PATCH 01/50] stash --- .../safety/amazon_comprehend_chain.ipynb | 10 +++--- .../evaluation/examples/comparisons.ipynb | 2 +- docs/extras/guides/local_llms.ipynb | 5 +-- docs/extras/guides/model_laboratory.ipynb | 4 +-- .../safety/amazon_comprehend_chain.ipynb | 10 +++--- .../hugging_face_prompt_injection.ipynb | 2 +- .../integrations/callbacks/context.ipynb | 2 +- docs/extras/integrations/chat/ollama.ipynb | 2 +- .../integrations/chat_loaders/discord.ipynb | 10 +++--- docs/extras/integrations/llms/ai21.ipynb | 2 +- .../integrations/llms/aleph_alpha.ipynb | 2 +- docs/extras/integrations/llms/anyscale.ipynb | 2 +- docs/extras/integrations/llms/azure_ml.ipynb | 2 +- docs/extras/integrations/llms/banana.ipynb | 2 +- docs/extras/integrations/llms/baseten.ipynb | 2 +- docs/extras/integrations/llms/bittensor.ipynb | 4 +-- .../integrations/llms/cerebriumai.ipynb | 2 +- docs/extras/integrations/llms/chatglm.ipynb | 2 +- docs/extras/integrations/llms/clarifai.ipynb | 2 +- docs/extras/integrations/llms/cohere.ipynb | 2 +- .../integrations/llms/ctransformers.ipynb | 2 +- .../integrations/llms/ctranslate2.ipynb | 2 +- docs/extras/integrations/llms/deepinfra.ipynb | 2 +- docs/extras/integrations/llms/edenai.ipynb | 2 +- docs/extras/integrations/llms/fireworks.ipynb | 2 +- .../integrations/llms/forefrontai.ipynb | 2 +- .../llms/google_vertex_ai_palm.ipynb | 2 +- docs/extras/integrations/llms/gooseai.ipynb | 2 +- docs/extras/integrations/llms/gpt4all.ipynb | 2 +- .../integrations/llms/huggingface_hub.ipynb | 4 +-- docs/extras/integrations/llms/llamacpp.ipynb | 2 +- docs/extras/integrations/llms/manifest.ipynb | 2 +- docs/extras/integrations/llms/minimax.ipynb | 2 +- docs/extras/integrations/llms/modal.ipynb | 2 +- docs/extras/integrations/llms/mosaicml.ipynb | 2 +- docs/extras/integrations/llms/nlpcloud.ipynb | 2 +- docs/extras/integrations/llms/octoai.ipynb | 2 +- docs/extras/integrations/llms/ollama.ipynb | 2 +- .../integrations/llms/opaqueprompts.ipynb | 2 +- docs/extras/integrations/llms/openai.ipynb | 2 +- docs/extras/integrations/llms/openllm.ipynb | 2 +- docs/extras/integrations/llms/openlm.ipynb | 2 +- docs/extras/integrations/llms/petals.ipynb | 2 +- .../extras/integrations/llms/pipelineai.ipynb | 2 +- .../integrations/llms/predictionguard.ipynb | 2 +- docs/extras/integrations/llms/replicate.ipynb | 2 +- docs/extras/integrations/llms/runhouse.ipynb | 2 +- docs/extras/integrations/llms/sagemaker.ipynb | 2 +- .../integrations/llms/stochasticai.ipynb | 2 +- .../integrations/llms/symblai_nebula.ipynb | 2 +- docs/extras/integrations/llms/textgen.ipynb | 4 +-- .../integrations/llms/titan_takeoff.ipynb | 2 +- docs/extras/integrations/llms/tongyi.ipynb | 2 +- docs/extras/integrations/llms/vllm.ipynb | 2 +- docs/extras/integrations/llms/writer.ipynb | 2 +- .../extras/integrations/llms/xinference.ipynb | 2 +- .../memory/motorhead_memory.ipynb | 2 +- .../memory/motorhead_memory_managed.ipynb | 2 +- .../integrations/memory/zep_memory.ipynb | 2 +- docs/extras/integrations/providers/cnosdb.mdx | 4 +-- .../integrations/providers/databricks.ipynb | 4 +-- docs/extras/integrations/providers/jina.mdx | 2 +- docs/extras/integrations/providers/log10.mdx | 4 +-- .../providers/mlflow_ai_gateway.mdx | 4 +-- .../integrations/providers/motherduck.mdx | 2 +- .../providers/predictionguard.mdx | 4 +-- .../integrations/providers/ray_serve.ipynb | 2 +- .../integrations/providers/shaleprotocol.md | 2 +- .../retrievers/google_drive.ipynb | 2 +- .../integrations/retrievers/re_phrase.ipynb | 2 +- .../text_embedding/clarifai.ipynb | 2 +- .../integrations/toolkits/amadeus.ipynb | 2 +- .../toolkits/azure_cognitive_services.ipynb | 2 +- docs/extras/integrations/toolkits/gmail.ipynb | 2 +- .../integrations/toolkits/google_drive.ipynb | 2 +- .../integrations/toolkits/multion.ipynb | 2 +- .../integrations/toolkits/office365.ipynb | 2 +- .../integrations/toolkits/vectorstore.ipynb | 2 +- .../extras/integrations/tools/awslambda.ipynb | 2 +- .../integrations/tools/eleven_labs_tts.ipynb | 2 +- .../integrations/tools/google_drive.ipynb | 2 +- docs/extras/integrations/tools/graphql.ipynb | 2 +- docs/extras/integrations/tools/lemonai.ipynb | 2 +- .../integrations/vectorstores/marqo.ipynb | 2 +- .../integrations/vectorstores/starrocks.ipynb | 2 +- .../integrations/vectorstores/vearch.ipynb | 2 +- .../integrations/vectorstores/weaviate.ipynb | 2 +- .../openai_multi_functions_agent.ipynb | 2 +- .../agents/agent_types/react_docstore.ipynb | 2 +- .../agent_types/self_ask_with_search.ipynb | 2 +- .../how_to/add_memory_openai_functions.ipynb | 12 +++---- .../modules/agents/how_to/agent_iter.ipynb | 2 +- .../agents/how_to/agent_vectorstore.ipynb | 2 +- .../modules/agents/how_to/chatgpt_clone.ipynb | 2 +- .../modules/agents/how_to/custom_agent.ipynb | 2 +- .../custom_agent_with_tool_retrieval.ipynb | 2 +- .../agents/how_to/custom_mrkl_agent.ipynb | 2 +- .../how_to/custom_multi_action_agent.ipynb | 2 +- .../agents/how_to/handle_parsing_errors.ipynb | 11 +++--- .../how_to/sharedmemory_for_tools.ipynb | 2 +- .../use_toolkits_with_openai_functions.ipynb | 12 +++---- .../modules/agents/tools/custom_tools.ipynb | 6 ++-- .../agents/tools/multi_input_tool.ipynb | 2 +- .../modules/chains/how_to/from_hub.ipynb | 2 +- .../modules/chains/how_to/serialization.ipynb | 2 +- .../retrievers/MultiQueryRetriever.ipynb | 2 +- .../modules/memory/agent_with_memory.ipynb | 2 +- .../memory/agent_with_memory_in_db.ipynb | 2 +- .../extras/modules/memory/custom_memory.ipynb | 2 +- .../connecting_to_a_feature_store.ipynb | 2 +- .../extras/use_cases/code_understanding.ipynb | 4 +-- .../multiagent_bidding.ipynb | 2 +- .../custom_agent_with_plugin_retrieval.ipynb | 2 +- ...ith_plugin_retrieval_using_plugnplai.ipynb | 2 +- .../agents/sales_agent_with_context.ipynb | 2 +- .../more/agents/agents/wikibase_agent.ipynb | 2 +- .../agents/autonomous_agents/baby_agi.ipynb | 2 +- .../baby_agi_with_agent.ipynb | 4 +-- .../autonomous_agents/meta_prompt.ipynb | 2 +- .../multi_modal_output_agent.ipynb | 2 +- .../use_cases/more/code_writing/cpal.ipynb | 2 +- .../more/code_writing/llm_math.ipynb | 2 +- .../use_cases/more/code_writing/pal.ipynb | 2 +- .../more/graph/graph_memgraph_qa.ipynb | 2 +- .../integrations/myscale_vector_sql.ipynb | 4 +-- .../how_to/local_retrieval_qa.ipynb | 2 +- .../agent_types/conversational_agent.mdx | 2 +- .../agents/agent_types/plan_and_execute.mdx | 4 +-- .../agents/how_to/custom_llm_agent.mdx | 2 +- .../agents/how_to/custom_llm_chat_agent.mdx | 3 +- docs/snippets/modules/agents/how_to/mrkl.mdx | 2 +- .../chains/additional/analyze_document.mdx | 2 +- .../modules/chains/foundational/llm_chain.mdx | 2 +- .../modules/chains/popular/sqlite.mdx | 4 +-- .../popular/vector_db_qa_with_sources.mdx | 2 +- .../model_io/models/chat/how_to/prompts.mdx | 2 +- .../prompts/prompt_templates/get_started.mdx | 6 ++-- .../hugginggpt/repsonse_generator.py | 2 +- .../hugginggpt/task_planner.py | 2 +- .../langchain_experimental/sql/base.py | 2 +- .../langchain_experimental/sql/vector_sql.py | 2 +- .../integration_tests/chains/test_cpal.py | 2 +- .../integration_tests/chains/test_pal.py | 2 +- libs/langchain/langchain/agents/mrkl/base.py | 35 +------------------ libs/langchain/langchain/agents/react/base.py | 9 +---- .../agents/self_ask_with_search/base.py | 10 +----- .../langchain/callbacks/context_callback.py | 4 +-- .../langchain/chains/conversation/base.py | 3 +- .../chains/elasticsearch_database/base.py | 3 +- libs/langchain/langchain/chains/llm.py | 4 ++- .../langchain/chains/llm_bash/base.py | 3 +- .../langchain/chains/llm_checker/base.py | 3 +- .../langchain/chains/llm_math/base.py | 3 +- .../chains/llm_summarization_checker/base.py | 3 +- .../chains/llm_symbolic_math/base.py | 3 +- .../langchain/langchain/chains/natbot/base.py | 2 +- .../chains/openai_functions/openapi.py | 2 +- .../chains/query_constructor/base.py | 3 +- .../chains/query_constructor/prompt.py | 2 +- libs/langchain/langchain/chains/transform.py | 2 +- .../langchain/chat_loaders/imessage.py | 6 ++-- .../langchain/langchain/chat_loaders/slack.py | 6 ++-- .../langchain/chat_loaders/telegram.py | 8 ++--- .../langchain/langchain/chat_loaders/utils.py | 5 ++- .../langchain/chat_loaders/whatsapp.py | 7 ++-- .../langchain/evaluation/qa/eval_chain.py | 2 +- libs/langchain/langchain/indexes/graph.py | 2 +- libs/langchain/langchain/llms/pipelineai.py | 2 +- .../langchain/llms/sagemaker_endpoint.py | 2 +- libs/langchain/langchain/llms/writer.py | 2 +- libs/langchain/langchain/prompts/prompt.py | 2 +- .../document_compressors/chain_extract.py | 3 +- .../document_compressors/chain_filter.py | 3 +- .../langchain/utilities/google_places_api.py | 2 +- .../langchain/utilities/google_serper.py | 2 +- .../langchain/langchain/vectorstores/annoy.py | 6 ++-- .../langchain/langchain/vectorstores/dingo.py | 2 +- .../vectorstores/elastic_vector_search.py | 6 ++-- .../langchain/langchain/vectorstores/faiss.py | 4 +-- .../langchain/vectorstores/hologres.py | 2 +- .../langchain/vectorstores/meilisearch.py | 2 +- .../langchain/vectorstores/milvus.py | 2 +- .../vectorstores/opensearch_vector_search.py | 4 +-- .../langchain/vectorstores/pgvector.py | 2 +- .../langchain/vectorstores/pinecone.py | 2 +- .../langchain/vectorstores/qdrant.py | 6 ++-- .../langchain/langchain/vectorstores/scann.py | 4 +-- .../langchain/vectorstores/vectara.py | 4 +-- .../langchain/vectorstores/zilliz.py | 2 +- .../integration_tests/llms/test_fireworks.py | 3 +- .../test_huggingface_text_gen_inference.py | 2 +- .../llms/test_opaqueprompts.py | 3 +- .../llms/test_symblai_nebula.py | 3 +- .../tests/unit_tests/chains/test_api.py | 2 +- .../chains/test_combine_documents.py | 2 +- .../unit_tests/chat_loaders/test_telegram.py | 12 +++---- .../tests/unit_tests/schema/__init__.py | 0 .../unit_tests/schema/runnable/test_locals.py | 2 +- .../schema/runnable/test_runnable.py | 2 +- 199 files changed, 284 insertions(+), 322 deletions(-) create mode 100644 libs/langchain/tests/unit_tests/schema/__init__.py diff --git a/docs/docs_skeleton/docs/guides/safety/amazon_comprehend_chain.ipynb b/docs/docs_skeleton/docs/guides/safety/amazon_comprehend_chain.ipynb index 614db1b885ef3c..49f3d6f41e2ff2 100644 --- a/docs/docs_skeleton/docs/guides/safety/amazon_comprehend_chain.ipynb +++ b/docs/docs_skeleton/docs/guides/safety/amazon_comprehend_chain.ipynb @@ -105,7 +105,7 @@ }, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms.fake import FakeListLLM\n", "from langchain_experimental.comprehend_moderation.base_moderation_exceptions import ModerationPiiError\n", "\n", @@ -412,7 +412,7 @@ }, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms.fake import FakeListLLM\n", "\n", "template = \"\"\"Question: {question}\n", @@ -572,8 +572,8 @@ }, "outputs": [], "source": [ - "from langchain import HuggingFaceHub\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.llms import HuggingFaceHub\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "template = \"\"\"Question: {question}\"\"\"\n", "\n", @@ -697,7 +697,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import SagemakerEndpoint\n", + "from langchain.llms import SagemakerEndpoint\n", "from langchain.llms.sagemaker_endpoint import LLMContentHandler\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import load_prompt, PromptTemplate\n", diff --git a/docs/extras/guides/evaluation/examples/comparisons.ipynb b/docs/extras/guides/evaluation/examples/comparisons.ipynb index 5c293d89841875..35d79c91809b99 100644 --- a/docs/extras/guides/evaluation/examples/comparisons.ipynb +++ b/docs/extras/guides/evaluation/examples/comparisons.ipynb @@ -97,7 +97,7 @@ }, "outputs": [], "source": [ - "from langchain import SerpAPIWrapper\n", + "from langchain.utilities import SerpAPIWrapper\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.chat_models import ChatOpenAI\n", diff --git a/docs/extras/guides/local_llms.ipynb b/docs/extras/guides/local_llms.ipynb index 90c3232a2bc97d..6268923d387c63 100644 --- a/docs/extras/guides/local_llms.ipynb +++ b/docs/extras/guides/local_llms.ipynb @@ -468,7 +468,8 @@ } ], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\n", + "from langchain.chains import LLMChain\n", "from langchain.chains.prompt_selector import ConditionalPromptSelector\n", "\n", "DEFAULT_LLAMA_SEARCH_PROMPT = PromptTemplate(\n", @@ -593,7 +594,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.10.1" } }, "nbformat": 4, diff --git a/docs/extras/guides/model_laboratory.ipynb b/docs/extras/guides/model_laboratory.ipynb index 24fd5f7760754d..ec275e4a25fd84 100644 --- a/docs/extras/guides/model_laboratory.ipynb +++ b/docs/extras/guides/model_laboratory.ipynb @@ -19,7 +19,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import LLMChain, OpenAI, Cohere, HuggingFaceHub, PromptTemplate\n", + "from langchain.chains import LLMChain, OpenAI, Cohere, HuggingFaceHub, PromptTemplate\n", "from langchain.model_laboratory import ModelLaboratory" ] }, @@ -139,7 +139,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import SelfAskWithSearchChain, SerpAPIWrapper\n", + "from langchain.chains import SelfAskWithSearchChain\nfrom langchain.utilities import SerpAPIWrapper\n", "\n", "open_ai_llm = OpenAI(temperature=0)\n", "search = SerpAPIWrapper()\n", diff --git a/docs/extras/guides/safety/amazon_comprehend_chain.ipynb b/docs/extras/guides/safety/amazon_comprehend_chain.ipynb index 69117b8257c022..25eb4f2505df46 100644 --- a/docs/extras/guides/safety/amazon_comprehend_chain.ipynb +++ b/docs/extras/guides/safety/amazon_comprehend_chain.ipynb @@ -95,7 +95,7 @@ }, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms.fake import FakeListLLM\n", "from langchain_experimental.comprehend_moderation.base_moderation_exceptions import ModerationPiiError\n", "\n", @@ -399,7 +399,7 @@ }, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms.fake import FakeListLLM\n", "\n", "template = \"\"\"Question: {question}\n", @@ -564,8 +564,8 @@ }, "outputs": [], "source": [ - "from langchain import HuggingFaceHub\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.llms import HuggingFaceHub\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "template = \"\"\"Question: {question}\n", "\n", @@ -679,7 +679,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import SagemakerEndpoint\n", + "from langchain.llms import SagemakerEndpoint\n", "from langchain.llms.sagemaker_endpoint import LLMContentHandler\n", "from langchain.chains import LLMChain\n", "from langchain.prompts import load_prompt, PromptTemplate\n", diff --git a/docs/extras/guides/safety/hugging_face_prompt_injection.ipynb b/docs/extras/guides/safety/hugging_face_prompt_injection.ipynb index 52d9d0fca41ca4..1277d48c904e7d 100644 --- a/docs/extras/guides/safety/hugging_face_prompt_injection.ipynb +++ b/docs/extras/guides/safety/hugging_face_prompt_injection.ipynb @@ -123,7 +123,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType" ] }, diff --git a/docs/extras/integrations/callbacks/context.ipynb b/docs/extras/integrations/callbacks/context.ipynb index bf05268f6b78b0..9da1e73f290b54 100644 --- a/docs/extras/integrations/callbacks/context.ipynb +++ b/docs/extras/integrations/callbacks/context.ipynb @@ -167,7 +167,7 @@ "import os\n", "\n", "from langchain.chat_models import ChatOpenAI\n", - "from langchain import LLMChain\n", + "from langchain.chains import LLMChain\n", "from langchain.prompts import PromptTemplate\n", "from langchain.prompts.chat import (\n", " ChatPromptTemplate,\n", diff --git a/docs/extras/integrations/chat/ollama.ipynb b/docs/extras/integrations/chat/ollama.ipynb index d5569397bf2ced..d42827d4bda0c1 100644 --- a/docs/extras/integrations/chat/ollama.ipynb +++ b/docs/extras/integrations/chat/ollama.ipynb @@ -199,7 +199,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "\n", "# Prompt\n", "template = \"\"\"[INST] <> Use the following pieces of context to answer the question at the end. \n", diff --git a/docs/extras/integrations/chat_loaders/discord.ipynb b/docs/extras/integrations/chat_loaders/discord.ipynb index 973b59640c9ae3..f8748706c66954 100644 --- a/docs/extras/integrations/chat_loaders/discord.ipynb +++ b/docs/extras/integrations/chat_loaders/discord.ipynb @@ -81,7 +81,7 @@ "import re\n", "from typing import Iterator, List\n", "\n", - "from langchain import schema\n", + "from langchain.schema import BaseMessage, HumanMessage\n", "from langchain.chat_loaders import base as chat_loaders\n", "\n", "logger = logging.getLogger()\n", @@ -117,7 +117,7 @@ " with open(file_path, \"r\", encoding=\"utf-8\") as file:\n", " lines = file.readlines()\n", "\n", - " results: List[schema.BaseMessage] = []\n", + " results: List[BaseMessage] = []\n", " current_sender = None\n", " current_timestamp = None\n", " current_content = []\n", @@ -128,7 +128,7 @@ " ):\n", " if current_sender and current_content:\n", " results.append(\n", - " schema.HumanMessage(\n", + " HumanMessage(\n", " content=\"\".join(current_content).strip(),\n", " additional_kwargs={\n", " \"sender\": current_sender,\n", @@ -142,7 +142,7 @@ " ]\n", " elif re.match(r\"\\[\\d{1,2}:\\d{2} (?:AM|PM)\\]\", line.strip()):\n", " results.append(\n", - " schema.HumanMessage(\n", + " HumanMessage(\n", " content=\"\".join(current_content).strip(),\n", " additional_kwargs={\n", " \"sender\": current_sender,\n", @@ -157,7 +157,7 @@ "\n", " if current_sender and current_content:\n", " results.append(\n", - " schema.HumanMessage(\n", + " HumanMessage(\n", " content=\"\".join(current_content).strip(),\n", " additional_kwargs={\n", " \"sender\": current_sender,\n", diff --git a/docs/extras/integrations/llms/ai21.ipynb b/docs/extras/integrations/llms/ai21.ipynb index 26152170039e09..8cbbccfaa90073 100644 --- a/docs/extras/integrations/llms/ai21.ipynb +++ b/docs/extras/integrations/llms/ai21.ipynb @@ -59,7 +59,7 @@ "outputs": [], "source": [ "from langchain.llms import AI21\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/aleph_alpha.ipynb b/docs/extras/integrations/llms/aleph_alpha.ipynb index cbe6151750b7c5..ac3c36dc81482c 100644 --- a/docs/extras/integrations/llms/aleph_alpha.ipynb +++ b/docs/extras/integrations/llms/aleph_alpha.ipynb @@ -59,7 +59,7 @@ "outputs": [], "source": [ "from langchain.llms import AlephAlpha\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/anyscale.ipynb b/docs/extras/integrations/llms/anyscale.ipynb index 3f9e2cc0b2458e..b94df0654096b5 100644 --- a/docs/extras/integrations/llms/anyscale.ipynb +++ b/docs/extras/integrations/llms/anyscale.ipynb @@ -41,7 +41,7 @@ "outputs": [], "source": [ "from langchain.llms import Anyscale\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/azure_ml.ipynb b/docs/extras/integrations/llms/azure_ml.ipynb index 96fdf7494df8e7..38c1cd2aea5613 100644 --- a/docs/extras/integrations/llms/azure_ml.ipynb +++ b/docs/extras/integrations/llms/azure_ml.ipynb @@ -154,7 +154,7 @@ } ], "source": [ - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "from langchain.llms.azureml_endpoint import DollyContentFormatter\n", "from langchain.chains import LLMChain\n", "\n", diff --git a/docs/extras/integrations/llms/banana.ipynb b/docs/extras/integrations/llms/banana.ipynb index b92db8dabab60a..81453babd18403 100644 --- a/docs/extras/integrations/llms/banana.ipynb +++ b/docs/extras/integrations/llms/banana.ipynb @@ -53,7 +53,7 @@ "outputs": [], "source": [ "from langchain.llms import Banana\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/baseten.ipynb b/docs/extras/integrations/llms/baseten.ipynb index b8e3d46b0e894d..7786578a64f1d2 100644 --- a/docs/extras/integrations/llms/baseten.ipynb +++ b/docs/extras/integrations/llms/baseten.ipynb @@ -107,7 +107,7 @@ "outputs": [], "source": [ "from langchain.chains import SimpleSequentialChain\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/bittensor.ipynb b/docs/extras/integrations/llms/bittensor.ipynb index d6f97dc6cb6ce4..e585669ec204f3 100644 --- a/docs/extras/integrations/llms/bittensor.ipynb +++ b/docs/extras/integrations/llms/bittensor.ipynb @@ -80,7 +80,7 @@ "outputs": [], "source": [ "import langchain\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms import NIBittensorLLM\n", "\n", "langchain.debug = True\n", @@ -123,7 +123,7 @@ " AgentExecutor,\n", ")\n", "from langchain.memory import ConversationBufferMemory\n", - "from langchain import LLMChain, PromptTemplate\n", + "from langchain.chains import LLMChain, PromptTemplate\n", "from langchain.utilities import GoogleSearchAPIWrapper, SerpAPIWrapper\n", "from langchain.llms import NIBittensorLLM\n", "\n", diff --git a/docs/extras/integrations/llms/cerebriumai.ipynb b/docs/extras/integrations/llms/cerebriumai.ipynb index f7b32e92de0913..f9ea729c0937e9 100644 --- a/docs/extras/integrations/llms/cerebriumai.ipynb +++ b/docs/extras/integrations/llms/cerebriumai.ipynb @@ -44,7 +44,7 @@ "source": [ "import os\n", "from langchain.llms import CerebriumAI\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/chatglm.ipynb b/docs/extras/integrations/llms/chatglm.ipynb index 0601925a5f53eb..a1fbc3f95d8d11 100644 --- a/docs/extras/integrations/llms/chatglm.ipynb +++ b/docs/extras/integrations/llms/chatglm.ipynb @@ -22,7 +22,7 @@ "outputs": [], "source": [ "from langchain.llms import ChatGLM\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "# import os" ] diff --git a/docs/extras/integrations/llms/clarifai.ipynb b/docs/extras/integrations/llms/clarifai.ipynb index f2fca728b71af1..7d58f35e166c84 100644 --- a/docs/extras/integrations/llms/clarifai.ipynb +++ b/docs/extras/integrations/llms/clarifai.ipynb @@ -82,7 +82,7 @@ "source": [ "# Import the required modules\n", "from langchain.llms import Clarifai\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/cohere.ipynb b/docs/extras/integrations/llms/cohere.ipynb index 05712924342c12..9795875ae6e73d 100644 --- a/docs/extras/integrations/llms/cohere.ipynb +++ b/docs/extras/integrations/llms/cohere.ipynb @@ -59,7 +59,7 @@ "outputs": [], "source": [ "from langchain.llms import Cohere\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/ctransformers.ipynb b/docs/extras/integrations/llms/ctransformers.ipynb index 28ddfc6152f07f..0539ca6c510a78 100644 --- a/docs/extras/integrations/llms/ctransformers.ipynb +++ b/docs/extras/integrations/llms/ctransformers.ipynb @@ -102,7 +102,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/extras/integrations/llms/ctranslate2.ipynb b/docs/extras/integrations/llms/ctranslate2.ipynb index 1554e13c557afc..dd0bf0978e00e8 100644 --- a/docs/extras/integrations/llms/ctranslate2.ipynb +++ b/docs/extras/integrations/llms/ctranslate2.ipynb @@ -195,7 +195,7 @@ } ], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "template = \"\"\"{question}\n", "\n", diff --git a/docs/extras/integrations/llms/deepinfra.ipynb b/docs/extras/integrations/llms/deepinfra.ipynb index 45ba2ac8c50b28..ecb0fd63918879 100644 --- a/docs/extras/integrations/llms/deepinfra.ipynb +++ b/docs/extras/integrations/llms/deepinfra.ipynb @@ -28,7 +28,7 @@ "source": [ "import os\n", "from langchain.llms import DeepInfra\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/edenai.ipynb b/docs/extras/integrations/llms/edenai.ipynb index 4b5ab69bdd4272..031f6986d6aa14 100644 --- a/docs/extras/integrations/llms/edenai.ipynb +++ b/docs/extras/integrations/llms/edenai.ipynb @@ -103,7 +103,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "llm=EdenAI(feature=\"text\",provider=\"openai\",model=\"text-davinci-003\",temperature=0.2, max_tokens=250)\n", "\n", "prompt = \"\"\"\n", diff --git a/docs/extras/integrations/llms/fireworks.ipynb b/docs/extras/integrations/llms/fireworks.ipynb index 4da49230bfaca7..70834f06b5887f 100644 --- a/docs/extras/integrations/llms/fireworks.ipynb +++ b/docs/extras/integrations/llms/fireworks.ipynb @@ -20,7 +20,7 @@ "outputs": [], "source": [ "from langchain.llms.fireworks import Fireworks, FireworksChat\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.prompts.chat import (\n", " ChatPromptTemplate,\n", " HumanMessagePromptTemplate,\n", diff --git a/docs/extras/integrations/llms/forefrontai.ipynb b/docs/extras/integrations/llms/forefrontai.ipynb index 8aca6234d13580..5f988c25c9a3cf 100644 --- a/docs/extras/integrations/llms/forefrontai.ipynb +++ b/docs/extras/integrations/llms/forefrontai.ipynb @@ -27,7 +27,7 @@ "source": [ "import os\n", "from langchain.llms import ForefrontAI\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/google_vertex_ai_palm.ipynb b/docs/extras/integrations/llms/google_vertex_ai_palm.ipynb index b55f2771f1dea5..0327465f343df6 100644 --- a/docs/extras/integrations/llms/google_vertex_ai_palm.ipynb +++ b/docs/extras/integrations/llms/google_vertex_ai_palm.ipynb @@ -66,7 +66,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/gooseai.ipynb b/docs/extras/integrations/llms/gooseai.ipynb index aaedce3a69e0df..fc0fad0bddfabf 100644 --- a/docs/extras/integrations/llms/gooseai.ipynb +++ b/docs/extras/integrations/llms/gooseai.ipynb @@ -43,7 +43,7 @@ "source": [ "import os\n", "from langchain.llms import GooseAI\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/gpt4all.ipynb b/docs/extras/integrations/llms/gpt4all.ipynb index a8760ceeab7381..604f60499956d4 100644 --- a/docs/extras/integrations/llms/gpt4all.ipynb +++ b/docs/extras/integrations/llms/gpt4all.ipynb @@ -47,7 +47,7 @@ }, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms import GPT4All\n", "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler" ] diff --git a/docs/extras/integrations/llms/huggingface_hub.ipynb b/docs/extras/integrations/llms/huggingface_hub.ipynb index f635e5c67925ae..8b0d33d9bb4d5d 100644 --- a/docs/extras/integrations/llms/huggingface_hub.ipynb +++ b/docs/extras/integrations/llms/huggingface_hub.ipynb @@ -91,7 +91,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import HuggingFaceHub" + "from langchain.llms import HuggingFaceHub" ] }, { @@ -101,7 +101,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/llamacpp.ipynb b/docs/extras/integrations/llms/llamacpp.ipynb index 71ba60db70c4b7..45df2ac8a8c427 100644 --- a/docs/extras/integrations/llms/llamacpp.ipynb +++ b/docs/extras/integrations/llms/llamacpp.ipynb @@ -189,7 +189,7 @@ "outputs": [], "source": [ "from langchain.llms import LlamaCpp\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.callbacks.manager import CallbackManager\n", "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler" ] diff --git a/docs/extras/integrations/llms/manifest.ipynb b/docs/extras/integrations/llms/manifest.ipynb index 3664b203962545..1238ae84f459c9 100644 --- a/docs/extras/integrations/llms/manifest.ipynb +++ b/docs/extras/integrations/llms/manifest.ipynb @@ -80,7 +80,7 @@ "outputs": [], "source": [ "# Map reduce example\n", - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "from langchain.text_splitter import CharacterTextSplitter\n", "from langchain.chains.mapreduce import MapReduceChain\n", "\n", diff --git a/docs/extras/integrations/llms/minimax.ipynb b/docs/extras/integrations/llms/minimax.ipynb index e889b99a9133fd..fecad66feac06e 100644 --- a/docs/extras/integrations/llms/minimax.ipynb +++ b/docs/extras/integrations/llms/minimax.ipynb @@ -94,7 +94,7 @@ "outputs": [], "source": [ "from langchain.llms import Minimax\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ], "metadata": { "collapsed": false diff --git a/docs/extras/integrations/llms/modal.ipynb b/docs/extras/integrations/llms/modal.ipynb index 719c7ce54cd24e..c2031e0a9da3b8 100644 --- a/docs/extras/integrations/llms/modal.ipynb +++ b/docs/extras/integrations/llms/modal.ipynb @@ -108,7 +108,7 @@ "outputs": [], "source": [ "from langchain.llms import Modal\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/mosaicml.ipynb b/docs/extras/integrations/llms/mosaicml.ipynb index cd9be156fcb731..08d378cdccfa9d 100644 --- a/docs/extras/integrations/llms/mosaicml.ipynb +++ b/docs/extras/integrations/llms/mosaicml.ipynb @@ -43,7 +43,7 @@ "outputs": [], "source": [ "from langchain.llms import MosaicML\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/nlpcloud.ipynb b/docs/extras/integrations/llms/nlpcloud.ipynb index 931a317c9de25d..d8d0e2daf3cbfa 100644 --- a/docs/extras/integrations/llms/nlpcloud.ipynb +++ b/docs/extras/integrations/llms/nlpcloud.ipynb @@ -73,7 +73,7 @@ "outputs": [], "source": [ "from langchain.llms import NLPCloud\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/octoai.ipynb b/docs/extras/integrations/llms/octoai.ipynb index d783c1e5115823..79324cd9ac28a2 100644 --- a/docs/extras/integrations/llms/octoai.ipynb +++ b/docs/extras/integrations/llms/octoai.ipynb @@ -43,7 +43,7 @@ "outputs": [], "source": [ "from langchain.llms.octoai_endpoint import OctoAIEndpoint\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/ollama.ipynb b/docs/extras/integrations/llms/ollama.ipynb index 49f8ae8f53bbf6..9e2a31c258076e 100644 --- a/docs/extras/integrations/llms/ollama.ipynb +++ b/docs/extras/integrations/llms/ollama.ipynb @@ -206,7 +206,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "\n", "# Prompt\n", "template = \"\"\"Use the following pieces of context to answer the question at the end. \n", diff --git a/docs/extras/integrations/llms/opaqueprompts.ipynb b/docs/extras/integrations/llms/opaqueprompts.ipynb index 132b37e8ce93f6..c598450cb72b31 100644 --- a/docs/extras/integrations/llms/opaqueprompts.ipynb +++ b/docs/extras/integrations/llms/opaqueprompts.ipynb @@ -59,7 +59,7 @@ "outputs": [], "source": [ "import langchain\n", - "from langchain import LLMChain, PromptTemplate\n", + "from langchain.chains import LLMChain, PromptTemplate\n", "from langchain.callbacks.stdout import StdOutCallbackHandler\n", "from langchain.llms import OpenAI\n", "from langchain.memory import ConversationBufferWindowMemory\n", diff --git a/docs/extras/integrations/llms/openai.ipynb b/docs/extras/integrations/llms/openai.ipynb index 9cd691e1047fc7..3bd1da7c82d94c 100644 --- a/docs/extras/integrations/llms/openai.ipynb +++ b/docs/extras/integrations/llms/openai.ipynb @@ -67,7 +67,7 @@ "outputs": [], "source": [ "from langchain.llms import OpenAI\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/openllm.ipynb b/docs/extras/integrations/llms/openllm.ipynb index 9038ef262a102f..6d85ea29b0d204 100644 --- a/docs/extras/integrations/llms/openllm.ipynb +++ b/docs/extras/integrations/llms/openllm.ipynb @@ -114,7 +114,7 @@ } ], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "template = \"What is a good name for a company that makes {product}?\"\n", "\n", diff --git a/docs/extras/integrations/llms/openlm.ipynb b/docs/extras/integrations/llms/openlm.ipynb index 997d321f12c7b9..ecb4f0ee0c553c 100644 --- a/docs/extras/integrations/llms/openlm.ipynb +++ b/docs/extras/integrations/llms/openlm.ipynb @@ -71,7 +71,7 @@ "outputs": [], "source": [ "from langchain.llms import OpenLM\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/petals.ipynb b/docs/extras/integrations/llms/petals.ipynb index d2d386af7fc1cd..1128429e788de9 100644 --- a/docs/extras/integrations/llms/petals.ipynb +++ b/docs/extras/integrations/llms/petals.ipynb @@ -45,7 +45,7 @@ "source": [ "import os\n", "from langchain.llms import Petals\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/pipelineai.ipynb b/docs/extras/integrations/llms/pipelineai.ipynb index 509a61f03a3e22..7e89832fd6ffbd 100644 --- a/docs/extras/integrations/llms/pipelineai.ipynb +++ b/docs/extras/integrations/llms/pipelineai.ipynb @@ -50,7 +50,7 @@ "source": [ "import os\n", "from langchain.llms import PipelineAI\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/predictionguard.ipynb b/docs/extras/integrations/llms/predictionguard.ipynb index ed0225b1578e3f..83950aefead484 100644 --- a/docs/extras/integrations/llms/predictionguard.ipynb +++ b/docs/extras/integrations/llms/predictionguard.ipynb @@ -32,7 +32,7 @@ "\n", "import predictionguard as pg\n", "from langchain.llms import PredictionGuard\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ], "id": "7191a5ce" }, diff --git a/docs/extras/integrations/llms/replicate.ipynb b/docs/extras/integrations/llms/replicate.ipynb index abc7d9e7dd4ed7..60bff90a40cb2c 100644 --- a/docs/extras/integrations/llms/replicate.ipynb +++ b/docs/extras/integrations/llms/replicate.ipynb @@ -104,7 +104,7 @@ "outputs": [], "source": [ "from langchain.llms import Replicate\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/runhouse.ipynb b/docs/extras/integrations/llms/runhouse.ipynb index 209975b3552fcd..27b3c5c919c240 100644 --- a/docs/extras/integrations/llms/runhouse.ipynb +++ b/docs/extras/integrations/llms/runhouse.ipynb @@ -44,7 +44,7 @@ ], "source": [ "from langchain.llms import SelfHostedPipeline, SelfHostedHuggingFaceLLM\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "import runhouse as rh" ] }, diff --git a/docs/extras/integrations/llms/sagemaker.ipynb b/docs/extras/integrations/llms/sagemaker.ipynb index bbdbd5a6da554b..7b21a56e56e221 100644 --- a/docs/extras/integrations/llms/sagemaker.ipynb +++ b/docs/extras/integrations/llms/sagemaker.ipynb @@ -92,7 +92,7 @@ "source": [ "from typing import Dict\n", "\n", - "from langchain import PromptTemplate, SagemakerEndpoint\n", + "from langchain.prompts import PromptTemplate, SagemakerEndpoint\n", "from langchain.llms.sagemaker_endpoint import LLMContentHandler\n", "from langchain.chains.question_answering import load_qa_chain\n", "import json\n", diff --git a/docs/extras/integrations/llms/stochasticai.ipynb b/docs/extras/integrations/llms/stochasticai.ipynb index 26dcacc23609bb..efd4db43b99030 100644 --- a/docs/extras/integrations/llms/stochasticai.ipynb +++ b/docs/extras/integrations/llms/stochasticai.ipynb @@ -80,7 +80,7 @@ "outputs": [], "source": [ "from langchain.llms import StochasticAI\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/symblai_nebula.ipynb b/docs/extras/integrations/llms/symblai_nebula.ipynb index 304917a5cf4a11..3faab38216869d 100644 --- a/docs/extras/integrations/llms/symblai_nebula.ipynb +++ b/docs/extras/integrations/llms/symblai_nebula.ipynb @@ -54,7 +54,7 @@ "execution_count": null, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "conversation = \"\"\"Sam: Good morning, team! Let's keep this standup concise. We'll go in the usual order: what you did yesterday, what you plan to do today, and any blockers. Alex, kick us off.\n", "Alex: Morning! Yesterday, I wrapped up the UI for the user dashboard. The new charts and widgets are now responsive. I also had a sync with the design team to ensure the final touchups are in line with the brand guidelines. Today, I'll start integrating the frontend with the new API endpoints Rhea was working on. The only blocker is waiting for some final API documentation, but I guess Rhea can update on that.\n", diff --git a/docs/extras/integrations/llms/textgen.ipynb b/docs/extras/integrations/llms/textgen.ipynb index 3ffd83e69f95ac..411f2e9c33b487 100644 --- a/docs/extras/integrations/llms/textgen.ipynb +++ b/docs/extras/integrations/llms/textgen.ipynb @@ -44,7 +44,7 @@ "outputs": [], "source": [ "import langchain\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms import TextGen\n", "\n", "langchain.debug = True\n", @@ -93,7 +93,7 @@ "outputs": [], "source": [ "import langchain\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.llms import TextGen\n", "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", "\n", diff --git a/docs/extras/integrations/llms/titan_takeoff.ipynb b/docs/extras/integrations/llms/titan_takeoff.ipynb index 5b8fddcb1e2d30..5ccff187647968 100644 --- a/docs/extras/integrations/llms/titan_takeoff.ipynb +++ b/docs/extras/integrations/llms/titan_takeoff.ipynb @@ -157,7 +157,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "llm = TitanTakeoff()\n", "\n", diff --git a/docs/extras/integrations/llms/tongyi.ipynb b/docs/extras/integrations/llms/tongyi.ipynb index c8e1b1a596819e..6fb8cb336c8c2d 100644 --- a/docs/extras/integrations/llms/tongyi.ipynb +++ b/docs/extras/integrations/llms/tongyi.ipynb @@ -76,7 +76,7 @@ "outputs": [], "source": [ "from langchain.llms import Tongyi\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/vllm.ipynb b/docs/extras/integrations/llms/vllm.ipynb index 8a9ff01df131d4..b4f491f47a4780 100644 --- a/docs/extras/integrations/llms/vllm.ipynb +++ b/docs/extras/integrations/llms/vllm.ipynb @@ -128,7 +128,7 @@ } ], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/extras/integrations/llms/writer.ipynb b/docs/extras/integrations/llms/writer.ipynb index 208155309f705d..dc7451048fb469 100644 --- a/docs/extras/integrations/llms/writer.ipynb +++ b/docs/extras/integrations/llms/writer.ipynb @@ -56,7 +56,7 @@ "outputs": [], "source": [ "from langchain.llms import Writer\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/llms/xinference.ipynb b/docs/extras/integrations/llms/xinference.ipynb index d4010cf34f4baf..037470e8a045e7 100644 --- a/docs/extras/integrations/llms/xinference.ipynb +++ b/docs/extras/integrations/llms/xinference.ipynb @@ -122,7 +122,7 @@ } ], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "template = \"Where can we visit in the capital of {country}?\"\n", "\n", diff --git a/docs/extras/integrations/memory/motorhead_memory.ipynb b/docs/extras/integrations/memory/motorhead_memory.ipynb index 7801e0f3c8f866..8402315a07ad8d 100644 --- a/docs/extras/integrations/memory/motorhead_memory.ipynb +++ b/docs/extras/integrations/memory/motorhead_memory.ipynb @@ -20,7 +20,7 @@ "outputs": [], "source": [ "from langchain.memory.motorhead_memory import MotorheadMemory\n", - "from langchain import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", "\n", "template = \"\"\"You are a chatbot having a conversation with a human.\n", "\n", diff --git a/docs/extras/integrations/memory/motorhead_memory_managed.ipynb b/docs/extras/integrations/memory/motorhead_memory_managed.ipynb index f577bef8d9ffa9..e3e3e3d3cb6a6c 100644 --- a/docs/extras/integrations/memory/motorhead_memory_managed.ipynb +++ b/docs/extras/integrations/memory/motorhead_memory_managed.ipynb @@ -21,7 +21,7 @@ "outputs": [], "source": [ "from langchain.memory.motorhead_memory import MotorheadMemory\n", - "from langchain import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", "\n", "template = \"\"\"You are a chatbot having a conversation with a human.\n", "\n", diff --git a/docs/extras/integrations/memory/zep_memory.ipynb b/docs/extras/integrations/memory/zep_memory.ipynb index aa4d6686659873..f30816acc4a29b 100644 --- a/docs/extras/integrations/memory/zep_memory.ipynb +++ b/docs/extras/integrations/memory/zep_memory.ipynb @@ -49,7 +49,7 @@ "source": [ "from langchain.memory import ZepMemory\n", "from langchain.retrievers import ZepRetriever\n", - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.schema import HumanMessage, AIMessage\n", "from langchain.utilities import WikipediaAPIWrapper\n", "from langchain.agents import initialize_agent, AgentType, Tool\n", diff --git a/docs/extras/integrations/providers/cnosdb.mdx b/docs/extras/integrations/providers/cnosdb.mdx index eab53c9bfc5a39..60cadd28204ca5 100644 --- a/docs/extras/integrations/providers/cnosdb.mdx +++ b/docs/extras/integrations/providers/cnosdb.mdx @@ -31,7 +31,7 @@ Args: ## Examples ```python # Connecting to CnosDB with SQLDatabase Wrapper -from langchain import SQLDatabase +from langchain.utilities import SQLDatabase db = SQLDatabase.from_cnosdb() ``` @@ -45,7 +45,7 @@ llm = ChatOpenAI(temperature=0, model_name="gpt-3.5-turbo") ### SQL Database Chain This example demonstrates the use of the SQL Chain for answering a question over a CnosDB. ```python -from langchain import SQLDatabaseChain +from langchain.utilities import SQLDatabaseChain db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True) diff --git a/docs/extras/integrations/providers/databricks.ipynb b/docs/extras/integrations/providers/databricks.ipynb index 4064b1c2640555..c37794143b887d 100644 --- a/docs/extras/integrations/providers/databricks.ipynb +++ b/docs/extras/integrations/providers/databricks.ipynb @@ -80,7 +80,7 @@ "outputs": [], "source": [ "# Connecting to Databricks with SQLDatabase wrapper\n", - "from langchain import SQLDatabase\n", + "from langchain.utilities import SQLDatabase\n", "\n", "db = SQLDatabase.from_databricks(catalog=\"samples\", schema=\"nyctaxi\")" ] @@ -115,7 +115,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import SQLDatabaseChain\n", + "from langchain.utilities import SQLDatabaseChain\n", "\n", "db_chain = SQLDatabaseChain.from_llm(llm, db, verbose=True)" ] diff --git a/docs/extras/integrations/providers/jina.mdx b/docs/extras/integrations/providers/jina.mdx index 560c2207405f38..3779babf95b6d1 100644 --- a/docs/extras/integrations/providers/jina.mdx +++ b/docs/extras/integrations/providers/jina.mdx @@ -37,7 +37,7 @@ from lcserve import serving @serving def ask(input: str) -> str: - from langchain import LLMChain, OpenAI + from langchain.chains import LLMChain, OpenAI from langchain.agents import AgentExecutor, ZeroShotAgent tools = [...] # list of tools diff --git a/docs/extras/integrations/providers/log10.mdx b/docs/extras/integrations/providers/log10.mdx index 6511cb8d43ce22..a4d634566d2ea8 100644 --- a/docs/extras/integrations/providers/log10.mdx +++ b/docs/extras/integrations/providers/log10.mdx @@ -40,7 +40,7 @@ llm = ChatOpenAI(model_name="gpt-3.5-turbo", callbacks=[log10_callback]) ## How to use tags with Log10 ```python -from langchain import OpenAI +from langchain.llms import OpenAI from langchain.chat_models import ChatAnthropic from langchain.chat_models import ChatOpenAI from langchain.schema import HumanMessage @@ -74,7 +74,7 @@ You can also intermix direct OpenAI calls and Langchain LLM calls: import os from log10.load import log10, log10_session import openai -from langchain import OpenAI +from langchain.llms import OpenAI log10(openai) diff --git a/docs/extras/integrations/providers/mlflow_ai_gateway.mdx b/docs/extras/integrations/providers/mlflow_ai_gateway.mdx index aaeb84f2f53b21..27d183317e579b 100644 --- a/docs/extras/integrations/providers/mlflow_ai_gateway.mdx +++ b/docs/extras/integrations/providers/mlflow_ai_gateway.mdx @@ -60,7 +60,7 @@ See the [API documentation and examples](https://www.mlflow.org/docs/latest/pyth ```python import mlflow -from langchain import LLMChain, PromptTemplate +from langchain.chains import LLMChain, PromptTemplate from langchain.llms import MlflowAIGateway gateway = MlflowAIGateway( @@ -134,7 +134,7 @@ Databricks MLflow AI Gateway is in private preview. Please contact a Databricks representative to enroll in the preview. ```python -from langchain import LLMChain, PromptTemplate +from langchain.chains import LLMChain, PromptTemplate from langchain.llms import MlflowAIGateway gateway = MlflowAIGateway( diff --git a/docs/extras/integrations/providers/motherduck.mdx b/docs/extras/integrations/providers/motherduck.mdx index a388bd96fca8b3..9f6d79146da782 100644 --- a/docs/extras/integrations/providers/motherduck.mdx +++ b/docs/extras/integrations/providers/motherduck.mdx @@ -26,7 +26,7 @@ conn_str = f"duckdb:///md:{token}@my_db" You can use the SQLChain to query data in your Motherduck instance in natural language. ``` -from langchain import OpenAI, SQLDatabase, SQLDatabaseChain +from langchain.llms import OpenAI, SQLDatabase, SQLDatabaseChain db = SQLDatabase.from_uri(conn_str) db_chain = SQLDatabaseChain.from_llm(OpenAI(temperature=0), db, verbose=True) ``` diff --git a/docs/extras/integrations/providers/predictionguard.mdx b/docs/extras/integrations/providers/predictionguard.mdx index 06766504f74044..f953e37effe44a 100644 --- a/docs/extras/integrations/providers/predictionguard.mdx +++ b/docs/extras/integrations/providers/predictionguard.mdx @@ -37,7 +37,7 @@ import os import predictionguard as pg from langchain.llms import PredictionGuard -from langchain import PromptTemplate, LLMChain +from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain # Your Prediction Guard API key. Get one at predictionguard.com os.environ["PREDICTIONGUARD_TOKEN"] = "" @@ -76,7 +76,7 @@ Basic LLM Chaining with the Prediction Guard wrapper: ```python import os -from langchain import PromptTemplate, LLMChain +from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain from langchain.llms import PredictionGuard # Optional, add your OpenAI API Key. This is optional, as Prediction Guard allows diff --git a/docs/extras/integrations/providers/ray_serve.ipynb b/docs/extras/integrations/providers/ray_serve.ipynb index da26930ad27f43..0e56b3d37c1af7 100644 --- a/docs/extras/integrations/providers/ray_serve.ipynb +++ b/docs/extras/integrations/providers/ray_serve.ipynb @@ -108,7 +108,7 @@ "outputs": [], "source": [ "from langchain.llms import OpenAI\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/providers/shaleprotocol.md b/docs/extras/integrations/providers/shaleprotocol.md index 0ffa6294bd9928..332963050e3080 100644 --- a/docs/extras/integrations/providers/shaleprotocol.md +++ b/docs/extras/integrations/providers/shaleprotocol.md @@ -20,7 +20,7 @@ As of June 2023, the API supports Vicuna-13B by default. We are going to support For example ```python from langchain.llms import OpenAI -from langchain import PromptTemplate, LLMChain +from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain import os os.environ['OPENAI_API_BASE'] = "https://shale.live/v1" diff --git a/docs/extras/integrations/retrievers/google_drive.ipynb b/docs/extras/integrations/retrievers/google_drive.ipynb index caf0bf3092ffa8..77de80be30030d 100644 --- a/docs/extras/integrations/retrievers/google_drive.ipynb +++ b/docs/extras/integrations/retrievers/google_drive.ipynb @@ -204,7 +204,7 @@ }, "outputs": [], "source": [ - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "retriever = GoogleDriveRetriever(\n", " template=PromptTemplate(input_variables=['query'],\n", " # See https://developers.google.com/drive/api/guides/search-files\n", diff --git a/docs/extras/integrations/retrievers/re_phrase.ipynb b/docs/extras/integrations/retrievers/re_phrase.ipynb index 31221663e3305f..1432a8d20e0d19 100644 --- a/docs/extras/integrations/retrievers/re_phrase.ipynb +++ b/docs/extras/integrations/retrievers/re_phrase.ipynb @@ -150,7 +150,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import LLMChain\n", + "from langchain.chains import LLMChain\n", "from langchain.prompts import PromptTemplate\n", "\n", "QUERY_PROMPT = PromptTemplate(\n", diff --git a/docs/extras/integrations/text_embedding/clarifai.ipynb b/docs/extras/integrations/text_embedding/clarifai.ipynb index 17c7cf454a7da2..1fb53e49fc017f 100644 --- a/docs/extras/integrations/text_embedding/clarifai.ipynb +++ b/docs/extras/integrations/text_embedding/clarifai.ipynb @@ -82,7 +82,7 @@ "source": [ "# Import the required modules\n", "from langchain.embeddings import ClarifaiEmbeddings\n", - "from langchain import PromptTemplate, LLMChain" + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain" ] }, { diff --git a/docs/extras/integrations/toolkits/amadeus.ipynb b/docs/extras/integrations/toolkits/amadeus.ipynb index baa9288dcd84c9..91fc4bb9e1822b 100644 --- a/docs/extras/integrations/toolkits/amadeus.ipynb +++ b/docs/extras/integrations/toolkits/amadeus.ipynb @@ -81,7 +81,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType" ] }, diff --git a/docs/extras/integrations/toolkits/azure_cognitive_services.ipynb b/docs/extras/integrations/toolkits/azure_cognitive_services.ipynb index 609cc2e4e498e5..de9d16c6567fd8 100644 --- a/docs/extras/integrations/toolkits/azure_cognitive_services.ipynb +++ b/docs/extras/integrations/toolkits/azure_cognitive_services.ipynb @@ -105,7 +105,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType" ] }, diff --git a/docs/extras/integrations/toolkits/gmail.ipynb b/docs/extras/integrations/toolkits/gmail.ipynb index d24ded1f3609d7..9ae8fc2b8d46a5 100644 --- a/docs/extras/integrations/toolkits/gmail.ipynb +++ b/docs/extras/integrations/toolkits/gmail.ipynb @@ -118,7 +118,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType" ] }, diff --git a/docs/extras/integrations/toolkits/google_drive.ipynb b/docs/extras/integrations/toolkits/google_drive.ipynb index 38ee843d43eae0..7e5bc413d23121 100644 --- a/docs/extras/integrations/toolkits/google_drive.ipynb +++ b/docs/extras/integrations/toolkits/google_drive.ipynb @@ -167,7 +167,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType\n", "llm = OpenAI(temperature=0)\n", "agent = initialize_agent(\n", diff --git a/docs/extras/integrations/toolkits/multion.ipynb b/docs/extras/integrations/toolkits/multion.ipynb index 5502d3e7044f64..7f168d001bc7e3 100644 --- a/docs/extras/integrations/toolkits/multion.ipynb +++ b/docs/extras/integrations/toolkits/multion.ipynb @@ -81,7 +81,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType\n", "llm = OpenAI(temperature=0)\n", "from langchain.agents.agent_toolkits import MultionToolkit\n", diff --git a/docs/extras/integrations/toolkits/office365.ipynb b/docs/extras/integrations/toolkits/office365.ipynb index 350bcc0495ff0d..c22f2a047e837f 100644 --- a/docs/extras/integrations/toolkits/office365.ipynb +++ b/docs/extras/integrations/toolkits/office365.ipynb @@ -93,7 +93,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType" ] }, diff --git a/docs/extras/integrations/toolkits/vectorstore.ipynb b/docs/extras/integrations/toolkits/vectorstore.ipynb index db388fdb077786..b9b5c42e8918ea 100644 --- a/docs/extras/integrations/toolkits/vectorstore.ipynb +++ b/docs/extras/integrations/toolkits/vectorstore.ipynb @@ -30,7 +30,7 @@ "from langchain.embeddings.openai import OpenAIEmbeddings\n", "from langchain.vectorstores import Chroma\n", "from langchain.text_splitter import CharacterTextSplitter\n", - "from langchain import OpenAI, VectorDBQA\n", + "from langchain.llms import OpenAI, VectorDBQA\n", "\n", "llm = OpenAI(temperature=0)" ] diff --git a/docs/extras/integrations/tools/awslambda.ipynb b/docs/extras/integrations/tools/awslambda.ipynb index be44befa2d4fb9..30a9fb968d3472 100644 --- a/docs/extras/integrations/tools/awslambda.ipynb +++ b/docs/extras/integrations/tools/awslambda.ipynb @@ -61,7 +61,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import load_tools, initialize_agent, AgentType\n", "\n", "llm = OpenAI(temperature=0)\n", diff --git a/docs/extras/integrations/tools/eleven_labs_tts.ipynb b/docs/extras/integrations/tools/eleven_labs_tts.ipynb index 093679c8d18e75..55a9d3fd8080bd 100644 --- a/docs/extras/integrations/tools/eleven_labs_tts.ipynb +++ b/docs/extras/integrations/tools/eleven_labs_tts.ipynb @@ -126,7 +126,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType, load_tools" ] }, diff --git a/docs/extras/integrations/tools/google_drive.ipynb b/docs/extras/integrations/tools/google_drive.ipynb index c8ad25c2cca623..6dbd337c3beae9 100644 --- a/docs/extras/integrations/tools/google_drive.ipynb +++ b/docs/extras/integrations/tools/google_drive.ipynb @@ -167,7 +167,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType\n", "llm = OpenAI(temperature=0)\n", "agent = initialize_agent(\n", diff --git a/docs/extras/integrations/tools/graphql.ipynb b/docs/extras/integrations/tools/graphql.ipynb index 6cdd3d3019bce1..76a3cf910fa4af 100644 --- a/docs/extras/integrations/tools/graphql.ipynb +++ b/docs/extras/integrations/tools/graphql.ipynb @@ -43,7 +43,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import load_tools, initialize_agent, AgentType\n", "from langchain.utilities import GraphQLAPIWrapper\n", "\n", diff --git a/docs/extras/integrations/tools/lemonai.ipynb b/docs/extras/integrations/tools/lemonai.ipynb index 454d3b93129d7a..79cf1d6e170cad 100644 --- a/docs/extras/integrations/tools/lemonai.ipynb +++ b/docs/extras/integrations/tools/lemonai.ipynb @@ -126,7 +126,7 @@ "source": [ "import os\n", "from lemonai import execute_workflow\n", - "from langchain import OpenAI" + "from langchain.llms import OpenAI" ] }, { diff --git a/docs/extras/integrations/vectorstores/marqo.ipynb b/docs/extras/integrations/vectorstores/marqo.ipynb index 13f0164e7fff9e..d208f7a6f740f1 100644 --- a/docs/extras/integrations/vectorstores/marqo.ipynb +++ b/docs/extras/integrations/vectorstores/marqo.ipynb @@ -474,7 +474,7 @@ ], "source": [ "from langchain.chains import RetrievalQAWithSourcesChain\n", - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "\n", "import os\n", "import getpass\n", diff --git a/docs/extras/integrations/vectorstores/starrocks.ipynb b/docs/extras/integrations/vectorstores/starrocks.ipynb index 6291d49f237e8b..6f51f25a13d6a7 100644 --- a/docs/extras/integrations/vectorstores/starrocks.ipynb +++ b/docs/extras/integrations/vectorstores/starrocks.ipynb @@ -62,7 +62,7 @@ "from langchain.vectorstores.starrocks import StarRocksSettings\n", "from langchain.vectorstores import Chroma\n", "from langchain.text_splitter import CharacterTextSplitter, TokenTextSplitter\n", - "from langchain import OpenAI, VectorDBQA\n", + "from langchain.llms import OpenAI, VectorDBQA\n", "from langchain.document_loaders import DirectoryLoader\n", "from langchain.chains import RetrievalQA\n", "from langchain.document_loaders import TextLoader, UnstructuredMarkdownLoader\n", diff --git a/docs/extras/integrations/vectorstores/vearch.ipynb b/docs/extras/integrations/vectorstores/vearch.ipynb index 8e14c12369c488..efddac3eb58935 100644 --- a/docs/extras/integrations/vectorstores/vearch.ipynb +++ b/docs/extras/integrations/vectorstores/vearch.ipynb @@ -39,7 +39,7 @@ "\n", "import os, sys, torch\n", "from transformers import AutoTokenizer, AutoModelForCausalLM, AutoModel\n", - "from langchain import HuggingFacePipeline, ConversationChain\n", + "from langchain.llms import HuggingFacePipeline\nfrom langchain.chains import ConversationChain\n", "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", "from langchain.vectorstores.vearch import VearchDb\n", "from langchain.document_loaders import TextLoader\n", diff --git a/docs/extras/integrations/vectorstores/weaviate.ipynb b/docs/extras/integrations/vectorstores/weaviate.ipynb index 382baab9715065..7b25d80d621497 100644 --- a/docs/extras/integrations/vectorstores/weaviate.ipynb +++ b/docs/extras/integrations/vectorstores/weaviate.ipynb @@ -332,7 +332,7 @@ "outputs": [], "source": [ "from langchain.chains import RetrievalQAWithSourcesChain\n", - "from langchain import OpenAI" + "from langchain.llms import OpenAI" ] }, { diff --git a/docs/extras/modules/agents/agent_types/openai_multi_functions_agent.ipynb b/docs/extras/modules/agents/agent_types/openai_multi_functions_agent.ipynb index b2b18853884f23..5c8d1c10125f4f 100644 --- a/docs/extras/modules/agents/agent_types/openai_multi_functions_agent.ipynb +++ b/docs/extras/modules/agents/agent_types/openai_multi_functions_agent.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import SerpAPIWrapper\n", + "from langchain.utilities import SerpAPIWrapper\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.chat_models import ChatOpenAI" diff --git a/docs/extras/modules/agents/agent_types/react_docstore.ipynb b/docs/extras/modules/agents/agent_types/react_docstore.ipynb index c18a4914488fa7..68aeaaf96f06b1 100644 --- a/docs/extras/modules/agents/agent_types/react_docstore.ipynb +++ b/docs/extras/modules/agents/agent_types/react_docstore.ipynb @@ -17,7 +17,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI, Wikipedia\n", + "from langchain.llms import OpenAI, Wikipedia\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.agents.react.base import DocstoreExplorer\n", diff --git a/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb b/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb index 673a13eddbeac3..860f56a7290225 100644 --- a/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb +++ b/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb @@ -45,7 +45,7 @@ } ], "source": [ - "from langchain import OpenAI, SerpAPIWrapper\n", + "from langchain.llms import OpenAI, SerpAPIWrapper\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "\n", diff --git a/docs/extras/modules/agents/how_to/add_memory_openai_functions.ipynb b/docs/extras/modules/agents/how_to/add_memory_openai_functions.ipynb index c69ce64976c914..6530f477e5cd25 100644 --- a/docs/extras/modules/agents/how_to/add_memory_openai_functions.ipynb +++ b/docs/extras/modules/agents/how_to/add_memory_openai_functions.ipynb @@ -17,13 +17,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import (\n", - " LLMMathChain,\n", - " OpenAI,\n", - " SerpAPIWrapper,\n", - " SQLDatabase,\n", - " SQLDatabaseChain,\n", - ")\n", + "from langchain.chains import LLMMathChain\n", + "from langchain.llms import OpenAI\n", + "from langchain.utilities import SerpAPIWrapper\n", + "from langchain.utilities import SQLDatabase\n", + "from langchain_experimental.sql import SQLDatabaseChain\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.chat_models import ChatOpenAI" diff --git a/docs/extras/modules/agents/how_to/agent_iter.ipynb b/docs/extras/modules/agents/how_to/agent_iter.ipynb index 35a32f83b5c67b..280e73eabcfed9 100644 --- a/docs/extras/modules/agents/how_to/agent_iter.ipynb +++ b/docs/extras/modules/agents/how_to/agent_iter.ipynb @@ -29,7 +29,7 @@ "from langchain.agents import AgentExecutor, initialize_agent, AgentType\n", "from langchain.schema import AgentFinish\n", "from langchain.agents.tools import Tool\n", - "from langchain import LLMMathChain\n", + "from langchain.chains import LLMMathChain\n", "from langchain.chat_models import ChatOpenAI" ] }, diff --git a/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb b/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb index 7259982e9491a1..2af0600ad70366 100644 --- a/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb +++ b/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb @@ -166,7 +166,7 @@ "from langchain.agents import AgentType\n", "from langchain.tools import BaseTool\n", "from langchain.llms import OpenAI\n", - "from langchain import LLMMathChain, SerpAPIWrapper" + "from langchain.chains import LLMMathChain, SerpAPIWrapper" ] }, { diff --git a/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb b/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb index 7b5bba41a7196c..8375fdd76b5503 100644 --- a/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb +++ b/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb @@ -47,7 +47,7 @@ } ], "source": [ - "from langchain import OpenAI, ConversationChain, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI, ConversationChain, LLMChain, PromptTemplate\n", "from langchain.memory import ConversationBufferWindowMemory\n", "\n", "\n", diff --git a/docs/extras/modules/agents/how_to/custom_agent.ipynb b/docs/extras/modules/agents/how_to/custom_agent.ipynb index 19faa567e19bdb..598125cd9ab77a 100644 --- a/docs/extras/modules/agents/how_to/custom_agent.ipynb +++ b/docs/extras/modules/agents/how_to/custom_agent.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "from langchain.agents import Tool, AgentExecutor, BaseSingleActionAgent\n", - "from langchain import OpenAI, SerpAPIWrapper" + "from langchain.llms import OpenAI, SerpAPIWrapper" ] }, { diff --git a/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb b/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb index 5d8bbd372d6cdc..9ec20626709298 100644 --- a/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb +++ b/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb @@ -38,7 +38,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "import re" diff --git a/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb b/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb index 9892f2450c3763..ec073b3b0ca30f 100644 --- a/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb +++ b/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb @@ -48,7 +48,7 @@ "outputs": [], "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", - "from langchain import OpenAI, SerpAPIWrapper, LLMChain" + "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain" ] }, { diff --git a/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb b/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb index 1dedebc194999e..615e71346185a9 100644 --- a/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb +++ b/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "from langchain.agents import Tool, AgentExecutor, BaseMultiActionAgent\n", - "from langchain import OpenAI, SerpAPIWrapper" + "from langchain.llms import OpenAI, SerpAPIWrapper" ] }, { diff --git a/docs/extras/modules/agents/how_to/handle_parsing_errors.ipynb b/docs/extras/modules/agents/how_to/handle_parsing_errors.ipynb index c56c1b36ae4e8f..c2bcea22fd523b 100644 --- a/docs/extras/modules/agents/how_to/handle_parsing_errors.ipynb +++ b/docs/extras/modules/agents/how_to/handle_parsing_errors.ipynb @@ -25,12 +25,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import (\n", - " OpenAI,\n", - " LLMMathChain,\n", - " SerpAPIWrapper,\n", - " SQLDatabase,\n", - " SQLDatabaseChain,\n", + "from langchain.llms import OpenAI\n", + "from langchain.chains import LLMMathChain\n", + "from langchain.utilities import SerpAPIWrapper\n", + "from langchain.utilities import SQLDatabase\n", + "from langchain_experimental.sql import SQLDatabaseChain\n", ")\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", diff --git a/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb b/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb index 66bd13f7cf81b2..d76133f9a8d681 100644 --- a/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb +++ b/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb @@ -24,7 +24,7 @@ "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", "from langchain.memory import ConversationBufferMemory, ReadOnlySharedMemory\n", - "from langchain import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", "from langchain.utilities import GoogleSearchAPIWrapper" ] }, diff --git a/docs/extras/modules/agents/how_to/use_toolkits_with_openai_functions.ipynb b/docs/extras/modules/agents/how_to/use_toolkits_with_openai_functions.ipynb index 8fba48771fafbd..60a0442fdefa01 100644 --- a/docs/extras/modules/agents/how_to/use_toolkits_with_openai_functions.ipynb +++ b/docs/extras/modules/agents/how_to/use_toolkits_with_openai_functions.ipynb @@ -17,13 +17,11 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import (\n", - " LLMMathChain,\n", - " OpenAI,\n", - " SerpAPIWrapper,\n", - " SQLDatabase,\n", - " SQLDatabaseChain,\n", - ")\n", + "from langchain.chains import LLMMathChain\n", + "from langchain.llms import OpenAI\n", + "from langchain.utilities import SerpAPIWrapper\n", + "from langchain.utilities import SQLDatabase\n", + "from langchain_experimental.sql import SQLDatabaseChain\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.chat_models import ChatOpenAI\n", diff --git a/docs/extras/modules/agents/tools/custom_tools.ipynb b/docs/extras/modules/agents/tools/custom_tools.ipynb index 11e629ffe53031..8bdbe2ac44f032 100644 --- a/docs/extras/modules/agents/tools/custom_tools.ipynb +++ b/docs/extras/modules/agents/tools/custom_tools.ipynb @@ -29,7 +29,7 @@ "outputs": [], "source": [ "# Import things that are needed generically\n", - "from langchain import LLMMathChain, SerpAPIWrapper\n", + "from langchain.chains import LLMMathChain, SerpAPIWrapper\n", "from langchain.agents import AgentType, initialize_agent\n", "from langchain.chat_models import ChatOpenAI\n", "from langchain.tools import BaseTool, StructuredTool, Tool, tool" @@ -774,7 +774,7 @@ "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.llms import OpenAI\n", - "from langchain import LLMMathChain, SerpAPIWrapper\n", + "from langchain.chains import LLMMathChain, SerpAPIWrapper\n", "\n", "search = SerpAPIWrapper()\n", "tools = [\n", @@ -936,7 +936,7 @@ "source": [ "from langchain.tools.base import ToolException\n", "\n", - "from langchain import SerpAPIWrapper\n", + "from langchain.utilities import SerpAPIWrapper\n", "from langchain.agents import AgentType, initialize_agent\n", "from langchain.chat_models import ChatOpenAI\n", "from langchain.tools import Tool\n", diff --git a/docs/extras/modules/agents/tools/multi_input_tool.ipynb b/docs/extras/modules/agents/tools/multi_input_tool.ipynb index dad96af6cb4d55..8ea3bb639a0705 100644 --- a/docs/extras/modules/agents/tools/multi_input_tool.ipynb +++ b/docs/extras/modules/agents/tools/multi_input_tool.ipynb @@ -34,7 +34,7 @@ }, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent, AgentType\n", "\n", "llm = OpenAI(temperature=0)" diff --git a/docs/extras/modules/chains/how_to/from_hub.ipynb b/docs/extras/modules/chains/how_to/from_hub.ipynb index 99b1db8ae17618..0e92e9b664b0cb 100644 --- a/docs/extras/modules/chains/how_to/from_hub.ipynb +++ b/docs/extras/modules/chains/how_to/from_hub.ipynb @@ -73,7 +73,7 @@ "from langchain.embeddings.openai import OpenAIEmbeddings\n", "from langchain.vectorstores import Chroma\n", "from langchain.text_splitter import CharacterTextSplitter\n", - "from langchain import OpenAI, VectorDBQA" + "from langchain.llms import OpenAI, VectorDBQA" ] }, { diff --git a/docs/extras/modules/chains/how_to/serialization.ipynb b/docs/extras/modules/chains/how_to/serialization.ipynb index 409d495fa07b8e..2b6875c8c699f5 100644 --- a/docs/extras/modules/chains/how_to/serialization.ipynb +++ b/docs/extras/modules/chains/how_to/serialization.ipynb @@ -25,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate, OpenAI, LLMChain\n", + "from langchain.prompts import PromptTemplate, OpenAI, LLMChain\n", "\n", "template = \"\"\"Question: {question}\n", "\n", diff --git a/docs/extras/modules/data_connection/retrievers/MultiQueryRetriever.ipynb b/docs/extras/modules/data_connection/retrievers/MultiQueryRetriever.ipynb index 73747bd5c2c431..86aadd97c7e074 100644 --- a/docs/extras/modules/data_connection/retrievers/MultiQueryRetriever.ipynb +++ b/docs/extras/modules/data_connection/retrievers/MultiQueryRetriever.ipynb @@ -126,7 +126,7 @@ "outputs": [], "source": [ "from typing import List\n", - "from langchain import LLMChain\n", + "from langchain.chains import LLMChain\n", "from pydantic import BaseModel, Field\n", "from langchain.prompts import PromptTemplate\n", "from langchain.output_parsers import PydanticOutputParser\n", diff --git a/docs/extras/modules/memory/agent_with_memory.ipynb b/docs/extras/modules/memory/agent_with_memory.ipynb index acf4980f292dc5..11cf34dc288855 100644 --- a/docs/extras/modules/memory/agent_with_memory.ipynb +++ b/docs/extras/modules/memory/agent_with_memory.ipynb @@ -29,7 +29,7 @@ "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", "from langchain.memory import ConversationBufferMemory\n", - "from langchain import OpenAI, LLMChain\n", + "from langchain.llms import OpenAI, LLMChain\n", "from langchain.utilities import GoogleSearchAPIWrapper" ] }, diff --git a/docs/extras/modules/memory/agent_with_memory_in_db.ipynb b/docs/extras/modules/memory/agent_with_memory_in_db.ipynb index d53a9d916e67c0..f8e527f45a073c 100644 --- a/docs/extras/modules/memory/agent_with_memory_in_db.ipynb +++ b/docs/extras/modules/memory/agent_with_memory_in_db.ipynb @@ -37,7 +37,7 @@ "from langchain.memory import ConversationBufferMemory\n", "from langchain.memory.chat_memory import ChatMessageHistory\n", "from langchain.memory.chat_message_histories import RedisChatMessageHistory\n", - "from langchain import OpenAI, LLMChain\n", + "from langchain.llms import OpenAI, LLMChain\n", "from langchain.utilities import GoogleSearchAPIWrapper" ] }, diff --git a/docs/extras/modules/memory/custom_memory.ipynb b/docs/extras/modules/memory/custom_memory.ipynb index 8de18c824d9f30..fd99520319426f 100644 --- a/docs/extras/modules/memory/custom_memory.ipynb +++ b/docs/extras/modules/memory/custom_memory.ipynb @@ -25,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI, ConversationChain\n", + "from langchain.llms import OpenAI, ConversationChain\n", "from langchain.schema import BaseMemory\n", "from pydantic import BaseModel\n", "from typing import List, Dict, Any" diff --git a/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb b/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb index 768c43e9291582..ec5e681f6c5203 100644 --- a/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb +++ b/docs/extras/modules/model_io/prompts/prompt_templates/connecting_to_a_feature_store.ipynb @@ -781,7 +781,7 @@ "os.environ[\"OPENAI_API_KEY\"]=\"\" # Fill the open ai key here\n", "\n", "from langchain.chat_models import ChatOpenAI\n", - "from langchain import LLMChain\n", + "from langchain.chains import LLMChain\n", "\n", "chain = LLMChain(llm=ChatOpenAI(), prompt=prompt_template)" ] diff --git a/docs/extras/use_cases/code_understanding.ipynb b/docs/extras/use_cases/code_understanding.ipynb index df0cfbf9d1b862..73e02145e61f12 100644 --- a/docs/extras/use_cases/code_understanding.ipynb +++ b/docs/extras/use_cases/code_understanding.ipynb @@ -245,7 +245,7 @@ { "data": { "text/plain": [ - "'To initialize a ReAct agent, you need to follow these steps:\\n\\n1. Initialize a language model `llm` of type `BaseLanguageModel`.\\n\\n2. Initialize a document store `docstore` of type `Docstore`.\\n\\n3. Create a `DocstoreExplorer` with the initialized `docstore`. The `DocstoreExplorer` is used to search for and look up terms in the document store.\\n\\n4. Create an array of `Tool` objects. The `Tool` objects represent the actions that the agent can perform. In the case of `ReActDocstoreAgent`, the tools must be \"Search\" and \"Lookup\" with their corresponding functions from the `DocstoreExplorer`.\\n\\n5. Initialize the `ReActDocstoreAgent` using the `from_llm_and_tools` method with the `llm` (language model) and `tools` as parameters.\\n\\n6. Initialize the `ReActChain` (which is the `AgentExecutor`) using the `ReActDocstoreAgent` and `tools` as parameters.\\n\\nHere is an example of how to do this:\\n\\n```python\\nfrom langchain import ReActChain, OpenAI\\nfrom langchain.docstore.base import Docstore\\nfrom langchain.docstore.document import Document\\nfrom langchain.tools.base import BaseTool\\n\\n# Initialize the LLM and a docstore\\nllm = OpenAI()\\ndocstore = Docstore()\\n\\ndocstore_explorer = DocstoreExplorer(docstore)\\ntools = [\\n Tool(\\n name=\"Search\",\\n func=docstore_explorer.search,\\n description=\"Search for a term in the docstore.\",\\n ),\\n Tool(\\n name=\"Lookup\",\\n func=docstore_explorer.lookup,\\n description=\"Lookup a term in the docstore.\",\\n ),\\n]\\nagent = ReActDocstoreAgent.from_llm_and_tools(llm, tools)\\nreact = ReActChain(agent=agent, tools=tools)\\n```\\n\\nKeep in mind that this is a simplified example and you might need to adapt it to your specific needs.'" + "'To initialize a ReAct agent, you need to follow these steps:\\n\\n1. Initialize a language model `llm` of type `BaseLanguageModel`.\\n\\n2. Initialize a document store `docstore` of type `Docstore`.\\n\\n3. Create a `DocstoreExplorer` with the initialized `docstore`. The `DocstoreExplorer` is used to search for and look up terms in the document store.\\n\\n4. Create an array of `Tool` objects. The `Tool` objects represent the actions that the agent can perform. In the case of `ReActDocstoreAgent`, the tools must be \"Search\" and \"Lookup\" with their corresponding functions from the `DocstoreExplorer`.\\n\\n5. Initialize the `ReActDocstoreAgent` using the `from_llm_and_tools` method with the `llm` (language model) and `tools` as parameters.\\n\\n6. Initialize the `ReActChain` (which is the `AgentExecutor`) using the `ReActDocstoreAgent` and `tools` as parameters.\\n\\nHere is an example of how to do this:\\n\\n```python\\nfrom langchain.chains import ReActChain, OpenAI\\nfrom langchain.docstore.base import Docstore\\nfrom langchain.docstore.document import Document\\nfrom langchain.tools.base import BaseTool\\n\\n# Initialize the LLM and a docstore\\nllm = OpenAI()\\ndocstore = Docstore()\\n\\ndocstore_explorer = DocstoreExplorer(docstore)\\ntools = [\\n Tool(\\n name=\"Search\",\\n func=docstore_explorer.search,\\n description=\"Search for a term in the docstore.\",\\n ),\\n Tool(\\n name=\"Lookup\",\\n func=docstore_explorer.lookup,\\n description=\"Lookup a term in the docstore.\",\\n ),\\n]\\nagent = ReActDocstoreAgent.from_llm_and_tools(llm, tools)\\nreact = ReActChain(agent=agent, tools=tools)\\n```\\n\\nKeep in mind that this is a simplified example and you might need to adapt it to your specific needs.'" ] }, "execution_count": 43, @@ -361,7 +361,7 @@ "outputs": [], "source": [ "from langchain.llms import LlamaCpp\n", - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "from langchain.callbacks.manager import CallbackManager\n", "from langchain.memory import ConversationSummaryMemory\n", "from langchain.chains import ConversationalRetrievalChain \n", diff --git a/docs/extras/use_cases/more/agents/agent_simulations/multiagent_bidding.ipynb b/docs/extras/use_cases/more/agents/agent_simulations/multiagent_bidding.ipynb index ec0ed5b8844dd2..7ec494a9d7ec2c 100644 --- a/docs/extras/use_cases/more/agents/agent_simulations/multiagent_bidding.ipynb +++ b/docs/extras/use_cases/more/agents/agent_simulations/multiagent_bidding.ipynb @@ -24,7 +24,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "import re\n", "import tenacity\n", "from typing import List, Dict, Callable\n", diff --git a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb index a10ebf7eb33a4e..782e70da72c09e 100644 --- a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb +++ b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb @@ -39,7 +39,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "from langchain.agents.agent_toolkits import NLAToolkit\n", diff --git a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb index 4980924463482e..83ccbd7e680e84 100644 --- a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb +++ b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb @@ -63,7 +63,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "from langchain.agents.agent_toolkits import NLAToolkit\n", diff --git a/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb b/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb index b0179f3dbff96f..0a3cce8d335c18 100644 --- a/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb +++ b/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb @@ -44,7 +44,7 @@ "\n", "from typing import Dict, List, Any, Union, Callable\n", "from pydantic import BaseModel, Field\n", - "from langchain import LLMChain, PromptTemplate\n", + "from langchain.chains import LLMChain, PromptTemplate\n", "from langchain.llms import BaseLLM\n", "from langchain.chains.base import Chain\n", "from langchain.chat_models import ChatOpenAI\n", diff --git a/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb b/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb index 2b3fe21d9f4ca2..42f5dc76be9503 100644 --- a/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb +++ b/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb @@ -396,7 +396,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain import OpenAI, LLMChain\n", + "from langchain.llms import OpenAI, LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "import re" diff --git a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb index 88e53c9798b757..9d5da752636b23 100644 --- a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb +++ b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb @@ -33,7 +33,7 @@ "from collections import deque\n", "from typing import Dict, List, Optional, Any\n", "\n", - "from langchain import LLMChain, OpenAI, PromptTemplate\n", + "from langchain.chains import LLMChain, OpenAI, PromptTemplate\n", "from langchain.embeddings import OpenAIEmbeddings\n", "from langchain.llms import BaseLLM\n", "from langchain.vectorstores.base import VectorStore\n", diff --git a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb index dc4c4f0974df19..910456b553c04d 100644 --- a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb +++ b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb @@ -29,7 +29,7 @@ "from collections import deque\n", "from typing import Dict, List, Optional, Any\n", "\n", - "from langchain import LLMChain, OpenAI, PromptTemplate\n", + "from langchain.chains import LLMChain, OpenAI, PromptTemplate\n", "from langchain.embeddings import OpenAIEmbeddings\n", "from langchain.llms import BaseLLM\n", "from langchain.vectorstores.base import VectorStore\n", @@ -111,7 +111,7 @@ "outputs": [], "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", - "from langchain import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", "\n", "todo_prompt = PromptTemplate.from_template(\n", " \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n", diff --git a/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb b/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb index 1f746b15b717de..05441f17a0d45a 100644 --- a/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb +++ b/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb @@ -56,7 +56,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", "from langchain.memory import ConversationBufferWindowMemory" ] }, diff --git a/docs/extras/use_cases/more/agents/multi_modal/multi_modal_output_agent.ipynb b/docs/extras/use_cases/more/agents/multi_modal/multi_modal_output_agent.ipynb index cc32520acd15e1..632a41350c7846 100644 --- a/docs/extras/use_cases/more/agents/multi_modal/multi_modal_output_agent.ipynb +++ b/docs/extras/use_cases/more/agents/multi_modal/multi_modal_output_agent.ipynb @@ -41,7 +41,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "from langchain.agents import initialize_agent\n", "from langchain.agents import AgentType\n", "from langchain.tools import SteamshipImageGenerationTool" diff --git a/docs/extras/use_cases/more/code_writing/cpal.ipynb b/docs/extras/use_cases/more/code_writing/cpal.ipynb index 47e2decbfd3188..b6277e0ccc7f8f 100644 --- a/docs/extras/use_cases/more/code_writing/cpal.ipynb +++ b/docs/extras/use_cases/more/code_writing/cpal.ipynb @@ -50,7 +50,7 @@ "\n", "from langchain_experimental.cpal.base import CPALChain\n", "from langchain_experimental.pal_chain import PALChain\n", - "from langchain import OpenAI\n", + "from langchain.llms import OpenAI\n", "\n", "llm = OpenAI(temperature=0, max_tokens=512)\n", "cpal_chain = CPALChain.from_univariate_prompt(llm=llm, verbose=True)\n", diff --git a/docs/extras/use_cases/more/code_writing/llm_math.ipynb b/docs/extras/use_cases/more/code_writing/llm_math.ipynb index b8e824d9f67278..ae3354a35a283d 100644 --- a/docs/extras/use_cases/more/code_writing/llm_math.ipynb +++ b/docs/extras/use_cases/more/code_writing/llm_math.ipynb @@ -45,7 +45,7 @@ } ], "source": [ - "from langchain import OpenAI, LLMMathChain\n", + "from langchain.llms import OpenAI, LLMMathChain\n", "\n", "llm = OpenAI(temperature=0)\n", "llm_math = LLMMathChain.from_llm(llm, verbose=True)\n", diff --git a/docs/extras/use_cases/more/code_writing/pal.ipynb b/docs/extras/use_cases/more/code_writing/pal.ipynb index 4a5e9fc57ebb0c..3b8ab0dfefff48 100644 --- a/docs/extras/use_cases/more/code_writing/pal.ipynb +++ b/docs/extras/use_cases/more/code_writing/pal.ipynb @@ -18,7 +18,7 @@ "outputs": [], "source": [ "from langchain_experimental.pal_chain import PALChain\n", - "from langchain import OpenAI" + "from langchain.llms import OpenAI" ] }, { diff --git a/docs/extras/use_cases/more/graph/graph_memgraph_qa.ipynb b/docs/extras/use_cases/more/graph/graph_memgraph_qa.ipynb index 6a6f89c4f9e4fb..94df7393bf6717 100644 --- a/docs/extras/use_cases/more/graph/graph_memgraph_qa.ipynb +++ b/docs/extras/use_cases/more/graph/graph_memgraph_qa.ipynb @@ -67,7 +67,7 @@ "from langchain.chat_models import ChatOpenAI\n", "from langchain.chains import GraphCypherQAChain\n", "from langchain.graphs import MemgraphGraph\n", - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "\n", "from gqlalchemy import Memgraph\n", "\n", diff --git a/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb b/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb index 65bd8323ed068a..0ab31ba6deba2c 100644 --- a/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb +++ b/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb @@ -31,10 +31,10 @@ "from os import environ\n", "import getpass\n", "from typing import Dict, Any\n", - "from langchain import OpenAI, SQLDatabase, LLMChain\n", + "from langchain.llms import OpenAI, SQLDatabase, LLMChain\n", "from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n", "from sqlalchemy import create_engine, Column, MetaData\n", - "from langchain import PromptTemplate\n", + "from langchain.prompts import PromptTemplate\n", "\n", "\n", "from sqlalchemy import create_engine\n", diff --git a/docs/extras/use_cases/question_answering/how_to/local_retrieval_qa.ipynb b/docs/extras/use_cases/question_answering/how_to/local_retrieval_qa.ipynb index a1fe71e8b5027e..9a704ea252d2ac 100644 --- a/docs/extras/use_cases/question_answering/how_to/local_retrieval_qa.ipynb +++ b/docs/extras/use_cases/question_answering/how_to/local_retrieval_qa.ipynb @@ -413,7 +413,7 @@ } ], "source": [ - "from langchain import PromptTemplate, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.chains import LLMChain\n", "\n", "# Prompt\n", "prompt = PromptTemplate.from_template(\n", diff --git a/docs/snippets/modules/agents/agent_types/conversational_agent.mdx b/docs/snippets/modules/agents/agent_types/conversational_agent.mdx index 50f0129cad94a5..285014ae9cc105 100644 --- a/docs/snippets/modules/agents/agent_types/conversational_agent.mdx +++ b/docs/snippets/modules/agents/agent_types/conversational_agent.mdx @@ -4,7 +4,7 @@ This is accomplished with a specific type of agent (`conversational-react-descri from langchain.agents import Tool from langchain.agents import AgentType from langchain.memory import ConversationBufferMemory -from langchain import OpenAI +from langchain.llms import OpenAI from langchain.utilities import SerpAPIWrapper from langchain.agents import initialize_agent ``` diff --git a/docs/snippets/modules/agents/agent_types/plan_and_execute.mdx b/docs/snippets/modules/agents/agent_types/plan_and_execute.mdx index e5a529525f59ed..81960064a6d3fc 100644 --- a/docs/snippets/modules/agents/agent_types/plan_and_execute.mdx +++ b/docs/snippets/modules/agents/agent_types/plan_and_execute.mdx @@ -5,9 +5,9 @@ from langchain.chat_models import ChatOpenAI from langchain_experimental.plan_and_execute import PlanAndExecute, load_agent_executor, load_chat_planner from langchain.llms import OpenAI -from langchain import SerpAPIWrapper +from langchain.utilities import SerpAPIWrapper from langchain.agents.tools import Tool -from langchain import LLMMathChain +from langchain.chains import LLMMathChain ``` ## Tools diff --git a/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx b/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx index b8a03de62139c1..b52fb2aab2abeb 100644 --- a/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx +++ b/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx @@ -20,7 +20,7 @@ Do necessary imports, etc. ```python from langchain.agents import Tool, AgentExecutor, LLMSingleActionAgent, AgentOutputParser from langchain.prompts import StringPromptTemplate -from langchain import OpenAI, SerpAPIWrapper, LLMChain +from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain from typing import List, Union from langchain.schema import AgentAction, AgentFinish, OutputParserException import re diff --git a/docs/snippets/modules/agents/how_to/custom_llm_chat_agent.mdx b/docs/snippets/modules/agents/how_to/custom_llm_chat_agent.mdx index 87f1d584648fac..acacb63599650a 100644 --- a/docs/snippets/modules/agents/how_to/custom_llm_chat_agent.mdx +++ b/docs/snippets/modules/agents/how_to/custom_llm_chat_agent.mdx @@ -27,7 +27,8 @@ pip install openai ```python from langchain.agents import Tool, AgentExecutor, LLMSingleActionAgent, AgentOutputParser from langchain.prompts import BaseChatPromptTemplate -from langchain import SerpAPIWrapper, LLMChain +from langchain.utilities import SerpAPIWrapper +from langchain.chains.llm import LLMChain from langchain.chat_models import ChatOpenAI from typing import List, Union from langchain.schema import AgentAction, AgentFinish, HumanMessage diff --git a/docs/snippets/modules/agents/how_to/mrkl.mdx b/docs/snippets/modules/agents/how_to/mrkl.mdx index 4d46a31c64d8b0..b6d1e3d3c77175 100644 --- a/docs/snippets/modules/agents/how_to/mrkl.mdx +++ b/docs/snippets/modules/agents/how_to/mrkl.mdx @@ -1,5 +1,5 @@ ```python -from langchain import LLMMathChain, OpenAI, SerpAPIWrapper, SQLDatabase, SQLDatabaseChain +from langchain.chains import LLMMathChain, OpenAI, SerpAPIWrapper, SQLDatabase, SQLDatabaseChain from langchain.agents import initialize_agent, Tool from langchain.agents import AgentType ``` diff --git a/docs/snippets/modules/chains/additional/analyze_document.mdx b/docs/snippets/modules/chains/additional/analyze_document.mdx index b9d6bdeaa3d0f2..640ed4ea8d3cc7 100644 --- a/docs/snippets/modules/chains/additional/analyze_document.mdx +++ b/docs/snippets/modules/chains/additional/analyze_document.mdx @@ -8,7 +8,7 @@ Let's take a look at it in action below, using it to summarize a long document. ```python -from langchain import OpenAI +from langchain.llms import OpenAI from langchain.chains.summarize import load_summarize_chain llm = OpenAI(temperature=0) diff --git a/docs/snippets/modules/chains/foundational/llm_chain.mdx b/docs/snippets/modules/chains/foundational/llm_chain.mdx index d22e01c39a7e65..f84a225a2f845d 100644 --- a/docs/snippets/modules/chains/foundational/llm_chain.mdx +++ b/docs/snippets/modules/chains/foundational/llm_chain.mdx @@ -1,5 +1,5 @@ ```python -from langchain import PromptTemplate, OpenAI, LLMChain +from langchain.prompts import PromptTemplate, OpenAI, LLMChain prompt_template = "What is a good name for a company that makes {product}?" diff --git a/docs/snippets/modules/chains/popular/sqlite.mdx b/docs/snippets/modules/chains/popular/sqlite.mdx index a709328d1ec611..6319a0917cca86 100644 --- a/docs/snippets/modules/chains/popular/sqlite.mdx +++ b/docs/snippets/modules/chains/popular/sqlite.mdx @@ -537,7 +537,7 @@ Sometimes you may not have the luxury of using OpenAI or other service-hosted la import logging import torch from transformers import AutoTokenizer, GPT2TokenizerFast, pipeline, AutoModelForSeq2SeqLM, AutoModelForCausalLM -from langchain import HuggingFacePipeline +from langchain.llms import HuggingFacePipeline # Note: This model requires a large GPU, e.g. an 80GB A100. See documentation for other ways to run private non-OpenAI models. model_id = "google/flan-ul2" @@ -882,7 +882,7 @@ Now that you have some examples (with manually corrected output SQL), you can do ```python -from langchain import FewShotPromptTemplate, PromptTemplate +from langchain.prompts import FewShotPromptTemplate, PromptTemplate from langchain.chains.sql_database.prompt import _sqlite_prompt, PROMPT_SUFFIX from langchain.embeddings.huggingface import HuggingFaceEmbeddings from langchain.prompts.example_selector.semantic_similarity import SemanticSimilarityExampleSelector diff --git a/docs/snippets/modules/chains/popular/vector_db_qa_with_sources.mdx b/docs/snippets/modules/chains/popular/vector_db_qa_with_sources.mdx index 564406bb615bba..fbf77554db99cc 100644 --- a/docs/snippets/modules/chains/popular/vector_db_qa_with_sources.mdx +++ b/docs/snippets/modules/chains/popular/vector_db_qa_with_sources.mdx @@ -49,7 +49,7 @@ docsearch = Chroma.from_texts(texts, embeddings, metadatas=[{"source": f"{i}-pl" ```python from langchain.chains import RetrievalQAWithSourcesChain -from langchain import OpenAI +from langchain.llms import OpenAI chain = RetrievalQAWithSourcesChain.from_chain_type(OpenAI(temperature=0), chain_type="stuff", retriever=docsearch.as_retriever()) ``` diff --git a/docs/snippets/modules/model_io/models/chat/how_to/prompts.mdx b/docs/snippets/modules/model_io/models/chat/how_to/prompts.mdx index da0df2dbc9aa15..509851d7926e4e 100644 --- a/docs/snippets/modules/model_io/models/chat/how_to/prompts.mdx +++ b/docs/snippets/modules/model_io/models/chat/how_to/prompts.mdx @@ -4,7 +4,7 @@ For convenience, there is a `from_template` method defined on the template. If y ```python -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate from langchain.prompts.chat import ( ChatPromptTemplate, SystemMessagePromptTemplate, diff --git a/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx b/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx index 25d62563ced2eb..f481177a3c8c98 100644 --- a/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx +++ b/docs/snippets/modules/model_io/prompts/prompt_templates/get_started.mdx @@ -8,7 +8,7 @@ By default, `PromptTemplate` uses [Python's str.format](https://docs.python.org/ syntax for templating; however other templating syntax is available (e.g., `jinja2`). ```python -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate prompt_template = PromptTemplate.from_template( "Tell me a {adjective} joke about {content}." @@ -27,7 +27,7 @@ prompt_template.format(adjective="funny", content="chickens") The template supports any number of variables, including no variables: ```python -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate prompt_template = PromptTemplate.from_template( "Tell me a joke" @@ -40,7 +40,7 @@ will be compared against the variables present in the template string during ins there is a mismatch; for example, ```python -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate invalid_prompt = PromptTemplate( input_variables=["adjective"], diff --git a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py index a624710f0854fc..6fb1d4833dda6a 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py @@ -1,6 +1,6 @@ from typing import Any, List, Optional -from langchain import LLMChain, PromptTemplate +from langchain.chains import LLMChain, PromptTemplate from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import Callbacks diff --git a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py index e1228161cf7af8..2346f780451a2b 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py @@ -3,7 +3,7 @@ from abc import abstractmethod from typing import Any, Dict, List, Optional, Union -from langchain import LLMChain +from langchain.chains import LLMChain from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import Callbacks from langchain.prompts.chat import ( diff --git a/libs/experimental/langchain_experimental/sql/base.py b/libs/experimental/langchain_experimental/sql/base.py index 5773e45561553d..1de3a0761356e6 100644 --- a/libs/experimental/langchain_experimental/sql/base.py +++ b/libs/experimental/langchain_experimental/sql/base.py @@ -26,7 +26,7 @@ class SQLDatabaseChain(Chain): .. code-block:: python from langchain_experimental.sql import SQLDatabaseChain - from langchain import OpenAI, SQLDatabase + from langchain.llms import OpenAI, SQLDatabase db = SQLDatabase(...) db_chain = SQLDatabaseChain.from_llm(OpenAI(), db) diff --git a/libs/experimental/langchain_experimental/sql/vector_sql.py b/libs/experimental/langchain_experimental/sql/vector_sql.py index 98f3c2dee0c184..05ccb453b28ae0 100644 --- a/libs/experimental/langchain_experimental/sql/vector_sql.py +++ b/libs/experimental/langchain_experimental/sql/vector_sql.py @@ -102,7 +102,7 @@ class VectorSQLDatabaseChain(SQLDatabaseChain): .. code-block:: python from langchain_experimental.sql import SQLDatabaseChain - from langchain import OpenAI, SQLDatabase, OpenAIEmbeddings + from langchain.llms import OpenAI, SQLDatabase, OpenAIEmbeddings db = SQLDatabase(...) db_chain = VectorSQLDatabaseChain.from_llm(OpenAI(), db, OpenAIEmbeddings()) diff --git a/libs/experimental/tests/integration_tests/chains/test_cpal.py b/libs/experimental/tests/integration_tests/chains/test_cpal.py index f3775861732d94..fc332b2a2eaae9 100644 --- a/libs/experimental/tests/integration_tests/chains/test_cpal.py +++ b/libs/experimental/tests/integration_tests/chains/test_cpal.py @@ -6,7 +6,7 @@ from unittest import mock import pytest -from langchain import OpenAI +from langchain.llms import OpenAI from langchain.output_parsers import PydanticOutputParser from langchain.prompts.prompt import PromptTemplate diff --git a/libs/experimental/tests/integration_tests/chains/test_pal.py b/libs/experimental/tests/integration_tests/chains/test_pal.py index 355bfb8c1060f0..7ad8e661c221b2 100644 --- a/libs/experimental/tests/integration_tests/chains/test_pal.py +++ b/libs/experimental/tests/integration_tests/chains/test_pal.py @@ -1,6 +1,6 @@ """Test PAL chain.""" -from langchain import OpenAI +from langchain.llms import OpenAI from langchain.chains.pal.base import PALChain diff --git a/libs/langchain/langchain/agents/mrkl/base.py b/libs/langchain/langchain/agents/mrkl/base.py index 71b33a92f79ce9..177291c03f1da3 100644 --- a/libs/langchain/langchain/agents/mrkl/base.py +++ b/libs/langchain/langchain/agents/mrkl/base.py @@ -137,18 +137,7 @@ def _validate_tools(cls, tools: Sequence[BaseTool]) -> None: class MRKLChain(AgentExecutor): - """Chain that implements the MRKL system. - - Example: - .. code-block:: python - - from langchain import OpenAI, MRKLChain - from langchain.chains.mrkl.base import ChainConfig - llm = OpenAI(temperature=0) - prompt = PromptTemplate(...) - chains = [...] - mrkl = MRKLChain.from_chains(llm=llm, prompt=prompt) - """ + """[Deprecated] Chain that implements the MRKL system.""" @classmethod def from_chains( @@ -166,28 +155,6 @@ def from_chains( Returns: An initialized MRKL chain. - - Example: - .. code-block:: python - - from langchain import LLMMathChain, OpenAI, SerpAPIWrapper, MRKLChain - from langchain.chains.mrkl.base import ChainConfig - llm = OpenAI(temperature=0) - search = SerpAPIWrapper() - llm_math_chain = LLMMathChain(llm=llm) - chains = [ - ChainConfig( - action_name = "Search", - action=search.search, - action_description="useful for searching" - ), - ChainConfig( - action_name="Calculator", - action=llm_math_chain.run, - action_description="useful for doing math" - ) - ] - mrkl = MRKLChain.from_chains(llm, chains) """ tools = [ Tool( diff --git a/libs/langchain/langchain/agents/react/base.py b/libs/langchain/langchain/agents/react/base.py index 3d2e73ec20e568..323104cd30d6f5 100644 --- a/libs/langchain/langchain/agents/react/base.py +++ b/libs/langchain/langchain/agents/react/base.py @@ -131,14 +131,7 @@ def _validate_tools(cls, tools: Sequence[BaseTool]) -> None: class ReActChain(AgentExecutor): - """Chain that implements the ReAct paper. - - Example: - .. code-block:: python - - from langchain import ReActChain, OpenAI - react = ReAct(llm=OpenAI()) - """ + """[Deprecated] Chain that implements the ReAct paper.""" def __init__(self, llm: BaseLanguageModel, docstore: Docstore, **kwargs: Any): """Initialize with the LLM and a docstore.""" diff --git a/libs/langchain/langchain/agents/self_ask_with_search/base.py b/libs/langchain/langchain/agents/self_ask_with_search/base.py index f2db10b4f7200c..ddd77f2277e153 100644 --- a/libs/langchain/langchain/agents/self_ask_with_search/base.py +++ b/libs/langchain/langchain/agents/self_ask_with_search/base.py @@ -58,15 +58,7 @@ def llm_prefix(self) -> str: class SelfAskWithSearchChain(AgentExecutor): - """Chain that does self-ask with search. - - Example: - .. code-block:: python - - from langchain import SelfAskWithSearchChain, OpenAI, GoogleSerperAPIWrapper - search_chain = GoogleSerperAPIWrapper() - self_ask = SelfAskWithSearchChain(llm=OpenAI(), search_chain=search_chain) - """ + """[Deprecated] Chain that does self-ask with search.""" def __init__( self, diff --git a/libs/langchain/langchain/callbacks/context_callback.py b/libs/langchain/langchain/callbacks/context_callback.py index d95db1eceea086..12791d9119bb8f 100644 --- a/libs/langchain/langchain/callbacks/context_callback.py +++ b/libs/langchain/langchain/callbacks/context_callback.py @@ -63,8 +63,8 @@ class ContextCallbackHandler(BaseCallbackHandler): >>> chat(messages) Chain Example: - >>> from langchain import LLMChain - >>> from langchain.llms import ChatOpenAI + >>> from langchain.chains import LLMChain + >>> from langchain.chat_models import ChatOpenAI >>> from langchain.callbacks import ContextCallbackHandler >>> context_callback = ContextCallbackHandler( ... token="", diff --git a/libs/langchain/langchain/chains/conversation/base.py b/libs/langchain/langchain/chains/conversation/base.py index 033e73ffefb5a7..06e5481c30622c 100644 --- a/libs/langchain/langchain/chains/conversation/base.py +++ b/libs/langchain/langchain/chains/conversation/base.py @@ -14,7 +14,8 @@ class ConversationChain(LLMChain): Example: .. code-block:: python - from langchain import ConversationChain, OpenAI + from langchain.chains import ConversationChain + from langchain.llms import OpenAI conversation = ConversationChain(llm=OpenAI()) """ diff --git a/libs/langchain/langchain/chains/elasticsearch_database/base.py b/libs/langchain/langchain/chains/elasticsearch_database/base.py index f4cc3a6837b6d6..5ee33c9fb99a72 100644 --- a/libs/langchain/langchain/chains/elasticsearch_database/base.py +++ b/libs/langchain/langchain/chains/elasticsearch_database/base.py @@ -24,7 +24,8 @@ class ElasticsearchDatabaseChain(Chain): Example: .. code-block:: python - from langchain import ElasticsearchDatabaseChain, OpenAI + from langchain.chains import ElasticsearchDatabaseChain + from langchain.llms import OpenAI from elasticsearch import Elasticsearch database = Elasticsearch("http://localhost:9200") diff --git a/libs/langchain/langchain/chains/llm.py b/libs/langchain/langchain/chains/llm.py index ba8c107305b06a..efa16ff4fcbda8 100644 --- a/libs/langchain/langchain/chains/llm.py +++ b/libs/langchain/langchain/chains/llm.py @@ -32,7 +32,9 @@ class LLMChain(Chain): Example: .. code-block:: python - from langchain import LLMChain, OpenAI, PromptTemplate + from langchain.chains import LLMChain + from langchain.llms import OpenAI + from langchain.prompts import PromptTemplate prompt_template = "Tell me a {adjective} joke" prompt = PromptTemplate( input_variables=["adjective"], template=prompt_template diff --git a/libs/langchain/langchain/chains/llm_bash/base.py b/libs/langchain/langchain/chains/llm_bash/base.py index 9b1bd568bb543d..3d885a4bd4416a 100644 --- a/libs/langchain/langchain/chains/llm_bash/base.py +++ b/libs/langchain/langchain/chains/llm_bash/base.py @@ -23,7 +23,8 @@ class LLMBashChain(Chain): Example: .. code-block:: python - from langchain import LLMBashChain, OpenAI + from langchain.chains import LLMBashChain + from langchain.llms import OpenAI llm_bash = LLMBashChain.from_llm(OpenAI()) """ diff --git a/libs/langchain/langchain/chains/llm_checker/base.py b/libs/langchain/langchain/chains/llm_checker/base.py index d72d389398ad94..a9ff8178dac251 100644 --- a/libs/langchain/langchain/chains/llm_checker/base.py +++ b/libs/langchain/langchain/chains/llm_checker/base.py @@ -67,7 +67,8 @@ class LLMCheckerChain(Chain): Example: .. code-block:: python - from langchain import OpenAI, LLMCheckerChain + from langchain.llms import OpenAI + from langchain.chains import LLMCheckerChain llm = OpenAI(temperature=0.7) checker_chain = LLMCheckerChain.from_llm(llm) """ diff --git a/libs/langchain/langchain/chains/llm_math/base.py b/libs/langchain/langchain/chains/llm_math/base.py index fc54999b9100a9..ceddc3e9cbb355 100644 --- a/libs/langchain/langchain/chains/llm_math/base.py +++ b/libs/langchain/langchain/chains/llm_math/base.py @@ -26,7 +26,8 @@ class LLMMathChain(Chain): Example: .. code-block:: python - from langchain import LLMMathChain, OpenAI + from langchain.chains import LLMMathChain + from langchain.llms import OpenAI llm_math = LLMMathChain.from_llm(OpenAI()) """ diff --git a/libs/langchain/langchain/chains/llm_summarization_checker/base.py b/libs/langchain/langchain/chains/llm_summarization_checker/base.py index 0728571eb33f68..282b4318003de6 100644 --- a/libs/langchain/langchain/chains/llm_summarization_checker/base.py +++ b/libs/langchain/langchain/chains/llm_summarization_checker/base.py @@ -78,7 +78,8 @@ class LLMSummarizationCheckerChain(Chain): Example: .. code-block:: python - from langchain import OpenAI, LLMSummarizationCheckerChain + from langchain.llms import OpenAI + from langchain.chains import LLMSummarizationCheckerChain llm = OpenAI(temperature=0.0) checker_chain = LLMSummarizationCheckerChain.from_llm(llm) """ diff --git a/libs/langchain/langchain/chains/llm_symbolic_math/base.py b/libs/langchain/langchain/chains/llm_symbolic_math/base.py index c511b8cc373a6c..1ae92fa9d8be98 100644 --- a/libs/langchain/langchain/chains/llm_symbolic_math/base.py +++ b/libs/langchain/langchain/chains/llm_symbolic_math/base.py @@ -22,7 +22,8 @@ class LLMSymbolicMathChain(Chain): Example: .. code-block:: python - from langchain import LLMSymbolicMathChain, OpenAI + from langchain.chains import LLMSymbolicMathChain + from langchain.llms import OpenAI llm_symbolic_math = LLMSymbolicMathChain.from_llm(OpenAI()) """ diff --git a/libs/langchain/langchain/chains/natbot/base.py b/libs/langchain/langchain/chains/natbot/base.py index 0fbffe6885e266..e772ee7b428088 100644 --- a/libs/langchain/langchain/chains/natbot/base.py +++ b/libs/langchain/langchain/chains/natbot/base.py @@ -19,7 +19,7 @@ class NatBotChain(Chain): Example: .. code-block:: python - from langchain import NatBotChain + from langchain.chains import NatBotChain natbot = NatBotChain.from_default("Buy me a new hat.") """ diff --git a/libs/langchain/langchain/chains/openai_functions/openapi.py b/libs/langchain/langchain/chains/openai_functions/openapi.py index a06d147ff6177e..ae4d80a3520051 100644 --- a/libs/langchain/langchain/chains/openai_functions/openapi.py +++ b/libs/langchain/langchain/chains/openai_functions/openapi.py @@ -8,7 +8,7 @@ import requests from requests import Response -from langchain import LLMChain +from langchain.chains.llm import LLMChain from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain from langchain.chains.sequential import SequentialChain diff --git a/libs/langchain/langchain/chains/query_constructor/base.py b/libs/langchain/langchain/chains/query_constructor/base.py index 46c8c694917e29..c7836c6ac9b0b9 100644 --- a/libs/langchain/langchain/chains/query_constructor/base.py +++ b/libs/langchain/langchain/chains/query_constructor/base.py @@ -4,7 +4,8 @@ import json from typing import Any, Callable, List, Optional, Sequence -from langchain import FewShotPromptTemplate, LLMChain +from langchain.prompts.few_shot import FewShotPromptTemplate +from langchain.chains.llm import LLMChain from langchain.chains.query_constructor.ir import ( Comparator, Operator, diff --git a/libs/langchain/langchain/chains/query_constructor/prompt.py b/libs/langchain/langchain/chains/query_constructor/prompt.py index 22cf248af819c3..fb6a7901c5e4a8 100644 --- a/libs/langchain/langchain/chains/query_constructor/prompt.py +++ b/libs/langchain/langchain/chains/query_constructor/prompt.py @@ -1,5 +1,5 @@ # flake8: noqa -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate SONG_DATA_SOURCE = """\ ```json diff --git a/libs/langchain/langchain/chains/transform.py b/libs/langchain/langchain/chains/transform.py index 66762fa4ab22a4..bdf51bf5c091a6 100644 --- a/libs/langchain/langchain/chains/transform.py +++ b/libs/langchain/langchain/chains/transform.py @@ -19,7 +19,7 @@ class TransformChain(Chain): Example: .. code-block:: python - from langchain import TransformChain + from langchain.chains import TransformChain transform_chain = TransformChain(input_variables=["text"], output_variables["entities"], transform=func()) """ diff --git a/libs/langchain/langchain/chat_loaders/imessage.py b/libs/langchain/langchain/chat_loaders/imessage.py index eed0cfea3795ee..41e061cb934ac7 100644 --- a/libs/langchain/langchain/chat_loaders/imessage.py +++ b/libs/langchain/langchain/chat_loaders/imessage.py @@ -3,7 +3,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Iterator, List, Optional, Union -from langchain import schema +from langchain.schema import HumanMessage from langchain.chat_loaders.base import BaseChatLoader, ChatSession if TYPE_CHECKING: @@ -57,7 +57,7 @@ def _load_single_chat_session( Returns: ChatSession: Loaded chat session. """ - results: List[schema.HumanMessage] = [] + results: List[HumanMessage] = [] query = """ SELECT message.date, handle.id, message.text @@ -73,7 +73,7 @@ def _load_single_chat_session( for date, sender, text in messages: if text: # Skip empty messages results.append( - schema.HumanMessage( + HumanMessage( role=sender, content=text, additional_kwargs={ diff --git a/libs/langchain/langchain/chat_loaders/slack.py b/libs/langchain/langchain/chat_loaders/slack.py index 7c9f76c9650e83..2cf17d3abaede3 100644 --- a/libs/langchain/langchain/chat_loaders/slack.py +++ b/libs/langchain/langchain/chat_loaders/slack.py @@ -5,7 +5,7 @@ from pathlib import Path from typing import Dict, Iterator, List, Union -from langchain import schema +from langchain.schema import HumanMessage, AIMessage from langchain.chat_loaders.base import BaseChatLoader, ChatSession logger = logging.getLogger(__name__) @@ -28,7 +28,7 @@ def __init__( raise FileNotFoundError(f"File {self.zip_path} not found") def _load_single_chat_session(self, messages: List[Dict]) -> ChatSession: - results: List[Union[schema.AIMessage, schema.HumanMessage]] = [] + results: List[Union[AIMessage, HumanMessage]] = [] previous_sender = None for message in messages: if not isinstance(message, dict): @@ -50,7 +50,7 @@ def _load_single_chat_session(self, messages: List[Dict]) -> ChatSession: ) else: results.append( - schema.HumanMessage( + HumanMessage( role=sender, content=text, additional_kwargs={ diff --git a/libs/langchain/langchain/chat_loaders/telegram.py b/libs/langchain/langchain/chat_loaders/telegram.py index 12c30014ac1fa2..f4ac71cfb58847 100644 --- a/libs/langchain/langchain/chat_loaders/telegram.py +++ b/libs/langchain/langchain/chat_loaders/telegram.py @@ -6,7 +6,7 @@ from pathlib import Path from typing import Iterator, List, Union -from langchain import schema +from langchain.schema import HumanMessage, BaseMessage from langchain.chat_loaders.base import BaseChatLoader, ChatSession logger = logging.getLogger(__name__) @@ -69,7 +69,7 @@ def _load_single_chat_session_html(self, file_path: str) -> ChatSession: from_name = from_name_element.text.strip() text = message.select_one(".text").text.strip() results.append( - schema.HumanMessage( + HumanMessage( content=text, additional_kwargs={ "sender": from_name, @@ -94,14 +94,14 @@ def _load_single_chat_session_json(self, file_path: str) -> ChatSession: data = json.load(file) messages = data.get("messages", []) - results: List[schema.BaseMessage] = [] + results: List[BaseMessage] = [] for message in messages: text = message.get("text", "") timestamp = message.get("date", "") from_name = message.get("from", "") results.append( - schema.HumanMessage( + HumanMessage( content=text, additional_kwargs={ "sender": from_name, diff --git a/libs/langchain/langchain/chat_loaders/utils.py b/libs/langchain/langchain/chat_loaders/utils.py index da41cfd23f6ffc..71ae630b3778db 100644 --- a/libs/langchain/langchain/chat_loaders/utils.py +++ b/libs/langchain/langchain/chat_loaders/utils.py @@ -2,9 +2,8 @@ from copy import deepcopy from typing import Iterable, Iterator, List -from langchain import schema from langchain.chat_loaders.base import ChatSession -from langchain.schema.messages import BaseMessage +from langchain.schema.messages import BaseMessage, AIMessage def merge_chat_runs_in_session( @@ -65,7 +64,7 @@ def map_ai_messages_in_session(chat_sessions: ChatSession, sender: str) -> ChatS num_converted = 0 for message in chat_sessions["messages"]: if message.additional_kwargs.get("sender") == sender: - message = schema.AIMessage( + message = AIMessage( content=message.content, additional_kwargs=message.additional_kwargs.copy(), example=getattr(message, "example", None), diff --git a/libs/langchain/langchain/chat_loaders/whatsapp.py b/libs/langchain/langchain/chat_loaders/whatsapp.py index 39266485e23ea3..6117d2a4328499 100644 --- a/libs/langchain/langchain/chat_loaders/whatsapp.py +++ b/libs/langchain/langchain/chat_loaders/whatsapp.py @@ -4,9 +4,8 @@ import zipfile from typing import Iterator, List, Union -from langchain import schema from langchain.chat_loaders.base import BaseChatLoader, ChatSession -from langchain.schema import messages +from langchain.schema import HumanMessage, AIMessage logger = logging.getLogger(__name__) @@ -66,14 +65,14 @@ def _load_single_chat_session(self, file_path: str) -> ChatSession: current_message += " " + line.strip() if current_message: chat_lines.append(current_message) - results: List[Union[messages.HumanMessage, messages.AIMessage]] = [] + results: List[Union[HumanMessage, AIMessage]] = [] for line in chat_lines: result = self._message_line_regex.match(line.strip()) if result: timestamp, sender, text = result.groups() if not self._ignore_lines.match(text.strip()): results.append( - schema.HumanMessage( + HumanMessage( role=sender, content=text, additional_kwargs={ diff --git a/libs/langchain/langchain/evaluation/qa/eval_chain.py b/libs/langchain/langchain/evaluation/qa/eval_chain.py index 7e3af64168c48e..86866cc2003135 100644 --- a/libs/langchain/langchain/evaluation/qa/eval_chain.py +++ b/libs/langchain/langchain/evaluation/qa/eval_chain.py @@ -4,7 +4,7 @@ import re from typing import Any, List, Optional, Sequence -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.chains.llm import LLMChain from langchain.evaluation.qa.eval_prompt import CONTEXT_PROMPT, COT_PROMPT, PROMPT diff --git a/libs/langchain/langchain/indexes/graph.py b/libs/langchain/langchain/indexes/graph.py index c29f56de5e68b6..19cfc82bc321ee 100644 --- a/libs/langchain/langchain/indexes/graph.py +++ b/libs/langchain/langchain/indexes/graph.py @@ -1,7 +1,7 @@ """Graph Index Creator.""" from typing import Optional, Type -from langchain import BasePromptTemplate +from langchain.schema.prompt_template import BasePromptTemplate from langchain.chains.llm import LLMChain from langchain.graphs.networkx_graph import NetworkxEntityGraph, parse_triples from langchain.indexes.prompts.knowledge_triplet_extraction import ( diff --git a/libs/langchain/langchain/llms/pipelineai.py b/libs/langchain/langchain/llms/pipelineai.py index cff6c0f5b18cde..248666e6f44717 100644 --- a/libs/langchain/langchain/llms/pipelineai.py +++ b/libs/langchain/langchain/llms/pipelineai.py @@ -22,7 +22,7 @@ class PipelineAI(LLM, BaseModel): Example: .. code-block:: python - from langchain import PipelineAI + from langchain.llms import PipelineAI pipeline = PipelineAI(pipeline_key="") """ diff --git a/libs/langchain/langchain/llms/sagemaker_endpoint.py b/libs/langchain/langchain/llms/sagemaker_endpoint.py index e0383552b2f254..9505f17330780a 100644 --- a/libs/langchain/langchain/llms/sagemaker_endpoint.py +++ b/libs/langchain/langchain/llms/sagemaker_endpoint.py @@ -83,7 +83,7 @@ class SagemakerEndpoint(LLM): Example: .. code-block:: python - from langchain import SagemakerEndpoint + from langchain.llms import SagemakerEndpoint endpoint_name = ( "my-endpoint-name" ) diff --git a/libs/langchain/langchain/llms/writer.py b/libs/langchain/langchain/llms/writer.py index 5f83d16e694693..54a8a5fc21588c 100644 --- a/libs/langchain/langchain/llms/writer.py +++ b/libs/langchain/langchain/llms/writer.py @@ -18,7 +18,7 @@ class Writer(LLM): Example: .. code-block:: python - from langchain import Writer + from langchain.llms import Writer writer = Writer(model_id="palmyra-base") """ diff --git a/libs/langchain/langchain/prompts/prompt.py b/libs/langchain/langchain/prompts/prompt.py index 9f18af8a165add..ce87ec468d6c3b 100644 --- a/libs/langchain/langchain/prompts/prompt.py +++ b/libs/langchain/langchain/prompts/prompt.py @@ -26,7 +26,7 @@ class PromptTemplate(StringPromptTemplate): .. code-block:: python - from langchain import PromptTemplate + from langchain.prompts import PromptTemplate # Instantiation using from_template (recommended) prompt = PromptTemplate.from_template("Say {foo}") diff --git a/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py b/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py index 43a50be79da5ac..3a4dc1a3251264 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py +++ b/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py @@ -4,7 +4,8 @@ import asyncio from typing import Any, Callable, Dict, Optional, Sequence -from langchain import LLMChain, PromptTemplate +from langchain.chains.llm import LLMChain +from langchain.prompts import PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.retrievers.document_compressors.base import BaseDocumentCompressor from langchain.retrievers.document_compressors.chain_extract_prompt import ( diff --git a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py index efe6886c88863b..c97f7b06b472bd 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py +++ b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py @@ -1,7 +1,8 @@ """Filter that uses an LLM to drop documents that aren't relevant to the query.""" from typing import Any, Callable, Dict, Optional, Sequence -from langchain import LLMChain, PromptTemplate +from langchain.chains import LLMChain +from langchain.prompts import PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.output_parsers.boolean import BooleanOutputParser from langchain.retrievers.document_compressors.base import BaseDocumentCompressor diff --git a/libs/langchain/langchain/utilities/google_places_api.py b/libs/langchain/langchain/utilities/google_places_api.py index 22d4d0e6267ed6..abd45394741b56 100644 --- a/libs/langchain/langchain/utilities/google_places_api.py +++ b/libs/langchain/langchain/utilities/google_places_api.py @@ -24,7 +24,7 @@ class GooglePlacesAPIWrapper(BaseModel): .. code-block:: python - from langchain import GooglePlacesAPIWrapper + from langchain.utilities import GooglePlacesAPIWrapper gplaceapi = GooglePlacesAPIWrapper() """ diff --git a/libs/langchain/langchain/utilities/google_serper.py b/libs/langchain/langchain/utilities/google_serper.py index 951c00b8facd05..15a7614881864f 100644 --- a/libs/langchain/langchain/utilities/google_serper.py +++ b/libs/langchain/langchain/utilities/google_serper.py @@ -21,7 +21,7 @@ class GoogleSerperAPIWrapper(BaseModel): Example: .. code-block:: python - from langchain import GoogleSerperAPIWrapper + from langchain.utilities import GoogleSerperAPIWrapper google_serper = GoogleSerperAPIWrapper() """ diff --git a/libs/langchain/langchain/vectorstores/annoy.py b/libs/langchain/langchain/vectorstores/annoy.py index 22c569781bb33f..233b574f01db1f 100644 --- a/libs/langchain/langchain/vectorstores/annoy.py +++ b/libs/langchain/langchain/vectorstores/annoy.py @@ -40,7 +40,7 @@ class Annoy(VectorStore): Example: .. code-block:: python - from langchain import Annoy + from langchain.vectorstores import Annoy db = Annoy(embedding_function, index, docstore, index_to_docstore_id) """ @@ -350,7 +350,7 @@ def from_texts( Example: .. code-block:: python - from langchain import Annoy + from langchain.vectorstores import Annoy from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() index = Annoy.from_texts(texts, embeddings) @@ -390,7 +390,7 @@ def from_embeddings( Example: .. code-block:: python - from langchain import Annoy + from langchain.vectorstores import Annoy from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() text_embeddings = embeddings.embed_documents(texts) diff --git a/libs/langchain/langchain/vectorstores/dingo.py b/libs/langchain/langchain/vectorstores/dingo.py index e6d3419c794cf2..8527f8a31b628b 100644 --- a/libs/langchain/langchain/vectorstores/dingo.py +++ b/libs/langchain/langchain/vectorstores/dingo.py @@ -286,7 +286,7 @@ def from_texts( Example: .. code-block:: python - from langchain import Dingo + from langchain.vectorstores import Dingo from langchain.embeddings import OpenAIEmbeddings import dingodb sss diff --git a/libs/langchain/langchain/vectorstores/elastic_vector_search.py b/libs/langchain/langchain/vectorstores/elastic_vector_search.py index 978da1d5ce6ab3..2ac7bbd71dcdf8 100644 --- a/libs/langchain/langchain/vectorstores/elastic_vector_search.py +++ b/libs/langchain/langchain/vectorstores/elastic_vector_search.py @@ -62,7 +62,7 @@ class ElasticVectorSearch(VectorStore): Example: .. code-block:: python - from langchain import ElasticVectorSearch + from langchain.vectorstores import ElasticVectorSearch from langchain.embeddings import OpenAIEmbeddings embedding = OpenAIEmbeddings() @@ -98,7 +98,7 @@ class ElasticVectorSearch(VectorStore): Example: .. code-block:: python - from langchain import ElasticVectorSearch + from langchain.vectorstores import ElasticVectorSearch from langchain.embeddings import OpenAIEmbeddings embedding = OpenAIEmbeddings() @@ -282,7 +282,7 @@ def from_texts( Example: .. code-block:: python - from langchain import ElasticVectorSearch + from langchain.vectorstores import ElasticVectorSearch from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() elastic_vector_search = ElasticVectorSearch.from_texts( diff --git a/libs/langchain/langchain/vectorstores/faiss.py b/libs/langchain/langchain/vectorstores/faiss.py index ec95c67e0b99cd..d3599f3f1473e1 100644 --- a/libs/langchain/langchain/vectorstores/faiss.py +++ b/libs/langchain/langchain/vectorstores/faiss.py @@ -593,7 +593,7 @@ def from_texts( Example: .. code-block:: python - from langchain import FAISS + from langchain.vectorstores import FAISS from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() @@ -630,7 +630,7 @@ def from_embeddings( Example: .. code-block:: python - from langchain import FAISS + from langchain.vectorstores import FAISS from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() diff --git a/libs/langchain/langchain/vectorstores/hologres.py b/libs/langchain/langchain/vectorstores/hologres.py index ce816e8f90439e..c387612c8462fd 100644 --- a/libs/langchain/langchain/vectorstores/hologres.py +++ b/libs/langchain/langchain/vectorstores/hologres.py @@ -403,7 +403,7 @@ def from_embeddings( Example: .. code-block:: python - from langchain import Hologres + from langchain.vectorstores import Hologres from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() text_embeddings = embeddings.embed_documents(texts) diff --git a/libs/langchain/langchain/vectorstores/meilisearch.py b/libs/langchain/langchain/vectorstores/meilisearch.py index 05ffb9700fecd5..43ab448ad18674 100644 --- a/libs/langchain/langchain/vectorstores/meilisearch.py +++ b/libs/langchain/langchain/vectorstores/meilisearch.py @@ -281,7 +281,7 @@ def from_texts( Example: .. code-block:: python - from langchain import Meilisearch + from langchain.vectorstores import Meilisearch from langchain.embeddings import OpenAIEmbeddings import meilisearch diff --git a/libs/langchain/langchain/vectorstores/milvus.py b/libs/langchain/langchain/vectorstores/milvus.py index e5ee2a015ce0c8..5ae88f893ecbdf 100644 --- a/libs/langchain/langchain/vectorstores/milvus.py +++ b/libs/langchain/langchain/vectorstores/milvus.py @@ -84,7 +84,7 @@ class Milvus(VectorStore): Example: .. code-block:: python - from langchain import Milvus + from langchain.vectorstores import Milvus from langchain.embeddings import OpenAIEmbeddings embedding = OpenAIEmbeddings() diff --git a/libs/langchain/langchain/vectorstores/opensearch_vector_search.py b/libs/langchain/langchain/vectorstores/opensearch_vector_search.py index ddc2e72ac3bad7..319e98d92a2a94 100644 --- a/libs/langchain/langchain/vectorstores/opensearch_vector_search.py +++ b/libs/langchain/langchain/vectorstores/opensearch_vector_search.py @@ -319,7 +319,7 @@ class OpenSearchVectorSearch(VectorStore): Example: .. code-block:: python - from langchain import OpenSearchVectorSearch + from langchain.vectorstores import OpenSearchVectorSearch opensearch_vector_search = OpenSearchVectorSearch( "http://localhost:9200", "embeddings", @@ -670,7 +670,7 @@ def from_texts( Example: .. code-block:: python - from langchain import OpenSearchVectorSearch + from langchain.vectorstores import OpenSearchVectorSearch from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() opensearch_vector_search = OpenSearchVectorSearch.from_texts( diff --git a/libs/langchain/langchain/vectorstores/pgvector.py b/libs/langchain/langchain/vectorstores/pgvector.py index 6186d1d7e6fc5d..96be6c2568fd77 100644 --- a/libs/langchain/langchain/vectorstores/pgvector.py +++ b/libs/langchain/langchain/vectorstores/pgvector.py @@ -523,7 +523,7 @@ def from_embeddings( Example: .. code-block:: python - from langchain import PGVector + from langchain.vectorstores import PGVector from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() text_embeddings = embeddings.embed_documents(texts) diff --git a/libs/langchain/langchain/vectorstores/pinecone.py b/libs/langchain/langchain/vectorstores/pinecone.py index ec9fff790ab363..e84549f5ebb338 100644 --- a/libs/langchain/langchain/vectorstores/pinecone.py +++ b/libs/langchain/langchain/vectorstores/pinecone.py @@ -396,7 +396,7 @@ def from_texts( Example: .. code-block:: python - from langchain import Pinecone + from langchain.vectorstores import Pinecone from langchain.embeddings import OpenAIEmbeddings import pinecone diff --git a/libs/langchain/langchain/vectorstores/qdrant.py b/libs/langchain/langchain/vectorstores/qdrant.py index 0be0766f31143b..76608b5206f62c 100644 --- a/libs/langchain/langchain/vectorstores/qdrant.py +++ b/libs/langchain/langchain/vectorstores/qdrant.py @@ -75,7 +75,7 @@ class Qdrant(VectorStore): .. code-block:: python from qdrant_client import QdrantClient - from langchain import Qdrant + from langchain.vectorstores import Qdrant client = QdrantClient() collection_name = "MyCollection" @@ -1293,7 +1293,7 @@ def from_texts( Example: .. code-block:: python - from langchain import Qdrant + from langchain.vectorstores import Qdrant from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() qdrant = Qdrant.from_texts(texts, embeddings, "localhost") @@ -1469,7 +1469,7 @@ async def afrom_texts( Example: .. code-block:: python - from langchain import Qdrant + from langchain.vectorstores import Qdrant from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() qdrant = await Qdrant.afrom_texts(texts, embeddings, "localhost") diff --git a/libs/langchain/langchain/vectorstores/scann.py b/libs/langchain/langchain/vectorstores/scann.py index 434080e7e239d7..a7a80e7a877d28 100644 --- a/libs/langchain/langchain/vectorstores/scann.py +++ b/libs/langchain/langchain/vectorstores/scann.py @@ -382,7 +382,7 @@ def from_texts( Example: .. code-block:: python - from langchain import ScaNN + from langchain.vectorstores import ScaNN from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() scann = ScaNN.from_texts(texts, embeddings) @@ -418,7 +418,7 @@ def from_embeddings( Example: .. code-block:: python - from langchain import ScaNN + from langchain.vectorstores import ScaNN from langchain.embeddings import OpenAIEmbeddings embeddings = OpenAIEmbeddings() text_embeddings = embeddings.embed_documents(texts) diff --git a/libs/langchain/langchain/vectorstores/vectara.py b/libs/langchain/langchain/vectorstores/vectara.py index 3e8a2549e284c5..603410842a8156 100644 --- a/libs/langchain/langchain/vectorstores/vectara.py +++ b/libs/langchain/langchain/vectorstores/vectara.py @@ -388,7 +388,7 @@ def from_texts( Example: .. code-block:: python - from langchain import Vectara + from langchain.vectorstores import Vectara vectara = Vectara.from_texts( texts, vectara_customer_id=customer_id, @@ -420,7 +420,7 @@ def from_files( Example: .. code-block:: python - from langchain import Vectara + from langchain.vectorstores import Vectara vectara = Vectara.from_files( files_list, vectara_customer_id=customer_id, diff --git a/libs/langchain/langchain/vectorstores/zilliz.py b/libs/langchain/langchain/vectorstores/zilliz.py index e62bfb1aa88072..9e592664e2632c 100644 --- a/libs/langchain/langchain/vectorstores/zilliz.py +++ b/libs/langchain/langchain/vectorstores/zilliz.py @@ -66,7 +66,7 @@ class Zilliz(Milvus): Example: .. code-block:: python - from langchain import Zilliz + from langchain.vectorstores import Zilliz from langchain.embeddings import OpenAIEmbeddings embedding = OpenAIEmbeddings() diff --git a/libs/langchain/tests/integration_tests/llms/test_fireworks.py b/libs/langchain/tests/integration_tests/llms/test_fireworks.py index e0dcb4fe433087..ab622f1ddfc1a1 100644 --- a/libs/langchain/tests/integration_tests/llms/test_fireworks.py +++ b/libs/langchain/tests/integration_tests/llms/test_fireworks.py @@ -3,7 +3,8 @@ import pytest -from langchain import LLMChain, PromptTemplate +from langchain.chains.llm import LLMChain +from langchain.prompts import PromptTemplate from langchain.chains import RetrievalQA from langchain.document_loaders import TextLoader from langchain.embeddings.openai import OpenAIEmbeddings diff --git a/libs/langchain/tests/integration_tests/llms/test_huggingface_text_gen_inference.py b/libs/langchain/tests/integration_tests/llms/test_huggingface_text_gen_inference.py index 46e63383a4b175..2d97dfa57d4e58 100644 --- a/libs/langchain/tests/integration_tests/llms/test_huggingface_text_gen_inference.py +++ b/libs/langchain/tests/integration_tests/llms/test_huggingface_text_gen_inference.py @@ -1,4 +1,4 @@ -from langchain import HuggingFaceTextGenInference +from langchain.llms import HuggingFaceTextGenInference def test_invocation_params_stop_sequences() -> None: diff --git a/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py b/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py index 1a2fb604bf9051..b1ba50985be949 100644 --- a/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py +++ b/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py @@ -1,5 +1,6 @@ import langchain.utilities.opaqueprompts as op -from langchain import LLMChain, PromptTemplate +from langchain.chains.llm import LLMChain +from langchain.prompts import PromptTemplate from langchain.llms import OpenAI from langchain.llms.opaqueprompts import OpaquePrompts from langchain.memory import ConversationBufferWindowMemory diff --git a/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py b/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py index 35ebd968383eb8..c4da70356dc3a5 100644 --- a/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py +++ b/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py @@ -1,5 +1,6 @@ """Test Nebula API wrapper.""" -from langchain import LLMChain, PromptTemplate +from langchain.chains.llm import LLMChain +from langchain.prompts.prompt import PromptTemplate from langchain.llms.symblai_nebula import Nebula diff --git a/libs/langchain/tests/unit_tests/chains/test_api.py b/libs/langchain/tests/unit_tests/chains/test_api.py index 2a6854fb4fa046..ce154f3150d4f6 100644 --- a/libs/langchain/tests/unit_tests/chains/test_api.py +++ b/libs/langchain/tests/unit_tests/chains/test_api.py @@ -5,7 +5,7 @@ import pytest -from langchain import LLMChain +from langchain.chains.llm import LLMChain from langchain.chains.api.base import APIChain from langchain.chains.api.prompt import API_RESPONSE_PROMPT, API_URL_PROMPT from langchain.utilities.requests import TextRequestsWrapper diff --git a/libs/langchain/tests/unit_tests/chains/test_combine_documents.py b/libs/langchain/tests/unit_tests/chains/test_combine_documents.py index a970c33cd4d292..5c5c9af4a9583a 100644 --- a/libs/langchain/tests/unit_tests/chains/test_combine_documents.py +++ b/libs/langchain/tests/unit_tests/chains/test_combine_documents.py @@ -4,7 +4,7 @@ import pytest -from langchain import PromptTemplate +from langchain.prompts.prompt import PromptTemplate from langchain.chains.combine_documents.reduce import ( _collapse_docs, _split_list_of_docs, diff --git a/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py b/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py index 7984adc194b82d..a663f2a941c834 100644 --- a/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py +++ b/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py @@ -6,13 +6,13 @@ import pytest -from langchain import schema +from langchain.schema import BaseMessage, HumanMessage, AIMessage from langchain.chat_loaders import telegram, utils def _assert_messages_are_equal( - actual_messages: Sequence[schema.BaseMessage], - expected_messages: Sequence[schema.BaseMessage], + actual_messages: Sequence[BaseMessage], + expected_messages: Sequence[BaseMessage], ) -> None: assert len(actual_messages) == len(expected_messages) for actual, expected in zip(actual_messages, expected_messages): @@ -47,21 +47,21 @@ def _check_telegram_chat_loader(path: str) -> None: assert len(session["messages"]) > 0 assert session["messages"][0].content == "i refuse to converse with you" expected_content = [ - schema.HumanMessage( + HumanMessage( content="i refuse to converse with you", additional_kwargs={ "sender": "Jimmeny Marvelton", "events": [{"message_time": "23.08.2023 13:11:23 UTC-08:00"}], }, ), - schema.AIMessage( + AIMessage( content="Hi nemesis", additional_kwargs={ "sender": "Batman & Robin", "events": [{"message_time": "23.08.2023 13:13:20 UTC-08:00"}], }, ), - schema.HumanMessage( + HumanMessage( content="we meet again\n\nyou will not trick me this time", additional_kwargs={ "sender": "Jimmeny Marvelton", diff --git a/libs/langchain/tests/unit_tests/schema/__init__.py b/libs/langchain/tests/unit_tests/schema/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py b/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py index ee07c0cfc6ea1b..5e999cadc905e8 100644 --- a/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py +++ b/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py @@ -2,7 +2,7 @@ import pytest -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate from langchain.llms import FakeListLLM from langchain.schema.runnable import ( GetLocalVar, diff --git a/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py b/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py index 9b1275e95bf16c..3ad683f6e61ee4 100644 --- a/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py +++ b/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py @@ -7,7 +7,7 @@ from pytest_mock import MockerFixture from syrupy import SnapshotAssertion -from langchain import PromptTemplate +from langchain.prompts import PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.callbacks.tracers.base import BaseTracer from langchain.callbacks.tracers.schemas import Run From 50e9bfce9fac5cfa45ed7ba57611db57941c7eb9 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 15:47:54 -0700 Subject: [PATCH 02/50] stop importing from init --- .github/workflows/imports.yml | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 .github/workflows/imports.yml diff --git a/.github/workflows/imports.yml b/.github/workflows/imports.yml new file mode 100644 index 00000000000000..92986b6d3ae107 --- /dev/null +++ b/.github/workflows/imports.yml @@ -0,0 +1,20 @@ +--- +name: Imports + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Run Bash script + run: git grep 'from langchain import' | grep -vE 'from langchain import (__version__)' + From bc7535a9c73e75a05015aa3455a50405acb25a6e Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 15:49:33 -0700 Subject: [PATCH 03/50] Update docs/extras/guides/model_laboratory.ipynb --- docs/extras/guides/model_laboratory.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/guides/model_laboratory.ipynb b/docs/extras/guides/model_laboratory.ipynb index ec275e4a25fd84..3f4614ca9c20ba 100644 --- a/docs/extras/guides/model_laboratory.ipynb +++ b/docs/extras/guides/model_laboratory.ipynb @@ -19,7 +19,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.chains import LLMChain, OpenAI, Cohere, HuggingFaceHub, PromptTemplate\n", + "from langchain.chains import LLMChain\nfrom langchain.llms import OpenAI, Cohere, HuggingFaceHub\nfrom langchain.prompts import PromptTemplate\n", "from langchain.model_laboratory import ModelLaboratory" ] }, From 284b9ae8502dc4c72da96b092ac69e89262654f5 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:44:14 -0700 Subject: [PATCH 04/50] Update docs/extras/integrations/llms/bittensor.ipynb --- docs/extras/integrations/llms/bittensor.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/llms/bittensor.ipynb b/docs/extras/integrations/llms/bittensor.ipynb index e585669ec204f3..c50f96fd75119e 100644 --- a/docs/extras/integrations/llms/bittensor.ipynb +++ b/docs/extras/integrations/llms/bittensor.ipynb @@ -123,7 +123,7 @@ " AgentExecutor,\n", ")\n", "from langchain.memory import ConversationBufferMemory\n", - "from langchain.chains import LLMChain, PromptTemplate\n", + "from langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n", "from langchain.utilities import GoogleSearchAPIWrapper, SerpAPIWrapper\n", "from langchain.llms import NIBittensorLLM\n", "\n", From 10e419d84ed0accd725e7368a9321085283299ba Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:44:24 -0700 Subject: [PATCH 05/50] Update docs/extras/integrations/llms/opaqueprompts.ipynb --- docs/extras/integrations/llms/opaqueprompts.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/llms/opaqueprompts.ipynb b/docs/extras/integrations/llms/opaqueprompts.ipynb index c598450cb72b31..c2ec73fe41a8df 100644 --- a/docs/extras/integrations/llms/opaqueprompts.ipynb +++ b/docs/extras/integrations/llms/opaqueprompts.ipynb @@ -59,7 +59,7 @@ "outputs": [], "source": [ "import langchain\n", - "from langchain.chains import LLMChain, PromptTemplate\n", + "from langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n", "from langchain.callbacks.stdout import StdOutCallbackHandler\n", "from langchain.llms import OpenAI\n", "from langchain.memory import ConversationBufferWindowMemory\n", From f2a56d733c9d3c2d7a8392b2ebcbcb7b58cf4bda Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:44:39 -0700 Subject: [PATCH 06/50] Update docs/extras/integrations/llms/sagemaker.ipynb --- docs/extras/integrations/llms/sagemaker.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/llms/sagemaker.ipynb b/docs/extras/integrations/llms/sagemaker.ipynb index 7b21a56e56e221..067aeaaa5600de 100644 --- a/docs/extras/integrations/llms/sagemaker.ipynb +++ b/docs/extras/integrations/llms/sagemaker.ipynb @@ -92,7 +92,7 @@ "source": [ "from typing import Dict\n", "\n", - "from langchain.prompts import PromptTemplate, SagemakerEndpoint\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.llms import SagemakerEndpoint\n", "from langchain.llms.sagemaker_endpoint import LLMContentHandler\n", "from langchain.chains.question_answering import load_qa_chain\n", "import json\n", From c61a05d6b080ab4b9e7724d14e070134ee2be601 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:44:46 -0700 Subject: [PATCH 07/50] Update docs/extras/integrations/memory/motorhead_memory.ipynb --- docs/extras/integrations/memory/motorhead_memory.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/memory/motorhead_memory.ipynb b/docs/extras/integrations/memory/motorhead_memory.ipynb index 8402315a07ad8d..87ded21c3e68db 100644 --- a/docs/extras/integrations/memory/motorhead_memory.ipynb +++ b/docs/extras/integrations/memory/motorhead_memory.ipynb @@ -20,7 +20,7 @@ "outputs": [], "source": [ "from langchain.memory.motorhead_memory import MotorheadMemory\n", - "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n", "\n", "template = \"\"\"You are a chatbot having a conversation with a human.\n", "\n", From ffa69882c01695a2dcc14cfcd501d12ffbae0d93 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:44:53 -0700 Subject: [PATCH 08/50] Update docs/extras/integrations/memory/motorhead_memory_managed.ipynb --- docs/extras/integrations/memory/motorhead_memory_managed.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/memory/motorhead_memory_managed.ipynb b/docs/extras/integrations/memory/motorhead_memory_managed.ipynb index e3e3e3d3cb6a6c..bfaee89cbcc1fe 100644 --- a/docs/extras/integrations/memory/motorhead_memory_managed.ipynb +++ b/docs/extras/integrations/memory/motorhead_memory_managed.ipynb @@ -21,7 +21,7 @@ "outputs": [], "source": [ "from langchain.memory.motorhead_memory import MotorheadMemory\n", - "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n", "\n", "template = \"\"\"You are a chatbot having a conversation with a human.\n", "\n", From 0beb465adb935dd3b9eb32a9b397b7a6c0439171 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:01 -0700 Subject: [PATCH 09/50] Update docs/extras/integrations/providers/mlflow_ai_gateway.mdx --- docs/extras/integrations/providers/mlflow_ai_gateway.mdx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/extras/integrations/providers/mlflow_ai_gateway.mdx b/docs/extras/integrations/providers/mlflow_ai_gateway.mdx index 27d183317e579b..6931a90cf970d5 100644 --- a/docs/extras/integrations/providers/mlflow_ai_gateway.mdx +++ b/docs/extras/integrations/providers/mlflow_ai_gateway.mdx @@ -134,7 +134,8 @@ Databricks MLflow AI Gateway is in private preview. Please contact a Databricks representative to enroll in the preview. ```python -from langchain.chains import LLMChain, PromptTemplate +from langchain.chains import LLMChain +from langchain.prompts import PromptTemplate from langchain.llms import MlflowAIGateway gateway = MlflowAIGateway( From 87838d0fcdde06db65b493697746b3bb300c0441 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:11 -0700 Subject: [PATCH 10/50] Update docs/extras/integrations/providers/jina.mdx --- docs/extras/integrations/providers/jina.mdx | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/extras/integrations/providers/jina.mdx b/docs/extras/integrations/providers/jina.mdx index 3779babf95b6d1..bec3e9fd789855 100644 --- a/docs/extras/integrations/providers/jina.mdx +++ b/docs/extras/integrations/providers/jina.mdx @@ -37,7 +37,8 @@ from lcserve import serving @serving def ask(input: str) -> str: - from langchain.chains import LLMChain, OpenAI + from langchain.chains import LLMChain + from langchain.llms import OpenAI from langchain.agents import AgentExecutor, ZeroShotAgent tools = [...] # list of tools From 2d3064e6fd84ffea759808854970f056c2a019f4 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:18 -0700 Subject: [PATCH 11/50] Update docs/extras/integrations/toolkits/vectorstore.ipynb --- docs/extras/integrations/toolkits/vectorstore.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/toolkits/vectorstore.ipynb b/docs/extras/integrations/toolkits/vectorstore.ipynb index b9b5c42e8918ea..75909b64063aeb 100644 --- a/docs/extras/integrations/toolkits/vectorstore.ipynb +++ b/docs/extras/integrations/toolkits/vectorstore.ipynb @@ -30,7 +30,7 @@ "from langchain.embeddings.openai import OpenAIEmbeddings\n", "from langchain.vectorstores import Chroma\n", "from langchain.text_splitter import CharacterTextSplitter\n", - "from langchain.llms import OpenAI, VectorDBQA\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import VectorDBQA\n", "\n", "llm = OpenAI(temperature=0)" ] From 865cd63f8142c46141417314e63db2973d95434c Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:25 -0700 Subject: [PATCH 12/50] Update docs/extras/integrations/vectorstores/starrocks.ipynb --- docs/extras/integrations/vectorstores/starrocks.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/integrations/vectorstores/starrocks.ipynb b/docs/extras/integrations/vectorstores/starrocks.ipynb index 6f51f25a13d6a7..39d4de18ae15b7 100644 --- a/docs/extras/integrations/vectorstores/starrocks.ipynb +++ b/docs/extras/integrations/vectorstores/starrocks.ipynb @@ -62,7 +62,7 @@ "from langchain.vectorstores.starrocks import StarRocksSettings\n", "from langchain.vectorstores import Chroma\n", "from langchain.text_splitter import CharacterTextSplitter, TokenTextSplitter\n", - "from langchain.llms import OpenAI, VectorDBQA\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import VectorDBQA\n", "from langchain.document_loaders import DirectoryLoader\n", "from langchain.chains import RetrievalQA\n", "from langchain.document_loaders import TextLoader, UnstructuredMarkdownLoader\n", From c9432e2055f801761cdc1e946ee4522fee933435 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:32 -0700 Subject: [PATCH 13/50] Update docs/extras/modules/agents/agent_types/react_docstore.ipynb --- docs/extras/modules/agents/agent_types/react_docstore.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/agent_types/react_docstore.ipynb b/docs/extras/modules/agents/agent_types/react_docstore.ipynb index 68aeaaf96f06b1..ede8d0a097b9e8 100644 --- a/docs/extras/modules/agents/agent_types/react_docstore.ipynb +++ b/docs/extras/modules/agents/agent_types/react_docstore.ipynb @@ -17,7 +17,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.llms import OpenAI, Wikipedia\n", + "from langchain.llms import OpenAI\from langchain.utilities import Wikipedia\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.agents.react.base import DocstoreExplorer\n", From 6609750538bce974d7ceaa6413f8f9ab0c2dfe95 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:39 -0700 Subject: [PATCH 14/50] Update docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb --- .../modules/agents/agent_types/self_ask_with_search.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb b/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb index 860f56a7290225..59c9c9ff07277b 100644 --- a/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb +++ b/docs/extras/modules/agents/agent_types/self_ask_with_search.ipynb @@ -45,7 +45,7 @@ } ], "source": [ - "from langchain.llms import OpenAI, SerpAPIWrapper\n", + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\n", "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "\n", From 6a7c07010d1d218b728d7b55616cc29a95f93fac Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:46 -0700 Subject: [PATCH 15/50] Update docs/extras/modules/agents/how_to/agent_vectorstore.ipynb --- docs/extras/modules/agents/how_to/agent_vectorstore.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb b/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb index 2af0600ad70366..611d61e70da700 100644 --- a/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb +++ b/docs/extras/modules/agents/how_to/agent_vectorstore.ipynb @@ -166,7 +166,7 @@ "from langchain.agents import AgentType\n", "from langchain.tools import BaseTool\n", "from langchain.llms import OpenAI\n", - "from langchain.chains import LLMMathChain, SerpAPIWrapper" + "from langchain.chains import LLMMathChain\nfrom langchain.utilities import SerpAPIWrapper" ] }, { From e602c5ea95af78b0d5899f35759b9b1823b1bbee Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:45:53 -0700 Subject: [PATCH 16/50] Update docs/extras/modules/agents/how_to/chatgpt_clone.ipynb --- docs/extras/modules/agents/how_to/chatgpt_clone.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb b/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb index 8375fdd76b5503..009ff2eb961d14 100644 --- a/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb +++ b/docs/extras/modules/agents/how_to/chatgpt_clone.ipynb @@ -47,7 +47,7 @@ } ], "source": [ - "from langchain.llms import OpenAI, ConversationChain, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import ConversationChain, LLMChain\nfrom langchain.prompts import PromptTemplate\n", "from langchain.memory import ConversationBufferWindowMemory\n", "\n", "\n", From 2bf326b77d3e18b93547718da4183a4290682487 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:46:02 -0700 Subject: [PATCH 17/50] Update docs/extras/modules/agents/how_to/custom_agent.ipynb --- docs/extras/modules/agents/how_to/custom_agent.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/how_to/custom_agent.ipynb b/docs/extras/modules/agents/how_to/custom_agent.ipynb index 598125cd9ab77a..071deb1486db1e 100644 --- a/docs/extras/modules/agents/how_to/custom_agent.ipynb +++ b/docs/extras/modules/agents/how_to/custom_agent.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "from langchain.agents import Tool, AgentExecutor, BaseSingleActionAgent\n", - "from langchain.llms import OpenAI, SerpAPIWrapper" + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper" ] }, { From 7b692d22bfe081609f2dec9307a38cf7f4125990 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:46:08 -0700 Subject: [PATCH 18/50] Update docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb --- .../agents/how_to/custom_agent_with_tool_retrieval.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb b/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb index 9ec20626709298..5471eec006f670 100644 --- a/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb +++ b/docs/extras/modules/agents/how_to/custom_agent_with_tool_retrieval.ipynb @@ -38,7 +38,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.chains import LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "import re" From d6663aecec1be7055739b47c815d40d2fc3f755d Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:47:24 -0700 Subject: [PATCH 19/50] Update docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb --- docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb b/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb index ec073b3b0ca30f..7b7ddebe3a2a12 100644 --- a/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb +++ b/docs/extras/modules/agents/how_to/custom_mrkl_agent.ipynb @@ -48,7 +48,7 @@ "outputs": [], "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", - "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain" + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.chains import LLMChain" ] }, { From 1055a74dc41dda81946aa65fbe5df7c3ce44737a Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:47:30 -0700 Subject: [PATCH 20/50] Update docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb --- .../modules/agents/how_to/custom_multi_action_agent.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb b/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb index 615e71346185a9..11c204cce7e0a9 100644 --- a/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb +++ b/docs/extras/modules/agents/how_to/custom_multi_action_agent.ipynb @@ -26,7 +26,7 @@ "outputs": [], "source": [ "from langchain.agents import Tool, AgentExecutor, BaseMultiActionAgent\n", - "from langchain.llms import OpenAI, SerpAPIWrapper" + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper" ] }, { From 563d7806f5b36aedee293a5052bda0377ce29ebb Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:47:35 -0700 Subject: [PATCH 21/50] Update docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb --- docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb b/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb index d76133f9a8d681..ece210c841552c 100644 --- a/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb +++ b/docs/extras/modules/agents/how_to/sharedmemory_for_tools.ipynb @@ -24,7 +24,7 @@ "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", "from langchain.memory import ConversationBufferMemory, ReadOnlySharedMemory\n", - "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n", "from langchain.utilities import GoogleSearchAPIWrapper" ] }, From 447517e1255edf3e966cba74564fb990c9f4824f Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:47:42 -0700 Subject: [PATCH 22/50] Update docs/extras/modules/agents/tools/custom_tools.ipynb --- docs/extras/modules/agents/tools/custom_tools.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/tools/custom_tools.ipynb b/docs/extras/modules/agents/tools/custom_tools.ipynb index 8bdbe2ac44f032..688166e48b32eb 100644 --- a/docs/extras/modules/agents/tools/custom_tools.ipynb +++ b/docs/extras/modules/agents/tools/custom_tools.ipynb @@ -29,7 +29,7 @@ "outputs": [], "source": [ "# Import things that are needed generically\n", - "from langchain.chains import LLMMathChain, SerpAPIWrapper\n", + "from langchain.chains import LLMMathChain\nfrom langchain.utilities import SerpAPIWrapper\n", "from langchain.agents import AgentType, initialize_agent\n", "from langchain.chat_models import ChatOpenAI\n", "from langchain.tools import BaseTool, StructuredTool, Tool, tool" From 54a0900093b7528dc39fb8b5fa3fc8d957428c5b Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:47:47 -0700 Subject: [PATCH 23/50] Update docs/extras/modules/agents/tools/custom_tools.ipynb --- docs/extras/modules/agents/tools/custom_tools.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/agents/tools/custom_tools.ipynb b/docs/extras/modules/agents/tools/custom_tools.ipynb index 688166e48b32eb..086c162705686a 100644 --- a/docs/extras/modules/agents/tools/custom_tools.ipynb +++ b/docs/extras/modules/agents/tools/custom_tools.ipynb @@ -774,7 +774,7 @@ "from langchain.agents import initialize_agent, Tool\n", "from langchain.agents import AgentType\n", "from langchain.llms import OpenAI\n", - "from langchain.chains import LLMMathChain, SerpAPIWrapper\n", + "from langchain.chains import LLMMathChain\nfrom langchain.utilities import SerpAPIWrapper\n", "\n", "search = SerpAPIWrapper()\n", "tools = [\n", From 620be7322fe96de7576963845cc07e75ae21e7db Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:47:53 -0700 Subject: [PATCH 24/50] Update docs/extras/modules/chains/how_to/from_hub.ipynb --- docs/extras/modules/chains/how_to/from_hub.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/chains/how_to/from_hub.ipynb b/docs/extras/modules/chains/how_to/from_hub.ipynb index 0e92e9b664b0cb..e862b527a1d7dd 100644 --- a/docs/extras/modules/chains/how_to/from_hub.ipynb +++ b/docs/extras/modules/chains/how_to/from_hub.ipynb @@ -73,7 +73,7 @@ "from langchain.embeddings.openai import OpenAIEmbeddings\n", "from langchain.vectorstores import Chroma\n", "from langchain.text_splitter import CharacterTextSplitter\n", - "from langchain.llms import OpenAI, VectorDBQA" + "from langchain.llms import OpenAI\nfrom langchain.chains import VectorDBQA" ] }, { From 3ead58d1c15d3a24535fc38bfbd135cb6173140b Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:47:59 -0700 Subject: [PATCH 25/50] Update docs/extras/modules/chains/how_to/serialization.ipynb --- docs/extras/modules/chains/how_to/serialization.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/chains/how_to/serialization.ipynb b/docs/extras/modules/chains/how_to/serialization.ipynb index 2b6875c8c699f5..555ff1beaa0ac2 100644 --- a/docs/extras/modules/chains/how_to/serialization.ipynb +++ b/docs/extras/modules/chains/how_to/serialization.ipynb @@ -25,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.prompts import PromptTemplate, OpenAI, LLMChain\n", + "from langchain.prompts import PromptTemplate\nfrom langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\n", "\n", "template = \"\"\"Question: {question}\n", "\n", From d52a853518ee3b0198d61bdcf09971be344e243f Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:10 -0700 Subject: [PATCH 26/50] Update docs/extras/modules/memory/agent_with_memory.ipynb --- docs/extras/modules/memory/agent_with_memory.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/memory/agent_with_memory.ipynb b/docs/extras/modules/memory/agent_with_memory.ipynb index 11cf34dc288855..1ece76969d267a 100644 --- a/docs/extras/modules/memory/agent_with_memory.ipynb +++ b/docs/extras/modules/memory/agent_with_memory.ipynb @@ -29,7 +29,7 @@ "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", "from langchain.memory import ConversationBufferMemory\n", - "from langchain.llms import OpenAI, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\n", "from langchain.utilities import GoogleSearchAPIWrapper" ] }, From 5083e7b1e224fb7262d820ff18bd5dcc9daf9b29 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:15 -0700 Subject: [PATCH 27/50] Update docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb --- .../use_cases/more/agents/autonomous_agents/meta_prompt.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb b/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb index 05441f17a0d45a..c5a532900c6412 100644 --- a/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb +++ b/docs/extras/use_cases/more/agents/autonomous_agents/meta_prompt.ipynb @@ -56,7 +56,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.llms import OpenAI, LLMChain, PromptTemplate\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n", "from langchain.memory import ConversationBufferWindowMemory" ] }, From 262b45522d4d744dc8b161f3373a70f111fb0868 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:20 -0700 Subject: [PATCH 28/50] Update docs/extras/use_cases/more/code_writing/llm_math.ipynb --- docs/extras/use_cases/more/code_writing/llm_math.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/code_writing/llm_math.ipynb b/docs/extras/use_cases/more/code_writing/llm_math.ipynb index ae3354a35a283d..edc56654bbc58a 100644 --- a/docs/extras/use_cases/more/code_writing/llm_math.ipynb +++ b/docs/extras/use_cases/more/code_writing/llm_math.ipynb @@ -45,7 +45,7 @@ } ], "source": [ - "from langchain.llms import OpenAI, LLMMathChain\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMMathChain\n", "\n", "llm = OpenAI(temperature=0)\n", "llm_math = LLMMathChain.from_llm(llm, verbose=True)\n", From c8982b1a82b35fe537083cbfe09065953adbea88 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:32 -0700 Subject: [PATCH 29/50] Update docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb --- .../qa_structured/integrations/myscale_vector_sql.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb b/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb index 0ab31ba6deba2c..bc7044eb1709fe 100644 --- a/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb +++ b/docs/extras/use_cases/qa_structured/integrations/myscale_vector_sql.ipynb @@ -31,7 +31,7 @@ "from os import environ\n", "import getpass\n", "from typing import Dict, Any\n", - "from langchain.llms import OpenAI, SQLDatabase, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.utilities import SQLDatabase\nfrom langchain.chains import LLMChain\n", "from langchain_experimental.sql.vector_sql import VectorSQLDatabaseChain\n", "from sqlalchemy import create_engine, Column, MetaData\n", "from langchain.prompts import PromptTemplate\n", From 641183a137c755b24f28cf36ae8ca8332e8e7ed3 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:37 -0700 Subject: [PATCH 30/50] Update docs/snippets/modules/agents/how_to/custom_llm_agent.mdx --- docs/snippets/modules/agents/how_to/custom_llm_agent.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx b/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx index b52fb2aab2abeb..b23404e4e71685 100644 --- a/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx +++ b/docs/snippets/modules/agents/how_to/custom_llm_agent.mdx @@ -20,7 +20,7 @@ Do necessary imports, etc. ```python from langchain.agents import Tool, AgentExecutor, LLMSingleActionAgent, AgentOutputParser from langchain.prompts import StringPromptTemplate -from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain +from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.chains import LLMChain from typing import List, Union from langchain.schema import AgentAction, AgentFinish, OutputParserException import re From baafcbacd927de15a00eafc87cbea425a31d46b3 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:42 -0700 Subject: [PATCH 31/50] Update docs/snippets/modules/agents/how_to/mrkl.mdx --- docs/snippets/modules/agents/how_to/mrkl.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/snippets/modules/agents/how_to/mrkl.mdx b/docs/snippets/modules/agents/how_to/mrkl.mdx index b6d1e3d3c77175..c88951762d5f39 100644 --- a/docs/snippets/modules/agents/how_to/mrkl.mdx +++ b/docs/snippets/modules/agents/how_to/mrkl.mdx @@ -1,5 +1,5 @@ ```python -from langchain.chains import LLMMathChain, OpenAI, SerpAPIWrapper, SQLDatabase, SQLDatabaseChain +from langchain.chains import LLMMathChain\nfrom langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.utilities import SQLDatabase\nfrom langchain_experimental.sql import SQLDatabaseChain from langchain.agents import initialize_agent, Tool from langchain.agents import AgentType ``` From 3b8f58edfab12e160d673a9b2196315c731e4687 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:46 -0700 Subject: [PATCH 32/50] Update docs/snippets/modules/chains/foundational/llm_chain.mdx --- docs/snippets/modules/chains/foundational/llm_chain.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/snippets/modules/chains/foundational/llm_chain.mdx b/docs/snippets/modules/chains/foundational/llm_chain.mdx index f84a225a2f845d..ac680d9b5bbf4e 100644 --- a/docs/snippets/modules/chains/foundational/llm_chain.mdx +++ b/docs/snippets/modules/chains/foundational/llm_chain.mdx @@ -1,5 +1,5 @@ ```python -from langchain.prompts import PromptTemplate, OpenAI, LLMChain +from langchain.prompts import PromptTemplate\nfrom langchain.llms import OpenAI\nfrom langchain.chains import LLMChain prompt_template = "What is a good name for a company that makes {product}?" From fc9e1bcf9d5b836cfcbeb1b1f0d62eb002acc2af Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:48:51 -0700 Subject: [PATCH 33/50] Update libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py --- .../autonomous_agents/hugginggpt/repsonse_generator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py index 6fb1d4833dda6a..e5cdabaa4eac7a 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py @@ -1,6 +1,7 @@ from typing import Any, List, Optional -from langchain.chains import LLMChain, PromptTemplate +from langchain.chains import LLMChain +from langchain.prompts import PromptTemplate from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import Callbacks From f893a82efa84b56c8f0ec4d413493a50c8a01eb1 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:49:02 -0700 Subject: [PATCH 34/50] Update docs/extras/modules/memory/agent_with_memory_in_db.ipynb --- docs/extras/modules/memory/agent_with_memory_in_db.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/memory/agent_with_memory_in_db.ipynb b/docs/extras/modules/memory/agent_with_memory_in_db.ipynb index f8e527f45a073c..adf48b9e0402a7 100644 --- a/docs/extras/modules/memory/agent_with_memory_in_db.ipynb +++ b/docs/extras/modules/memory/agent_with_memory_in_db.ipynb @@ -37,7 +37,7 @@ "from langchain.memory import ConversationBufferMemory\n", "from langchain.memory.chat_memory import ChatMessageHistory\n", "from langchain.memory.chat_message_histories import RedisChatMessageHistory\n", - "from langchain.llms import OpenAI, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\n", "from langchain.utilities import GoogleSearchAPIWrapper" ] }, From 059185400763f76d0a8a829d29df9df5dd93fb24 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:49:10 -0700 Subject: [PATCH 35/50] Update docs/extras/modules/memory/custom_memory.ipynb --- docs/extras/modules/memory/custom_memory.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/modules/memory/custom_memory.ipynb b/docs/extras/modules/memory/custom_memory.ipynb index fd99520319426f..4abc0c094a9ecf 100644 --- a/docs/extras/modules/memory/custom_memory.ipynb +++ b/docs/extras/modules/memory/custom_memory.ipynb @@ -25,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain.llms import OpenAI, ConversationChain\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import ConversationChain\n", "from langchain.schema import BaseMemory\n", "from pydantic import BaseModel\n", "from typing import List, Dict, Any" From d908818b325315dbc7251ec746eafa8d740f9eef Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:49:19 -0700 Subject: [PATCH 36/50] Update docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb --- .../more/agents/agents/custom_agent_with_plugin_retrieval.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb index 782e70da72c09e..ba1d12fb849a83 100644 --- a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb +++ b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval.ipynb @@ -39,7 +39,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.chains import LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "from langchain.agents.agent_toolkits import NLAToolkit\n", From 447066163e77da849751bcd4442c27d9451d5011 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:49:30 -0700 Subject: [PATCH 37/50] Update docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb --- .../custom_agent_with_plugin_retrieval_using_plugnplai.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb index 83ccbd7e680e84..4284fe459e898d 100644 --- a/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb +++ b/docs/extras/use_cases/more/agents/agents/custom_agent_with_plugin_retrieval_using_plugnplai.ipynb @@ -63,7 +63,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.chains import LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "from langchain.agents.agent_toolkits import NLAToolkit\n", From 9951a6cc75f51f0cabe7522025f8c073723a91dc Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:49:39 -0700 Subject: [PATCH 38/50] Update docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb --- .../use_cases/more/agents/agents/sales_agent_with_context.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb b/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb index 0a3cce8d335c18..64273459a85a6a 100644 --- a/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb +++ b/docs/extras/use_cases/more/agents/agents/sales_agent_with_context.ipynb @@ -44,7 +44,7 @@ "\n", "from typing import Dict, List, Any, Union, Callable\n", "from pydantic import BaseModel, Field\n", - "from langchain.chains import LLMChain, PromptTemplate\n", + "from langchain.chains import LLMChain\nfrom langchain.prompts import PromptTemplate\n", "from langchain.llms import BaseLLM\n", "from langchain.chains.base import Chain\n", "from langchain.chat_models import ChatOpenAI\n", From eb726fa333e537181822bfb5c60a1f9f967f1d8b Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:49:49 -0700 Subject: [PATCH 39/50] Update docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb --- docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb b/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb index 42f5dc76be9503..415f4e3a9e6934 100644 --- a/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb +++ b/docs/extras/use_cases/more/agents/agents/wikibase_agent.ipynb @@ -396,7 +396,7 @@ " AgentOutputParser,\n", ")\n", "from langchain.prompts import StringPromptTemplate\n", - "from langchain.llms import OpenAI, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.chains import LLMChain\n", "from typing import List, Union\n", "from langchain.schema import AgentAction, AgentFinish\n", "import re" From 3f991afa1532e1810602f695ea6baffa34d56558 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:49:58 -0700 Subject: [PATCH 40/50] Update docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb --- .../use_cases/more/agents/autonomous_agents/baby_agi.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb index 9d5da752636b23..b0bb79e1d439dc 100644 --- a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb +++ b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi.ipynb @@ -33,7 +33,7 @@ "from collections import deque\n", "from typing import Dict, List, Optional, Any\n", "\n", - "from langchain.chains import LLMChain, OpenAI, PromptTemplate\n", + "from langchain.chains import LLMChain\nfrom langchain.llms import OpenAI\nfrom langchain.prompts import PromptTemplate\n", "from langchain.embeddings import OpenAIEmbeddings\n", "from langchain.llms import BaseLLM\n", "from langchain.vectorstores.base import VectorStore\n", From 6a0898f89d439b9c3fdae98508e2efe06e8fbc48 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:50:08 -0700 Subject: [PATCH 41/50] Update docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb --- .../more/agents/autonomous_agents/baby_agi_with_agent.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb index 910456b553c04d..efef081cb44901 100644 --- a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb +++ b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb @@ -29,7 +29,7 @@ "from collections import deque\n", "from typing import Dict, List, Optional, Any\n", "\n", - "from langchain.chains import LLMChain, OpenAI, PromptTemplate\n", + "from langchain.chains import LLMChain\nfrom langchain.llms import OpenAI\nfrom langchain.prompts import PromptTemplate\n", "from langchain.embeddings import OpenAIEmbeddings\n", "from langchain.llms import BaseLLM\n", "from langchain.vectorstores.base import VectorStore\n", From bcb64527aec05a968722f2a69fdd9fa312ebe201 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:50:17 -0700 Subject: [PATCH 42/50] Update docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb --- .../more/agents/autonomous_agents/baby_agi_with_agent.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb index efef081cb44901..bf03f95e0cb0bf 100644 --- a/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb +++ b/docs/extras/use_cases/more/agents/autonomous_agents/baby_agi_with_agent.ipynb @@ -111,7 +111,7 @@ "outputs": [], "source": [ "from langchain.agents import ZeroShotAgent, Tool, AgentExecutor\n", - "from langchain.llms import OpenAI, SerpAPIWrapper, LLMChain\n", + "from langchain.llms import OpenAI\nfrom langchain.utilities import SerpAPIWrapper\nfrom langchain.chains import LLMChain\n", "\n", "todo_prompt = PromptTemplate.from_template(\n", " \"You are a planner who is an expert at coming up with a todo list for a given objective. Come up with a todo list for this objective: {objective}\"\n", From 3d2250820c28b8eb70c542a95e7ec9308454db4c Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:51:56 -0700 Subject: [PATCH 43/50] cr --- .github/workflows/imports.yml | 16 +++++++--------- .../langchain/chains/openai_functions/openapi.py | 2 +- .../langchain/chains/query_constructor/base.py | 2 +- .../langchain/langchain/chat_loaders/imessage.py | 2 +- libs/langchain/langchain/chat_loaders/slack.py | 2 +- .../langchain/langchain/chat_loaders/telegram.py | 2 +- libs/langchain/langchain/chat_loaders/utils.py | 2 +- .../langchain/langchain/chat_loaders/whatsapp.py | 2 +- .../langchain/evaluation/qa/eval_chain.py | 2 +- libs/langchain/langchain/indexes/graph.py | 2 +- .../document_compressors/chain_extract.py | 2 +- .../document_compressors/chain_filter.py | 4 ++-- .../integration_tests/llms/test_fireworks.py | 4 ++-- .../integration_tests/llms/test_opaqueprompts.py | 2 +- .../llms/test_symblai_nebula.py | 2 +- .../tests/unit_tests/chains/test_api.py | 2 +- .../unit_tests/chains/test_combine_documents.py | 2 +- .../unit_tests/chat_loaders/test_telegram.py | 2 +- .../unit_tests/schema/runnable/test_locals.py | 2 +- .../unit_tests/schema/runnable/test_runnable.py | 2 +- 20 files changed, 28 insertions(+), 30 deletions(-) diff --git a/.github/workflows/imports.yml b/.github/workflows/imports.yml index 92986b6d3ae107..f81532a19aa8d6 100644 --- a/.github/workflows/imports.yml +++ b/.github/workflows/imports.yml @@ -1,20 +1,18 @@ ---- -name: Imports +name: Check Imports on: push: - branches: [master] - pull_request: - branches: [master] + branches: + - main # or replace 'main' with the name of your default branch jobs: - build: + check: runs-on: ubuntu-latest steps: - name: Checkout repository uses: actions/checkout@v2 - - name: Run Bash script - run: git grep 'from langchain import' | grep -vE 'from langchain import (__version__)' - + - name: Run import check + run: | + git grep 'from langchain import' | grep -vE 'from langchain import (__version__)' && exit 1 || exit 0 diff --git a/libs/langchain/langchain/chains/openai_functions/openapi.py b/libs/langchain/langchain/chains/openai_functions/openapi.py index ae4d80a3520051..5d03b5956215f9 100644 --- a/libs/langchain/langchain/chains/openai_functions/openapi.py +++ b/libs/langchain/langchain/chains/openai_functions/openapi.py @@ -8,9 +8,9 @@ import requests from requests import Response -from langchain.chains.llm import LLMChain from langchain.callbacks.manager import CallbackManagerForChainRun from langchain.chains.base import Chain +from langchain.chains.llm import LLMChain from langchain.chains.sequential import SequentialChain from langchain.chat_models import ChatOpenAI from langchain.output_parsers.openai_functions import JsonOutputFunctionsParser diff --git a/libs/langchain/langchain/chains/query_constructor/base.py b/libs/langchain/langchain/chains/query_constructor/base.py index c7836c6ac9b0b9..71106753f0ac54 100644 --- a/libs/langchain/langchain/chains/query_constructor/base.py +++ b/libs/langchain/langchain/chains/query_constructor/base.py @@ -4,7 +4,6 @@ import json from typing import Any, Callable, List, Optional, Sequence -from langchain.prompts.few_shot import FewShotPromptTemplate from langchain.chains.llm import LLMChain from langchain.chains.query_constructor.ir import ( Comparator, @@ -23,6 +22,7 @@ ) from langchain.chains.query_constructor.schema import AttributeInfo from langchain.output_parsers.json import parse_and_check_json_markdown +from langchain.prompts.few_shot import FewShotPromptTemplate from langchain.schema import BaseOutputParser, BasePromptTemplate, OutputParserException from langchain.schema.language_model import BaseLanguageModel diff --git a/libs/langchain/langchain/chat_loaders/imessage.py b/libs/langchain/langchain/chat_loaders/imessage.py index 41e061cb934ac7..a656c60b76cee3 100644 --- a/libs/langchain/langchain/chat_loaders/imessage.py +++ b/libs/langchain/langchain/chat_loaders/imessage.py @@ -3,8 +3,8 @@ from pathlib import Path from typing import TYPE_CHECKING, Iterator, List, Optional, Union -from langchain.schema import HumanMessage from langchain.chat_loaders.base import BaseChatLoader, ChatSession +from langchain.schema import HumanMessage if TYPE_CHECKING: import sqlite3 diff --git a/libs/langchain/langchain/chat_loaders/slack.py b/libs/langchain/langchain/chat_loaders/slack.py index 2cf17d3abaede3..29c2dc794c0891 100644 --- a/libs/langchain/langchain/chat_loaders/slack.py +++ b/libs/langchain/langchain/chat_loaders/slack.py @@ -5,8 +5,8 @@ from pathlib import Path from typing import Dict, Iterator, List, Union -from langchain.schema import HumanMessage, AIMessage from langchain.chat_loaders.base import BaseChatLoader, ChatSession +from langchain.schema import AIMessage, HumanMessage logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/chat_loaders/telegram.py b/libs/langchain/langchain/chat_loaders/telegram.py index f4ac71cfb58847..71761182183845 100644 --- a/libs/langchain/langchain/chat_loaders/telegram.py +++ b/libs/langchain/langchain/chat_loaders/telegram.py @@ -6,8 +6,8 @@ from pathlib import Path from typing import Iterator, List, Union -from langchain.schema import HumanMessage, BaseMessage from langchain.chat_loaders.base import BaseChatLoader, ChatSession +from langchain.schema import BaseMessage, HumanMessage logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/chat_loaders/utils.py b/libs/langchain/langchain/chat_loaders/utils.py index 71ae630b3778db..1c75c852d2e12d 100644 --- a/libs/langchain/langchain/chat_loaders/utils.py +++ b/libs/langchain/langchain/chat_loaders/utils.py @@ -3,7 +3,7 @@ from typing import Iterable, Iterator, List from langchain.chat_loaders.base import ChatSession -from langchain.schema.messages import BaseMessage, AIMessage +from langchain.schema.messages import AIMessage, BaseMessage def merge_chat_runs_in_session( diff --git a/libs/langchain/langchain/chat_loaders/whatsapp.py b/libs/langchain/langchain/chat_loaders/whatsapp.py index 6117d2a4328499..ad9c1ee9c3428c 100644 --- a/libs/langchain/langchain/chat_loaders/whatsapp.py +++ b/libs/langchain/langchain/chat_loaders/whatsapp.py @@ -5,7 +5,7 @@ from typing import Iterator, List, Union from langchain.chat_loaders.base import BaseChatLoader, ChatSession -from langchain.schema import HumanMessage, AIMessage +from langchain.schema import AIMessage, HumanMessage logger = logging.getLogger(__name__) diff --git a/libs/langchain/langchain/evaluation/qa/eval_chain.py b/libs/langchain/langchain/evaluation/qa/eval_chain.py index 86866cc2003135..9f270b6f12cd75 100644 --- a/libs/langchain/langchain/evaluation/qa/eval_chain.py +++ b/libs/langchain/langchain/evaluation/qa/eval_chain.py @@ -4,11 +4,11 @@ import re from typing import Any, List, Optional, Sequence -from langchain.prompts import PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.chains.llm import LLMChain from langchain.evaluation.qa.eval_prompt import CONTEXT_PROMPT, COT_PROMPT, PROMPT from langchain.evaluation.schema import LLMEvalChain, StringEvaluator +from langchain.prompts import PromptTemplate from langchain.pydantic_v1 import Extra from langchain.schema import RUN_KEY from langchain.schema.language_model import BaseLanguageModel diff --git a/libs/langchain/langchain/indexes/graph.py b/libs/langchain/langchain/indexes/graph.py index 19cfc82bc321ee..9772a9ef2b0305 100644 --- a/libs/langchain/langchain/indexes/graph.py +++ b/libs/langchain/langchain/indexes/graph.py @@ -1,7 +1,6 @@ """Graph Index Creator.""" from typing import Optional, Type -from langchain.schema.prompt_template import BasePromptTemplate from langchain.chains.llm import LLMChain from langchain.graphs.networkx_graph import NetworkxEntityGraph, parse_triples from langchain.indexes.prompts.knowledge_triplet_extraction import ( @@ -9,6 +8,7 @@ ) from langchain.pydantic_v1 import BaseModel from langchain.schema.language_model import BaseLanguageModel +from langchain.schema.prompt_template import BasePromptTemplate class GraphIndexCreator(BaseModel): diff --git a/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py b/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py index 3a4dc1a3251264..7fc00416408331 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py +++ b/libs/langchain/langchain/retrievers/document_compressors/chain_extract.py @@ -4,9 +4,9 @@ import asyncio from typing import Any, Callable, Dict, Optional, Sequence +from langchain.callbacks.manager import Callbacks from langchain.chains.llm import LLMChain from langchain.prompts import PromptTemplate -from langchain.callbacks.manager import Callbacks from langchain.retrievers.document_compressors.base import BaseDocumentCompressor from langchain.retrievers.document_compressors.chain_extract_prompt import ( prompt_template, diff --git a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py index c97f7b06b472bd..716909fab8eb58 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py +++ b/libs/langchain/langchain/retrievers/document_compressors/chain_filter.py @@ -1,10 +1,10 @@ """Filter that uses an LLM to drop documents that aren't relevant to the query.""" from typing import Any, Callable, Dict, Optional, Sequence -from langchain.chains import LLMChain -from langchain.prompts import PromptTemplate from langchain.callbacks.manager import Callbacks +from langchain.chains import LLMChain from langchain.output_parsers.boolean import BooleanOutputParser +from langchain.prompts import PromptTemplate from langchain.retrievers.document_compressors.base import BaseDocumentCompressor from langchain.retrievers.document_compressors.chain_filter_prompt import ( prompt_template, diff --git a/libs/langchain/tests/integration_tests/llms/test_fireworks.py b/libs/langchain/tests/integration_tests/llms/test_fireworks.py index ab622f1ddfc1a1..b111d5b57be5cf 100644 --- a/libs/langchain/tests/integration_tests/llms/test_fireworks.py +++ b/libs/langchain/tests/integration_tests/llms/test_fireworks.py @@ -3,14 +3,14 @@ import pytest -from langchain.chains.llm import LLMChain -from langchain.prompts import PromptTemplate from langchain.chains import RetrievalQA +from langchain.chains.llm import LLMChain from langchain.document_loaders import TextLoader from langchain.embeddings.openai import OpenAIEmbeddings from langchain.llms import OpenAIChat from langchain.llms.fireworks import Fireworks, FireworksChat from langchain.llms.loading import load_llm +from langchain.prompts import PromptTemplate from langchain.prompts.chat import ( ChatPromptTemplate, HumanMessagePromptTemplate, diff --git a/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py b/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py index b1ba50985be949..c9d0b17567f97f 100644 --- a/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py +++ b/libs/langchain/tests/integration_tests/llms/test_opaqueprompts.py @@ -1,9 +1,9 @@ import langchain.utilities.opaqueprompts as op from langchain.chains.llm import LLMChain -from langchain.prompts import PromptTemplate from langchain.llms import OpenAI from langchain.llms.opaqueprompts import OpaquePrompts from langchain.memory import ConversationBufferWindowMemory +from langchain.prompts import PromptTemplate from langchain.schema.output_parser import StrOutputParser from langchain.schema.runnable import RunnableMap diff --git a/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py b/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py index c4da70356dc3a5..97761676847c08 100644 --- a/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py +++ b/libs/langchain/tests/integration_tests/llms/test_symblai_nebula.py @@ -1,7 +1,7 @@ """Test Nebula API wrapper.""" from langchain.chains.llm import LLMChain -from langchain.prompts.prompt import PromptTemplate from langchain.llms.symblai_nebula import Nebula +from langchain.prompts.prompt import PromptTemplate def test_symblai_nebula_call() -> None: diff --git a/libs/langchain/tests/unit_tests/chains/test_api.py b/libs/langchain/tests/unit_tests/chains/test_api.py index ce154f3150d4f6..93d38ff6add3d7 100644 --- a/libs/langchain/tests/unit_tests/chains/test_api.py +++ b/libs/langchain/tests/unit_tests/chains/test_api.py @@ -5,9 +5,9 @@ import pytest -from langchain.chains.llm import LLMChain from langchain.chains.api.base import APIChain from langchain.chains.api.prompt import API_RESPONSE_PROMPT, API_URL_PROMPT +from langchain.chains.llm import LLMChain from langchain.utilities.requests import TextRequestsWrapper from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/langchain/tests/unit_tests/chains/test_combine_documents.py b/libs/langchain/tests/unit_tests/chains/test_combine_documents.py index 5c5c9af4a9583a..7acbdeba631abb 100644 --- a/libs/langchain/tests/unit_tests/chains/test_combine_documents.py +++ b/libs/langchain/tests/unit_tests/chains/test_combine_documents.py @@ -4,13 +4,13 @@ import pytest -from langchain.prompts.prompt import PromptTemplate from langchain.chains.combine_documents.reduce import ( _collapse_docs, _split_list_of_docs, ) from langchain.chains.qa_with_sources import load_qa_with_sources_chain from langchain.docstore.document import Document +from langchain.prompts.prompt import PromptTemplate from langchain.schema import format_document from tests.unit_tests.llms.fake_llm import FakeLLM diff --git a/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py b/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py index a663f2a941c834..c35dfbaa2b59aa 100644 --- a/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py +++ b/libs/langchain/tests/unit_tests/chat_loaders/test_telegram.py @@ -6,8 +6,8 @@ import pytest -from langchain.schema import BaseMessage, HumanMessage, AIMessage from langchain.chat_loaders import telegram, utils +from langchain.schema import AIMessage, BaseMessage, HumanMessage def _assert_messages_are_equal( diff --git a/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py b/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py index 5e999cadc905e8..4b0bbb01364620 100644 --- a/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py +++ b/libs/langchain/tests/unit_tests/schema/runnable/test_locals.py @@ -2,8 +2,8 @@ import pytest -from langchain.prompts import PromptTemplate from langchain.llms import FakeListLLM +from langchain.prompts import PromptTemplate from langchain.schema.runnable import ( GetLocalVar, PutLocalVar, diff --git a/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py b/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py index 3ad683f6e61ee4..a6b8db9ff2e225 100644 --- a/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py +++ b/libs/langchain/tests/unit_tests/schema/runnable/test_runnable.py @@ -7,7 +7,6 @@ from pytest_mock import MockerFixture from syrupy import SnapshotAssertion -from langchain.prompts import PromptTemplate from langchain.callbacks.manager import Callbacks from langchain.callbacks.tracers.base import BaseTracer from langchain.callbacks.tracers.schemas import Run @@ -16,6 +15,7 @@ from langchain.llms.fake import FakeListLLM, FakeStreamingListLLM from langchain.load.dump import dumpd, dumps from langchain.output_parsers.list import CommaSeparatedListOutputParser +from langchain.prompts import PromptTemplate from langchain.prompts.chat import ( ChatPromptTemplate, ChatPromptValue, From 71b80e3ba5bebeb4fda9baac589de461c2361897 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:55:37 -0700 Subject: [PATCH 44/50] cr --- libs/langchain/langchain/chat_loaders/telegram.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/langchain/langchain/chat_loaders/telegram.py b/libs/langchain/langchain/chat_loaders/telegram.py index 71761182183845..2441d49a07e13b 100644 --- a/libs/langchain/langchain/chat_loaders/telegram.py +++ b/libs/langchain/langchain/chat_loaders/telegram.py @@ -7,7 +7,7 @@ from typing import Iterator, List, Union from langchain.chat_loaders.base import BaseChatLoader, ChatSession -from langchain.schema import BaseMessage, HumanMessage +from langchain.schema import AIMessage, BaseMessage, HumanMessage logger = logging.getLogger(__name__) @@ -55,7 +55,7 @@ def _load_single_chat_session_html(self, file_path: str) -> ChatSession: with open(file_path, "r", encoding="utf-8") as file: soup = BeautifulSoup(file, "html.parser") - results: List[Union[schema.HumanMessage, schema.AIMessage]] = [] + results: List[Union[HumanMessage, AIMessage]] = [] previous_sender = None for message in soup.select(".message.default"): timestamp = message.select_one(".pull_right.date.details")["title"] From 4eb649da19655b39d4a76b81b6122b79cce3beab Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:56:05 -0700 Subject: [PATCH 45/50] cr --- .../autonomous_agents/hugginggpt/repsonse_generator.py | 4 ++-- .../autonomous_agents/hugginggpt/task_planner.py | 2 +- libs/experimental/tests/integration_tests/chains/test_pal.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py index e5cdabaa4eac7a..1df95270cf7ae8 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/repsonse_generator.py @@ -1,9 +1,9 @@ from typing import Any, List, Optional -from langchain.chains import LLMChain -from langchain.prompts import PromptTemplate from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import Callbacks +from langchain.chains import LLMChain +from langchain.prompts import PromptTemplate class ResponseGenerationChain(LLMChain): diff --git a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py index 2346f780451a2b..34aaab55ab1b33 100644 --- a/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py +++ b/libs/experimental/langchain_experimental/autonomous_agents/hugginggpt/task_planner.py @@ -3,9 +3,9 @@ from abc import abstractmethod from typing import Any, Dict, List, Optional, Union -from langchain.chains import LLMChain from langchain.base_language import BaseLanguageModel from langchain.callbacks.manager import Callbacks +from langchain.chains import LLMChain from langchain.prompts.chat import ( AIMessagePromptTemplate, ChatPromptTemplate, diff --git a/libs/experimental/tests/integration_tests/chains/test_pal.py b/libs/experimental/tests/integration_tests/chains/test_pal.py index 7ad8e661c221b2..6f3d83b5a16118 100644 --- a/libs/experimental/tests/integration_tests/chains/test_pal.py +++ b/libs/experimental/tests/integration_tests/chains/test_pal.py @@ -1,7 +1,7 @@ """Test PAL chain.""" -from langchain.llms import OpenAI from langchain.chains.pal.base import PALChain +from langchain.llms import OpenAI def test_math_prompt() -> None: From 40ad26f0304f3008a86394d702dedd912acfe14a Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 16:59:48 -0700 Subject: [PATCH 46/50] cr --- .../schema/runnable/__snapshots__/test_runnable.ambr | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/libs/langchain/tests/unit_tests/schema/runnable/__snapshots__/test_runnable.ambr b/libs/langchain/tests/unit_tests/schema/runnable/__snapshots__/test_runnable.ambr index 63c0acc38d7bee..fb950d5b53f06f 100644 --- a/libs/langchain/tests/unit_tests/schema/runnable/__snapshots__/test_runnable.ambr +++ b/libs/langchain/tests/unit_tests/schema/runnable/__snapshots__/test_runnable.ambr @@ -573,6 +573,9 @@ "lc": 1, "type": "constructor", "id": [ + "tests", + "unit_tests", + "schema", "runnable", "test_runnable", "FakeSplitIntoListParser" @@ -1716,6 +1719,9 @@ "lc": 1, "type": "not_implemented", "id": [ + "tests", + "unit_tests", + "schema", "runnable", "test_runnable", "FakeRetriever" From 2f718319bb69eec19c2c8bfd4a3126c57d520f55 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 17:00:38 -0700 Subject: [PATCH 47/50] cr --- .github/workflows/imports.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/imports.yml b/.github/workflows/imports.yml index f81532a19aa8d6..f636f4ae64ac2b 100644 --- a/.github/workflows/imports.yml +++ b/.github/workflows/imports.yml @@ -1,9 +1,11 @@ -name: Check Imports +--- +name: Imports on: push: - branches: - - main # or replace 'main' with the name of your default branch + branches: [master] + pull_request: + branches: [master] jobs: check: From e9483bfc88f338d8f21b55ebc62ab380f71c35fa Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 17:02:54 -0700 Subject: [PATCH 48/50] cr --- .github/workflows/imports.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/imports.yml b/.github/workflows/imports.yml index f636f4ae64ac2b..3ae67a5b677dae 100644 --- a/.github/workflows/imports.yml +++ b/.github/workflows/imports.yml @@ -17,4 +17,6 @@ jobs: - name: Run import check run: | - git grep 'from langchain import' | grep -vE 'from langchain import (__version__)' && exit 1 || exit 0 + # We should not encourage imports directly from main init file + # Expect for __version__ and hub + git grep 'from langchain import' | grep -vE 'from langchain import (__version__|hub)' && exit 1 || exit 0 From d6e245e2e12290952477ec46f0e83ba141485b45 Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 17:07:48 -0700 Subject: [PATCH 49/50] cr --- docs/extras/integrations/chat_loaders/wechat.ipynb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/extras/integrations/chat_loaders/wechat.ipynb b/docs/extras/integrations/chat_loaders/wechat.ipynb index d5d06345722808..7eb86e8169013a 100644 --- a/docs/extras/integrations/chat_loaders/wechat.ipynb +++ b/docs/extras/integrations/chat_loaders/wechat.ipynb @@ -78,7 +78,7 @@ "import re\n", "from typing import Iterator, List\n", "\n", - "from langchain import schema\n", + "from langchain.schema import HumanMessage, BaseMessage\n", "from langchain.chat_loaders import base as chat_loaders\n", "\n", "logger = logging.getLogger()\n", @@ -110,7 +110,7 @@ " # skip non-text messages like stickers, images, etc.\n", " if not re.match(r\"\\[.*\\]\", content):\n", " results.append(\n", - " schema.HumanMessage(\n", + " HumanMessage(\n", " content=content,\n", " additional_kwargs={\n", " \"sender\": current_sender,\n", @@ -135,7 +135,7 @@ " with open(file_path, \"r\", encoding=\"utf-8\") as file:\n", " lines = file.readlines()\n", "\n", - " results: List[schema.BaseMessage] = []\n", + " results: List[BaseMessage] = []\n", " current_sender = None\n", " current_timestamp = None\n", " current_content = []\n", @@ -292,7 +292,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.11" + "version": "3.10.1" } }, "nbformat": 4, From ccc7e97513c919b08ddbe030f62a81f9debbfc0a Mon Sep 17 00:00:00 2001 From: Harrison Chase Date: Sat, 16 Sep 2023 17:10:33 -0700 Subject: [PATCH 50/50] cr --- .github/workflows/imports.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/imports.yml b/.github/workflows/imports.yml index 3ae67a5b677dae..7ad315f1b9281f 100644 --- a/.github/workflows/imports.yml +++ b/.github/workflows/imports.yml @@ -19,4 +19,5 @@ jobs: run: | # We should not encourage imports directly from main init file # Expect for __version__ and hub - git grep 'from langchain import' | grep -vE 'from langchain import (__version__|hub)' && exit 1 || exit 0 + # And of course expect for this file + git grep 'from langchain import' | grep -vE 'from langchain import (__version__|hub)' | grep -v '.github/workflows/check-imports.yml' && exit 1 || exit 0