From 39a075f73d71250bde8b92e862d269c783b762d2 Mon Sep 17 00:00:00 2001 From: Brody Klapko Date: Mon, 8 Sep 2025 12:13:43 -0700 Subject: [PATCH 1/6] Remove callouts first --- .../integrations/callbacks/datadog_tracer.mdx | 3 - .../callbacks/upstash_ratelimit_callback.mdx | 3 - .../integrations/chat/alibaba_tongyi.mdx | 3 - .../integrations/chat/baidu_qianfan.mdx | 3 - .../integrations/chat/baidu_wenxin.mdx | 3 - .../integrations/chat/deep_infra.mdx | 3 - .../javascript/integrations/chat/friendli.mdx | 3 - .../integrations/chat/llama_cpp.mdx | 3 - .../javascript/integrations/chat/minimax.mdx | 3 - .../javascript/integrations/chat/moonshot.mdx | 3 - .../javascript/integrations/chat/premai.mdx | 3 - .../integrations/chat/tencent_hunyuan.mdx | 3 - .../javascript/integrations/chat/web_llm.mdx | 3 - .../javascript/integrations/chat/yandex.mdx | 3 - .../javascript/integrations/chat/zhipuai.mdx | 3 - src/oss/javascript/integrations/convert.py | 86 +++++++++++++++++++ .../document_compressors/cohere_rerank.mdx | 3 - .../document_compressors/mixedbread_ai.mdx | 3 - .../web_loaders/apify_dataset.mdx | 3 - .../web_loaders/searchapi.mdx | 3 - .../document_loaders/web_loaders/serpapi.mdx | 3 - .../web_loaders/sort_xyz_blockchain.mdx | 3 - .../document_transformers/html-to-text.mdx | 3 - .../mozilla_readability.mdx | 3 - .../openai_metadata_tagger.mdx | 3 - .../llm_caching/azure_cosmosdb_nosql.mdx | 3 - src/oss/javascript/integrations/llms/ai21.mdx | 3 - .../integrations/llms/aleph_alpha.mdx | 3 - .../integrations/llms/aws_sagemaker.mdx | 3 - .../integrations/llms/deep_infra.mdx | 3 - .../javascript/integrations/llms/friendli.mdx | 3 - .../integrations/llms/gradient_ai.mdx | 3 - .../integrations/llms/jigsawstack.mdx | 3 - .../integrations/llms/llama_cpp.mdx | 3 - .../javascript/integrations/llms/raycast.mdx | 3 - .../integrations/llms/replicate.mdx | 3 - .../javascript/integrations/llms/writer.mdx | 3 - .../javascript/integrations/llms/yandex.mdx | 3 - .../integrations/memory/astradb.mdx | 3 - .../integrations/memory/aurora_dsql.mdx | 3 - .../memory/azure_cosmos_mongo_vcore.mdx | 3 - .../memory/azure_cosmosdb_nosql.mdx | 3 - .../integrations/memory/cassandra.mdx | 3 - .../integrations/memory/cloudflare_d1.mdx | 3 - .../javascript/integrations/memory/convex.mdx | 3 - .../integrations/memory/dynamodb.mdx | 3 - .../javascript/integrations/memory/file.mdx | 3 - .../integrations/memory/firestore.mdx | 3 - .../integrations/memory/ipfs_datastore.mdx | 3 - .../integrations/memory/mem0_memory.mdx | 3 - .../integrations/memory/momento.mdx | 3 - .../integrations/memory/mongodb.mdx | 3 - .../integrations/memory/motorhead_memory.mdx | 3 - .../integrations/memory/planetscale.mdx | 3 - .../integrations/memory/postgres.mdx | 3 - .../javascript/integrations/memory/redis.mdx | 3 - .../integrations/memory/upstash_redis.mdx | 3 - .../javascript/integrations/memory/xata.mdx | 3 - .../integrations/memory/zep_memory.mdx | 3 - .../integrations/memory/zep_memory_cloud.mdx | 3 - .../integrations/providers/anthropic.mdx | 3 - .../integrations/providers/google.mdx | 5 -- .../integrations/providers/microsoft.mdx | 15 ---- .../integrations/providers/openai.mdx | 3 - .../retrievers/chaindesk-retriever.mdx | 3 - .../integrations/retrievers/dria.mdx | 3 - .../integrations/retrievers/hyde.mdx | 3 - .../retrievers/metal-retriever.mdx | 3 - .../retrievers/supabase-hybrid.mdx | 3 - .../retrievers/time-weighted-retriever.mdx | 3 - .../retrievers/zep-cloud-retriever.mdx | 3 - .../integrations/retrievers/zep-retriever.mdx | 3 - .../text_embedding/alibaba_tongyi.mdx | 3 - .../text_embedding/baidu_qianfan.mdx | 3 - .../integrations/text_embedding/deepinfra.mdx | 3 - .../text_embedding/hugging_face_inference.mdx | 3 - .../integrations/text_embedding/jina.mdx | 3 - .../integrations/text_embedding/llama_cpp.mdx | 3 - .../text_embedding/mixedbread_ai.mdx | 3 - .../integrations/text_embedding/nomic.mdx | 3 - .../integrations/text_embedding/premai.mdx | 3 - .../text_embedding/tencent_hunyuan.mdx | 3 - .../text_embedding/transformers.mdx | 3 - .../integrations/text_embedding/zhipuai.mdx | 3 - .../integrations/tools/aiplugin-tool.mdx | 3 - .../tools/azure_dynamic_sessions.mdx | 3 - .../javascript/integrations/tools/connery.mdx | 2 - .../integrations/tools/connery_toolkit.mdx | 2 - .../javascript/integrations/tools/dalle.mdx | 3 - .../integrations/tools/discord_tool.mdx | 3 - .../javascript/integrations/tools/gmail.mdx | 3 - .../integrations/tools/google_calendar.mdx | 3 - .../integrations/tools/google_places.mdx | 3 - .../integrations/tools/google_routes.mdx | 3 - .../integrations/tools/google_trends.mdx | 3 - src/oss/javascript/integrations/tools/ibm.mdx | 3 - .../integrations/tools/jigsawstack.mdx | 3 - .../javascript/integrations/tools/json.mdx | 3 - .../integrations/tools/lambda_agent.mdx | 3 - .../integrations/tools/pyinterpreter.mdx | 3 - .../integrations/tools/searchapi.mdx | 3 - .../javascript/integrations/tools/searxng.mdx | 3 - .../integrations/tools/sfn_agent.mdx | 3 - .../integrations/tools/webbrowser.mdx | 3 - .../integrations/tools/zapier_agent.mdx | 3 - .../integrations/vectorstores/analyticdb.mdx | 3 - .../integrations/vectorstores/astradb.mdx | 3 - .../vectorstores/azure_aisearch.mdx | 3 - .../vectorstores/azure_cosmosdb_mongodb.mdx | 3 - .../vectorstores/azure_cosmosdb_nosql.mdx | 3 - .../integrations/vectorstores/cassandra.mdx | 3 - .../integrations/vectorstores/clickhouse.mdx | 3 - .../integrations/vectorstores/closevector.mdx | 3 - .../vectorstores/cloudflare_vectorize.mdx | 3 - .../integrations/vectorstores/convex.mdx | 3 - .../integrations/vectorstores/hanavector.mdx | 3 - .../integrations/vectorstores/lancedb.mdx | 3 - .../integrations/vectorstores/libsql.mdx | 3 - .../integrations/vectorstores/milvus.mdx | 3 - .../vectorstores/momento_vector_index.mdx | 3 - .../integrations/vectorstores/myscale.mdx | 3 - .../integrations/vectorstores/neo4jvector.mdx | 3 - .../integrations/vectorstores/neon.mdx | 3 - .../integrations/vectorstores/opensearch.mdx | 3 - .../integrations/vectorstores/prisma.mdx | 3 - .../integrations/vectorstores/rockset.mdx | 3 - .../integrations/vectorstores/singlestore.mdx | 3 - .../integrations/vectorstores/tigris.mdx | 3 - .../integrations/vectorstores/typeorm.mdx | 3 - .../integrations/vectorstores/typesense.mdx | 3 - .../integrations/vectorstores/usearch.mdx | 3 - .../vectorstores/vercel_postgres.mdx | 3 - .../integrations/vectorstores/voy.mdx | 3 - .../integrations/vectorstores/xata.mdx | 3 - .../integrations/vectorstores/zep.mdx | 3 - .../integrations/vectorstores/zep_cloud.mdx | 3 - .../integration-install-tooltip.mdx | 3 - 137 files changed, 86 insertions(+), 420 deletions(-) create mode 100644 src/oss/javascript/integrations/convert.py delete mode 100644 src/snippets/javascript-integrations/integration-install-tooltip.mdx diff --git a/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx b/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx index 4b1b36659..93494f89e 100644 --- a/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx +++ b/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx @@ -12,9 +12,6 @@ This is an experimental community implementation, and it is not officially suppo ## Setup -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx b/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx index 19174c70f..3db075808 100644 --- a/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx +++ b/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx @@ -26,9 +26,6 @@ UPSTASH_REDIS_REST_TOKEN="****" Next, you will need to install Upstash Ratelimit and `@langchain/community`: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @upstash/ratelimit @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx b/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx index 45fd3a85d..58f99d7af 100644 --- a/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx +++ b/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx @@ -10,9 +10,6 @@ You'll need to sign up for an Alibaba API key and set it as an environment varia Then, you'll need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/baidu_qianfan.mdx b/src/oss/javascript/integrations/chat/baidu_qianfan.mdx index 6f92cfdc3..44ebca838 100644 --- a/src/oss/javascript/integrations/chat/baidu_qianfan.mdx +++ b/src/oss/javascript/integrations/chat/baidu_qianfan.mdx @@ -6,9 +6,6 @@ title: ChatBaiduQianfan You'll first need to install the [`@langchain/baidu-qianfan`](https://www.npmjs.com/package/@langchain/baidu-qianfan) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/baidu-qianfan @langchain/core diff --git a/src/oss/javascript/integrations/chat/baidu_wenxin.mdx b/src/oss/javascript/integrations/chat/baidu_wenxin.mdx index 7875fded0..894f1f9b9 100644 --- a/src/oss/javascript/integrations/chat/baidu_wenxin.mdx +++ b/src/oss/javascript/integrations/chat/baidu_wenxin.mdx @@ -11,9 +11,6 @@ Use the [`@langchain/baidu-qianfan`](/oss/integrations/chat/baidu_qianfan/) pack LangChain.js supports Baidu's ERNIE-bot family of models. Here's an example: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/deep_infra.mdx b/src/oss/javascript/integrations/chat/deep_infra.mdx index ee4fc0d27..7d00237aa 100644 --- a/src/oss/javascript/integrations/chat/deep_infra.mdx +++ b/src/oss/javascript/integrations/chat/deep_infra.mdx @@ -5,9 +5,6 @@ title: ChatDeepInfra LangChain supports chat models hosted by [Deep Infra](https://deepinfra.com/) through the `ChatDeepInfra` wrapper. First, you'll need to install the `@langchain/community` package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/friendli.mdx b/src/oss/javascript/integrations/chat/friendli.mdx index 10a7d6d51..032309d24 100644 --- a/src/oss/javascript/integrations/chat/friendli.mdx +++ b/src/oss/javascript/integrations/chat/friendli.mdx @@ -10,9 +10,6 @@ This tutorial guides you through integrating `ChatFriendli` for chat application Ensure the `@langchain/community` is installed. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/llama_cpp.mdx b/src/oss/javascript/integrations/chat/llama_cpp.mdx index 405bb12cb..8fe2090ef 100644 --- a/src/oss/javascript/integrations/chat/llama_cpp.mdx +++ b/src/oss/javascript/integrations/chat/llama_cpp.mdx @@ -14,9 +14,6 @@ This module is based on the [node-llama-cpp](https://github.com/withcatai/node-l You'll need to install major version `3` of the [node-llama-cpp](https://github.com/withcatai/node-llama-cpp) module to communicate with your local model. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install -S node-llama-cpp@3 @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/minimax.mdx b/src/oss/javascript/integrations/chat/minimax.mdx index 899218288..ad78b3629 100644 --- a/src/oss/javascript/integrations/chat/minimax.mdx +++ b/src/oss/javascript/integrations/chat/minimax.mdx @@ -10,9 +10,6 @@ This example demonstrates using LangChain.js to interact with Minimax. To use Minimax models, you'll need a Minimax account, an API key, and a Group ID. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/moonshot.mdx b/src/oss/javascript/integrations/chat/moonshot.mdx index a559161d8..a7dc733fb 100644 --- a/src/oss/javascript/integrations/chat/moonshot.mdx +++ b/src/oss/javascript/integrations/chat/moonshot.mdx @@ -14,9 +14,6 @@ https://platform.moonshot.cn/console You'll also need to install the following dependencies: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/premai.mdx b/src/oss/javascript/integrations/chat/premai.mdx index 9ba01b0a0..d94e9b6c0 100644 --- a/src/oss/javascript/integrations/chat/premai.mdx +++ b/src/oss/javascript/integrations/chat/premai.mdx @@ -13,9 +13,6 @@ export PREM_API_KEY=your-api-key You can use models provided by Prem AI as follows: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx b/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx index 4ee247660..c66458564 100644 --- a/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx +++ b/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx @@ -12,9 +12,6 @@ https://cloud.tencent.com/document/product/1729/104753 2. Create SecretID & SecretKey [here](https://console.cloud.tencent.com/cam/capi). 3. Set SecretID and SecretKey as environment variables named `TENCENT_SECRET_ID` and `TENCENT_SECRET_KEY`, respectively. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/web_llm.mdx b/src/oss/javascript/integrations/chat/web_llm.mdx index 91bbcd06e..c6bdd3ff1 100644 --- a/src/oss/javascript/integrations/chat/web_llm.mdx +++ b/src/oss/javascript/integrations/chat/web_llm.mdx @@ -14,9 +14,6 @@ You can run LLMs directly in your web browser using LangChain's [WebLLM](https:/ You'll need to install the [WebLLM SDK](https://www.npmjs.com/package/@mlc-ai/web-llm) module to communicate with your local model. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install -S @mlc-ai/web-llm @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/chat/yandex.mdx b/src/oss/javascript/integrations/chat/yandex.mdx index 28fe823a7..a4cbaabbb 100644 --- a/src/oss/javascript/integrations/chat/yandex.mdx +++ b/src/oss/javascript/integrations/chat/yandex.mdx @@ -17,9 +17,6 @@ Next, you have two authentication options: ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/yandex @langchain/core diff --git a/src/oss/javascript/integrations/chat/zhipuai.mdx b/src/oss/javascript/integrations/chat/zhipuai.mdx index a9461a120..6d4a5591d 100644 --- a/src/oss/javascript/integrations/chat/zhipuai.mdx +++ b/src/oss/javascript/integrations/chat/zhipuai.mdx @@ -14,9 +14,6 @@ https://open.bigmodel.cn You'll also need to install the following dependencies: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core jsonwebtoken diff --git a/src/oss/javascript/integrations/convert.py b/src/oss/javascript/integrations/convert.py new file mode 100644 index 000000000..5840b0621 --- /dev/null +++ b/src/oss/javascript/integrations/convert.py @@ -0,0 +1,86 @@ +import re +from pathlib import Path + +def extract_packages(content: str) -> str | None: + """Extract package names from Npm2Yarn component.""" + pattern = r'\s*(.*?)\s*' + match = re.search(pattern, content, re.DOTALL) + if match: + return match.group(1).strip() + return None + +def create_codegroup(packages: str) -> str: + """Create CodeGroup component with npm, yarn, and pnpm instructions.""" + return f''' +```bash npm +npm install {packages} +``` +```bash yarn +yarn add {packages} +``` +```bash pnpm +pnpm add {packages} +``` +''' + +def convert_file(file_path: Path) -> bool: + """Convert a single file's installation blocks.""" + with open(file_path, 'r', encoding='utf-8') as f: + content = f.read() + + # Find the mdx block + mdx_pattern = r'```\{=mdx\}.*?```' + match = re.search(mdx_pattern, content, re.DOTALL) # Fixed: using mdx_pattern instead of pattern + + if not match: + print(f"No mdx block found in {file_path}") + return False + + mdx_block = match.group(0) + packages = extract_packages(mdx_block) + + if not packages: + print(f"No Npm2Yarn component found in {file_path}") + return False + + # Create new CodeGroup + new_content = content.replace(mdx_block, create_codegroup(packages)) + + # Write back to file + with open(file_path, 'w', encoding='utf-8') as f: + f.write(new_content) + + print(f"Successfully converted {file_path}") + return True + +def main(): + import sys + + # If no argument provided, process all files in current directory + if len(sys.argv) == 1: + base_dir = Path(__file__).parent + files = [] + # Walk through all subdirectories + for ext in ['.md', '.mdx']: + files.extend(list(base_dir.rglob(f'*{ext}'))) + + print(f"Found {len(files)} files to process") + for file_path in files: + print(f"\nProcessing {file_path}...") + convert_file(file_path) + + # If file path provided, process single file + elif len(sys.argv) == 2: + file_path = Path(sys.argv[1]) + if not file_path.exists(): + print(f"File not found: {file_path}") + sys.exit(1) + convert_file(file_path) + + else: + print("Usage: python script.py [file_path]") + print("If no file_path is provided, will process all .md/.mdx files in current directory") + sys.exit(1) + +if __name__ == "__main__": + main() diff --git a/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx b/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx index 80673b11a..09a6b1129 100644 --- a/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx +++ b/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx @@ -10,9 +10,6 @@ Cohere offers an API for reranking documents. In this example we'll show you how ## Setup -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/cohere @langchain/core diff --git a/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx b/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx index e6cce8c8b..c06f2a3be 100644 --- a/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx +++ b/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx @@ -10,9 +10,6 @@ This guide will help you integrate and use the [Mixedbread AI](https://mixedbrea To get started, install the `@langchain/mixedbread-ai` package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm install @langchain/mixedbread-ai diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx index 52f840864..8f4ddd341 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx @@ -32,9 +32,6 @@ You'll first need to install the official Apify client: ```bash npm npm install apify-client ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install hnswlib-node @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx index ccf48eb6a..a999ba10f 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx @@ -22,9 +22,6 @@ Here's an example of how to use the `SearchApiLoader`: import Searchapi from "/snippets/javascript-integrations/examples/document_loaders/searchapi.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core @langchain/openai diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx index 23e7a312e..80b8a1ab5 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx @@ -20,9 +20,6 @@ Here's an example of how to use the `SerpAPILoader`: import Serpapi from "/snippets/javascript-integrations/examples/document_loaders/serpapi.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core @langchain/openai diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx index 9421cdcf5..bb8fb33a7 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx @@ -8,9 +8,6 @@ You will need a free Sort API key, visiting sort.xyz to obtain one. import SortXyzBlockchain from "/snippets/javascript-integrations/examples/document_loaders/sort_xyz_blockchain.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core @langchain/openai diff --git a/src/oss/javascript/integrations/document_transformers/html-to-text.mdx b/src/oss/javascript/integrations/document_transformers/html-to-text.mdx index a4250c864..51c165bdb 100644 --- a/src/oss/javascript/integrations/document_transformers/html-to-text.mdx +++ b/src/oss/javascript/integrations/document_transformers/html-to-text.mdx @@ -17,9 +17,6 @@ Though not required for the transformer by itself, the below usage examples requ ```bash npm npm install cheerio ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx b/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx index b6e68b838..9689b2757 100644 --- a/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx +++ b/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx @@ -17,9 +17,6 @@ Though not required for the transformer by itself, the below usage examples requ ```bash npm npm install cheerio ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx b/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx index cf20a383c..978dabf44 100644 --- a/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx +++ b/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx @@ -14,9 +14,6 @@ For example, let's say you wanted to index a set of movie reviews. You could ini import MetadataTagger from "/snippets/javascript-integrations/examples/document_transformers/metadata_tagger.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx b/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx index 337df4890..a52d66150 100644 --- a/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx +++ b/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx @@ -10,9 +10,6 @@ If you don't have an Azure account, you can [create a free account](https://azur You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs.com/package/@langchain/azure-cosmosdb) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/azure-cosmosdb @langchain/core diff --git a/src/oss/javascript/integrations/llms/ai21.mdx b/src/oss/javascript/integrations/llms/ai21.mdx index 9f168288c..327b13c28 100644 --- a/src/oss/javascript/integrations/llms/ai21.mdx +++ b/src/oss/javascript/integrations/llms/ai21.mdx @@ -6,9 +6,6 @@ You can get started with AI21Labs' Jurassic family of models, as well as see a f Here's an example of initializing an instance in LangChain.js: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/aleph_alpha.mdx b/src/oss/javascript/integrations/llms/aleph_alpha.mdx index eed2c83ae..20f939612 100644 --- a/src/oss/javascript/integrations/llms/aleph_alpha.mdx +++ b/src/oss/javascript/integrations/llms/aleph_alpha.mdx @@ -6,9 +6,6 @@ LangChain.js supports AlephAlpha's Luminous family of models. You'll need to sig Here's an example: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/aws_sagemaker.mdx b/src/oss/javascript/integrations/llms/aws_sagemaker.mdx index 40beca8fe..72d895540 100644 --- a/src/oss/javascript/integrations/llms/aws_sagemaker.mdx +++ b/src/oss/javascript/integrations/llms/aws_sagemaker.mdx @@ -11,9 +11,6 @@ You'll need to install the official SageMaker SDK as a peer dependency: ```bash npm npm install @aws-sdk/client-sagemaker-runtime ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/deep_infra.mdx b/src/oss/javascript/integrations/llms/deep_infra.mdx index 7d3e844b0..07213904e 100644 --- a/src/oss/javascript/integrations/llms/deep_infra.mdx +++ b/src/oss/javascript/integrations/llms/deep_infra.mdx @@ -5,9 +5,6 @@ title: DeepInfra LangChain supports LLMs hosted by [Deep Infra](https://deepinfra.com/) through the `DeepInfra` wrapper. First, you'll need to install the `@langchain/community` package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/friendli.mdx b/src/oss/javascript/integrations/llms/friendli.mdx index 91fb4f981..f5df921db 100644 --- a/src/oss/javascript/integrations/llms/friendli.mdx +++ b/src/oss/javascript/integrations/llms/friendli.mdx @@ -10,9 +10,6 @@ This tutorial guides you through integrating `Friendli` with LangChain. Ensure the `@langchain/community` is installed. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/gradient_ai.mdx b/src/oss/javascript/integrations/llms/gradient_ai.mdx index 8ccbdb016..01bae392e 100644 --- a/src/oss/javascript/integrations/llms/gradient_ai.mdx +++ b/src/oss/javascript/integrations/llms/gradient_ai.mdx @@ -27,9 +27,6 @@ const model = new GradientLLM({ ``` ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/jigsawstack.mdx b/src/oss/javascript/integrations/llms/jigsawstack.mdx index ce480f131..0df69a1ca 100644 --- a/src/oss/javascript/integrations/llms/jigsawstack.mdx +++ b/src/oss/javascript/integrations/llms/jigsawstack.mdx @@ -17,9 +17,6 @@ export JIGSAWSTACK_API_KEY="your-api-key" ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/jigsawstack diff --git a/src/oss/javascript/integrations/llms/llama_cpp.mdx b/src/oss/javascript/integrations/llms/llama_cpp.mdx index f448869f4..146a8089c 100644 --- a/src/oss/javascript/integrations/llms/llama_cpp.mdx +++ b/src/oss/javascript/integrations/llms/llama_cpp.mdx @@ -17,9 +17,6 @@ You'll need to install major version `3` of the [node-llama-cpp](https://github. ```bash npm npm install -S node-llama-cpp@3 ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/raycast.mdx b/src/oss/javascript/integrations/llms/raycast.mdx index a0d6b36a1..3863960aa 100644 --- a/src/oss/javascript/integrations/llms/raycast.mdx +++ b/src/oss/javascript/integrations/llms/raycast.mdx @@ -10,9 +10,6 @@ You can utilize the LangChain's RaycastAI class within the [Raycast Environment] - There is a rate limit of approx 10 requests per minute for each Raycast Pro user. If you exceed this limit, you will receive an error. You can set your desired rpm limit by passing `rateLimitPerMinute` to the `RaycastAI` constructor as shown in the example, as this rate limit may change in the future. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/replicate.mdx b/src/oss/javascript/integrations/llms/replicate.mdx index d11a951d5..b3f2bcdf3 100644 --- a/src/oss/javascript/integrations/llms/replicate.mdx +++ b/src/oss/javascript/integrations/llms/replicate.mdx @@ -4,9 +4,6 @@ title: Replicate Here's an example of calling a Replicate model as an LLM: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install replicate@1 @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/writer.mdx b/src/oss/javascript/integrations/llms/writer.mdx index 5a9c6977c..cc68f5b96 100644 --- a/src/oss/javascript/integrations/llms/writer.mdx +++ b/src/oss/javascript/integrations/llms/writer.mdx @@ -13,9 +13,6 @@ Next, you'll need to install the official package as a peer dependency: ```bash npm yarn add @writerai/writer-sdk ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/llms/yandex.mdx b/src/oss/javascript/integrations/llms/yandex.mdx index a57c0f7c7..712f1f261 100644 --- a/src/oss/javascript/integrations/llms/yandex.mdx +++ b/src/oss/javascript/integrations/llms/yandex.mdx @@ -17,9 +17,6 @@ Next, you have two authentication options: ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/yandex @langchain/core diff --git a/src/oss/javascript/integrations/memory/astradb.mdx b/src/oss/javascript/integrations/memory/astradb.mdx index 5f7f5994e..4238b2af8 100644 --- a/src/oss/javascript/integrations/memory/astradb.mdx +++ b/src/oss/javascript/integrations/memory/astradb.mdx @@ -11,9 +11,6 @@ You need to install the Astra DB TS client: ```bash npm npm install @datastax/astra-db-ts ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/aurora_dsql.mdx b/src/oss/javascript/integrations/memory/aurora_dsql.mdx index aa070604c..2378c74c9 100644 --- a/src/oss/javascript/integrations/memory/aurora_dsql.mdx +++ b/src/oss/javascript/integrations/memory/aurora_dsql.mdx @@ -14,9 +14,6 @@ This is very similar to the PostgreSQL integration with a few differences to mak Go to you AWS Console and create an Aurora DSQL Cluster, https://console.aws.amazon.com/dsql/clusters -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core pg @aws-sdk/dsql-signer diff --git a/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx b/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx index cd02d115f..6be637873 100644 --- a/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx +++ b/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx @@ -12,9 +12,6 @@ You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs ```bash npm npm install @langchain/azure-cosmosdb @langchain/core ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx b/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx index fd6c0e390..c3c80d2bb 100644 --- a/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx +++ b/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx @@ -12,9 +12,6 @@ You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs ```bash npm npm install @langchain/azure-cosmosdb @langchain/core ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/cassandra.mdx b/src/oss/javascript/integrations/memory/cassandra.mdx index cb08a3fb8..7bdff301d 100644 --- a/src/oss/javascript/integrations/memory/cassandra.mdx +++ b/src/oss/javascript/integrations/memory/cassandra.mdx @@ -8,9 +8,6 @@ For longer-term persistence across chat sessions, you can swap out the default i First, install the Cassandra Node.js driver: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install cassandra-driver @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/cloudflare_d1.mdx b/src/oss/javascript/integrations/memory/cloudflare_d1.mdx index aff8e6ccf..c05c3cc3a 100644 --- a/src/oss/javascript/integrations/memory/cloudflare_d1.mdx +++ b/src/oss/javascript/integrations/memory/cloudflare_d1.mdx @@ -13,9 +13,6 @@ For longer-term persistence across chat sessions, you can swap out the default i You'll need to install the LangChain Cloudflare integration package. For the below example, we also use Anthropic, but you can use any model you'd like: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/cloudflare @langchain/anthropic @langchain/core diff --git a/src/oss/javascript/integrations/memory/convex.mdx b/src/oss/javascript/integrations/memory/convex.mdx index 5dc148ae4..757deaf0c 100644 --- a/src/oss/javascript/integrations/memory/convex.mdx +++ b/src/oss/javascript/integrations/memory/convex.mdx @@ -50,9 +50,6 @@ export default defineSchema({ Each chat history session stored in Convex must have a unique session id. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/dynamodb.mdx b/src/oss/javascript/integrations/memory/dynamodb.mdx index 6249742a6..5913f5b1d 100644 --- a/src/oss/javascript/integrations/memory/dynamodb.mdx +++ b/src/oss/javascript/integrations/memory/dynamodb.mdx @@ -11,9 +11,6 @@ First, install the AWS DynamoDB client in your project: ```bash npm npm install @aws-sdk/client-dynamodb ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/file.mdx b/src/oss/javascript/integrations/memory/file.mdx index d09985571..41d29463d 100644 --- a/src/oss/javascript/integrations/memory/file.mdx +++ b/src/oss/javascript/integrations/memory/file.mdx @@ -11,9 +11,6 @@ You'll first need to install the [`@langchain/community`](https://www.npmjs.com/ ```bash npm npm install @langchain/community @langchain/core ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/firestore.mdx b/src/oss/javascript/integrations/memory/firestore.mdx index 7db1859bf..4da37ebdd 100644 --- a/src/oss/javascript/integrations/memory/firestore.mdx +++ b/src/oss/javascript/integrations/memory/firestore.mdx @@ -11,9 +11,6 @@ First, install the Firebase admin package in your project: ```bash npm npm install firebase-admin ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/ipfs_datastore.mdx b/src/oss/javascript/integrations/memory/ipfs_datastore.mdx index 3cf44ab92..4c69d13ec 100644 --- a/src/oss/javascript/integrations/memory/ipfs_datastore.mdx +++ b/src/oss/javascript/integrations/memory/ipfs_datastore.mdx @@ -8,9 +8,6 @@ For a storage backend you can use the IPFS Datastore Chat Memory to wrap an IPFS First, install the integration dependencies: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install cborg interface-datastore it-all @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/mem0_memory.mdx b/src/oss/javascript/integrations/memory/mem0_memory.mdx index 919c25d5d..c0350e436 100644 --- a/src/oss/javascript/integrations/memory/mem0_memory.mdx +++ b/src/oss/javascript/integrations/memory/mem0_memory.mdx @@ -12,9 +12,6 @@ Goto [Mem0 Dashboard](https://app.mem0.ai) to get API keys for Mem0. import Mem0 from "/snippets/javascript-integrations/examples/memory/mem0.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core @langchain/community diff --git a/src/oss/javascript/integrations/memory/momento.mdx b/src/oss/javascript/integrations/memory/momento.mdx index 31cf3866a..e7fbda875 100644 --- a/src/oss/javascript/integrations/memory/momento.mdx +++ b/src/oss/javascript/integrations/memory/momento.mdx @@ -19,9 +19,6 @@ To install for **browser/edge workers**: ```bash npm npm install @gomomento/sdk-web ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/mongodb.mdx b/src/oss/javascript/integrations/memory/mongodb.mdx index ed88d9f33..b72903d0c 100644 --- a/src/oss/javascript/integrations/memory/mongodb.mdx +++ b/src/oss/javascript/integrations/memory/mongodb.mdx @@ -25,9 +25,6 @@ You need to install Node MongoDB SDK in your project: ```bash npm npm install -S mongodb ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/motorhead_memory.mdx b/src/oss/javascript/integrations/memory/motorhead_memory.mdx index 8945adc6d..71c5aef4b 100644 --- a/src/oss/javascript/integrations/memory/motorhead_memory.mdx +++ b/src/oss/javascript/integrations/memory/motorhead_memory.mdx @@ -12,9 +12,6 @@ See instructions at [Motörhead](https://github.com/getmetal/motorhead) for runn import Motorhead from "/snippets/javascript-integrations/examples/memory/motorhead.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/memory/planetscale.mdx b/src/oss/javascript/integrations/memory/planetscale.mdx index f4a1c50ec..5721fb9ce 100644 --- a/src/oss/javascript/integrations/memory/planetscale.mdx +++ b/src/oss/javascript/integrations/memory/planetscale.mdx @@ -10,9 +10,6 @@ For longer-term persistence across chat sessions, you can swap out the default i You will need to install [@planetscale/database](https://github.com/planetscale/database-js) in your project: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @planetscale/database @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/postgres.mdx b/src/oss/javascript/integrations/memory/postgres.mdx index 3015db70a..92b7ba633 100644 --- a/src/oss/javascript/integrations/memory/postgres.mdx +++ b/src/oss/javascript/integrations/memory/postgres.mdx @@ -8,9 +8,6 @@ For longer-term persistence across chat sessions, you can swap out the default i First install the [node-postgres](https://node-postgres.com/) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core pg diff --git a/src/oss/javascript/integrations/memory/redis.mdx b/src/oss/javascript/integrations/memory/redis.mdx index 78f30cda3..18d73c9e7 100644 --- a/src/oss/javascript/integrations/memory/redis.mdx +++ b/src/oss/javascript/integrations/memory/redis.mdx @@ -8,9 +8,6 @@ For longer-term persistence across chat sessions, you can swap out the default i You will need to install [node-redis](https://github.com/redis/node-redis) in your project: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core redis diff --git a/src/oss/javascript/integrations/memory/upstash_redis.mdx b/src/oss/javascript/integrations/memory/upstash_redis.mdx index 0df192016..a1005e785 100644 --- a/src/oss/javascript/integrations/memory/upstash_redis.mdx +++ b/src/oss/javascript/integrations/memory/upstash_redis.mdx @@ -11,9 +11,6 @@ For longer-term persistence across chat sessions, you can swap out the default i You will need to install [@upstash/redis](https://github.com/upstash/upstash-redis) in your project: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core @upstash/redis diff --git a/src/oss/javascript/integrations/memory/xata.mdx b/src/oss/javascript/integrations/memory/xata.mdx index 22b417f31..2636cc026 100644 --- a/src/oss/javascript/integrations/memory/xata.mdx +++ b/src/oss/javascript/integrations/memory/xata.mdx @@ -39,9 +39,6 @@ Each chat history session stored in Xata database must have a unique id. In this example, the `getXataClient()` function is used to create a new Xata client based on the environment variables. However, we recommend using the code generated by the `xata init` command, in which case you only need to import the `getXataClient()` function from the generated `xata.ts` file. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/zep_memory.mdx b/src/oss/javascript/integrations/memory/zep_memory.mdx index b4b9d3516..cda7eb30d 100644 --- a/src/oss/javascript/integrations/memory/zep_memory.mdx +++ b/src/oss/javascript/integrations/memory/zep_memory.mdx @@ -31,9 +31,6 @@ Zep allows you to be more intentional about constructing your prompt: See the instructions from [Zep Open Source](https://github.com/getzep/zep) for running the server locally or through an automated hosting provider. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx b/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx index 2a63508df..37cc6da42 100644 --- a/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx +++ b/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx @@ -39,9 +39,6 @@ Follow the [Zep Cloud Typescript SDK Installation Guide](https://help.getzep.com You'll need your Zep Cloud Project API Key to use the Zep Cloud Memory. See the [Zep Cloud docs](https://help.getzep.com/projects) for more information. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @getzep/zep-cloud @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/providers/anthropic.mdx b/src/oss/javascript/integrations/providers/anthropic.mdx index 8f7100927..857f6798f 100644 --- a/src/oss/javascript/integrations/providers/anthropic.mdx +++ b/src/oss/javascript/integrations/providers/anthropic.mdx @@ -20,9 +20,6 @@ Anthropic models require any system messages to be the first one in your prompts `ChatAnthropic` is a subclass of LangChain's `ChatModel`, meaning it works best with `ChatPromptTemplate`. You can import this wrapper with the following code: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/anthropic @langchain/core diff --git a/src/oss/javascript/integrations/providers/google.mdx b/src/oss/javascript/integrations/providers/google.mdx index 68d37b8a4..9df9374b8 100644 --- a/src/oss/javascript/integrations/providers/google.mdx +++ b/src/oss/javascript/integrations/providers/google.mdx @@ -12,12 +12,9 @@ and [AI Studio](https://aistudio.google.com/) Access Gemini models such as `gemini-1.5-pro` and `gemini-2.0-flex` through the [`ChatGoogleGenerativeAI`](/oss/integrations/chat/google_generativeai), or if using VertexAI, via the [`ChatVertexAI`](/oss/integrations/chat/google_vertex_ai) class. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/google-genai @langchain/core ``` @@ -76,8 +73,6 @@ const res = await visionModel.invoke(input2); - - ```bash npm npm install @langchain/google-vertexai @langchain/core ``` diff --git a/src/oss/javascript/integrations/providers/microsoft.mdx b/src/oss/javascript/integrations/providers/microsoft.mdx index 7ba99eb07..186bd97e0 100644 --- a/src/oss/javascript/integrations/providers/microsoft.mdx +++ b/src/oss/javascript/integrations/providers/microsoft.mdx @@ -46,9 +46,6 @@ AZURE_OPENAI_API_VERSION="2024-02-01" -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core @@ -79,8 +76,6 @@ import AzureOpenaiEmbeddings from "/snippets/javascript-integrations/examples/mo > [Azure AI Search](https://azure.microsoft.com/products/ai-services/ai-search) (formerly known as Azure Search and Azure Cognitive Search) is a distributed, RESTful search engine optimized for speed and relevance on production-scale workloads on Azure. It supports also vector search using the [k-nearest neighbor](https://en.wikipedia.org/wiki/Nearest_neighbor_search) (kNN) algorithm and also [semantic search](https://learn.microsoft.com/azure/search/semantic-search-overview). - - ```bash npm npm install -S @langchain/community @langchain/core @azure/search-documents ``` @@ -93,8 +88,6 @@ import { AzureAISearchVectorStore } from "@langchain/community/vectorstores/azur > [Azure Cosmos DB for NoSQL](https://learn.microsoft.com/azure/cosmos-db/nosql/) provides support for querying items with flexible schemas and native support for JSON. It now offers vector indexing and search. This feature is designed to handle high-dimensional vectors, enabling efficient and accurate vector search at any scale. You can now store vectors directly in the documents alongside your data. Each document in your database can contain not only traditional schema-free data, but also high-dimensional vectors as other properties of the documents. - - ```bash npm npm install @langchain/azure-cosmosdb @langchain/core ``` @@ -107,8 +100,6 @@ import { AzureCosmosDBNoSQLVectorStore } from "@langchain/azure-cosmosdb"; > [Azure Cosmos DB for MongoDB vCore](https://learn.microsoft.com/azure/cosmos-db/mongodb/vcore/) makes it easy to create a database with full native MongoDB support. You can apply your MongoDB experience and continue to use your favorite MongoDB drivers, SDKs, and tools by pointing your application to the API for MongoDB vCore account’s connection string. Use vector search in Azure Cosmos DB for MongoDB vCore to seamlessly integrate your AI-based applications with your data that’s stored in Azure Cosmos DB. - - ```bash npm npm install @langchain/azure-cosmosdb @langchain/core ``` @@ -123,8 +114,6 @@ import { AzureCosmosDBMongoDBVectorStore } from "@langchain/azure-cosmosdb"; > The Semantic Cache feature is supported with Azure Cosmos DB for NoSQL integration, enabling users to retrieve cached responses based on semantic similarity between the user input and previously cached results. It leverages [AzureCosmosDBNoSQLVectorStore](/oss/integrations/vectorstores/azure_cosmosdb_nosql), which stores vector embeddings of cached prompts. These embeddings enable similarity-based searches, allowing the system to retrieve relevant cached results. - - ```bash npm npm install @langchain/azure-cosmosdb @langchain/core ``` @@ -178,8 +167,6 @@ import { AzureCosmosDBMongoChatMessageHistory } from "@langchain/azure-cosmosdb" - Storing data for backup and restore, disaster recovery, and archiving. - Storing data for analysis by an on-premises or Azure-hosted service. - - ```bash npm npm install @langchain/community @langchain/core @azure/storage-blob ``` @@ -199,8 +186,6 @@ import { AzureBlobStorageFileLoader } from "@langchain/community/document_loader > [Azure Container Apps dynamic sessions](https://learn.microsoft.com/azure/container-apps/sessions) provide fast access to secure sandboxed environments that are ideal for running code or applications that require strong isolation from other workloads. - - ```bash npm npm install @langchain/azure-dynamic-sessions @langchain/core ``` diff --git a/src/oss/javascript/integrations/providers/openai.mdx b/src/oss/javascript/integrations/providers/openai.mdx index 77127e591..138698c54 100644 --- a/src/oss/javascript/integrations/providers/openai.mdx +++ b/src/oss/javascript/integrations/providers/openai.mdx @@ -30,9 +30,6 @@ import { ChatOpenAI } from "@langchain/openai"; See a [usage example](/oss/integrations/llms/openai). -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx b/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx index 21f9e050d..ec4bbeae2 100644 --- a/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx +++ b/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx @@ -6,9 +6,6 @@ This example shows how to use the Chaindesk Retriever in a retrieval chain to re ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/dria.mdx b/src/oss/javascript/integrations/retrievers/dria.mdx index 4de3c8e61..87f52ac4d 100644 --- a/src/oss/javascript/integrations/retrievers/dria.mdx +++ b/src/oss/javascript/integrations/retrievers/dria.mdx @@ -22,9 +22,6 @@ Dria retriever exposes the underlying [Dria client](https://npmjs.com/package/dr ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install dria @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/hyde.mdx b/src/oss/javascript/integrations/retrievers/hyde.mdx index 4625c0de2..bf77cf8d8 100644 --- a/src/oss/javascript/integrations/retrievers/hyde.mdx +++ b/src/oss/javascript/integrations/retrievers/hyde.mdx @@ -12,9 +12,6 @@ In order to use HyDE, we therefore need to provide a base embedding model, as we import Hyde from "/snippets/javascript-integrations/examples/retrievers/hyde.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/metal-retriever.mdx b/src/oss/javascript/integrations/retrievers/metal-retriever.mdx index 185b6c747..1fe54c4bf 100644 --- a/src/oss/javascript/integrations/retrievers/metal-retriever.mdx +++ b/src/oss/javascript/integrations/retrievers/metal-retriever.mdx @@ -6,9 +6,6 @@ This example shows how to use the Metal Retriever in a retrieval chain to retrie ## Setup -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm i @getmetal/metal-sdk @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx b/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx index 534c3e3bc..13466381f 100644 --- a/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx +++ b/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx @@ -73,9 +73,6 @@ $$ language plpgsql; ``` ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx b/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx index f279d2cf0..3e49befb6 100644 --- a/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx +++ b/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx @@ -21,9 +21,6 @@ It is important to note that due to required metadata, all documents must be add import TimeWeightedRetriever from "/snippets/javascript-integrations/examples/retrievers/time-weighted-retriever.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx b/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx index e3407ed51..b366ee0d6 100644 --- a/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx +++ b/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx @@ -18,9 +18,6 @@ You'll need your Zep Cloud Project API Key to use the ZepCloudRetriever. See the ## Setup -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm i @getzep/zep-cloud @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/retrievers/zep-retriever.mdx b/src/oss/javascript/integrations/retrievers/zep-retriever.mdx index 243121c4d..4f85db281 100644 --- a/src/oss/javascript/integrations/retrievers/zep-retriever.mdx +++ b/src/oss/javascript/integrations/retrievers/zep-retriever.mdx @@ -16,9 +16,6 @@ Follow the [Zep Open Source Quickstart Guide](https://help.getzep.com/quickstart ## Setup -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm i @getzep/zep-js @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx b/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx index 4840e0d90..e04a38f29 100644 --- a/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx +++ b/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx @@ -10,9 +10,6 @@ You'll need to sign up for an Alibaba API key and set it as an environment varia Then, you'll need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx b/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx index 01a47bcc4..6e04d6cb9 100644 --- a/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx +++ b/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx @@ -12,9 +12,6 @@ Please set the acquired API key as an environment variable named BAIDU_API_KEY, Then, you'll need to install the [`@langchain/baidu-qianfan`](https://www.npmjs.com/package/@langchain/baidu-qianfan) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/baidu-qianfan @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/deepinfra.mdx b/src/oss/javascript/integrations/text_embedding/deepinfra.mdx index 82ac4289e..0e81f2216 100644 --- a/src/oss/javascript/integrations/text_embedding/deepinfra.mdx +++ b/src/oss/javascript/integrations/text_embedding/deepinfra.mdx @@ -8,9 +8,6 @@ The `DeepInfraEmbeddings` class utilizes the DeepInfra API to generate embedding Install the `@langchain/community` package as shown below: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm i @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx b/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx index e92cbc252..fddbbe9db 100644 --- a/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx +++ b/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx @@ -8,9 +8,6 @@ This Embeddings integration uses the HuggingFace Inference API to generate embed You'll first need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package and the required peer dependency: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core @huggingface/inference@4 diff --git a/src/oss/javascript/integrations/text_embedding/jina.mdx b/src/oss/javascript/integrations/text_embedding/jina.mdx index ec1520ec7..2f6779de9 100644 --- a/src/oss/javascript/integrations/text_embedding/jina.mdx +++ b/src/oss/javascript/integrations/text_embedding/jina.mdx @@ -8,9 +8,6 @@ The `JinaEmbeddings` class utilizes the Jina API to generate embeddings for give Install the `@langchain/community` package as shown below: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm i @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx b/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx index 55f77b296..99189b2fa 100644 --- a/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx +++ b/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx @@ -17,9 +17,6 @@ You'll need to install major version `3` of the [node-llama-cpp](https://github. ```bash npm npm install -S node-llama-cpp@3 ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx b/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx index 9eeda9e36..f1b61e4af 100644 --- a/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx +++ b/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx @@ -8,9 +8,6 @@ The `MixedbreadAIEmbeddings` class uses the [Mixedbread AI](https://mixedbread.a To install the `@langchain/mixedbread-ai` package, use the following command: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/mixedbread-ai @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/nomic.mdx b/src/oss/javascript/integrations/text_embedding/nomic.mdx index 7c1c72b4a..316ff12e8 100644 --- a/src/oss/javascript/integrations/text_embedding/nomic.mdx +++ b/src/oss/javascript/integrations/text_embedding/nomic.mdx @@ -11,9 +11,6 @@ You can sign up for a Nomic account and create an API key [here](https://atlas.n You'll first need to install the [`@langchain/nomic`](https://www.npmjs.com/package/@langchain/nomic) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/nomic @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/premai.mdx b/src/oss/javascript/integrations/text_embedding/premai.mdx index 3f39b7154..a7cd28845 100644 --- a/src/oss/javascript/integrations/text_embedding/premai.mdx +++ b/src/oss/javascript/integrations/text_embedding/premai.mdx @@ -10,9 +10,6 @@ In order to use the Prem API you'll need an API key. You can sign up for a Prem You'll first need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx b/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx index 07d29db29..fcc789af3 100644 --- a/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx +++ b/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx @@ -10,9 +10,6 @@ The `TencentHunyuanEmbeddings` class uses the Tencent Hunyuan API to generate em 2. Create SecretID & SecretKey [here](https://console.cloud.tencent.com/cam/capi). 3. Set SecretID and SecretKey as environment variables named `TENCENT_SECRET_ID` and `TENCENT_SECRET_KEY`, respectively. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/transformers.mdx b/src/oss/javascript/integrations/text_embedding/transformers.mdx index ef35f6849..307c1bf8d 100644 --- a/src/oss/javascript/integrations/text_embedding/transformers.mdx +++ b/src/oss/javascript/integrations/text_embedding/transformers.mdx @@ -20,9 +20,6 @@ import the embeddings from `"@langchain/community/embeddings/hf_transformers"` b ```bash npm npm install @huggingface/transformers ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/text_embedding/zhipuai.mdx b/src/oss/javascript/integrations/text_embedding/zhipuai.mdx index d33b23ad0..401c56c2f 100644 --- a/src/oss/javascript/integrations/text_embedding/zhipuai.mdx +++ b/src/oss/javascript/integrations/text_embedding/zhipuai.mdx @@ -12,9 +12,6 @@ https://open.bigmodel.cn Then, you'll need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core jsonwebtoken diff --git a/src/oss/javascript/integrations/tools/aiplugin-tool.mdx b/src/oss/javascript/integrations/tools/aiplugin-tool.mdx index 6ed8634e0..b2494b5b1 100644 --- a/src/oss/javascript/integrations/tools/aiplugin-tool.mdx +++ b/src/oss/javascript/integrations/tools/aiplugin-tool.mdx @@ -14,9 +14,6 @@ Note 1: This currently only works for plugins with no auth. Note 2: There are almost certainly other ways to do this, this is just a first pass. If you have better ideas, please open a PR! -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx b/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx index f36233ef6..feb6ca9f2 100644 --- a/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx +++ b/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx @@ -10,9 +10,6 @@ You can learn more about Azure Container Apps dynamic sessions and its code inte You'll first need to install the [`@langchain/azure-dynamic-sessions`](https://www.npmjs.com/package/@langchain/azure-dynamic-sessions) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/azure-dynamic-sessions @langchain/core diff --git a/src/oss/javascript/integrations/tools/connery.mdx b/src/oss/javascript/integrations/tools/connery.mdx index 3bc55a6ec..ebb49cee7 100644 --- a/src/oss/javascript/integrations/tools/connery.mdx +++ b/src/oss/javascript/integrations/tools/connery.mdx @@ -46,8 +46,6 @@ To use the Connery Action Tool you need to install the following official peer d npm install @langchain/community @langchain/core ``` - - ### Usage In the example below, we fetch action by its ID from the Connery Runner and then call it with the specified parameters. diff --git a/src/oss/javascript/integrations/tools/connery_toolkit.mdx b/src/oss/javascript/integrations/tools/connery_toolkit.mdx index 3a22dcb76..a5ccfd360 100644 --- a/src/oss/javascript/integrations/tools/connery_toolkit.mdx +++ b/src/oss/javascript/integrations/tools/connery_toolkit.mdx @@ -46,8 +46,6 @@ To use the Connery Toolkit you need to install the following official peer depen npm install @langchain/openai @langchain/community @langchain/core ``` - - ### Usage In the example below, we create an agent that uses two Connery Actions to summarize a public webpage and send the summary by email: diff --git a/src/oss/javascript/integrations/tools/dalle.mdx b/src/oss/javascript/integrations/tools/dalle.mdx index cc3de1121..54c29f341 100644 --- a/src/oss/javascript/integrations/tools/dalle.mdx +++ b/src/oss/javascript/integrations/tools/dalle.mdx @@ -13,9 +13,6 @@ and then set the OPENAI_API_KEY environment variable to the key you just created To use the Dall-E Tool you need to install the LangChain OpenAI integration package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/discord_tool.mdx b/src/oss/javascript/integrations/tools/discord_tool.mdx index 15ee1e65d..78aab93bf 100644 --- a/src/oss/javascript/integrations/tools/discord_tool.mdx +++ b/src/oss/javascript/integrations/tools/discord_tool.mdx @@ -16,9 +16,6 @@ npm install discord.js import DiscordTool from "/snippets/javascript-integrations/examples/tools/discord_tool.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/gmail.mdx b/src/oss/javascript/integrations/tools/gmail.mdx index 0a19de6be..879b9b323 100644 --- a/src/oss/javascript/integrations/tools/gmail.mdx +++ b/src/oss/javascript/integrations/tools/gmail.mdx @@ -20,9 +20,6 @@ You can authenticate via two methods: To use the Gmail Tool you need to install the following official peer dependency: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core googleapis diff --git a/src/oss/javascript/integrations/tools/google_calendar.mdx b/src/oss/javascript/integrations/tools/google_calendar.mdx index 1380485ec..3ddca1149 100644 --- a/src/oss/javascript/integrations/tools/google_calendar.mdx +++ b/src/oss/javascript/integrations/tools/google_calendar.mdx @@ -15,9 +15,6 @@ npm install googleapis import GoogleCalendar from "/snippets/javascript-integrations/examples/tools/google_calendar.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core @langchain/community @langchain/langgraph diff --git a/src/oss/javascript/integrations/tools/google_places.mdx b/src/oss/javascript/integrations/tools/google_places.mdx index bf4777f89..06695cbc2 100644 --- a/src/oss/javascript/integrations/tools/google_places.mdx +++ b/src/oss/javascript/integrations/tools/google_places.mdx @@ -13,9 +13,6 @@ as `process.env.GOOGLE_PLACES_API_KEY` or pass it in as an `apiKey` constructor ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/tools/google_routes.mdx b/src/oss/javascript/integrations/tools/google_routes.mdx index 61ee27370..8a03e0540 100644 --- a/src/oss/javascript/integrations/tools/google_routes.mdx +++ b/src/oss/javascript/integrations/tools/google_routes.mdx @@ -13,9 +13,6 @@ as `process.env.GOOGLE_ROUTES_API_KEY` or pass it in as an `apiKey` constructor ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/tools/google_trends.mdx b/src/oss/javascript/integrations/tools/google_trends.mdx index ec5151632..ae0c7335b 100644 --- a/src/oss/javascript/integrations/tools/google_trends.mdx +++ b/src/oss/javascript/integrations/tools/google_trends.mdx @@ -20,9 +20,6 @@ Then, set your API key as `process.env.SERPAPI_API_KEY` or pass it in as an `api ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/tools/ibm.mdx b/src/oss/javascript/integrations/tools/ibm.mdx index 6119151eb..4e0fad549 100644 --- a/src/oss/javascript/integrations/tools/ibm.mdx +++ b/src/oss/javascript/integrations/tools/ibm.mdx @@ -34,9 +34,6 @@ process.env.LANGSMITH_API_KEY="your-api-key" This toolkit lives in the `@langchain/community` package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/tools/jigsawstack.mdx b/src/oss/javascript/integrations/tools/jigsawstack.mdx index 7f7a9b7ff..e58725dae 100644 --- a/src/oss/javascript/integrations/tools/jigsawstack.mdx +++ b/src/oss/javascript/integrations/tools/jigsawstack.mdx @@ -27,9 +27,6 @@ export JIGSAWSTACK_API_KEY="your-api-key" ## Usage, standalone -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai diff --git a/src/oss/javascript/integrations/tools/json.mdx b/src/oss/javascript/integrations/tools/json.mdx index f0667e4f2..5d2bccee3 100644 --- a/src/oss/javascript/integrations/tools/json.mdx +++ b/src/oss/javascript/integrations/tools/json.mdx @@ -4,9 +4,6 @@ title: JSON Agent Toolkit This example shows how to load and use an agent with a JSON toolkit. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/lambda_agent.mdx b/src/oss/javascript/integrations/tools/lambda_agent.mdx index 0cb3a3b96..aecb74fdf 100644 --- a/src/oss/javascript/integrations/tools/lambda_agent.mdx +++ b/src/oss/javascript/integrations/tools/lambda_agent.mdx @@ -17,9 +17,6 @@ This quick start will demonstrate how an Agent could use a Lambda function to se - If you have not run [`aws configure`](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) via the AWS CLI, the `region`, `accessKeyId`, and `secretAccessKey` must be provided to the AWSLambda constructor. - The IAM role corresponding to those credentials must have permission to invoke the lambda function. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/pyinterpreter.mdx b/src/oss/javascript/integrations/tools/pyinterpreter.mdx index 149fc35c7..937853daa 100644 --- a/src/oss/javascript/integrations/tools/pyinterpreter.mdx +++ b/src/oss/javascript/integrations/tools/pyinterpreter.mdx @@ -13,9 +13,6 @@ This can be useful in combination with an LLM that can generate code to perform import Pyinterpreter from "/snippets/javascript-integrations/examples/tools/pyinterpreter.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/searchapi.mdx b/src/oss/javascript/integrations/tools/searchapi.mdx index 574f63355..eae30864b 100644 --- a/src/oss/javascript/integrations/tools/searchapi.mdx +++ b/src/oss/javascript/integrations/tools/searchapi.mdx @@ -12,9 +12,6 @@ Input should be a search query. import SearchapiGoogleNews from "/snippets/javascript-integrations/examples/tools/searchapi_google_news.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/searxng.mdx b/src/oss/javascript/integrations/tools/searxng.mdx index d45ee3926..606fc97e9 100644 --- a/src/oss/javascript/integrations/tools/searxng.mdx +++ b/src/oss/javascript/integrations/tools/searxng.mdx @@ -10,9 +10,6 @@ A wrapper around the SearxNG API, this tool is useful for performing meta-search import SearxngSearch from "/snippets/javascript-integrations/examples/tools/searxng_search.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/sfn_agent.mdx b/src/oss/javascript/integrations/tools/sfn_agent.mdx index f19e0655a..563e41bc5 100644 --- a/src/oss/javascript/integrations/tools/sfn_agent.mdx +++ b/src/oss/javascript/integrations/tools/sfn_agent.mdx @@ -17,9 +17,6 @@ npm install @aws-sdk/client-sfn ``` ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/tools/webbrowser.mdx b/src/oss/javascript/integrations/tools/webbrowser.mdx index 54554d11c..7fab8088d 100644 --- a/src/oss/javascript/integrations/tools/webbrowser.mdx +++ b/src/oss/javascript/integrations/tools/webbrowser.mdx @@ -24,9 +24,6 @@ npm install cheerio axios import Webbrowser from "/snippets/javascript-integrations/examples/tools/webbrowser.mdx"; -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/tools/zapier_agent.mdx b/src/oss/javascript/integrations/tools/zapier_agent.mdx index fc6bfc857..a1367830d 100644 --- a/src/oss/javascript/integrations/tools/zapier_agent.mdx +++ b/src/oss/javascript/integrations/tools/zapier_agent.mdx @@ -26,9 +26,6 @@ Review [auth docs](https://docs.zapier.com/platform/build/auth) for more details The example below demonstrates how to use the Zapier integration as an Agent: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/analyticdb.mdx b/src/oss/javascript/integrations/vectorstores/analyticdb.mdx index fcd24c1b1..845aa4e09 100644 --- a/src/oss/javascript/integrations/vectorstores/analyticdb.mdx +++ b/src/oss/javascript/integrations/vectorstores/analyticdb.mdx @@ -30,9 +30,6 @@ And we need [pg-copy-streams](https://github.com/brianc/node-pg-copy-streams) to ```bash npm npm install -S pg-copy-streams ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/astradb.mdx b/src/oss/javascript/integrations/vectorstores/astradb.mdx index 7c05a80e9..2cb07e8e7 100644 --- a/src/oss/javascript/integrations/vectorstores/astradb.mdx +++ b/src/oss/javascript/integrations/vectorstores/astradb.mdx @@ -28,9 +28,6 @@ Where `ASTRA_DB_COLLECTION` is the desired name of your collection 6. Install the Astra TS Client & the LangChain community package -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @datastax/astra-db-ts @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx b/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx index fba2155cb..e75212e4f 100644 --- a/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx +++ b/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx @@ -12,9 +12,6 @@ Learn how to leverage the vector search capabilities of Azure AI Search from [th You'll first need to install the `@azure/search-documents` SDK and the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install -S @langchain/community @langchain/core @azure/search-documents diff --git a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx index 6c404c0e4..e11bb50bb 100644 --- a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx +++ b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx @@ -12,9 +12,6 @@ Learn how to leverage the vector search capabilities of Azure Cosmos DB for Mong You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs.com/package/@langchain/azure-cosmosdb) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/azure-cosmosdb @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx index 12c4d45aa..6ab6bf83c 100644 --- a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx +++ b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx @@ -10,9 +10,6 @@ Learn how to leverage the vector search capabilities of Azure Cosmos DB for NoSQ You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs.com/package/@langchain/azure-cosmosdb) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/azure-cosmosdb @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/cassandra.mdx b/src/oss/javascript/integrations/vectorstores/cassandra.mdx index 1f925ff7d..4a8af6972 100644 --- a/src/oss/javascript/integrations/vectorstores/cassandra.mdx +++ b/src/oss/javascript/integrations/vectorstores/cassandra.mdx @@ -16,9 +16,6 @@ The [latest version]( ```bash npm npm install cassandra-driver @langchain/community @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/clickhouse.mdx b/src/oss/javascript/integrations/vectorstores/clickhouse.mdx index 3ef863bb7..e6f517ff8 100644 --- a/src/oss/javascript/integrations/vectorstores/clickhouse.mdx +++ b/src/oss/javascript/integrations/vectorstores/clickhouse.mdx @@ -21,9 +21,6 @@ You will need to install the following peer dependencies: ```bash npm npm install -S @clickhouse/client mysql2 ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/closevector.mdx b/src/oss/javascript/integrations/vectorstores/closevector.mdx index 70603cd32..1fbb5b51e 100644 --- a/src/oss/javascript/integrations/vectorstores/closevector.mdx +++ b/src/oss/javascript/integrations/vectorstores/closevector.mdx @@ -22,9 +22,6 @@ npm install -S closevector-web ```bash npm npm install -S closevector-node ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx b/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx index 329b27e5d..36ea64f4a 100644 --- a/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx +++ b/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx @@ -32,9 +32,6 @@ index_name = "" Finally, you'll need to install the LangChain Cloudflare integration package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/cloudflare @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/convex.mdx b/src/oss/javascript/integrations/vectorstores/convex.mdx index 17a3b5958..21768bc5c 100644 --- a/src/oss/javascript/integrations/vectorstores/convex.mdx +++ b/src/oss/javascript/integrations/vectorstores/convex.mdx @@ -44,9 +44,6 @@ export default defineSchema({ ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/hanavector.mdx b/src/oss/javascript/integrations/vectorstores/hanavector.mdx index 931da9bf6..76906b5d2 100644 --- a/src/oss/javascript/integrations/vectorstores/hanavector.mdx +++ b/src/oss/javascript/integrations/vectorstores/hanavector.mdx @@ -8,9 +8,6 @@ title: SAP HANA Cloud Vector Engine You'll first need to install either the [`@sap/hana-client`](https://www.npmjs.com/package/@sap/hana-client) or the [`hdb`](https://www.npmjs.com/package/hdb) package, and the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package: -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install -S @langchain/community @langchain/core @sap/hana-client diff --git a/src/oss/javascript/integrations/vectorstores/lancedb.mdx b/src/oss/javascript/integrations/vectorstores/lancedb.mdx index 9a60bb609..013be8b5c 100644 --- a/src/oss/javascript/integrations/vectorstores/lancedb.mdx +++ b/src/oss/javascript/integrations/vectorstores/lancedb.mdx @@ -13,9 +13,6 @@ Install the [LanceDB](https://github.com/lancedb/lancedb) [Node.js bindings](htt ```bash npm npm install -S @lancedb/lancedb ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/libsql.mdx b/src/oss/javascript/integrations/vectorstores/libsql.mdx index 32b5dbecc..879366514 100644 --- a/src/oss/javascript/integrations/vectorstores/libsql.mdx +++ b/src/oss/javascript/integrations/vectorstores/libsql.mdx @@ -24,9 +24,6 @@ This guide will also use OpenAI embeddings, which require you to install the `@l You can use local SQLite when working with the libSQL vector store, or use a hosted Turso Database. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @libsql/client @langchain/openai @langchain/community diff --git a/src/oss/javascript/integrations/vectorstores/milvus.mdx b/src/oss/javascript/integrations/vectorstores/milvus.mdx index 148f5d485..ac7517e92 100644 --- a/src/oss/javascript/integrations/vectorstores/milvus.mdx +++ b/src/oss/javascript/integrations/vectorstores/milvus.mdx @@ -42,9 +42,6 @@ Only available on Node.js. ## Index and query docs -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx b/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx index 395d6ad16..f7df681da 100644 --- a/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx +++ b/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx @@ -38,9 +38,6 @@ To sign up and access MVI, visit the [Momento Console](https://console.gomomento ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/myscale.mdx b/src/oss/javascript/integrations/vectorstores/myscale.mdx index d2c686e0a..a249a62b6 100644 --- a/src/oss/javascript/integrations/vectorstores/myscale.mdx +++ b/src/oss/javascript/integrations/vectorstores/myscale.mdx @@ -16,9 +16,6 @@ Only available on Node.js. 2. After launching a cluster, view your `Connection Details` from your cluster's `Actions` menu. You will need the host, port, username, and password. 3. Install the required Node.js peer dependency in your workspace. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install -S @langchain/openai @clickhouse/client @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx b/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx index ebeb01c62..602333033 100644 --- a/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx +++ b/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx @@ -16,9 +16,6 @@ To work with Neo4j Vector Index, you need to install the `neo4j-driver` package: ```bash npm npm install neo4j-driver ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/neon.mdx b/src/oss/javascript/integrations/vectorstores/neon.mdx index 4b1319ed0..81302fa6e 100644 --- a/src/oss/javascript/integrations/vectorstores/neon.mdx +++ b/src/oss/javascript/integrations/vectorstores/neon.mdx @@ -31,9 +31,6 @@ driver to connect to the database. ```bash npm npm install @neondatabase/serverless ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/opensearch.mdx b/src/oss/javascript/integrations/vectorstores/opensearch.mdx index 99446e7f1..058f5615f 100644 --- a/src/oss/javascript/integrations/vectorstores/opensearch.mdx +++ b/src/oss/javascript/integrations/vectorstores/opensearch.mdx @@ -14,9 +14,6 @@ Langchain.js accepts [@opensearch-project/opensearch](https://opensearch.org/doc ## Setup -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install -S @langchain/openai @langchain/core @opensearch-project/opensearch diff --git a/src/oss/javascript/integrations/vectorstores/prisma.mdx b/src/oss/javascript/integrations/vectorstores/prisma.mdx index 4599eb96b..55d63ff16 100644 --- a/src/oss/javascript/integrations/vectorstores/prisma.mdx +++ b/src/oss/javascript/integrations/vectorstores/prisma.mdx @@ -63,9 +63,6 @@ npx prisma migrate dev ``` ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/rockset.mdx b/src/oss/javascript/integrations/vectorstores/rockset.mdx index 3336f5be8..0588dd509 100644 --- a/src/oss/javascript/integrations/vectorstores/rockset.mdx +++ b/src/oss/javascript/integrations/vectorstores/rockset.mdx @@ -15,9 +15,6 @@ yarn add @rockset/client ### Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/core @langchain/community diff --git a/src/oss/javascript/integrations/vectorstores/singlestore.mdx b/src/oss/javascript/integrations/vectorstores/singlestore.mdx index ed4764358..5264e811a 100644 --- a/src/oss/javascript/integrations/vectorstores/singlestore.mdx +++ b/src/oss/javascript/integrations/vectorstores/singlestore.mdx @@ -36,9 +36,6 @@ npm install -S mysql2 ### Standard usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/tigris.mdx b/src/oss/javascript/integrations/vectorstores/tigris.mdx index b90db6b4a..e72251a57 100644 --- a/src/oss/javascript/integrations/vectorstores/tigris.mdx +++ b/src/oss/javascript/integrations/vectorstores/tigris.mdx @@ -31,9 +31,6 @@ Application Keys section of the project. ## Index docs -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install -S @langchain/openai diff --git a/src/oss/javascript/integrations/vectorstores/typeorm.mdx b/src/oss/javascript/integrations/vectorstores/typeorm.mdx index 587c69585..c63424087 100644 --- a/src/oss/javascript/integrations/vectorstores/typeorm.mdx +++ b/src/oss/javascript/integrations/vectorstores/typeorm.mdx @@ -15,9 +15,6 @@ npm install typeorm npm install pg ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/typesense.mdx b/src/oss/javascript/integrations/vectorstores/typesense.mdx index 2fde62a6d..b9f5fb897 100644 --- a/src/oss/javascript/integrations/vectorstores/typesense.mdx +++ b/src/oss/javascript/integrations/vectorstores/typesense.mdx @@ -6,9 +6,6 @@ Vector store that utilizes the Typesense search engine. ### Basic Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/usearch.mdx b/src/oss/javascript/integrations/vectorstores/usearch.mdx index 27c93f3f0..8189a007c 100644 --- a/src/oss/javascript/integrations/vectorstores/usearch.mdx +++ b/src/oss/javascript/integrations/vectorstores/usearch.mdx @@ -17,9 +17,6 @@ Install the [usearch](https://github.com/unum-cloud/usearch/tree/main/javascript ```bash npm npm install -S usearch ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx b/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx index 99fc259ec..eecdea8da 100644 --- a/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx +++ b/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx @@ -14,9 +14,6 @@ To work with Vercel Postgres, you need to install the `@vercel/postgres` package ```bash npm npm install @vercel/postgres ``` -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/voy.mdx b/src/oss/javascript/integrations/vectorstores/voy.mdx index 6758380ad..48f53449f 100644 --- a/src/oss/javascript/integrations/vectorstores/voy.mdx +++ b/src/oss/javascript/integrations/vectorstores/voy.mdx @@ -7,9 +7,6 @@ It's supported in non-Node environments like browsers. You can use Voy as a vect ### Install Voy -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai voy-search @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/xata.mdx b/src/oss/javascript/integrations/vectorstores/xata.mdx index 3b1116a31..592fbfac8 100644 --- a/src/oss/javascript/integrations/vectorstores/xata.mdx +++ b/src/oss/javascript/integrations/vectorstores/xata.mdx @@ -35,9 +35,6 @@ and then choose the database you created above. This will also generate a `xata. ## Usage -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/zep.mdx b/src/oss/javascript/integrations/vectorstores/zep.mdx index 6b964a8bb..9f715db35 100644 --- a/src/oss/javascript/integrations/vectorstores/zep.mdx +++ b/src/oss/javascript/integrations/vectorstores/zep.mdx @@ -39,9 +39,6 @@ You must also set your document collection to `isAutoEmbedded === false`. See th ### Example: Creating a ZepVectorStore from Documents & Querying -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ```bash npm npm install @langchain/openai @langchain/community @langchain/core diff --git a/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx b/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx index 431f4e42e..a3e9c2998 100644 --- a/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx +++ b/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx @@ -34,9 +34,6 @@ You'll need your Zep Cloud Project API Key to use the Zep VectorStore. See the [ Zep auto embeds all documents by default, and it's not expecting to receive any embeddings from the user. Since LangChain requires passing in a `Embeddings` instance, we pass in `FakeEmbeddings`. -import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx'; - - ### Example: Creating a ZepVectorStore from Documents & Querying diff --git a/src/snippets/javascript-integrations/integration-install-tooltip.mdx b/src/snippets/javascript-integrations/integration-install-tooltip.mdx deleted file mode 100644 index 2abef55b7..000000000 --- a/src/snippets/javascript-integrations/integration-install-tooltip.mdx +++ /dev/null @@ -1,3 +0,0 @@ - -See [this section for general instructions on installing integration packages](https://js.langchain.com/docs/how_to/installation/#installing-integration-packages). - From 3ac6d480214b0759832c411e476e7998336afbbf Mon Sep 17 00:00:00 2001 From: Brody Klapko Date: Mon, 8 Sep 2025 12:14:12 -0700 Subject: [PATCH 2/6] Fix install blocks --- .../javascript/integrations/chat/anthropic.md | 19 ++++++++------- .../javascript/integrations/chat/arcjet.md | 18 +++++++------- src/oss/javascript/integrations/chat/azure.md | 19 ++++++++------- .../integrations/chat/bedrock_converse.md | 18 +++++++------- .../javascript/integrations/chat/cerebras.md | 19 ++++++++------- .../integrations/chat/cloudflare_workersai.md | 18 +++++++------- .../javascript/integrations/chat/cohere.md | 19 ++++++++------- .../javascript/integrations/chat/fireworks.md | 19 ++++++++------- .../integrations/chat/google_vertex_ai.md | 24 ++++++++----------- src/oss/javascript/integrations/chat/groq.md | 19 ++++++++------- src/oss/javascript/integrations/chat/ibm.md | 18 +++++++------- .../javascript/integrations/chat/mistral.md | 18 +++++++------- .../javascript/integrations/chat/novita.md | 17 +++++++------ .../javascript/integrations/chat/ollama.md | 18 +++++++------- .../javascript/integrations/chat/openai.md | 19 ++++++++------- .../integrations/chat/perplexity.md | 18 +++++++------- .../integrations/chat/togetherai.md | 18 +++++++------- src/oss/javascript/integrations/chat/xai.md | 19 ++++++++------- .../integrations/document_compressors/ibm.md | 17 +++++++------ .../document_loaders/web_loaders/firecrawl.md | 18 +++++++------- .../document_loaders/web_loaders/langsmith.md | 18 +++++++------- .../document_loaders/web_loaders/pdf.md | 18 +++++++------- .../web_loaders/web_cheerio.md | 18 +++++++------- .../javascript/integrations/llms/arcjet.md | 18 +++++++------- .../integrations/llms/cloudflare_workersai.md | 18 +++++++------- .../javascript/integrations/llms/cohere.md | 18 +++++++------- src/oss/javascript/integrations/llms/ibm.md | 18 +++++++------- .../javascript/integrations/llms/together.md | 18 +++++++------- .../integrations/retrievers/azion-edgesql.md | 17 +++++++------ .../retrievers/bedrock-knowledge-bases.md | 17 +++++++------ .../integrations/retrievers/bm25.md | 17 +++++++------ .../javascript/integrations/retrievers/exa.md | 17 +++++++------ .../retrievers/kendra-retriever.md | 17 +++++++------ .../retrievers/self_query/chroma.md | 17 +++++++------ .../retrievers/self_query/hnswlib.md | 17 +++++++------ .../retrievers/self_query/memory.md | 17 +++++++------ .../retrievers/self_query/pinecone.md | 17 +++++++------ .../retrievers/self_query/qdrant.md | 17 +++++++------ .../retrievers/self_query/supabase.md | 17 +++++++------ .../retrievers/self_query/vectara.md | 17 +++++++------ .../retrievers/self_query/weaviate.md | 17 +++++++------ .../integrations/retrievers/tavily.md | 17 +++++++------ .../integrations/stores/in_memory.md | 19 ++++++++------- .../integrations/text_embedding/bedrock.md | 17 +++++++------ .../text_embedding/bytedance_doubao.md | 17 +++++++------ .../text_embedding/cloudflare_ai.md | 17 +++++++------ .../integrations/text_embedding/cohere.md | 17 +++++++------ .../integrations/text_embedding/fireworks.md | 17 +++++++------ .../text_embedding/google_generativeai.md | 17 +++++++------ .../text_embedding/google_vertex_ai.md | 17 +++++++------ .../integrations/text_embedding/ibm.md | 18 +++++++------- .../integrations/text_embedding/mistralai.md | 17 +++++++------ .../integrations/text_embedding/ollama.md | 17 +++++++------ .../integrations/text_embedding/openai.md | 17 +++++++------ .../integrations/text_embedding/pinecone.md | 17 +++++++------ .../integrations/text_embedding/togetherai.md | 17 +++++++------ .../integrations/tools/duckduckgo_search.md | 17 +++++++------ .../integrations/tools/exa_search.md | 19 ++++++++------- src/oss/javascript/integrations/tools/ibm.mdx | 14 +++++++---- .../javascript/integrations/tools/serpapi.md | 17 +++++++------ src/oss/javascript/integrations/tools/sql.md | 17 +++++++------ .../integrations/tools/tavily_crawl.md | 17 +++++++------ .../integrations/tools/tavily_extract.md | 17 +++++++------ .../integrations/tools/tavily_map.md | 17 +++++++------ .../integrations/tools/tavily_search.md | 17 +++++++------ .../tools/tavily_search_community.md | 17 +++++++------ .../integrations/tools/vectorstore.md | 17 +++++++------ .../vectorstores/azion-edgesql.md | 17 +++++++------ .../integrations/vectorstores/chroma.md | 17 +++++++------ .../integrations/vectorstores/memory.md | 17 +++++++------ .../integrations/vectorstores/pinecone.md | 17 +++++++------ .../integrations/vectorstores/supabase.md | 17 +++++++------ .../integrations/vectorstores/upstash.md | 17 +++++++------ .../integrations/vectorstores/weaviate.md | 17 +++++++------ 74 files changed, 740 insertions(+), 560 deletions(-) diff --git a/src/oss/javascript/integrations/chat/anthropic.md b/src/oss/javascript/integrations/chat/anthropic.md index 824724409..4ab030baa 100644 --- a/src/oss/javascript/integrations/chat/anthropic.md +++ b/src/oss/javascript/integrations/chat/anthropic.md @@ -45,16 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `ChatAnthropic` integration lives in the `@langchain/anthropic` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/anthropic @langchain/core - - + +```bash npm +npm install @langchain/anthropic @langchain/core +``` +```bash yarn +yarn add @langchain/anthropic @langchain/core +``` +```bash pnpm +pnpm add @langchain/anthropic @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/arcjet.md b/src/oss/javascript/integrations/chat/arcjet.md index e9550cf19..d38ca7865 100644 --- a/src/oss/javascript/integrations/chat/arcjet.md +++ b/src/oss/javascript/integrations/chat/arcjet.md @@ -20,15 +20,17 @@ The Arcjet Redact object is not a chat model itself, instead it wraps an LLM. It Install the Arcjet Redaction Library: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @arcjet/redact - - + +```bash npm +npm install @arcjet/redact +``` +```bash yarn +yarn add @arcjet/redact +``` +```bash pnpm +pnpm add @arcjet/redact ``` + And install LangChain Community: diff --git a/src/oss/javascript/integrations/chat/azure.md b/src/oss/javascript/integrations/chat/azure.md index c0b66d507..29a93726b 100644 --- a/src/oss/javascript/integrations/chat/azure.md +++ b/src/oss/javascript/integrations/chat/azure.md @@ -56,16 +56,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain AzureChatOpenAI integration lives in the `@langchain/openai` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/openai @langchain/core - - + +```bash npm +npm install @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/bedrock_converse.md b/src/oss/javascript/integrations/chat/bedrock_converse.md index 6dd34ca03..4528fbd31 100644 --- a/src/oss/javascript/integrations/chat/bedrock_converse.md +++ b/src/oss/javascript/integrations/chat/bedrock_converse.md @@ -41,15 +41,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `ChatBedrockConverse` integration lives in the `@langchain/aws` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/aws @langchain/core - - + +```bash npm +npm install @langchain/aws @langchain/core +``` +```bash yarn +yarn add @langchain/aws @langchain/core +``` +```bash pnpm +pnpm add @langchain/aws @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/cerebras.md b/src/oss/javascript/integrations/chat/cerebras.md index ec52cbb05..0f767ec4b 100644 --- a/src/oss/javascript/integrations/chat/cerebras.md +++ b/src/oss/javascript/integrations/chat/cerebras.md @@ -53,16 +53,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain ChatCerebras integration lives in the `@langchain/cerebras` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/cerebras @langchain/core - - + +```bash npm +npm install @langchain/cerebras @langchain/core +``` +```bash yarn +yarn add @langchain/cerebras @langchain/core +``` +```bash pnpm +pnpm add @langchain/cerebras @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/cloudflare_workersai.md b/src/oss/javascript/integrations/chat/cloudflare_workersai.md index 28229139a..d9d2b9a02 100644 --- a/src/oss/javascript/integrations/chat/cloudflare_workersai.md +++ b/src/oss/javascript/integrations/chat/cloudflare_workersai.md @@ -36,15 +36,17 @@ Passing a binding within a Cloudflare Worker is not yet supported. The LangChain ChatCloudflareWorkersAI integration lives in the `@langchain/cloudflare` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/cloudflare @langchain/core - - + +```bash npm +npm install @langchain/cloudflare @langchain/core +``` +```bash yarn +yarn add @langchain/cloudflare @langchain/core +``` +```bash pnpm +pnpm add @langchain/cloudflare @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/cohere.md b/src/oss/javascript/integrations/chat/cohere.md index f90fbf940..88f0654ed 100644 --- a/src/oss/javascript/integrations/chat/cohere.md +++ b/src/oss/javascript/integrations/chat/cohere.md @@ -48,16 +48,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain ChatCohere integration lives in the `@langchain/cohere` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/cohere @langchain/core - - + +```bash npm +npm install @langchain/cohere @langchain/core +``` +```bash yarn +yarn add @langchain/cohere @langchain/core +``` +```bash pnpm +pnpm add @langchain/cohere @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/fireworks.md b/src/oss/javascript/integrations/chat/fireworks.md index 6d136ae85..7d4bf4949 100644 --- a/src/oss/javascript/integrations/chat/fireworks.md +++ b/src/oss/javascript/integrations/chat/fireworks.md @@ -45,16 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `ChatFireworks` integration lives in the `@langchain/community` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/google_vertex_ai.md b/src/oss/javascript/integrations/chat/google_vertex_ai.md index e3a0f1f39..96405619d 100644 --- a/src/oss/javascript/integrations/chat/google_vertex_ai.md +++ b/src/oss/javascript/integrations/chat/google_vertex_ai.md @@ -65,21 +65,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `ChatVertexAI` integration lives in the `@langchain/google-vertexai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/google-vertexai @langchain/core - - -Or if using in a web environment like a [Vercel Edge function](https://vercel.com/blog/edge-functions-generally-available): - - - @langchain/google-vertexai-web @langchain/core - - + +```bash npm +npm install @langchain/google-vertexai @langchain/core +``` +```bash yarn +yarn add @langchain/google-vertexai @langchain/core +``` +```bash pnpm +pnpm add @langchain/google-vertexai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/groq.md b/src/oss/javascript/integrations/chat/groq.md index e8f964284..29b5aa692 100644 --- a/src/oss/javascript/integrations/chat/groq.md +++ b/src/oss/javascript/integrations/chat/groq.md @@ -46,16 +46,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain ChatGroq integration lives in the `@langchain/groq` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/groq @langchain/core - - + +```bash npm +npm install @langchain/groq @langchain/core +``` +```bash yarn +yarn add @langchain/groq @langchain/core +``` +```bash pnpm +pnpm add @langchain/groq @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/ibm.md b/src/oss/javascript/integrations/chat/ibm.md index dea7c7668..378137c34 100644 --- a/src/oss/javascript/integrations/chat/ibm.md +++ b/src/oss/javascript/integrations/chat/ibm.md @@ -111,15 +111,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain IBM watsonx.ai integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/mistral.md b/src/oss/javascript/integrations/chat/mistral.md index 810ecf9e2..d8244e93d 100644 --- a/src/oss/javascript/integrations/chat/mistral.md +++ b/src/oss/javascript/integrations/chat/mistral.md @@ -45,15 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain ChatMistralAI integration lives in the `@langchain/mistralai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - -@langchain/mistralai @langchain/core - - + +```bash npm +npm install @langchain/mistralai @langchain/core +``` +```bash yarn +yarn add @langchain/mistralai @langchain/core +``` +```bash pnpm +pnpm add @langchain/mistralai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/novita.md b/src/oss/javascript/integrations/chat/novita.md index 35bfab740..b401459ee 100644 --- a/src/oss/javascript/integrations/chat/novita.md +++ b/src/oss/javascript/integrations/chat/novita.md @@ -32,14 +32,17 @@ export NOVITA_API_KEY="your-api-key" The LangChain Novita integration lives in the `@langchain-community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/ollama.md b/src/oss/javascript/integrations/chat/ollama.md index 8df3122de..f7a7311d1 100644 --- a/src/oss/javascript/integrations/chat/ollama.md +++ b/src/oss/javascript/integrations/chat/ollama.md @@ -45,15 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain ChatOllama integration lives in the `@langchain/ollama` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/ollama @langchain/core - - + +```bash npm +npm install @langchain/ollama @langchain/core +``` +```bash yarn +yarn add @langchain/ollama @langchain/core +``` +```bash pnpm +pnpm add @langchain/ollama @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/openai.md b/src/oss/javascript/integrations/chat/openai.md index 0682c3afb..c78e41263 100644 --- a/src/oss/javascript/integrations/chat/openai.md +++ b/src/oss/javascript/integrations/chat/openai.md @@ -45,16 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `ChatOpenAI` integration lives in the `@langchain/openai` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/openai @langchain/core - - + +```bash npm +npm install @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/perplexity.md b/src/oss/javascript/integrations/chat/perplexity.md index 94fca476b..c21fa16d7 100644 --- a/src/oss/javascript/integrations/chat/perplexity.md +++ b/src/oss/javascript/integrations/chat/perplexity.md @@ -45,15 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain Perplexity integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/togetherai.md b/src/oss/javascript/integrations/chat/togetherai.md index b247abe04..0afe9c327 100644 --- a/src/oss/javascript/integrations/chat/togetherai.md +++ b/src/oss/javascript/integrations/chat/togetherai.md @@ -45,15 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain ChatTogetherAI integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/xai.md b/src/oss/javascript/integrations/chat/xai.md index b4bb1b3c0..6ba0104b5 100644 --- a/src/oss/javascript/integrations/chat/xai.md +++ b/src/oss/javascript/integrations/chat/xai.md @@ -45,16 +45,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `ChatXAI` integration lives in the `@langchain/xai` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/xai @langchain/core - - + +```bash npm +npm install @langchain/xai @langchain/core +``` +```bash yarn +yarn add @langchain/xai @langchain/core +``` +```bash pnpm +pnpm add @langchain/xai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_compressors/ibm.md b/src/oss/javascript/integrations/document_compressors/ibm.md index adb1dbb18..25c55f88d 100644 --- a/src/oss/javascript/integrations/document_compressors/ibm.md +++ b/src/oss/javascript/integrations/document_compressors/ibm.md @@ -105,14 +105,17 @@ If you want to get automated tracing from individual queries, you can also set y This document compressor lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md b/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md index 603889062..14dc69da3 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md @@ -50,15 +50,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain FireCrawlLoader integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36 - - + +```bash npm +npm install @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36 +``` +```bash yarn +yarn add @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36 +``` +```bash pnpm +pnpm add @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36 ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md b/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md index ea6b7a206..01f342577 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md @@ -35,15 +35,17 @@ export LANGSMITH_API_KEY="your-api-key" The `LangSmithLoader` integration lives in the `@langchain/core` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/core - - + +```bash npm +npm install @langchain/core +``` +```bash yarn +yarn add @langchain/core +``` +```bash pnpm +pnpm add @langchain/core ``` + ## Create example dataset diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md b/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md index 303092860..8d143f94c 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md @@ -39,15 +39,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain WebPDFLoader integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core pdf-parse - - + +```bash npm +npm install @langchain/community @langchain/core pdf-parse +``` +```bash yarn +yarn add @langchain/community @langchain/core pdf-parse +``` +```bash pnpm +pnpm add @langchain/community @langchain/core pdf-parse ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md b/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md index 5a178bd16..e8e7d800f 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md @@ -42,15 +42,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain CheerioWebBaseLoader integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core cheerio - - + +```bash npm +npm install @langchain/community @langchain/core cheerio +``` +```bash yarn +yarn add @langchain/community @langchain/core cheerio +``` +```bash pnpm +pnpm add @langchain/community @langchain/core cheerio ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/arcjet.md b/src/oss/javascript/integrations/llms/arcjet.md index 0de03bfad..fd2dceac7 100644 --- a/src/oss/javascript/integrations/llms/arcjet.md +++ b/src/oss/javascript/integrations/llms/arcjet.md @@ -20,15 +20,17 @@ The Arcjet Redact object is not an LLM itself, instead it wraps an LLM. It redac Install the Arcjet Redaction Library: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @arcjet/redact - - + +```bash npm +npm install @arcjet/redact +``` +```bash yarn +yarn add @arcjet/redact +``` +```bash pnpm +pnpm add @arcjet/redact ``` + And install LangChain Community: diff --git a/src/oss/javascript/integrations/llms/cloudflare_workersai.md b/src/oss/javascript/integrations/llms/cloudflare_workersai.md index a808a631a..1ecc26c34 100644 --- a/src/oss/javascript/integrations/llms/cloudflare_workersai.md +++ b/src/oss/javascript/integrations/llms/cloudflare_workersai.md @@ -24,15 +24,17 @@ Head [to this page](https://developers.cloudflare.com/workers-ai/) to sign up to The LangChain Cloudflare integration lives in the `@langchain/cloudflare` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/cloudflare @langchain/core - - + +```bash npm +npm install @langchain/cloudflare @langchain/core +``` +```bash yarn +yarn add @langchain/cloudflare @langchain/core +``` +```bash pnpm +pnpm add @langchain/cloudflare @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/cohere.md b/src/oss/javascript/integrations/llms/cohere.md index b4fb2dc49..60b297f75 100644 --- a/src/oss/javascript/integrations/llms/cohere.md +++ b/src/oss/javascript/integrations/llms/cohere.md @@ -47,15 +47,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain Cohere integration lives in the `@langchain/cohere` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/cohere @langchain/core - - + +```bash npm +npm install @langchain/cohere @langchain/core +``` +```bash yarn +yarn add @langchain/cohere @langchain/core +``` +```bash pnpm +pnpm add @langchain/cohere @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/ibm.md b/src/oss/javascript/integrations/llms/ibm.md index a603ed180..73c04aab3 100644 --- a/src/oss/javascript/integrations/llms/ibm.md +++ b/src/oss/javascript/integrations/llms/ibm.md @@ -105,15 +105,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain IBM watsonx.ai integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/together.md b/src/oss/javascript/integrations/llms/together.md index 332cdebe0..bbfaa323f 100644 --- a/src/oss/javascript/integrations/llms/together.md +++ b/src/oss/javascript/integrations/llms/together.md @@ -43,15 +43,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain TogetherAI integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/azion-edgesql.md b/src/oss/javascript/integrations/retrievers/azion-edgesql.md index f5a11510e..e9b577e1d 100644 --- a/src/oss/javascript/integrations/retrievers/azion-edgesql.md +++ b/src/oss/javascript/integrations/retrievers/azion-edgesql.md @@ -37,14 +37,17 @@ If you want to get automated tracing from individual queries, you can also set y This retriever lives in the `@langchain/community/retrievers/azion_edgesql` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - azion @langchain/openai @langchain/community - + +```bash npm +npm install azion @langchain/openai @langchain/community +``` +```bash yarn +yarn add azion @langchain/openai @langchain/community +``` +```bash pnpm +pnpm add azion @langchain/openai @langchain/community ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md b/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md index 80df0a5b0..79b7f0d85 100644 --- a/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md +++ b/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md @@ -39,14 +39,17 @@ If you want to get automated tracing from individual queries, you can also set y This retriever lives in the `@langchain/aws` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/aws @langchain/core - + +```bash npm +npm install @langchain/aws @langchain/core +``` +```bash yarn +yarn add @langchain/aws @langchain/core +``` +```bash pnpm +pnpm add @langchain/aws @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/bm25.md b/src/oss/javascript/integrations/retrievers/bm25.md index 7e30b4d4a..3533c2a74 100644 --- a/src/oss/javascript/integrations/retrievers/bm25.md +++ b/src/oss/javascript/integrations/retrievers/bm25.md @@ -10,14 +10,17 @@ You can use it as part of your retrieval pipeline as a to rerank documents as a The `BM25Retriever` is exported from `@langchain/community`. You'll need to install it like this: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + This retriever uses code from [`this implementation`](https://github.com/FurkanToprak/OkapiBM25) of Okapi BM25. diff --git a/src/oss/javascript/integrations/retrievers/exa.md b/src/oss/javascript/integrations/retrievers/exa.md index 8db504127..1d95224d4 100644 --- a/src/oss/javascript/integrations/retrievers/exa.md +++ b/src/oss/javascript/integrations/retrievers/exa.md @@ -35,14 +35,17 @@ If you want to get automated tracing from individual queries, you can also set y This retriever lives in the `@langchain/exa` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/exa @langchain/core - + +```bash npm +npm install @langchain/exa @langchain/core +``` +```bash yarn +yarn add @langchain/exa @langchain/core +``` +```bash pnpm +pnpm add @langchain/exa @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/kendra-retriever.md b/src/oss/javascript/integrations/retrievers/kendra-retriever.md index cf63599d6..35f10099c 100644 --- a/src/oss/javascript/integrations/retrievers/kendra-retriever.md +++ b/src/oss/javascript/integrations/retrievers/kendra-retriever.md @@ -34,14 +34,17 @@ If you want to get automated tracing from individual queries, you can also set y This retriever lives in the `@langchain/aws` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/aws @langchain/core - + +```bash npm +npm install @langchain/aws @langchain/core +``` +```bash yarn +yarn add @langchain/aws @langchain/core +``` +```bash pnpm +pnpm add @langchain/aws @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/self_query/chroma.md b/src/oss/javascript/integrations/retrievers/self_query/chroma.md index f7ea83480..8e871a07c 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/chroma.md +++ b/src/oss/javascript/integrations/retrievers/self_query/chroma.md @@ -34,14 +34,17 @@ The vector store lives in the `@langchain/community` package. You'll also need t For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com): -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community langchain @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/community langchain @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/community langchain @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/community langchain @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md b/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md index 5cb9285d1..64b39b0ee 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md +++ b/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md @@ -33,14 +33,17 @@ The vector store lives in the `@langchain/community` package. You'll also need t For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com): -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community langchain @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/community langchain @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/community langchain @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/community langchain @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/self_query/memory.md b/src/oss/javascript/integrations/retrievers/self_query/memory.md index b955eb30a..3e310648e 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/memory.md +++ b/src/oss/javascript/integrations/retrievers/self_query/memory.md @@ -33,14 +33,17 @@ The vector store lives in the `@langchain/community` package. You'll also need t For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com): -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community langchain @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/community langchain @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/community langchain @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/community langchain @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/self_query/pinecone.md b/src/oss/javascript/integrations/retrievers/self_query/pinecone.md index ad463d25a..f5bd8b769 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/pinecone.md +++ b/src/oss/javascript/integrations/retrievers/self_query/pinecone.md @@ -41,14 +41,17 @@ You will also need to install the official Pinecone SDK (`@pinecone-database/pin For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com): -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone - + +```bash npm +npm install @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone +``` +```bash yarn +yarn add @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone +``` +```bash pnpm +pnpm add @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/self_query/qdrant.md b/src/oss/javascript/integrations/retrievers/self_query/qdrant.md index d3f23a085..64f0f98ff 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/qdrant.md +++ b/src/oss/javascript/integrations/retrievers/self_query/qdrant.md @@ -37,14 +37,17 @@ The vector store lives in the `@langchain/qdrant` package. You'll also need to i For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com): -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core ``` + The official Qdrant SDK (`@qdrant/js-client-rest`) is automatically installed as a dependency of `@langchain/qdrant`, but you may wish to install it independently as well. diff --git a/src/oss/javascript/integrations/retrievers/self_query/supabase.md b/src/oss/javascript/integrations/retrievers/self_query/supabase.md index 6fe3127f1..58daa4559 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/supabase.md +++ b/src/oss/javascript/integrations/retrievers/self_query/supabase.md @@ -38,14 +38,17 @@ The vector store lives in the `@langchain/community` package, which requires the For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com): -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js - + +```bash npm +npm install @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js +``` +```bash yarn +yarn add @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js +``` +```bash pnpm +pnpm add @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/self_query/vectara.md b/src/oss/javascript/integrations/retrievers/self_query/vectara.md index bb410900c..b8d3586e3 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/vectara.md +++ b/src/oss/javascript/integrations/retrievers/self_query/vectara.md @@ -37,14 +37,17 @@ If you want to get automated tracing from individual queries, you can also set y The vector store lives in the `@langchain/community` package. You'll also need to install the `langchain` package to import the main `SelfQueryRetriever` class. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community langchain @langchain/core - + +```bash npm +npm install @langchain/community langchain @langchain/core +``` +```bash yarn +yarn add @langchain/community langchain @langchain/core +``` +```bash pnpm +pnpm add @langchain/community langchain @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/self_query/weaviate.md b/src/oss/javascript/integrations/retrievers/self_query/weaviate.md index a4a6a129c..b3a255cfc 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/weaviate.md +++ b/src/oss/javascript/integrations/retrievers/self_query/weaviate.md @@ -42,14 +42,17 @@ The official Weaviate SDK (`weaviate-client`) is automatically installed as a de For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com): -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client - + +```bash npm +npm install @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client +``` +```bash yarn +yarn add @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client +``` +```bash pnpm +pnpm add @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/tavily.md b/src/oss/javascript/integrations/retrievers/tavily.md index 7f96172d3..9ab39714d 100644 --- a/src/oss/javascript/integrations/retrievers/tavily.md +++ b/src/oss/javascript/integrations/retrievers/tavily.md @@ -30,14 +30,17 @@ If you want to get automated tracing from individual queries, you can also set y This retriever lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/stores/in_memory.md b/src/oss/javascript/integrations/stores/in_memory.md index f5fde4e55..9af99adb9 100644 --- a/src/oss/javascript/integrations/stores/in_memory.md +++ b/src/oss/javascript/integrations/stores/in_memory.md @@ -20,16 +20,17 @@ The `InMemoryStore` allows for a generic type to be assigned to the values in th The LangChain InMemoryStore integration lives in the `@langchain/core` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/core - - + +```bash npm +npm install @langchain/core +``` +```bash yarn +yarn add @langchain/core +``` +```bash pnpm +pnpm add @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/bedrock.md b/src/oss/javascript/integrations/text_embedding/bedrock.md index 98436b84e..647192ba9 100644 --- a/src/oss/javascript/integrations/text_embedding/bedrock.md +++ b/src/oss/javascript/integrations/text_embedding/bedrock.md @@ -34,14 +34,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain Bedrock integration lives in the `@langchain/aws` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/aws @langchain/core - + +```bash npm +npm install @langchain/aws @langchain/core +``` +```bash yarn +yarn add @langchain/aws @langchain/core +``` +```bash pnpm +pnpm add @langchain/aws @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md b/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md index b456c5f19..51e882c4c 100644 --- a/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md +++ b/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md @@ -31,14 +31,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain ByteDanceDoubaoEmbeddings integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community - + +```bash npm +npm install @langchain/community +``` +```bash yarn +yarn add @langchain/community +``` +```bash pnpm +pnpm add @langchain/community ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md b/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md index d701e3030..baf0e67fd 100644 --- a/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md +++ b/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md @@ -46,14 +46,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain CloudflareWorkersAIEmbeddings integration lives in the `@langchain/cloudflare` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/cloudflare @langchain/core - + +```bash npm +npm install @langchain/cloudflare @langchain/core +``` +```bash yarn +yarn add @langchain/cloudflare @langchain/core +``` +```bash pnpm +pnpm add @langchain/cloudflare @langchain/core ``` + ## Usage diff --git a/src/oss/javascript/integrations/text_embedding/cohere.md b/src/oss/javascript/integrations/text_embedding/cohere.md index c4a24bd37..52a4e908f 100644 --- a/src/oss/javascript/integrations/text_embedding/cohere.md +++ b/src/oss/javascript/integrations/text_embedding/cohere.md @@ -35,14 +35,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain CohereEmbeddings integration lives in the `@langchain/cohere` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/cohere @langchain/core - + +```bash npm +npm install @langchain/cohere @langchain/core +``` +```bash yarn +yarn add @langchain/cohere @langchain/core +``` +```bash pnpm +pnpm add @langchain/cohere @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/fireworks.md b/src/oss/javascript/integrations/text_embedding/fireworks.md index bcc6bd094..f0cc11ea3 100644 --- a/src/oss/javascript/integrations/text_embedding/fireworks.md +++ b/src/oss/javascript/integrations/text_embedding/fireworks.md @@ -35,14 +35,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `FireworksEmbeddings` integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/google_generativeai.md b/src/oss/javascript/integrations/text_embedding/google_generativeai.md index 6fbfe4a39..2efb46bae 100644 --- a/src/oss/javascript/integrations/text_embedding/google_generativeai.md +++ b/src/oss/javascript/integrations/text_embedding/google_generativeai.md @@ -37,14 +37,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `GoogleGenerativeAIEmbeddings` integration lives in the `@langchain/google-genai` package. You may also wish to install the official SDK: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/google-genai @langchain/core @google/generative-ai - + +```bash npm +npm install @langchain/google-genai @langchain/core @google/generative-ai +``` +```bash yarn +yarn add @langchain/google-genai @langchain/core @google/generative-ai +``` +```bash pnpm +pnpm add @langchain/google-genai @langchain/core @google/generative-ai ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md b/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md index 30c653489..3e1b4c676 100644 --- a/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md +++ b/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md @@ -46,14 +46,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `VertexAIEmbeddings` integration lives in the `@langchain/google-vertexai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/google-vertexai @langchain/core - + +```bash npm +npm install @langchain/google-vertexai @langchain/core +``` +```bash yarn +yarn add @langchain/google-vertexai @langchain/core +``` +```bash pnpm +pnpm add @langchain/google-vertexai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/ibm.md b/src/oss/javascript/integrations/text_embedding/ibm.md index 5a2f7b9ed..d5c9144b8 100644 --- a/src/oss/javascript/integrations/text_embedding/ibm.md +++ b/src/oss/javascript/integrations/text_embedding/ibm.md @@ -105,15 +105,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain IBM watsonx.ai integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/mistralai.md b/src/oss/javascript/integrations/text_embedding/mistralai.md index 65847b41d..cbcc7fe6b 100644 --- a/src/oss/javascript/integrations/text_embedding/mistralai.md +++ b/src/oss/javascript/integrations/text_embedding/mistralai.md @@ -35,14 +35,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain MistralAIEmbeddings integration lives in the `@langchain/mistralai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/mistralai @langchain/core - + +```bash npm +npm install @langchain/mistralai @langchain/core +``` +```bash yarn +yarn add @langchain/mistralai @langchain/core +``` +```bash pnpm +pnpm add @langchain/mistralai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/ollama.md b/src/oss/javascript/integrations/text_embedding/ollama.md index bf65b9a51..4b91b3e27 100644 --- a/src/oss/javascript/integrations/text_embedding/ollama.md +++ b/src/oss/javascript/integrations/text_embedding/ollama.md @@ -29,14 +29,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain OllamaEmbeddings integration lives in the `@langchain/ollama` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/ollama @langchain/core - + +```bash npm +npm install @langchain/ollama @langchain/core +``` +```bash yarn +yarn add @langchain/ollama @langchain/core +``` +```bash pnpm +pnpm add @langchain/ollama @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/openai.md b/src/oss/javascript/integrations/text_embedding/openai.md index 520132fd9..417fd8313 100644 --- a/src/oss/javascript/integrations/text_embedding/openai.md +++ b/src/oss/javascript/integrations/text_embedding/openai.md @@ -35,14 +35,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain OpenAIEmbeddings integration lives in the `@langchain/openai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/pinecone.md b/src/oss/javascript/integrations/text_embedding/pinecone.md index 1b392727a..0aed45909 100644 --- a/src/oss/javascript/integrations/text_embedding/pinecone.md +++ b/src/oss/javascript/integrations/text_embedding/pinecone.md @@ -35,14 +35,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain PineconeEmbeddings integration lives in the `@langchain/pinecone` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/pinecone @langchain/core @pinecone-database/pinecone@5 - + +```bash npm +npm install @langchain/pinecone @langchain/core @pinecone-database/pinecone@5 +``` +```bash yarn +yarn add @langchain/pinecone @langchain/core @pinecone-database/pinecone@5 +``` +```bash pnpm +pnpm add @langchain/pinecone @langchain/core @pinecone-database/pinecone@5 ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/togetherai.md b/src/oss/javascript/integrations/text_embedding/togetherai.md index bc8c12c26..f23e43315 100644 --- a/src/oss/javascript/integrations/text_embedding/togetherai.md +++ b/src/oss/javascript/integrations/text_embedding/togetherai.md @@ -35,14 +35,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain TogetherAIEmbeddings integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/tools/duckduckgo_search.md b/src/oss/javascript/integrations/tools/duckduckgo_search.md index b93613969..9ee57c812 100644 --- a/src/oss/javascript/integrations/tools/duckduckgo_search.md +++ b/src/oss/javascript/integrations/tools/duckduckgo_search.md @@ -18,14 +18,17 @@ DuckDuckGoSearch offers a privacy-focused search API designed for LLM Agents. It The integration lives in the `@langchain/community` package, along with the `duck-duck-scrape` dependency: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core duck-duck-scrape - + +```bash npm +npm install @langchain/community @langchain/core duck-duck-scrape +``` +```bash yarn +yarn add @langchain/community @langchain/core duck-duck-scrape +``` +```bash pnpm +pnpm add @langchain/community @langchain/core duck-duck-scrape ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/exa_search.md b/src/oss/javascript/integrations/tools/exa_search.md index 0604f8166..cd6d1867d 100644 --- a/src/oss/javascript/integrations/tools/exa_search.md +++ b/src/oss/javascript/integrations/tools/exa_search.md @@ -20,16 +20,17 @@ This page goes over how to use `ExaSearchResults` with LangChain. The integration lives in the `@langchain/exa` package. -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/exa @langchain/core - - + +```bash npm +npm install @langchain/exa @langchain/core +``` +```bash yarn +yarn add @langchain/exa @langchain/core +``` +```bash pnpm +pnpm add @langchain/exa @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/ibm.mdx b/src/oss/javascript/integrations/tools/ibm.mdx index 4e0fad549..031297bd9 100644 --- a/src/oss/javascript/integrations/tools/ibm.mdx +++ b/src/oss/javascript/integrations/tools/ibm.mdx @@ -101,11 +101,17 @@ For detailed info about tools please visit [watsonx.ai API docs](https://cloud.i First, ensure you have LangGraph installed: -```{=mdx} - - @langchain/langgraph - + +```bash npm +npm install @langchain/langgraph +``` +```bash yarn +yarn add @langchain/langgraph +``` +```bash pnpm +pnpm add @langchain/langgraph ``` + Then, instantiate your LLM to be used in the React agent: diff --git a/src/oss/javascript/integrations/tools/serpapi.md b/src/oss/javascript/integrations/tools/serpapi.md index 2644b973c..42e2dd65c 100644 --- a/src/oss/javascript/integrations/tools/serpapi.md +++ b/src/oss/javascript/integrations/tools/serpapi.md @@ -18,14 +18,17 @@ This guide provides a quick overview for getting started with the SerpAPI [tool] The integration lives in the `@langchain/community` package, which you can install as shown below: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/sql.md b/src/oss/javascript/integrations/tools/sql.md index 58da542fd..e09fa037f 100644 --- a/src/oss/javascript/integrations/tools/sql.md +++ b/src/oss/javascript/integrations/tools/sql.md @@ -30,14 +30,17 @@ process.env.LANGSMITH_API_KEY="your-api-key" This toolkit lives in the `langchain` package. You'll also need to install the `typeorm` peer dependency. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - langchain @langchain/core typeorm - + +```bash npm +npm install langchain @langchain/core typeorm +``` +```bash yarn +yarn add langchain @langchain/core typeorm +``` +```bash pnpm +pnpm add langchain @langchain/core typeorm ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/tools/tavily_crawl.md b/src/oss/javascript/integrations/tools/tavily_crawl.md index 4a7c05f04..966cccf4f 100644 --- a/src/oss/javascript/integrations/tools/tavily_crawl.md +++ b/src/oss/javascript/integrations/tools/tavily_crawl.md @@ -18,14 +18,17 @@ This guide provides a quick overview for getting started with the Tavily [tool]( The integration lives in the `@langchain/tavily` package, which you can install as shown below: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/tavily @langchain/core - + +```bash npm +npm install @langchain/tavily @langchain/core +``` +```bash yarn +yarn add @langchain/tavily @langchain/core +``` +```bash pnpm +pnpm add @langchain/tavily @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/tavily_extract.md b/src/oss/javascript/integrations/tools/tavily_extract.md index c75c13f24..5e2248603 100644 --- a/src/oss/javascript/integrations/tools/tavily_extract.md +++ b/src/oss/javascript/integrations/tools/tavily_extract.md @@ -18,14 +18,17 @@ This guide provides a quick overview for getting started with the Tavily [tool]( The integration lives in the `@langchain/tavily` package, which you can install as shown below: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/tavily @langchain/core - + +```bash npm +npm install @langchain/tavily @langchain/core +``` +```bash yarn +yarn add @langchain/tavily @langchain/core +``` +```bash pnpm +pnpm add @langchain/tavily @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/tavily_map.md b/src/oss/javascript/integrations/tools/tavily_map.md index 41e156593..c10bac9d0 100644 --- a/src/oss/javascript/integrations/tools/tavily_map.md +++ b/src/oss/javascript/integrations/tools/tavily_map.md @@ -18,14 +18,17 @@ This guide provides a quick overview for getting started with the Tavily [tool]( The integration lives in the `@langchain/tavily` package, which you can install as shown below: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/tavily @langchain/core - + +```bash npm +npm install @langchain/tavily @langchain/core +``` +```bash yarn +yarn add @langchain/tavily @langchain/core +``` +```bash pnpm +pnpm add @langchain/tavily @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/tavily_search.md b/src/oss/javascript/integrations/tools/tavily_search.md index 9b34f63e5..a15cb952d 100644 --- a/src/oss/javascript/integrations/tools/tavily_search.md +++ b/src/oss/javascript/integrations/tools/tavily_search.md @@ -18,14 +18,17 @@ This guide provides a quick overview for getting started with the Tavily [tool]( The integration lives in the `@langchain/tavily` package, which you can install as shown below: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/tavily @langchain/core - + +```bash npm +npm install @langchain/tavily @langchain/core +``` +```bash yarn +yarn add @langchain/tavily @langchain/core +``` +```bash pnpm +pnpm add @langchain/tavily @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/tavily_search_community.md b/src/oss/javascript/integrations/tools/tavily_search_community.md index 6fa27c03e..41c367af0 100644 --- a/src/oss/javascript/integrations/tools/tavily_search_community.md +++ b/src/oss/javascript/integrations/tools/tavily_search_community.md @@ -25,14 +25,17 @@ This guide provides a quick overview for getting started with the Tavily [tool]( The integration lives in the `@langchain/community` package, which you can install as shown below: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/tools/vectorstore.md b/src/oss/javascript/integrations/tools/vectorstore.md index 5381b02e3..f6cdd43c2 100644 --- a/src/oss/javascript/integrations/tools/vectorstore.md +++ b/src/oss/javascript/integrations/tools/vectorstore.md @@ -19,14 +19,17 @@ process.env.LANGSMITH_API_KEY="your-api-key" This toolkit lives in the `langchain` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - langchain @langchain/core - + +```bash npm +npm install langchain @langchain/core +``` +```bash yarn +yarn add langchain @langchain/core +``` +```bash pnpm +pnpm add langchain @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/vectorstores/azion-edgesql.md b/src/oss/javascript/integrations/vectorstores/azion-edgesql.md index d5fb23111..30aaf8178 100644 --- a/src/oss/javascript/integrations/vectorstores/azion-edgesql.md +++ b/src/oss/javascript/integrations/vectorstores/azion-edgesql.md @@ -20,14 +20,17 @@ To use the `AzionVectorStore` vector store, you will need to install the `@langc This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - azion @langchain/openai @langchain/community - + +```bash npm +npm install azion @langchain/openai @langchain/community +``` +```bash yarn +yarn add azion @langchain/openai @langchain/community +``` +```bash pnpm +pnpm add azion @langchain/openai @langchain/community ``` + ### Credentials diff --git a/src/oss/javascript/integrations/vectorstores/chroma.md b/src/oss/javascript/integrations/vectorstores/chroma.md index 968f520ed..4f373735f 100644 --- a/src/oss/javascript/integrations/vectorstores/chroma.md +++ b/src/oss/javascript/integrations/vectorstores/chroma.md @@ -29,14 +29,17 @@ To use Chroma vector stores, you'll need to install the `@langchain/community` i This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/openai @langchain/core chromadb - + +```bash npm +npm install @langchain/community @langchain/openai @langchain/core chromadb +``` +```bash yarn +yarn add @langchain/community @langchain/openai @langchain/core chromadb +``` +```bash pnpm +pnpm add @langchain/community @langchain/openai @langchain/core chromadb ``` + If you want to run Chroma locally, you can [run a local Chroma server](https://docs.trychroma.com/docs/cli/run) using the Chroma CLI, which ships with the `chromadb` package: diff --git a/src/oss/javascript/integrations/vectorstores/memory.md b/src/oss/javascript/integrations/vectorstores/memory.md index 5931f339c..db2c3500b 100644 --- a/src/oss/javascript/integrations/vectorstores/memory.md +++ b/src/oss/javascript/integrations/vectorstores/memory.md @@ -22,14 +22,17 @@ To use in-memory vector stores, you'll need to install the `langchain` package: This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - langchain @langchain/openai @langchain/core - + +```bash npm +npm install langchain @langchain/openai @langchain/core +``` +```bash yarn +yarn add langchain @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add langchain @langchain/openai @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/vectorstores/pinecone.md b/src/oss/javascript/integrations/vectorstores/pinecone.md index e51b35193..fcc9699c7 100644 --- a/src/oss/javascript/integrations/vectorstores/pinecone.md +++ b/src/oss/javascript/integrations/vectorstores/pinecone.md @@ -20,14 +20,17 @@ To use Pinecone vector stores, you'll need to create a Pinecone account, initial This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5 - + +```bash npm +npm install @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5 +``` +```bash yarn +yarn add @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5 +``` +```bash pnpm +pnpm add @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5 ``` + ### Credentials diff --git a/src/oss/javascript/integrations/vectorstores/supabase.md b/src/oss/javascript/integrations/vectorstores/supabase.md index 09d499a3f..886695ec5 100644 --- a/src/oss/javascript/integrations/vectorstores/supabase.md +++ b/src/oss/javascript/integrations/vectorstores/supabase.md @@ -22,14 +22,17 @@ To use Supabase vector stores, you'll need to set up a Supabase database and ins This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core @supabase/supabase-js @langchain/openai - + +```bash npm +npm install @langchain/community @langchain/core @supabase/supabase-js @langchain/openai +``` +```bash yarn +yarn add @langchain/community @langchain/core @supabase/supabase-js @langchain/openai +``` +```bash pnpm +pnpm add @langchain/community @langchain/core @supabase/supabase-js @langchain/openai ``` + Once you've created a database, run the following SQL to set up [`pgvector`](https://github.com/pgvector/pgvector) and create the necessary table and functions: diff --git a/src/oss/javascript/integrations/vectorstores/upstash.md b/src/oss/javascript/integrations/vectorstores/upstash.md index 8ac714ad1..c8ec3c852 100644 --- a/src/oss/javascript/integrations/vectorstores/upstash.md +++ b/src/oss/javascript/integrations/vectorstores/upstash.md @@ -20,14 +20,17 @@ To use Upstash vector stores, you'll need to create an Upstash account, create a This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core @upstash/vector @langchain/openai - + +```bash npm +npm install @langchain/community @langchain/core @upstash/vector @langchain/openai +``` +```bash yarn +yarn add @langchain/community @langchain/core @upstash/vector @langchain/openai +``` +```bash pnpm +pnpm add @langchain/community @langchain/core @upstash/vector @langchain/openai ``` + You can create an index from the [Upstash Console](https://console.upstash.com/login). For further reference, see [the official docs](https://upstash.com/docs/vector/overall/getstarted). diff --git a/src/oss/javascript/integrations/vectorstores/weaviate.md b/src/oss/javascript/integrations/vectorstores/weaviate.md index 213d80367..7c73654d6 100644 --- a/src/oss/javascript/integrations/vectorstores/weaviate.md +++ b/src/oss/javascript/integrations/vectorstores/weaviate.md @@ -20,14 +20,17 @@ To use Weaviate vector stores, you'll need to set up a Weaviate instance and ins This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai - + +```bash npm +npm install @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai +``` +```bash yarn +yarn add @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai +``` +```bash pnpm +pnpm add @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai ``` + You'll need to run Weaviate either locally or on a server. See [the Weaviate documentation](https://weaviate.io/developers/weaviate/installation) for more information. From da8e5071d8ae6507d74158bf81cb4c07bd655b6a Mon Sep 17 00:00:00 2001 From: Brody Klapko Date: Mon, 8 Sep 2025 12:14:30 -0700 Subject: [PATCH 3/6] Remove script --- src/oss/javascript/integrations/convert.py | 86 ---------------------- 1 file changed, 86 deletions(-) delete mode 100644 src/oss/javascript/integrations/convert.py diff --git a/src/oss/javascript/integrations/convert.py b/src/oss/javascript/integrations/convert.py deleted file mode 100644 index 5840b0621..000000000 --- a/src/oss/javascript/integrations/convert.py +++ /dev/null @@ -1,86 +0,0 @@ -import re -from pathlib import Path - -def extract_packages(content: str) -> str | None: - """Extract package names from Npm2Yarn component.""" - pattern = r'\s*(.*?)\s*' - match = re.search(pattern, content, re.DOTALL) - if match: - return match.group(1).strip() - return None - -def create_codegroup(packages: str) -> str: - """Create CodeGroup component with npm, yarn, and pnpm instructions.""" - return f''' -```bash npm -npm install {packages} -``` -```bash yarn -yarn add {packages} -``` -```bash pnpm -pnpm add {packages} -``` -''' - -def convert_file(file_path: Path) -> bool: - """Convert a single file's installation blocks.""" - with open(file_path, 'r', encoding='utf-8') as f: - content = f.read() - - # Find the mdx block - mdx_pattern = r'```\{=mdx\}.*?```' - match = re.search(mdx_pattern, content, re.DOTALL) # Fixed: using mdx_pattern instead of pattern - - if not match: - print(f"No mdx block found in {file_path}") - return False - - mdx_block = match.group(0) - packages = extract_packages(mdx_block) - - if not packages: - print(f"No Npm2Yarn component found in {file_path}") - return False - - # Create new CodeGroup - new_content = content.replace(mdx_block, create_codegroup(packages)) - - # Write back to file - with open(file_path, 'w', encoding='utf-8') as f: - f.write(new_content) - - print(f"Successfully converted {file_path}") - return True - -def main(): - import sys - - # If no argument provided, process all files in current directory - if len(sys.argv) == 1: - base_dir = Path(__file__).parent - files = [] - # Walk through all subdirectories - for ext in ['.md', '.mdx']: - files.extend(list(base_dir.rglob(f'*{ext}'))) - - print(f"Found {len(files)} files to process") - for file_path in files: - print(f"\nProcessing {file_path}...") - convert_file(file_path) - - # If file path provided, process single file - elif len(sys.argv) == 2: - file_path = Path(sys.argv[1]) - if not file_path.exists(): - print(f"File not found: {file_path}") - sys.exit(1) - convert_file(file_path) - - else: - print("Usage: python script.py [file_path]") - print("If no file_path is provided, will process all .md/.mdx files in current directory") - sys.exit(1) - -if __name__ == "__main__": - main() From 018e4e4e5a727ffc29a7351410de7b73b8604432 Mon Sep 17 00:00:00 2001 From: Brody Klapko Date: Mon, 8 Sep 2025 12:52:31 -0700 Subject: [PATCH 4/6] Fix remaining install blocks, --- .../javascript/integrations/chat/anthropic.md | 4 -- .../javascript/integrations/chat/arcjet.md | 19 ++++--- src/oss/javascript/integrations/chat/azure.md | 16 +++--- .../javascript/integrations/chat/bedrock.md | 39 ++++++++------ .../integrations/chat/google_generativeai.md | 22 ++++---- .../javascript/integrations/chat/openai.md | 12 +---- .../document_loaders/file_loaders/csv.md | 24 ++++----- .../file_loaders/directory.md | 23 ++++---- .../document_loaders/file_loaders/pdf.md | 38 +++++++------- .../document_loaders/file_loaders/text.md | 23 ++++---- .../file_loaders/unstructured.md | 23 ++++---- .../document_loaders/web_loaders/pdf.md | 15 ++++-- .../web_loaders/recursive_url_loader.md | 23 ++++---- .../web_loaders/web_puppeteer.md | 21 ++++---- .../javascript/integrations/llms/arcjet.md | 19 ++++--- src/oss/javascript/integrations/llms/azure.md | 22 ++++---- .../javascript/integrations/llms/bedrock.md | 52 ++++++++++++------- .../javascript/integrations/llms/fireworks.md | 22 ++++---- .../integrations/llms/google_vertex_ai.md | 35 ++++++++----- .../javascript/integrations/llms/mistral.md | 29 +++++------ .../javascript/integrations/llms/ollama.md | 22 ++++---- .../javascript/integrations/llms/openai.md | 22 ++++---- .../integrations/retrievers/azion-edgesql.md | 2 - .../retrievers/bedrock-knowledge-bases.md | 8 --- .../javascript/integrations/retrievers/exa.md | 2 - .../retrievers/kendra-retriever.md | 2 - .../retrievers/self_query/chroma.md | 2 - .../retrievers/self_query/hnswlib.md | 2 - .../retrievers/self_query/memory.md | 2 - .../retrievers/self_query/pinecone.md | 2 - .../retrievers/self_query/qdrant.md | 2 - .../retrievers/self_query/supabase.md | 2 - .../retrievers/self_query/vectara.md | 2 - .../retrievers/self_query/weaviate.md | 2 - .../integrations/retrievers/tavily.md | 2 - .../integrations/stores/file_system.md | 30 ++++------- .../text_embedding/azure_openai.md | 29 ++++------- .../integrations/tools/duckduckgo_search.md | 2 - .../integrations/tools/exa_search.md | 20 +++---- .../javascript/integrations/tools/openapi.md | 37 +++++++------ .../javascript/integrations/tools/serpapi.md | 2 - src/oss/javascript/integrations/tools/sql.md | 16 +++--- .../integrations/tools/tavily_crawl.md | 2 - .../integrations/tools/tavily_extract.md | 2 - .../integrations/tools/tavily_map.md | 2 - .../integrations/tools/tavily_search.md | 2 - .../tools/tavily_search_community.md | 2 - .../integrations/tools/vectorstore.md | 16 +++--- .../vectorstores/elasticsearch.md | 21 ++++---- .../integrations/vectorstores/faiss.md | 35 +++++++------ .../integrations/vectorstores/hnswlib.md | 23 ++++---- .../integrations/vectorstores/mariadb.md | 19 +++---- .../vectorstores/mongodb_atlas.md | 19 +++---- .../integrations/vectorstores/pgvector.md | 24 ++++----- .../integrations/vectorstores/qdrant.md | 19 +++---- .../integrations/vectorstores/redis.md | 19 +++---- .../integrations/vectorstores/upstash.md | 3 +- .../integrations/retrievers/linkup_search.md | 2 - 58 files changed, 428 insertions(+), 475 deletions(-) diff --git a/src/oss/javascript/integrations/chat/anthropic.md b/src/oss/javascript/integrations/chat/anthropic.md index 4ab030baa..59c9a191d 100644 --- a/src/oss/javascript/integrations/chat/anthropic.md +++ b/src/oss/javascript/integrations/chat/anthropic.md @@ -367,16 +367,12 @@ AIMessage { ## Prompt caching -```{=mdx} - **Compatibility** This feature is currently in beta. -``` - Anthropic supports [caching parts of your prompt](https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching) in order to reduce costs for use-cases that require long context. You can cache tools and both entire messages and individual blocks. The initial request containing one or more blocks or tool definitions with a `"cache_control": { "type": "ephemeral" }` field will automatically cache that part of the prompt. This initial caching step will cost extra, but subsequent requests will be billed at a reduced rate. The cache has a lifetime of 5 minutes, but this is refereshed each time the cache is hit. diff --git a/src/oss/javascript/integrations/chat/arcjet.md b/src/oss/javascript/integrations/chat/arcjet.md index d38ca7865..b7d36dee3 100644 --- a/src/oss/javascript/integrations/chat/arcjet.md +++ b/src/oss/javascript/integrations/chat/arcjet.md @@ -34,17 +34,20 @@ pnpm add @arcjet/redact And install LangChain Community: -```{=mdx} - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core +``` + And now you're ready to start protecting your chat model calls with Arcjet Redaction! -``` - ## Usage ```typescript diff --git a/src/oss/javascript/integrations/chat/azure.md b/src/oss/javascript/integrations/chat/azure.md index 29a93726b..ab1c761c9 100644 --- a/src/oss/javascript/integrations/chat/azure.md +++ b/src/oss/javascript/integrations/chat/azure.md @@ -257,13 +257,17 @@ If you are using the deprecated Azure OpenAI SDK with the `@langchain/azure-open 1. Install the new `@langchain/openai` package and remove the previous `@langchain/azure-openai` package: -```{=mdx} - - - @langchain/openai - - + +```bash npm +npm install @langchain/openai ``` +```bash yarn +yarn add @langchain/openai +``` +```bash pnpm +pnpm add @langchain/openai +``` + ```bash npm uninstall @langchain/azure-openai diff --git a/src/oss/javascript/integrations/chat/bedrock.md b/src/oss/javascript/integrations/chat/bedrock.md index b7524b71e..657046ec1 100644 --- a/src/oss/javascript/integrations/chat/bedrock.md +++ b/src/oss/javascript/integrations/chat/bedrock.md @@ -6,11 +6,10 @@ title: BedrockChat This will help you getting started with Amazon Bedrock [chat models](/oss/concepts/chat_models). For detailed documentation of all `BedrockChat` features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_community_chat_models_bedrock.BedrockChat.html). -```{=mdx} The newer [`ChatBedrockConverse` chat model is now available via the dedicated `@langchain/aws`](/oss/integrations/chat/bedrock_converse) integration package. Use [tool calling](/oss/concepts/tool_calling) with more models with this package. -``` + ## Overview @@ -47,25 +46,33 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `BedrockChat` integration lives in the `@langchain/community` package. You'll also need to install several official AWS packages as peer dependencies: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types - + +```bash npm +npm install @langchain/community @langchain/core @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types ``` +```bash yarn +yarn add @langchain/community @langchain/core @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` +```bash pnpm +pnpm add @langchain/community @langchain/core @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` + -You can also use BedrockChat in web environments such as Edge functions or Cloudflare Workers by omitting the @aws-sdk/credential-provider-node dependency and using the web entrypoint: - -```{=mdx} - - - @langchain/community @langchain/core @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types - +You can also use BedrockChat in web environments such as Edge functions or Cloudflare Workers by omitting the @aws-sdk/credential-provider-node dependency and using the web entrypoint: + +```bash npm +npm install @langchain/community @langchain/core @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/typespenai +``` +```bash yarn +yarn add @langchain/community @langchain/core @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types ``` +```bash pnpm +pnpm add @langchain/community @langchain/core @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` + + ## Instantiation diff --git a/src/oss/javascript/integrations/chat/google_generativeai.md b/src/oss/javascript/integrations/chat/google_generativeai.md index b9b7f27ed..c9e85e174 100644 --- a/src/oss/javascript/integrations/chat/google_generativeai.md +++ b/src/oss/javascript/integrations/chat/google_generativeai.md @@ -28,16 +28,12 @@ You can access Google's `gemini` and `gemini-vision` models, as well as other generative models in LangChain through `ChatGoogleGenerativeAI` class in the `@langchain/google-genai` integration package. -```{=mdx} - **You can also access Google's `gemini` family of models via the LangChain VertexAI and VertexAI-web integrations.** - Click [here](/oss/integrations/chat/google_vertex_ai) to read the docs. -``` ### Credentials @@ -60,15 +56,19 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain `ChatGoogleGenerativeAI` integration lives in the `@langchain/google-genai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - + +```bash npm +npm install @langchain/google-genai @langchain/core +``` +```bash yarn +yarn add @langchain/google-genai @langchain/core +``` +```bash pnpm +pnpm add @langchain/google-genai @langchain/core +``` + - - @langchain/google-genai @langchain/core - -``` ## Instantiation diff --git a/src/oss/javascript/integrations/chat/openai.md b/src/oss/javascript/integrations/chat/openai.md index c78e41263..5c1f510e7 100644 --- a/src/oss/javascript/integrations/chat/openai.md +++ b/src/oss/javascript/integrations/chat/openai.md @@ -252,14 +252,11 @@ await fineTunedLlm.invoke("Hi there!"); If you need additional information like logprobs or token usage, these will be returned directly in the `.invoke` response within the `response_metadata` field on the message. -```{=mdx} - **Requires `@langchain/core` version >=0.1.48.** ::: -``` ```typescript import { ChatOpenAI } from "@langchain/openai"; @@ -433,15 +430,11 @@ console.log(result); As of Aug 6, 2024, OpenAI supports a `strict` argument when calling tools that will enforce that the tool argument schema is respected by the model. See more [here](https://platform.openai.com/docs/guides/function-calling). -```{=mdx} - info Requires ``@langchain/openai >= 0.2.6`` **Note**: If ``strict: true`` the tool definition will also be validated, and a subset of JSON schema are accepted. Crucially, schema cannot have optional args (those with default values). Read the full docs on what types of schema are supported here: https://platform.openai.com/docs/guides/structured-outputs/supported-schemas. -::: - -``` +::: Here's an example with tool calling. Passing an extra `strict: true` argument to `.bindTools` will pass the param through to all tool definitions: @@ -819,15 +812,12 @@ const response = await llmWithImageGeneration.invoke( ### Reasoning models -```{=mdx} **Compatibility** - The below points apply to `@langchain/openai>=0.4.0`. Please see here for a [guide on upgrading](/oss/how-to/installation/#installing-integration-packages). -``` When using reasoning models like `o1`, the default method for `withStructuredOutput` is OpenAI's built-in method for structured output (equivalent to passing `method: "jsonSchema"` as an option into `withStructuredOutput`). JSON schema mostly works the same as other models, but with one important caveat: when defining schema, `z.optional()` is not respected, and you should instead use `z.nullable()`. diff --git a/src/oss/javascript/integrations/document_loaders/file_loaders/csv.md b/src/oss/javascript/integrations/document_loaders/file_loaders/csv.md index dd97f140a..27f358a19 100644 --- a/src/oss/javascript/integrations/document_loaders/file_loaders/csv.md +++ b/src/oss/javascript/integrations/document_loaders/file_loaders/csv.md @@ -2,19 +2,13 @@ title: CSV --- - -```{=mdx} - **Compatibility** - Only available on Node.js. -``` - This notebook provides a quick overview for getting started with `CSVLoader` [document loaders](/oss/concepts/document_loaders). For detailed documentation of all `CSVLoader` features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_community_document_loaders_fs_csv.CSVLoader.html). This example goes over how to load data from CSV files. The second argument is the `column` name to extract from the CSV file. One document will be created for each row in the CSV file. When `column` is not specified, each row is converted into a key/value pair with each key/value pair outputted to a new line in the document's `pageContent`. When `column` is specified, one document is created for each row, and the value of the specified column is used as the document's `pageContent`. @@ -35,15 +29,17 @@ To access `CSVLoader` document loader you'll need to install the `@langchain/com The LangChain CSVLoader integration lives in the `@langchain/community` integration package. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core d3-dsv@2 - - + +```bash npm +npm install @langchain/community @langchain/core d3-dsv@2 +``` +```bash yarn +yarn add @langchain/community @langchain/core d3-dsv@2 +``` +```bash pnpm +pnpm add @langchain/community @langchain/core d3-dsv@2 ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_loaders/file_loaders/directory.md b/src/oss/javascript/integrations/document_loaders/file_loaders/directory.md index 1883c3b82..8485ef3a0 100644 --- a/src/oss/javascript/integrations/document_loaders/file_loaders/directory.md +++ b/src/oss/javascript/integrations/document_loaders/file_loaders/directory.md @@ -2,18 +2,13 @@ title: DirectoryLoader --- -```{=mdx} - **Compatibility** - Only available on Node.js. -``` - This notebook provides a quick overview for getting started with `DirectoryLoader` [document loaders](/oss/concepts/document_loaders). For detailed documentation of all `DirectoryLoader` features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain.document_loaders_fs_directory.DirectoryLoader.html). This example goes over how to load data from folders with multiple files. The second argument is a map of file extensions to loader factories. Each file will be passed to the matching loader, and the resulting documents will be concatenated together. @@ -44,15 +39,17 @@ To access `DirectoryLoader` document loader you'll need to install the `langchai The LangChain DirectoryLoader integration lives in the `langchain` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - langchain @langchain/core - - + +```bash npm +npm install langchain @langchain/core +``` +```bash yarn +yarn add langchain @langchain/core +``` +```bash pnpm +pnpm add langchain @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_loaders/file_loaders/pdf.md b/src/oss/javascript/integrations/document_loaders/file_loaders/pdf.md index 272c2a81d..e0d47dd9f 100644 --- a/src/oss/javascript/integrations/document_loaders/file_loaders/pdf.md +++ b/src/oss/javascript/integrations/document_loaders/file_loaders/pdf.md @@ -2,18 +2,13 @@ title: PDFLoader --- -```{=mdx} - **Compatibility** - Only available on Node.js. -``` - This notebook provides a quick overview for getting started with `PDFLoader` [document loaders](/oss/concepts/document_loaders). For detailed documentation of all `PDFLoader` features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_community_document_loaders_fs_pdf.PDFLoader.html). ## Overview @@ -34,15 +29,17 @@ To access `PDFLoader` document loader you'll need to install the `@langchain/com The LangChain PDFLoader integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core pdf-parse - - + +```bash npm +npm install langchain @langchain/core ``` +```bash yarn +yarn add @langchain/community @langchain/core pdf-parse +``` +```bash pnpm +pnpm add @langchain/community @langchain/core pdf-parse +``` + ## Instantiation @@ -192,12 +189,17 @@ By default we use the `pdfjs` build bundled with `pdf-parse`, which is compatibl In the following example we use the "legacy" (see [pdfjs docs](https://github.com/mozilla/pdf.js/wiki/Frequently-Asked-Questions#which-browsersenvironments-are-supported)) build of `pdfjs-dist`, which includes several polyfills not included in the default build. -```{=mdx} - - pdfjs-dist - - + +```bash npm +npm install pdfjs-dist +``` +```bash yarn +yarn add pdfjs-dist +``` +```bash pnpm +pnpm add pdfjs-dist ``` + ```typescript import { PDFLoader } from "@langchain/community/document_loaders/fs/pdf"; diff --git a/src/oss/javascript/integrations/document_loaders/file_loaders/text.md b/src/oss/javascript/integrations/document_loaders/file_loaders/text.md index 73c7ba4a5..124c533f8 100644 --- a/src/oss/javascript/integrations/document_loaders/file_loaders/text.md +++ b/src/oss/javascript/integrations/document_loaders/file_loaders/text.md @@ -2,18 +2,13 @@ title: TextLoader --- -```{=mdx} - **Compatibility** - Only available on Node.js. -``` - This notebook provides a quick overview for getting started with `TextLoader` [document loaders](/oss/concepts/document_loaders). For detailed documentation of all `TextLoader` features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain.document_loaders_fs_text.TextLoader.html). ## Overview @@ -32,15 +27,17 @@ To access `TextLoader` document loader you'll need to install the `langchain` pa The LangChain TextLoader integration lives in the `langchain` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - langchain - - + +```bash npm +npm install langchain +``` +```bash yarn +yarn add langchain +``` +```bash pnpm +pnpm add langchain ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_loaders/file_loaders/unstructured.md b/src/oss/javascript/integrations/document_loaders/file_loaders/unstructured.md index 67a6911ff..f9fdfd585 100644 --- a/src/oss/javascript/integrations/document_loaders/file_loaders/unstructured.md +++ b/src/oss/javascript/integrations/document_loaders/file_loaders/unstructured.md @@ -2,18 +2,13 @@ title: UnstructuredLoader --- -```{=mdx} - **Compatibility** - Only available on Node.js. -``` - This notebook provides a quick overview for getting started with `UnstructuredLoader` [document loaders](/oss/concepts/document_loaders). For detailed documentation of all `UnstructuredLoader` features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_community_document_loaders_fs_unstructured.UnstructuredLoader.html). ## Overview @@ -48,15 +43,17 @@ export UNSTRUCTURED_API_KEY="your-api-key" The LangChain UnstructuredLoader integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md b/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md index 8d143f94c..9e9f92628 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md @@ -186,12 +186,17 @@ By default we use the `pdfjs` build bundled with `pdf-parse`, which is compatibl In the following example we use the "legacy" (see [pdfjs docs](https://github.com/mozilla/pdf.js/wiki/Frequently-Asked-Questions#which-browsersenvironments-are-supported)) build of `pdfjs-dist`, which includes several polyfills not included in the default build. -```{=mdx} - - pdfjs-dist - - + +```bash npm +npm install pdfjs-dist +``` +```bash yarn +yarn add pdfjs-dist +``` +```bash pnpm +pnpm add pdfjs-dist ``` + ```typescript import { WebPDFLoader } from "@langchain/community/document_loaders/web/pdf"; diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/recursive_url_loader.md b/src/oss/javascript/integrations/document_loaders/web_loaders/recursive_url_loader.md index e169fbabc..8b9e1c9a0 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/recursive_url_loader.md +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/recursive_url_loader.md @@ -2,18 +2,13 @@ title: RecursiveUrlLoader --- -```{=mdx} - **Compatibility** - Only available on Node.js. -``` - This notebook provides a quick overview for getting started with [RecursiveUrlLoader](/oss/integrations/document_loaders/). For detailed documentation of all RecursiveUrlLoader features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_community_document_loaders_web_recursive_url.RecursiveUrlLoader.html). ## Overview @@ -59,13 +54,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain RecursiveUrlLoader integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core jsdom - + +```bash npm +npm install @langchain/community @langchain/core jsdom +``` +```bash yarn +yarn add @langchain/community @langchain/core jsdom +``` +```bash pnpm +pnpm add @langchain/community @langchain/core jsdom +``` + We also suggest adding a package like [`html-to-text`](https://www.npmjs.com/package/html-to-text) or [`@mozilla/readability`](https://www.npmjs.com/package/@mozilla/readability) for extracting the raw text from the page. diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/web_puppeteer.md b/src/oss/javascript/integrations/document_loaders/web_loaders/web_puppeteer.md index e7d15c403..e0b904632 100644 --- a/src/oss/javascript/integrations/document_loaders/web_loaders/web_puppeteer.md +++ b/src/oss/javascript/integrations/document_loaders/web_loaders/web_puppeteer.md @@ -2,15 +2,12 @@ title: PuppeteerWebBaseLoader --- -```{=mdx} **Compatibility** - Only available on Node.js. -``` This notebook provides a quick overview for getting started with [PuppeteerWebBaseLoader](/oss/integrations/document_loaders/). For detailed documentation of all PuppeteerWebBaseLoader features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_community_document_loaders_web_puppeteer.PuppeteerWebBaseLoader.html). @@ -49,15 +46,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain PuppeteerWebBaseLoader integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core puppeteer - - + +```bash npm +npm install @langchain/community @langchain/core puppeteer +``` +```bash yarn +yarn add @langchain/community @langchain/core puppeteer +``` +```bash pnpm +pnpm add @langchain/community @langchain/core puppeteer ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/arcjet.md b/src/oss/javascript/integrations/llms/arcjet.md index fd2dceac7..8432a18a9 100644 --- a/src/oss/javascript/integrations/llms/arcjet.md +++ b/src/oss/javascript/integrations/llms/arcjet.md @@ -34,17 +34,20 @@ pnpm add @arcjet/redact And install LangChain Community: -```{=mdx} - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core +``` + And now you're ready to start protecting your LLM calls with Arcjet Redaction! -``` - ## Usage ```typescript diff --git a/src/oss/javascript/integrations/llms/azure.md b/src/oss/javascript/integrations/llms/azure.md index 41fc94c02..dbcc835cc 100644 --- a/src/oss/javascript/integrations/llms/azure.md +++ b/src/oss/javascript/integrations/llms/azure.md @@ -2,8 +2,6 @@ title: Azure OpenAI --- -```{=mdx} - **You are currently on a page documenting the use of Azure OpenAI [text completion models](/oss/concepts/text_llms). The latest and most popular Azure OpenAI models are [chat completion models](/oss/concepts/chat_models).** @@ -19,8 +17,6 @@ If you are using Azure OpenAI with the deprecated SDK, see the [migration guide] -``` - [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/) is a Microsoft Azure service that provides powerful language models from OpenAI. This will help you get started with AzureOpenAI completion models (LLMs) using LangChain. For detailed documentation on `AzureOpenAI` features and configuration options, please refer to the [API reference](https://api.js.langchain.com/classes/langchain_openai.AzureOpenAI.html). @@ -67,15 +63,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain AzureOpenAI integration lives in the `@langchain/openai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/openai @langchain/core - - + +```bash npm +npm install @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/bedrock.md b/src/oss/javascript/integrations/llms/bedrock.md index e46d841f4..ea7b03217 100644 --- a/src/oss/javascript/integrations/llms/bedrock.md +++ b/src/oss/javascript/integrations/llms/bedrock.md @@ -2,10 +2,6 @@ title: Bedrock --- -# Bedrock - -```{=mdx} - **You are currently on a page documenting the use of Amazon Bedrock models as [text completion models](/oss/concepts/text_llms). Many popular models available on Bedrock are [chat completion models](/oss/concepts/chat_models).** @@ -13,8 +9,6 @@ title: Bedrock You may be looking for [this page instead](/oss/integrations/chat/bedrock/). -``` - > [Amazon Bedrock](https://aws.amazon.com/bedrock/) is a fully managed service that makes Foundation Models (FMs) > from leading AI startups and Amazon available via an API. You can choose from a wide range of FMs to find the model that is best suited for your use case. @@ -53,28 +47,46 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain Bedrock integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core +``` + And install the peer dependencies: - - @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types - + +```bash npm +npm install @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` +```bash yarn +yarn add @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` +```bash pnpm +pnpm add @aws-crypto/sha256-js @aws-sdk/credential-provider-node @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` + You can also use Bedrock in web environments such as Edge functions or Cloudflare Workers by omitting the `@aws-sdk/credential-provider-node` dependency and using the `web` entrypoint: - - @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types - - + +```bash npm +npm install @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` +```bash yarn +yarn add @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types +``` +```bash pnpm +pnpm add @aws-crypto/sha256-js @smithy/protocol-http @smithy/signature-v4 @smithy/eventstream-codec @smithy/util-utf8 @aws-sdk/types ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/fireworks.md b/src/oss/javascript/integrations/llms/fireworks.md index e43d81c46..2293dd244 100644 --- a/src/oss/javascript/integrations/llms/fireworks.md +++ b/src/oss/javascript/integrations/llms/fireworks.md @@ -2,8 +2,6 @@ title: Fireworks --- -```{=mdx} - **You are currently on a page documenting the use of Fireworks models as [text completion models](/oss/concepts/text_llms). Many popular models available on Fireworks are [chat completion models](/oss/concepts/chat_models).** @@ -11,8 +9,6 @@ title: Fireworks You may be looking for [this page instead](/oss/integrations/chat/fireworks/). -``` - [Fireworks AI](https://fireworks.ai/) is an AI inference platform to run and customize models. For a list of all models served by Fireworks see the [Fireworks docs](https://fireworks.ai/models). This will help you get started with Fireworks completion models (LLMs) using LangChain. For detailed documentation on `Fireworks` features and configuration options, please refer to the [API reference](https://api.js.langchain.com/classes/langchain_community_llms_fireworks.Fireworks.html). @@ -48,15 +44,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain Fireworks integration lives in the `@langchain/community` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/core - - + +```bash npm +npm install @langchain/community @langchain/core +``` +```bash yarn +yarn add @langchain/community @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/google_vertex_ai.md b/src/oss/javascript/integrations/llms/google_vertex_ai.md index bd7fc7d61..fd429379d 100644 --- a/src/oss/javascript/integrations/llms/google_vertex_ai.md +++ b/src/oss/javascript/integrations/llms/google_vertex_ai.md @@ -2,8 +2,6 @@ title: Google Vertex AI --- -```{=mdx} - **You are currently on a page documenting the use of Google Vertex models as [text completion models](/oss/concepts/text_llms). Many popular models available on Google Vertex are [chat completion models](/oss/concepts/chat_models).** @@ -11,8 +9,6 @@ title: Google Vertex AI You may be looking for [this page instead](/oss/integrations/chat/google_vertex_ai/). -``` - [Google Vertex](https://cloud.google.com/vertex-ai) is a service that exposes all foundation models available in Google Cloud, like `gemini-1.5-pro`, `gemini-1.5-flash`, etc. This will help you get started with VertexAI completion models (LLMs) using LangChain. For detailed documentation on `VertexAI` features and configuration options, please refer to the [API reference](https://api.js.langchain.com/classes/langchain_google_vertexai.VertexAI.html). @@ -86,21 +82,32 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain VertexAI integration lives in the `@langchain/google-vertexai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - + +```bash npm +npm install @langchain/google-vertexai @langchain/core +``` +```bash yarn +yarn add @langchain/google-vertexai @langchain/core +``` +```bash pnpm +pnpm add @langchain/google-vertexai @langchain/core +``` + - - @langchain/google-vertexai @langchain/core - or for web environments: - - @langchain/google-vertexai-web @langchain/core - - + +```bash npm +npm install @langchain/google-vertexai-web @langchain/core +``` +```bash yarn +yarn add @langchain/google-vertexai-web @langchain/core +``` +```bash pnpm +pnpm add @langchain/google-vertexai-web @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/mistral.md b/src/oss/javascript/integrations/llms/mistral.md index 42c3d9e4b..7b33ce4de 100644 --- a/src/oss/javascript/integrations/llms/mistral.md +++ b/src/oss/javascript/integrations/llms/mistral.md @@ -2,20 +2,15 @@ title: MistralAI --- -```{=mdx} - **Want to run Mistral's models locally? Check out our [Ollama integration](/oss/integrations/chat/ollama).** + -::: - -caution + You are currently on a page documenting the use of Mistral models as [text completion models](/oss/concepts/text_llms). Many popular models available on Mistral are [chat completion models](/oss/concepts/chat_models). + You may be looking for [this page instead](/oss/integrations/chat/mistral/). -::: - -``` [Mistral AI](https://mistral.ai/) is a platform that offers hosting for their powerful [open source models](https://docs.mistral.ai/getting-started/models/). @@ -52,15 +47,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain MistralAI integration lives in the `@langchain/mistralai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/mistralai @langchain/core - - + +```bash npm +npm install @langchain/mistralai @langchain/core +``` +```bash yarn +yarn add @langchain/mistralai @langchain/core +``` +```bash pnpm +pnpm add @langchain/mistralai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/ollama.md b/src/oss/javascript/integrations/llms/ollama.md index 538681c69..5f452c679 100644 --- a/src/oss/javascript/integrations/llms/ollama.md +++ b/src/oss/javascript/integrations/llms/ollama.md @@ -2,8 +2,6 @@ title: Ollama --- -```{=mdx} - **You are currently on a page documenting the use of Ollama models as [text completion models](/oss/concepts/text_llms). Many popular models available on Ollama are [chat completion models](/oss/concepts/chat_models).** @@ -11,8 +9,6 @@ title: Ollama You may be looking for [this page instead](/oss/integrations/chat/ollama/). -``` - This will help you get started with Ollama [text completion models (LLMs)](/oss/concepts/text_llms) using LangChain. For detailed documentation on `Ollama` features and configuration options, please refer to the [API reference](https://api.js.langchain.com/classes/langchain_ollama.Ollama.html). ## Overview @@ -47,15 +43,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain Ollama integration lives in the `@langchain/ollama` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/ollama @langchain/core - - + +```bash npm +npm install @langchain/ollama @langchain/core +``` +```bash yarn +yarn add @langchain/ollama @langchain/core +``` +```bash pnpm +pnpm add @langchain/ollama @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/llms/openai.md b/src/oss/javascript/integrations/llms/openai.md index 689162da1..c26001068 100644 --- a/src/oss/javascript/integrations/llms/openai.md +++ b/src/oss/javascript/integrations/llms/openai.md @@ -2,8 +2,6 @@ title: OpenAI --- -```{=mdx} - **You are currently on a page documenting the use of OpenAI [text completion models](/oss/concepts/text_llms). The latest and most popular OpenAI models are [chat completion models](/oss/concepts/chat_models).** @@ -11,8 +9,6 @@ title: OpenAI Unless you are specifically using `gpt-3.5-turbo-instruct`, you are probably looking for [this page instead](/oss/integrations/chat/openai/). -``` - [OpenAI](https://en.wikipedia.org/wiki/OpenAI) is an artificial intelligence (AI) research laboratory. This will help you get started with OpenAI completion models (LLMs) using LangChain. For detailed documentation on `OpenAI` features and configuration options, please refer to the [API reference](https://api.js.langchain.com/classes/langchain_openai.OpenAI.html). @@ -48,15 +44,17 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain OpenAI integration lives in the `@langchain/openai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/openai @langchain/core - - + +```bash npm +npm install @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/openai @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/retrievers/azion-edgesql.md b/src/oss/javascript/integrations/retrievers/azion-edgesql.md index e9b577e1d..2e9676778 100644 --- a/src/oss/javascript/integrations/retrievers/azion-edgesql.md +++ b/src/oss/javascript/integrations/retrievers/azion-edgesql.md @@ -115,9 +115,7 @@ Like other retrievers, AzionRetriever can be incorporated into LLM applications We will need a LLM or chat model: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md b/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md index 79b7f0d85..e46139d5c 100644 --- a/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md +++ b/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md @@ -85,9 +85,7 @@ Like other retrievers, AmazonKnowledgeBaseRetriever can be incorporated into LLM We will need a LLM or chat model: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell @@ -130,16 +128,10 @@ const ragChain = RunnableSequence.from([ ]); ``` -```{=mdx} - **See [our RAG tutorial](docs/tutorials/rag) for more information and examples on `RunnableSequence`'s like the one above.** - - -``` - ```typescript await ragChain.invoke("...") ``` diff --git a/src/oss/javascript/integrations/retrievers/exa.md b/src/oss/javascript/integrations/retrievers/exa.md index 1d95224d4..b1c675723 100644 --- a/src/oss/javascript/integrations/retrievers/exa.md +++ b/src/oss/javascript/integrations/retrievers/exa.md @@ -191,9 +191,7 @@ Like other retrievers, ExaRetriever can be incorporated into LLM applications vi We will need a LLM or chat model: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/kendra-retriever.md b/src/oss/javascript/integrations/retrievers/kendra-retriever.md index 35f10099c..c2debc683 100644 --- a/src/oss/javascript/integrations/retrievers/kendra-retriever.md +++ b/src/oss/javascript/integrations/retrievers/kendra-retriever.md @@ -80,9 +80,7 @@ Like other retrievers, the `AWSKendraRetriever` can be incorporated into LLM app We will need a LLM or chat model: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/chroma.md b/src/oss/javascript/integrations/retrievers/self_query/chroma.md index 8e871a07c..d4b37c066 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/chroma.md +++ b/src/oss/javascript/integrations/retrievers/self_query/chroma.md @@ -142,9 +142,7 @@ const vectorStore = await Chroma.fromDocuments(docs, embeddings, { Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md b/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md index 64b39b0ee..4e9ca4bbc 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md +++ b/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md @@ -139,9 +139,7 @@ const vectorStore = await HNSWLib.fromDocuments(docs, embeddings); Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/memory.md b/src/oss/javascript/integrations/retrievers/self_query/memory.md index 3e310648e..852b10207 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/memory.md +++ b/src/oss/javascript/integrations/retrievers/self_query/memory.md @@ -139,9 +139,7 @@ const vectorStore = await MemoryVectorStore.fromDocuments(docs, embeddings); Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/pinecone.md b/src/oss/javascript/integrations/retrievers/self_query/pinecone.md index f5bd8b769..d643dbaaa 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/pinecone.md +++ b/src/oss/javascript/integrations/retrievers/self_query/pinecone.md @@ -156,9 +156,7 @@ const vectorStore = await PineconeStore.fromDocuments(docs, embeddings, { Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/qdrant.md b/src/oss/javascript/integrations/retrievers/self_query/qdrant.md index 64f0f98ff..47f57caf1 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/qdrant.md +++ b/src/oss/javascript/integrations/retrievers/self_query/qdrant.md @@ -153,9 +153,7 @@ const vectorStore = await QdrantVectorStore.fromDocuments(docs, embeddings, { Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/supabase.md b/src/oss/javascript/integrations/retrievers/self_query/supabase.md index 58daa4559..d088084e6 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/supabase.md +++ b/src/oss/javascript/integrations/retrievers/self_query/supabase.md @@ -154,9 +154,7 @@ const vectorStore = await SupabaseVectorStore.fromDocuments(docs, embeddings, { Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/vectara.md b/src/oss/javascript/integrations/retrievers/self_query/vectara.md index b8d3586e3..bb4fc5615 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/vectara.md +++ b/src/oss/javascript/integrations/retrievers/self_query/vectara.md @@ -150,9 +150,7 @@ const vectorStore = await VectaraStore.fromDocuments(docs, embeddings, { Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/self_query/weaviate.md b/src/oss/javascript/integrations/retrievers/self_query/weaviate.md index b3a255cfc..29afae663 100644 --- a/src/oss/javascript/integrations/retrievers/self_query/weaviate.md +++ b/src/oss/javascript/integrations/retrievers/self_query/weaviate.md @@ -162,9 +162,7 @@ const vectorStore = await WeaviateStore.fromDocuments(docs, embeddings, { Now we can instantiate our retriever: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/retrievers/tavily.md b/src/oss/javascript/integrations/retrievers/tavily.md index 9ab39714d..941073f30 100644 --- a/src/oss/javascript/integrations/retrievers/tavily.md +++ b/src/oss/javascript/integrations/retrievers/tavily.md @@ -105,9 +105,7 @@ Like other retrievers, `TavilySearchAPIRetriever` can be incorporated into LLM a We will need a LLM or chat model: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/stores/file_system.md b/src/oss/javascript/integrations/stores/file_system.md index e21ae7f74..adc86523a 100644 --- a/src/oss/javascript/integrations/stores/file_system.md +++ b/src/oss/javascript/integrations/stores/file_system.md @@ -2,18 +2,13 @@ title: LocalFileStore --- -```{=mdx} - **Compatibility** - Only available on Node.js. -``` - This will help you get started with [LocalFileStore](/oss/concepts/key_value_stores). For detailed documentation of all LocalFileStore features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain.storage_file_system.LocalFileStore.html). ## Overview @@ -22,12 +17,8 @@ The `LocalFileStore` is a wrapper around the `fs` module for storing data as key Each key value pair has its own file nested inside the directory passed to the `.fromPath` method. The file name is the key and inside contains the value of the key. -```{=mdx} - **The path passed to the `.fromPath` must be a directory, not a file.** - - @@ -37,8 +28,6 @@ Make sure that the path you specify when initializing the store is free of other -``` - ### Integration details | Class | Package | Local | [PY support](https://python.langchain.com/docs/integrations/stores/file_system/) | Downloads | Version | @@ -51,16 +40,17 @@ Make sure that the path you specify when initializing the store is free of other The LangChain `LocalFileStore` integration lives in the `langchain` package: -```{=mdx} - -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - langchain @langchain/core - - + +```bash npm +npm install langchain @langchain/core +``` +```bash yarn +yarn add langchain @langchain/core +``` +```bash pnpm +pnpm add langchain @langchain/core ``` + ## Instantiation diff --git a/src/oss/javascript/integrations/text_embedding/azure_openai.md b/src/oss/javascript/integrations/text_embedding/azure_openai.md index f03e0e4f7..26a8c3660 100644 --- a/src/oss/javascript/integrations/text_embedding/azure_openai.md +++ b/src/oss/javascript/integrations/text_embedding/azure_openai.md @@ -10,18 +10,12 @@ You can learn more about Azure OpenAI and its difference with the OpenAI API on This will help you get started with AzureOpenAIEmbeddings [embedding models](/oss/concepts/embedding_models) using LangChain. For detailed documentation on `AzureOpenAIEmbeddings` features and configuration options, please refer to the [API reference](https://api.js.langchain.com/classes/langchain_openai.AzureOpenAIEmbeddings.html). -```{=mdx} - **Previously, LangChain.js supported integration with Azure OpenAI using the dedicated [Azure OpenAI SDK](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/openai/openai). This SDK is now deprecated in favor of the new Azure integration in the OpenAI SDK, which allows to access the latest OpenAI models and features the same day they are released, and allows seamless transition between the OpenAI API and Azure OpenAI.** - If you are using Azure OpenAI with the deprecated SDK, see the [migration guide](#migration-from-azure-openai-sdk) to update to the new API. - -``` - ## Overview ### Integration details @@ -60,28 +54,27 @@ If you want to get automated tracing of your model calls you can also set your [ The LangChain AzureOpenAIEmbeddings integration lives in the `@langchain/openai` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - + +```bash npm +npm install @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/openai @langchain/core +``` + - - @langchain/openai @langchain/core - **You can find the list of supported API versions in the [Azure OpenAI documentation](https://learn.microsoft.com/azure/ai-services/openai/reference).** - - **If `AZURE_OPENAI_API_EMBEDDINGS_DEPLOYMENT_NAME` is not defined, it will fall back to the value of `AZURE_OPENAI_API_DEPLOYMENT_NAME` for the deployment name. The same applies to the `azureOpenAIApiEmbeddingsDeploymentName` parameter in the `AzureOpenAIEmbeddings` constructor, which will fall back to the value of `azureOpenAIApiDeploymentName` if not defined.** - - -``` - ## Instantiation Now we can instantiate our model object and embed text: diff --git a/src/oss/javascript/integrations/tools/duckduckgo_search.md b/src/oss/javascript/integrations/tools/duckduckgo_search.md index 9ee57c812..d9bced24c 100644 --- a/src/oss/javascript/integrations/tools/duckduckgo_search.md +++ b/src/oss/javascript/integrations/tools/duckduckgo_search.md @@ -92,9 +92,7 @@ ToolMessage { We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling/) and then calling it: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/exa_search.md b/src/oss/javascript/integrations/tools/exa_search.md index cd6d1867d..ee22142d4 100644 --- a/src/oss/javascript/integrations/tools/exa_search.md +++ b/src/oss/javascript/integrations/tools/exa_search.md @@ -110,12 +110,8 @@ ToolMessage { We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling) and then calling it: -```{=mdx} - -``` - ```typescript // @lc-docs-hide-cell @@ -187,15 +183,21 @@ We can create LangChain tools which use the `ExaRetriever` and the `createRetrie We'll use LangGraph to create the agent. Make sure you have `@langchain/langgraph` installed: -```{=mdx} - - @langchain/langgraph - + +```bash npm +npm install @langchain/langgraph +``` +```bash yarn +yarn add @langchain/langgraph +``` +```bash pnpm +pnpm add @langchain/langgraph +``` + Then, define the LLM to use with the agent -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/openapi.md b/src/oss/javascript/integrations/tools/openapi.md index 82fb01c0a..6df426a21 100644 --- a/src/oss/javascript/integrations/tools/openapi.md +++ b/src/oss/javascript/integrations/tools/openapi.md @@ -2,8 +2,6 @@ title: OpenApi Toolkit --- -```{=mdx} - **Disclaimer ⚠️** @@ -18,8 +16,6 @@ In addition, consider implementing measures to validate URLs before sending requ -``` - This will help you getting started with the [OpenApiToolkit](/oss/concepts/tools/#toolkits). For detailed documentation of all OpenApiToolkit features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain.agents.OpenApiToolkit.html). The `OpenAPIToolkit` has access to the following tools: @@ -45,22 +41,23 @@ process.env.LANGSMITH_API_KEY="your-api-key" This toolkit lives in the `langchain` package: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - langchain @langchain/core - + +```bash npm +npm install langchain @langchain/core +``` +```bash yarn +yarn add langchain @langchain/core ``` +```bash pnpm +pnpm add langchain @langchain/core +``` + ## Instantiation Now we can instantiate our toolkit. First, we need to define the LLM we would like to use in the toolkit. -```{=mdx} -``` ```typescript // @lc-docs-hide-cell @@ -139,11 +136,17 @@ console.log(tools.map((tool) => ({ First, ensure you have LangGraph installed: -```{=mdx} - - @langchain/langgraph - + +```bash npm +npm install @langchain/langgraph +``` +```bash yarn +yarn add @langchain/langgraph +``` +```bash pnpm +pnpm add @langchain/langgraph ``` + ```typescript import { createAgent } from "langchain" diff --git a/src/oss/javascript/integrations/tools/serpapi.md b/src/oss/javascript/integrations/tools/serpapi.md index 42e2dd65c..40adc9102 100644 --- a/src/oss/javascript/integrations/tools/serpapi.md +++ b/src/oss/javascript/integrations/tools/serpapi.md @@ -103,9 +103,7 @@ ToolMessage { We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling/) and then calling it: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/sql.md b/src/oss/javascript/integrations/tools/sql.md index e09fa037f..e6db7bc1c 100644 --- a/src/oss/javascript/integrations/tools/sql.md +++ b/src/oss/javascript/integrations/tools/sql.md @@ -46,9 +46,7 @@ pnpm add langchain @langchain/core typeorm First, we need to define our LLM to be used in the toolkit. -```{=mdx} -``` ```typescript // @lc-docs-hide-cell @@ -121,11 +119,17 @@ console.log(tools.map((tool) => ({ First, ensure you have LangGraph installed: -```{=mdx} - - @langchain/langgraph - + +```bash npm +npm install @langchain/langgraph +``` +```bash yarn +yarn add @langchain/langgraph +``` +```bash pnpm +pnpm add @langchain/langgraph ``` + ```typescript import { createAgent } from "langchain" diff --git a/src/oss/javascript/integrations/tools/tavily_crawl.md b/src/oss/javascript/integrations/tools/tavily_crawl.md index 966cccf4f..618c81c6e 100644 --- a/src/oss/javascript/integrations/tools/tavily_crawl.md +++ b/src/oss/javascript/integrations/tools/tavily_crawl.md @@ -101,9 +101,7 @@ await tool.invoke(modelGeneratedToolCall) We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling/) and then calling it: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/tavily_extract.md b/src/oss/javascript/integrations/tools/tavily_extract.md index 5e2248603..2bc17ec13 100644 --- a/src/oss/javascript/integrations/tools/tavily_extract.md +++ b/src/oss/javascript/integrations/tools/tavily_extract.md @@ -94,9 +94,7 @@ await tool.invoke(modelGeneratedToolCall) We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling/) and then calling it: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/tavily_map.md b/src/oss/javascript/integrations/tools/tavily_map.md index c10bac9d0..65a2e9cc1 100644 --- a/src/oss/javascript/integrations/tools/tavily_map.md +++ b/src/oss/javascript/integrations/tools/tavily_map.md @@ -98,9 +98,7 @@ await tool.invoke(modelGeneratedToolCall) We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling/) and then calling it: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/tavily_search.md b/src/oss/javascript/integrations/tools/tavily_search.md index a15cb952d..79ef596e6 100644 --- a/src/oss/javascript/integrations/tools/tavily_search.md +++ b/src/oss/javascript/integrations/tools/tavily_search.md @@ -106,9 +106,7 @@ await tool.invoke(modelGeneratedToolCall) We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling/) and then calling it: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/tavily_search_community.md b/src/oss/javascript/integrations/tools/tavily_search_community.md index 41c367af0..7d7d324cc 100644 --- a/src/oss/javascript/integrations/tools/tavily_search_community.md +++ b/src/oss/javascript/integrations/tools/tavily_search_community.md @@ -99,9 +99,7 @@ await tool.invoke(modelGeneratedToolCall) We can use our tool in a chain by first binding it to a [tool-calling model](/oss/how-to/tool_calling/) and then calling it: -```{=mdx} -``` ```typescript // @lc-docs-hide-cell diff --git a/src/oss/javascript/integrations/tools/vectorstore.md b/src/oss/javascript/integrations/tools/vectorstore.md index f6cdd43c2..5f201c50c 100644 --- a/src/oss/javascript/integrations/tools/vectorstore.md +++ b/src/oss/javascript/integrations/tools/vectorstore.md @@ -35,9 +35,7 @@ pnpm add langchain @langchain/core Now we can instantiate our toolkit. First, we need to define the LLM we'll use in the toolkit. -```{=mdx} -``` ```typescript // @lc-docs-hide-cell @@ -101,11 +99,17 @@ console.log(tools.map((tool) => ({ First, ensure you have LangGraph installed: -```{=mdx} - - @langchain/langgraph - + +```bash npm +npm install @langchain/langgraph +``` +```bash yarn +yarn add @langchain/langgraph +``` +```bash pnpm +pnpm add @langchain/langgraph ``` + Then, instantiate the agent: diff --git a/src/oss/javascript/integrations/vectorstores/elasticsearch.md b/src/oss/javascript/integrations/vectorstores/elasticsearch.md index 2c3354af5..a9300b134 100644 --- a/src/oss/javascript/integrations/vectorstores/elasticsearch.md +++ b/src/oss/javascript/integrations/vectorstores/elasticsearch.md @@ -2,16 +2,12 @@ title: Elasticsearch --- -```{=mdx} - **Compatibility** Only available on Node.js. -``` - [Elasticsearch](https://github.com/elastic/elasticsearch) is a distributed, RESTful search engine optimized for speed and relevance on production-scale workloads. It supports also vector search using the [k-nearest neighbor](https://en.wikipedia.org/wiki/K-nearest_neighbors_algorithm) (kNN) algorithm and also [custom models for Natural Language Processing](https://www.elastic.co/blog/how-to-deploy-nlp-text-embeddings-and-vector-search) (NLP). You can read more about the support of vector search in Elasticsearch [here](https://www.elastic.co/guide/en/elasticsearch/reference/current/knn-search.html). @@ -33,14 +29,17 @@ LangChain.js accepts [`@elastic/elasticsearch`](https://github.com/elastic/elast This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @elastic/elasticsearch @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/community @elastic/elasticsearch @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/community @elastic/elasticsearch @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/community @elastic/elasticsearch @langchain/openai @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/vectorstores/faiss.md b/src/oss/javascript/integrations/vectorstores/faiss.md index 1006432a2..bdfbfeb6c 100644 --- a/src/oss/javascript/integrations/vectorstores/faiss.md +++ b/src/oss/javascript/integrations/vectorstores/faiss.md @@ -2,16 +2,12 @@ title: FaissStore --- -```{=mdx} - **Compatibility** Only available on Node.js. -``` - [Faiss](https://github.com/facebookresearch/faiss) is a library for efficient similarity search and clustering of dense vectors. LangChain.js supports using Faiss as a locally-running vectorstore that can be saved to a file. It also provides the ability to read the saved file from the [LangChain Python implementation](https://python.langchain.com/docs/integrations/vectorstores/faiss#saving-and-loading). @@ -32,14 +28,17 @@ To use Faiss vector stores, you'll need to install the `@langchain/community` in This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community faiss-node @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/community faiss-node @langchain/openai @langchain/core ``` +```bash yarn +yarn add @langchain/community faiss-node @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/community faiss-node @langchain/openai @langchain/core +``` + ### Credentials @@ -246,11 +245,17 @@ console.log(result); To enable the ability to read the saved file from [LangChain Python's implementation](https://python.langchain.com/docs/integrations/vectorstores/faiss#saving-and-loading), you'll need to install the [`pickleparser`](https://github.com/ewfian/pickleparser) package. -```{=mdx} - - pickleparser - + +```bash npm +npm install pickleparser +``` +```bash yarn +yarn add pickleparser +``` +```bash pnpm +pnpm add pickleparser ``` + Then you can use the `.loadFromPython` static method: diff --git a/src/oss/javascript/integrations/vectorstores/hnswlib.md b/src/oss/javascript/integrations/vectorstores/hnswlib.md index e1106f6de..099bd2dbe 100644 --- a/src/oss/javascript/integrations/vectorstores/hnswlib.md +++ b/src/oss/javascript/integrations/vectorstores/hnswlib.md @@ -2,13 +2,11 @@ title: HNSWLib --- -```{=mdx} **Compatibility** Only available on Node.js. -``` HNSWLib is an in-memory vector store that can be saved to a file. It uses the [HNSWLib library](https://github.com/nmslib/hnswlib). @@ -28,22 +26,21 @@ To use HNSWLib vector stores, you'll need to install the `@langchain/community` This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community hnswlib-node @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/community hnswlib-node @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/community hnswlib-node @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/community hnswlib-node @langchain/openai @langchain/core ``` + -```{=mdx} ****On Windows**, you might need to install [Visual Studio](https://visualstudio.microsoft.com/downloads/) first in order to properly build the `hnswlib-node` package.** - - -``` ### Credentials diff --git a/src/oss/javascript/integrations/vectorstores/mariadb.md b/src/oss/javascript/integrations/vectorstores/mariadb.md index ff84eec66..4be11cbb7 100644 --- a/src/oss/javascript/integrations/vectorstores/mariadb.md +++ b/src/oss/javascript/integrations/vectorstores/mariadb.md @@ -2,13 +2,11 @@ title: MariaDB --- -```{=mdx} **Compatibility** Only available on Node.js. -``` This requires MariaDB 11.7 or later version @@ -30,14 +28,17 @@ This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/op We'll also use the [`uuid`](https://www.npmjs.com/package/uuid) package to generate ids in the required format. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/openai @langchain/core mariadb uuid - + +```bash npm +npm install @langchain/community @langchain/openai @langchain/core mariadb uuid +``` +```bash yarn +yarn add @langchain/community @langchain/openai @langchain/core mariadb uuid +``` +```bash pnpm +pnpm add @langchain/community @langchain/openai @langchain/core mariadb uuid ``` + ### Setting up an instance diff --git a/src/oss/javascript/integrations/vectorstores/mongodb_atlas.md b/src/oss/javascript/integrations/vectorstores/mongodb_atlas.md index 6cf2b2777..d26ddf1d4 100644 --- a/src/oss/javascript/integrations/vectorstores/mongodb_atlas.md +++ b/src/oss/javascript/integrations/vectorstores/mongodb_atlas.md @@ -2,7 +2,6 @@ title: MongoDB Atlas --- -```{=mdx} **Compatibility** @@ -14,7 +13,6 @@ You can still create API routes that use MongoDB with Next.js by setting the `ru You can read more about Edge runtimes in the Next.js documentation [here](https://nextjs.org/docs/app/building-your-application/rendering/edge-and-nodejs-runtimes). -``` This guide provides a quick overview for getting started with MongoDB Atlas [vector stores](/oss/concepts/#vectorstores). For detailed documentation of all `MongoDBAtlasVectorSearch` features and configurations head to the [API reference](https://api.js.langchain.com/classes/langchain_mongodb.MongoDBAtlasVectorSearch.html). @@ -71,14 +69,17 @@ This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/op Install the following packages: -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/mongodb mongodb @langchain/openai @langchain/core - + +```bash npm +npm install @langchain/mongodb mongodb @langchain/openai @langchain/core +``` +```bash yarn +yarn add @langchain/mongodb mongodb @langchain/openai @langchain/core +``` +```bash pnpm +pnpm add @langchain/mongodb mongodb @langchain/openai @langchain/core ``` + ### Credentials diff --git a/src/oss/javascript/integrations/vectorstores/pgvector.md b/src/oss/javascript/integrations/vectorstores/pgvector.md index c1976f085..63a1334db 100644 --- a/src/oss/javascript/integrations/vectorstores/pgvector.md +++ b/src/oss/javascript/integrations/vectorstores/pgvector.md @@ -2,13 +2,11 @@ title: PGVectorStore --- -```{=mdx} **Compatibility** Only available on Node.js. -``` To enable vector search in generic PostgreSQL databases, LangChain.js supports using the [`pgvector`](https://github.com/pgvector/pgvector) Postgres extension. @@ -30,14 +28,17 @@ This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/op We'll also use the [`uuid`](https://www.npmjs.com/package/uuid) package to generate ids in the required format. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/community @langchain/openai @langchain/core pg uuid - + +```bash npm +npm install @langchain/community @langchain/openai @langchain/core pg uuid +``` +```bash yarn +yarn add @langchain/community @langchain/openai @langchain/core pg uuid ``` +```bash pnpm +pnpm add @langchain/community @langchain/openai @langchain/core pg uuid +``` + ### Setting up an instance @@ -86,17 +87,14 @@ If you want to get automated tracing of your model calls you can also set your [ To instantiate the vector store, call the `.initialize()` static method. This will automatically check for the presence of a table, given by `tableName` in the passed `config`. If it is not there, it will create it with the required columns. -```{=mdx} -: + **Security** User-generated data such as usernames should not be used as input for table and column names. **This may lead to SQL Injection!** : -``` - ```typescript import { PGVectorStore, diff --git a/src/oss/javascript/integrations/vectorstores/qdrant.md b/src/oss/javascript/integrations/vectorstores/qdrant.md index a0bd567d2..d38736aa8 100644 --- a/src/oss/javascript/integrations/vectorstores/qdrant.md +++ b/src/oss/javascript/integrations/vectorstores/qdrant.md @@ -2,13 +2,11 @@ title: QdrantVectorStore --- -```{=mdx} **Compatibility** Only available on Node.js. -``` [Qdrant](https://qdrant.tech/) is a vector similarity search engine. It provides a production-ready service with a convenient API to store, search, and manage points - vectors with an additional payload. @@ -28,14 +26,17 @@ To use Qdrant vector stores, you'll need to set up a Qdrant instance and install This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/qdrant @langchain/core @langchain/openai - + +```bash npm +npm install @langchain/qdrant @langchain/core @langchain/openai +``` +```bash yarn +yarn add @langchain/qdrant @langchain/core @langchain/openai +``` +```bash pnpm +pnpm add @langchain/qdrant @langchain/core @langchain/openai ``` + After installing the required dependencies, run a Qdrant instance with Docker on your computer by following the [Qdrant setup instructions](https://qdrant.tech/documentation/quickstart/). Note the URL your container runs on. diff --git a/src/oss/javascript/integrations/vectorstores/redis.md b/src/oss/javascript/integrations/vectorstores/redis.md index 7e9ea0f70..cd1217569 100644 --- a/src/oss/javascript/integrations/vectorstores/redis.md +++ b/src/oss/javascript/integrations/vectorstores/redis.md @@ -2,13 +2,11 @@ title: RedisVectorStore --- -```{=mdx} **Compatibility** Only available on Node.js. -``` [Redis](https://redis.io/) is a fast open source, in-memory data store. As part of the [Redis Stack](https://redis.io/docs/latest/operate/oss_and_stack/install/install-stack/), [RediSearch](https://redis.io/docs/latest/develop/interact/search-and-query/) is the module that enables vector similarity semantic search, as well as many other types of searching. @@ -28,14 +26,17 @@ To use Redis vector stores, you'll need to set up a Redis instance and install t This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish. -```{=mdx} -import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx"; - - - - @langchain/redis @langchain/core redis @langchain/openai - + +```bash npm +npm install @langchain/redis @langchain/core redis @langchain/openai +``` +```bash yarn +yarn add @langchain/redis @langchain/core redis @langchain/openai +``` +```bash pnpm +pnpm add @langchain/redis @langchain/core redis @langchain/openai ``` + You can set up a Redis instance locally with Docker by following [these instructions](https://redis.io/docs/latest/operate/oss_and_stack/install/install-stack/docker/#redisredis-stack). diff --git a/src/oss/javascript/integrations/vectorstores/upstash.md b/src/oss/javascript/integrations/vectorstores/upstash.md index c8ec3c852..f9ec93b85 100644 --- a/src/oss/javascript/integrations/vectorstores/upstash.md +++ b/src/oss/javascript/integrations/vectorstores/upstash.md @@ -36,11 +36,10 @@ You can create an index from the [Upstash Console](https://console.upstash.com/l Upstash vector also has built in embedding support. Which means you can use it directly without the need for an additional embedding model. Check the [embedding models documentation](https://upstash.com/docs/vector/features/embeddingmodels) for more details. -```{=mdx} + To use the built-in Upstash embeddings, you'll need to select an embedding model when creating the index. -``` ### Credentials diff --git a/src/oss/python/integrations/retrievers/linkup_search.md b/src/oss/python/integrations/retrievers/linkup_search.md index e3cafd66c..4a36135e0 100644 --- a/src/oss/python/integrations/retrievers/linkup_search.md +++ b/src/oss/python/integrations/retrievers/linkup_search.md @@ -78,9 +78,7 @@ Like other retrievers, LinkupSearchRetriever can be incorporated into LLM applic We will need a LLM or chat model: -```{=mdx} -``` ```python # | output: false From c1686ce5210bf4ee75daf90ced98e4e8c0da4d10 Mon Sep 17 00:00:00 2001 From: Brody Klapko Date: Mon, 8 Sep 2025 12:56:14 -0700 Subject: [PATCH 5/6] Fix tip callout --- src/oss/javascript/integrations/chat/openai.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/oss/javascript/integrations/chat/openai.md b/src/oss/javascript/integrations/chat/openai.md index 5c1f510e7..88a911775 100644 --- a/src/oss/javascript/integrations/chat/openai.md +++ b/src/oss/javascript/integrations/chat/openai.md @@ -430,11 +430,10 @@ console.log(result); As of Aug 6, 2024, OpenAI supports a `strict` argument when calling tools that will enforce that the tool argument schema is respected by the model. See more [here](https://platform.openai.com/docs/guides/function-calling). -info Requires ``@langchain/openai >= 0.2.6`` +info Requires ``@langchain/openai >= 0.2.6`` **Note**: If ``strict: true`` the tool definition will also be validated, and a subset of JSON schema are accepted. Crucially, schema cannot have optional args (those with default values). Read the full docs on what types of schema are supported here: https://platform.openai.com/docs/guides/structured-outputs/supported-schemas. - -::: + Here's an example with tool calling. Passing an extra `strict: true` argument to `.bindTools` will pass the param through to all tool definitions: From 790a1a0846a271fea8405c2a2029263140153543 Mon Sep 17 00:00:00 2001 From: Brody Klapko Date: Tue, 9 Sep 2025 10:33:43 -0700 Subject: [PATCH 6/6] Fix tags, fix broken migration content --- .../javascript/integrations/chat/openai.md | 19 ++++++++----------- .../javascript/integrations/tools/connery.mdx | 2 -- .../integrations/tools/connery_toolkit.mdx | 2 -- .../integrations/vectorstores/pgvector.md | 2 +- 4 files changed, 9 insertions(+), 16 deletions(-) diff --git a/src/oss/javascript/integrations/chat/openai.md b/src/oss/javascript/integrations/chat/openai.md index 88a911775..6240dd13c 100644 --- a/src/oss/javascript/integrations/chat/openai.md +++ b/src/oss/javascript/integrations/chat/openai.md @@ -254,9 +254,7 @@ If you need additional information like logprobs or token usage, these will be r **Requires `@langchain/core` version >=0.1.48.** - -::: - + ```typescript import { ChatOpenAI } from "@langchain/openai"; @@ -379,9 +377,7 @@ const result = await agent.invoke("Use the tool to calculate 3^3"); console.log(result); ``` -
-Context-free grammars - + OpenAI supports the specification of a [context-free grammar](https://platform.openai.com/docs/guides/function-calling#context-free-grammars) for custom tool inputs in `lark` or `regex` format. See [OpenAI docs](https://platform.openai.com/docs/guides/function-calling#context-free-grammars) for details. The `format` parameter can be passed into `customTool` as shown below: ```typescript @@ -423,16 +419,17 @@ const agent = createAgent({ const result = await agent.invoke("Use the tool to calculate 3^3"); console.log(result); ``` + -
- -## ``strict: true`` +## `strict: true` As of Aug 6, 2024, OpenAI supports a `strict` argument when calling tools that will enforce that the tool argument schema is respected by the model. See more [here](https://platform.openai.com/docs/guides/function-calling). -info Requires ``@langchain/openai >= 0.2.6`` + + +Requires `@langchain/openai >= 0.2.6` -**Note**: If ``strict: true`` the tool definition will also be validated, and a subset of JSON schema are accepted. Crucially, schema cannot have optional args (those with default values). Read the full docs on what types of schema are supported here: https://platform.openai.com/docs/guides/structured-outputs/supported-schemas. +**Note**: If `strict: true` the tool definition will also be validated, and a subset of JSON schema are accepted. Crucially, schema cannot have optional args (those with default values). Read the full docs on what types of schema are supported here: https://platform.openai.com/docs/guides/structured-outputs/supported-schemas. Here's an example with tool calling. Passing an extra `strict: true` argument to `.bindTools` will pass the param through to all tool definitions: diff --git a/src/oss/javascript/integrations/tools/connery.mdx b/src/oss/javascript/integrations/tools/connery.mdx index ebb49cee7..2aab1c25b 100644 --- a/src/oss/javascript/integrations/tools/connery.mdx +++ b/src/oss/javascript/integrations/tools/connery.mdx @@ -4,8 +4,6 @@ title: Connery Action Tool import Connery from "/snippets/javascript-integrations/examples/tools/connery.mdx"; -import IntegrationInstallTooltip from "/snippets/javascript-integrations/integration-install-tooltip.mdx"; - Using this tool, you can integrate individual Connery Action into your LangChain agent. diff --git a/src/oss/javascript/integrations/tools/connery_toolkit.mdx b/src/oss/javascript/integrations/tools/connery_toolkit.mdx index a5ccfd360..66f956adc 100644 --- a/src/oss/javascript/integrations/tools/connery_toolkit.mdx +++ b/src/oss/javascript/integrations/tools/connery_toolkit.mdx @@ -4,8 +4,6 @@ title: Connery Toolkit import ConneryMrkl from "/snippets/javascript-integrations/examples/agents/connery_mrkl.mdx"; -import IntegrationInstallTooltip from "/snippets/javascript-integrations/integration-install-tooltip.mdx"; - Using this toolkit, you can integrate Connery Actions into your LangChain agent. diff --git a/src/oss/javascript/integrations/vectorstores/pgvector.md b/src/oss/javascript/integrations/vectorstores/pgvector.md index 63a1334db..3f8aef67e 100644 --- a/src/oss/javascript/integrations/vectorstores/pgvector.md +++ b/src/oss/javascript/integrations/vectorstores/pgvector.md @@ -93,7 +93,7 @@ To instantiate the vector store, call the `.initialize()` static method. This wi User-generated data such as usernames should not be used as input for table and column names. **This may lead to SQL Injection!** -
: +
```typescript import {