diff --git a/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx b/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx
index 4b1b36659..f8af10a58 100644
--- a/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx
+++ b/src/oss/javascript/integrations/callbacks/datadog_tracer.mdx
@@ -12,10 +12,6 @@ This is an experimental community implementation, and it is not officially suppo
## Setup
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx b/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx
index 19174c70f..cb0f9fdfb 100644
--- a/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx
+++ b/src/oss/javascript/integrations/callbacks/upstash_ratelimit_callback.mdx
@@ -26,10 +26,6 @@ UPSTASH_REDIS_REST_TOKEN="****"
Next, you will need to install Upstash Ratelimit and `@langchain/community`:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @upstash/ratelimit @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx b/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx
index 45fd3a85d..0c6d9a3cf 100644
--- a/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx
+++ b/src/oss/javascript/integrations/chat/alibaba_tongyi.mdx
@@ -10,10 +10,6 @@ You'll need to sign up for an Alibaba API key and set it as an environment varia
Then, you'll need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/anthropic.md b/src/oss/javascript/integrations/chat/anthropic.md
index 39dce324b..61d5e2733 100644
--- a/src/oss/javascript/integrations/chat/anthropic.md
+++ b/src/oss/javascript/integrations/chat/anthropic.md
@@ -45,16 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `ChatAnthropic` integration lives in the `@langchain/anthropic` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/anthropic @langchain/core
+```
-
- @langchain/anthropic @langchain/core
-
+```bash yarn
+yarn add @langchain/anthropic @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/anthropic @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/arcjet.md b/src/oss/javascript/integrations/chat/arcjet.md
index 532d3fde3..77c35ce79 100644
--- a/src/oss/javascript/integrations/chat/arcjet.md
+++ b/src/oss/javascript/integrations/chat/arcjet.md
@@ -20,15 +20,19 @@ The Arcjet Redact object is not a chat model itself, instead it wraps an LLM. It
Install the Arcjet Redaction Library:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @arcjet/redact
+```
-
- @arcjet/redact
-
+```bash yarn
+yarn add @arcjet/redact
+```
+```bash pnpm
+pnpm add @arcjet/redact
```
+
And install LangChain Community:
diff --git a/src/oss/javascript/integrations/chat/azure.md b/src/oss/javascript/integrations/chat/azure.md
index f763409f9..59c9f9bd0 100644
--- a/src/oss/javascript/integrations/chat/azure.md
+++ b/src/oss/javascript/integrations/chat/azure.md
@@ -56,16 +56,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain AzureChatOpenAI integration lives in the `@langchain/openai` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/openai @langchain/core
+```
-
- @langchain/openai @langchain/core
-
+```bash yarn
+yarn add @langchain/openai @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/openai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/baidu_qianfan.mdx b/src/oss/javascript/integrations/chat/baidu_qianfan.mdx
index 6f92cfdc3..9157f0201 100644
--- a/src/oss/javascript/integrations/chat/baidu_qianfan.mdx
+++ b/src/oss/javascript/integrations/chat/baidu_qianfan.mdx
@@ -6,10 +6,6 @@ title: ChatBaiduQianfan
You'll first need to install the [`@langchain/baidu-qianfan`](https://www.npmjs.com/package/@langchain/baidu-qianfan) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/baidu-qianfan @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/baidu_wenxin.mdx b/src/oss/javascript/integrations/chat/baidu_wenxin.mdx
index 7875fded0..5896c1a2e 100644
--- a/src/oss/javascript/integrations/chat/baidu_wenxin.mdx
+++ b/src/oss/javascript/integrations/chat/baidu_wenxin.mdx
@@ -11,10 +11,6 @@ Use the [`@langchain/baidu-qianfan`](/oss/integrations/chat/baidu_qianfan/) pack
LangChain.js supports Baidu's ERNIE-bot family of models. Here's an example:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/bedrock_converse.md b/src/oss/javascript/integrations/chat/bedrock_converse.md
index 81fc464ad..5c20ec434 100644
--- a/src/oss/javascript/integrations/chat/bedrock_converse.md
+++ b/src/oss/javascript/integrations/chat/bedrock_converse.md
@@ -41,15 +41,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `ChatBedrockConverse` integration lives in the `@langchain/aws` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/aws @langchain/core
+```
-
- @langchain/aws @langchain/core
-
+```bash yarn
+yarn add @langchain/aws @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/aws @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/cerebras.md b/src/oss/javascript/integrations/chat/cerebras.md
index b44e0a5a4..3b556d612 100644
--- a/src/oss/javascript/integrations/chat/cerebras.md
+++ b/src/oss/javascript/integrations/chat/cerebras.md
@@ -53,16 +53,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain ChatCerebras integration lives in the `@langchain/cerebras` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/cerebras @langchain/core
+```
-
- @langchain/cerebras @langchain/core
-
+```bash yarn
+yarn add @langchain/cerebras @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/cerebras @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/cloudflare_workersai.md b/src/oss/javascript/integrations/chat/cloudflare_workersai.md
index 12c431060..bfd8b965a 100644
--- a/src/oss/javascript/integrations/chat/cloudflare_workersai.md
+++ b/src/oss/javascript/integrations/chat/cloudflare_workersai.md
@@ -36,15 +36,19 @@ Passing a binding within a Cloudflare Worker is not yet supported.
The LangChain ChatCloudflareWorkersAI integration lives in the `@langchain/cloudflare` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/cloudflare @langchain/core
+```
-
- @langchain/cloudflare @langchain/core
-
+```bash yarn
+yarn add @langchain/cloudflare @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/cloudflare @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/cohere.md b/src/oss/javascript/integrations/chat/cohere.md
index 91d3df02d..47c66d623 100644
--- a/src/oss/javascript/integrations/chat/cohere.md
+++ b/src/oss/javascript/integrations/chat/cohere.md
@@ -48,16 +48,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain ChatCohere integration lives in the `@langchain/cohere` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/cohere @langchain/core
+```
-
- @langchain/cohere @langchain/core
-
+```bash yarn
+yarn add @langchain/cohere @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/cohere @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/deep_infra.mdx b/src/oss/javascript/integrations/chat/deep_infra.mdx
index ee4fc0d27..93043fc9e 100644
--- a/src/oss/javascript/integrations/chat/deep_infra.mdx
+++ b/src/oss/javascript/integrations/chat/deep_infra.mdx
@@ -5,10 +5,6 @@ title: ChatDeepInfra
LangChain supports chat models hosted by [Deep Infra](https://deepinfra.com/) through the `ChatDeepInfra` wrapper.
First, you'll need to install the `@langchain/community` package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/fireworks.md b/src/oss/javascript/integrations/chat/fireworks.md
index acec550f1..63f11c9a7 100644
--- a/src/oss/javascript/integrations/chat/fireworks.md
+++ b/src/oss/javascript/integrations/chat/fireworks.md
@@ -45,16 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `ChatFireworks` integration lives in the `@langchain/community` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/friendli.mdx b/src/oss/javascript/integrations/chat/friendli.mdx
index 10a7d6d51..8ea137fc2 100644
--- a/src/oss/javascript/integrations/chat/friendli.mdx
+++ b/src/oss/javascript/integrations/chat/friendli.mdx
@@ -10,10 +10,6 @@ This tutorial guides you through integrating `ChatFriendli` for chat application
Ensure the `@langchain/community` is installed.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/google_vertex_ai.md b/src/oss/javascript/integrations/chat/google_vertex_ai.md
index 2a8ef08d0..c30df6219 100644
--- a/src/oss/javascript/integrations/chat/google_vertex_ai.md
+++ b/src/oss/javascript/integrations/chat/google_vertex_ai.md
@@ -65,21 +65,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `ChatVertexAI` integration lives in the `@langchain/google-vertexai` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
-
-
- @langchain/google-vertexai @langchain/core
-
-
-Or if using in a web environment like a [Vercel Edge function](https://vercel.com/blog/edge-functions-generally-available):
+
+```bash npm
+npm install @langchain/google-vertexai @langchain/core
+```
-
- @langchain/google-vertexai-web @langchain/core
-
+```bash yarn
+yarn add @langchain/google-vertexai @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/google-vertexai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/groq.md b/src/oss/javascript/integrations/chat/groq.md
index fc9159ddd..def47eca1 100644
--- a/src/oss/javascript/integrations/chat/groq.md
+++ b/src/oss/javascript/integrations/chat/groq.md
@@ -46,16 +46,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain ChatGroq integration lives in the `@langchain/groq` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/groq @langchain/core
+```
-
- @langchain/groq @langchain/core
-
+```bash yarn
+yarn add @langchain/groq @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/groq @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/ibm.md b/src/oss/javascript/integrations/chat/ibm.md
index e47acea57..536dfb226 100644
--- a/src/oss/javascript/integrations/chat/ibm.md
+++ b/src/oss/javascript/integrations/chat/ibm.md
@@ -111,15 +111,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain IBM watsonx.ai integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/index.mdx b/src/oss/javascript/integrations/chat/index.mdx
index 597a9d115..a05e8f0f3 100644
--- a/src/oss/javascript/integrations/chat/index.mdx
+++ b/src/oss/javascript/integrations/chat/index.mdx
@@ -13,10 +13,6 @@ If you'd like to write your own chat model, see [this how-to](/oss/how-to/custom
#### Install dependencies
-
-See [this section for general instructions on installing integration packages](/docs/how_to/installation/#installing-integration-packages).
-
-
@@ -63,10 +59,6 @@ const model = new ChatGroq({
#### Install dependencies
-
-See [this section for general instructions on installing integration packages](/docs/how_to/installation/#installing-integration-packages).
-
-
@@ -110,10 +102,6 @@ const model = new ChatOpenAI({ model: "gpt-4o-mini" });
#### Install dependencies
-
-See [this section for general instructions on installing integration packages](/docs/how_to/installation/#installing-integration-packages).
-
-
@@ -160,10 +148,6 @@ const model = new ChatAnthropic({
#### Install dependencies
-
-See [this section for general instructions on installing integration packages](/docs/how_to/installation/#installing-integration-packages).
-
-
@@ -210,10 +194,6 @@ const model = new ChatGoogleGenerativeAI({
#### Install dependencies
-
-See [this section for general instructions on installing integration packages](/docs/how_to/installation/#installing-integration-packages).
-
-
@@ -260,10 +240,6 @@ const model = new ChatFireworks({
#### Install dependencies
-
-See [this section for general instructions on installing integration packages](/docs/how_to/installation/#installing-integration-packages).
-
-
@@ -310,10 +286,6 @@ const model = new ChatMistralAI({
#### Install dependencies
-
-See [this section for general instructions on installing integration packages](/docs/how_to/installation/#installing-integration-packages).
-
-
diff --git a/src/oss/javascript/integrations/chat/llama_cpp.mdx b/src/oss/javascript/integrations/chat/llama_cpp.mdx
index 405bb12cb..7c95ecac4 100644
--- a/src/oss/javascript/integrations/chat/llama_cpp.mdx
+++ b/src/oss/javascript/integrations/chat/llama_cpp.mdx
@@ -14,10 +14,6 @@ This module is based on the [node-llama-cpp](https://github.com/withcatai/node-l
You'll need to install major version `3` of the [node-llama-cpp](https://github.com/withcatai/node-llama-cpp) module to communicate with your local model.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install -S node-llama-cpp@3 @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/minimax.mdx b/src/oss/javascript/integrations/chat/minimax.mdx
index 899218288..bf0977c18 100644
--- a/src/oss/javascript/integrations/chat/minimax.mdx
+++ b/src/oss/javascript/integrations/chat/minimax.mdx
@@ -10,10 +10,6 @@ This example demonstrates using LangChain.js to interact with Minimax.
To use Minimax models, you'll need a Minimax account, an API key, and a Group ID.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/mistral.md b/src/oss/javascript/integrations/chat/mistral.md
index e8186717e..ae6bf5778 100644
--- a/src/oss/javascript/integrations/chat/mistral.md
+++ b/src/oss/javascript/integrations/chat/mistral.md
@@ -45,15 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain ChatMistralAI integration lives in the `@langchain/mistralai` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/mistralai @langchain/core
+```
-
-@langchain/mistralai @langchain/core
-
+```bash yarn
+yarn add @langchain/mistralai @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/mistralai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/moonshot.mdx b/src/oss/javascript/integrations/chat/moonshot.mdx
index a559161d8..179edf95b 100644
--- a/src/oss/javascript/integrations/chat/moonshot.mdx
+++ b/src/oss/javascript/integrations/chat/moonshot.mdx
@@ -14,10 +14,6 @@ https://platform.moonshot.cn/console
You'll also need to install the following dependencies:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/novita.md b/src/oss/javascript/integrations/chat/novita.md
index 35bfab740..7ccf5b1a3 100644
--- a/src/oss/javascript/integrations/chat/novita.md
+++ b/src/oss/javascript/integrations/chat/novita.md
@@ -32,14 +32,19 @@ export NOVITA_API_KEY="your-api-key"
The LangChain Novita integration lives in the `@langchain-community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/ollama.md b/src/oss/javascript/integrations/chat/ollama.md
index 5c3c62e28..13127b147 100644
--- a/src/oss/javascript/integrations/chat/ollama.md
+++ b/src/oss/javascript/integrations/chat/ollama.md
@@ -45,15 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain ChatOllama integration lives in the `@langchain/ollama` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/ollama @langchain/core
+```
-
- @langchain/ollama @langchain/core
-
+```bash yarn
+yarn add @langchain/ollama @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/ollama @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/openai.md b/src/oss/javascript/integrations/chat/openai.md
index 264013db8..dcf368ff5 100644
--- a/src/oss/javascript/integrations/chat/openai.md
+++ b/src/oss/javascript/integrations/chat/openai.md
@@ -45,16 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `ChatOpenAI` integration lives in the `@langchain/openai` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/openai @langchain/core
+```
-
- @langchain/openai @langchain/core
-
+```bash yarn
+yarn add @langchain/openai @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/openai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/perplexity.md b/src/oss/javascript/integrations/chat/perplexity.md
index 6333589e6..00db5eac2 100644
--- a/src/oss/javascript/integrations/chat/perplexity.md
+++ b/src/oss/javascript/integrations/chat/perplexity.md
@@ -45,15 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain Perplexity integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/premai.mdx b/src/oss/javascript/integrations/chat/premai.mdx
index 9ba01b0a0..7079c741e 100644
--- a/src/oss/javascript/integrations/chat/premai.mdx
+++ b/src/oss/javascript/integrations/chat/premai.mdx
@@ -13,10 +13,6 @@ export PREM_API_KEY=your-api-key
You can use models provided by Prem AI as follows:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx b/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx
index 4ee247660..44137dc09 100644
--- a/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx
+++ b/src/oss/javascript/integrations/chat/tencent_hunyuan.mdx
@@ -12,10 +12,6 @@ https://cloud.tencent.com/document/product/1729/104753
2. Create SecretID & SecretKey [here](https://console.cloud.tencent.com/cam/capi).
3. Set SecretID and SecretKey as environment variables named `TENCENT_SECRET_ID` and `TENCENT_SECRET_KEY`, respectively.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/togetherai.md b/src/oss/javascript/integrations/chat/togetherai.md
index 052d728a3..93df82ce3 100644
--- a/src/oss/javascript/integrations/chat/togetherai.md
+++ b/src/oss/javascript/integrations/chat/togetherai.md
@@ -45,15 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain ChatTogetherAI integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/web_llm.mdx b/src/oss/javascript/integrations/chat/web_llm.mdx
index 91bbcd06e..c455bb268 100644
--- a/src/oss/javascript/integrations/chat/web_llm.mdx
+++ b/src/oss/javascript/integrations/chat/web_llm.mdx
@@ -14,10 +14,6 @@ You can run LLMs directly in your web browser using LangChain's [WebLLM](https:/
You'll need to install the [WebLLM SDK](https://www.npmjs.com/package/@mlc-ai/web-llm) module to communicate with your local model.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install -S @mlc-ai/web-llm @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/xai.md b/src/oss/javascript/integrations/chat/xai.md
index 15a5e9a38..4ff7fca4f 100644
--- a/src/oss/javascript/integrations/chat/xai.md
+++ b/src/oss/javascript/integrations/chat/xai.md
@@ -45,16 +45,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `ChatXAI` integration lives in the `@langchain/xai` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/xai @langchain/core
+```
-
- @langchain/xai @langchain/core
-
+```bash yarn
+yarn add @langchain/xai @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/xai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/chat/yandex.mdx b/src/oss/javascript/integrations/chat/yandex.mdx
index 28fe823a7..616f205da 100644
--- a/src/oss/javascript/integrations/chat/yandex.mdx
+++ b/src/oss/javascript/integrations/chat/yandex.mdx
@@ -17,10 +17,6 @@ Next, you have two authentication options:
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/yandex @langchain/core
```
diff --git a/src/oss/javascript/integrations/chat/zhipuai.mdx b/src/oss/javascript/integrations/chat/zhipuai.mdx
index a9461a120..01407c12b 100644
--- a/src/oss/javascript/integrations/chat/zhipuai.mdx
+++ b/src/oss/javascript/integrations/chat/zhipuai.mdx
@@ -14,10 +14,6 @@ https://open.bigmodel.cn
You'll also need to install the following dependencies:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core jsonwebtoken
```
diff --git a/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx b/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx
index 80673b11a..191d7f849 100644
--- a/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx
+++ b/src/oss/javascript/integrations/document_compressors/cohere_rerank.mdx
@@ -10,10 +10,6 @@ Cohere offers an API for reranking documents. In this example we'll show you how
## Setup
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/cohere @langchain/core
```
diff --git a/src/oss/javascript/integrations/document_compressors/ibm.md b/src/oss/javascript/integrations/document_compressors/ibm.md
index 5a216833c..441153314 100644
--- a/src/oss/javascript/integrations/document_compressors/ibm.md
+++ b/src/oss/javascript/integrations/document_compressors/ibm.md
@@ -105,14 +105,19 @@ If you want to get automated tracing from individual queries, you can also set y
This document compressor lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx b/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx
index e6cce8c8b..530fda74f 100644
--- a/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx
+++ b/src/oss/javascript/integrations/document_compressors/mixedbread_ai.mdx
@@ -10,10 +10,6 @@ This guide will help you integrate and use the [Mixedbread AI](https://mixedbrea
To get started, install the `@langchain/mixedbread-ai` package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash
npm install @langchain/mixedbread-ai
```
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx
index 52f840864..d694d32bc 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/apify_dataset.mdx
@@ -32,10 +32,6 @@ You'll first need to install the official Apify client:
```bash npm
npm install apify-client
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install hnswlib-node @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md b/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md
index 603889062..e2310c6bf 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/firecrawl.md
@@ -50,15 +50,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain FireCrawlLoader integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36
+```
-
- @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36
-
+```bash yarn
+yarn add @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core @mendable/firecrawl-js@0.0.36
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md b/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md
index ea6b7a206..e36533494 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/langsmith.md
@@ -35,15 +35,19 @@ export LANGSMITH_API_KEY="your-api-key"
The `LangSmithLoader` integration lives in the `@langchain/core` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/core
+```
-
- @langchain/core
-
+```bash yarn
+yarn add @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/core
```
+
## Create example dataset
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md b/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md
index 303092860..c9f17e69b 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/pdf.md
@@ -39,15 +39,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain WebPDFLoader integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core pdf-parse
+```
-
- @langchain/community @langchain/core pdf-parse
-
+```bash yarn
+yarn add @langchain/community @langchain/core pdf-parse
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core pdf-parse
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx
index ccf48eb6a..8c0bbf80d 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/searchapi.mdx
@@ -22,10 +22,6 @@ Here's an example of how to use the `SearchApiLoader`:
import Searchapi from "/snippets/javascript-integrations/examples/document_loaders/searchapi.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core @langchain/openai
```
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx
index 23e7a312e..c220fdf18 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/serpapi.mdx
@@ -20,10 +20,6 @@ Here's an example of how to use the `SerpAPILoader`:
import Serpapi from "/snippets/javascript-integrations/examples/document_loaders/serpapi.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core @langchain/openai
```
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx b/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx
index 9421cdcf5..51c429f00 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/sort_xyz_blockchain.mdx
@@ -8,10 +8,6 @@ You will need a free Sort API key, visiting sort.xyz to obtain one.
import SortXyzBlockchain from "/snippets/javascript-integrations/examples/document_loaders/sort_xyz_blockchain.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core @langchain/openai
```
diff --git a/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md b/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md
index 5a178bd16..42253b3ec 100644
--- a/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md
+++ b/src/oss/javascript/integrations/document_loaders/web_loaders/web_cheerio.md
@@ -42,15 +42,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain CheerioWebBaseLoader integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core cheerio
+```
-
- @langchain/community @langchain/core cheerio
-
+```bash yarn
+yarn add @langchain/community @langchain/core cheerio
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core cheerio
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/document_transformers/html-to-text.mdx b/src/oss/javascript/integrations/document_transformers/html-to-text.mdx
index a4250c864..bd8e2ab1a 100644
--- a/src/oss/javascript/integrations/document_transformers/html-to-text.mdx
+++ b/src/oss/javascript/integrations/document_transformers/html-to-text.mdx
@@ -17,10 +17,6 @@ Though not required for the transformer by itself, the below usage examples requ
```bash npm
npm install cheerio
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx b/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx
index b6e68b838..f9ddc17b8 100644
--- a/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx
+++ b/src/oss/javascript/integrations/document_transformers/mozilla_readability.mdx
@@ -17,10 +17,6 @@ Though not required for the transformer by itself, the below usage examples requ
```bash npm
npm install cheerio
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx b/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx
index cf20a383c..e73c06948 100644
--- a/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx
+++ b/src/oss/javascript/integrations/document_transformers/openai_metadata_tagger.mdx
@@ -14,10 +14,6 @@ For example, let's say you wanted to index a set of movie reviews. You could ini
import MetadataTagger from "/snippets/javascript-integrations/examples/document_transformers/metadata_tagger.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx b/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx
index 337df4890..35b275f4d 100644
--- a/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx
+++ b/src/oss/javascript/integrations/llm_caching/azure_cosmosdb_nosql.mdx
@@ -10,10 +10,6 @@ If you don't have an Azure account, you can [create a free account](https://azur
You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs.com/package/@langchain/azure-cosmosdb) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/azure-cosmosdb @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/ai21.mdx b/src/oss/javascript/integrations/llms/ai21.mdx
index 9f168288c..1d0cc1ab5 100644
--- a/src/oss/javascript/integrations/llms/ai21.mdx
+++ b/src/oss/javascript/integrations/llms/ai21.mdx
@@ -6,10 +6,6 @@ You can get started with AI21Labs' Jurassic family of models, as well as see a f
Here's an example of initializing an instance in LangChain.js:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/aleph_alpha.mdx b/src/oss/javascript/integrations/llms/aleph_alpha.mdx
index eed2c83ae..68b551918 100644
--- a/src/oss/javascript/integrations/llms/aleph_alpha.mdx
+++ b/src/oss/javascript/integrations/llms/aleph_alpha.mdx
@@ -6,10 +6,6 @@ LangChain.js supports AlephAlpha's Luminous family of models. You'll need to sig
Here's an example:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/arcjet.md b/src/oss/javascript/integrations/llms/arcjet.md
index 6535c069f..53c4d8686 100644
--- a/src/oss/javascript/integrations/llms/arcjet.md
+++ b/src/oss/javascript/integrations/llms/arcjet.md
@@ -20,15 +20,19 @@ The Arcjet Redact object is not an LLM itself, instead it wraps an LLM. It redac
Install the Arcjet Redaction Library:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @arcjet/redact
+```
-
- @arcjet/redact
-
+```bash yarn
+yarn add @arcjet/redact
+```
+```bash pnpm
+pnpm add @arcjet/redact
```
+
And install LangChain Community:
diff --git a/src/oss/javascript/integrations/llms/aws_sagemaker.mdx b/src/oss/javascript/integrations/llms/aws_sagemaker.mdx
index 40beca8fe..d8af81cbb 100644
--- a/src/oss/javascript/integrations/llms/aws_sagemaker.mdx
+++ b/src/oss/javascript/integrations/llms/aws_sagemaker.mdx
@@ -11,10 +11,6 @@ You'll need to install the official SageMaker SDK as a peer dependency:
```bash npm
npm install @aws-sdk/client-sagemaker-runtime
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/cloudflare_workersai.md b/src/oss/javascript/integrations/llms/cloudflare_workersai.md
index 470eb402e..4f55631b6 100644
--- a/src/oss/javascript/integrations/llms/cloudflare_workersai.md
+++ b/src/oss/javascript/integrations/llms/cloudflare_workersai.md
@@ -24,15 +24,19 @@ Head [to this page](https://developers.cloudflare.com/workers-ai/) to sign up to
The LangChain Cloudflare integration lives in the `@langchain/cloudflare` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/cloudflare @langchain/core
+```
-
- @langchain/cloudflare @langchain/core
-
+```bash yarn
+yarn add @langchain/cloudflare @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/cloudflare @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/llms/cohere.md b/src/oss/javascript/integrations/llms/cohere.md
index cc98bd2c0..2cc9f5dd1 100644
--- a/src/oss/javascript/integrations/llms/cohere.md
+++ b/src/oss/javascript/integrations/llms/cohere.md
@@ -47,15 +47,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain Cohere integration lives in the `@langchain/cohere` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/cohere @langchain/core
+```
-
- @langchain/cohere @langchain/core
-
+```bash yarn
+yarn add @langchain/cohere @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/cohere @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/llms/deep_infra.mdx b/src/oss/javascript/integrations/llms/deep_infra.mdx
index 7d3e844b0..81ef4e511 100644
--- a/src/oss/javascript/integrations/llms/deep_infra.mdx
+++ b/src/oss/javascript/integrations/llms/deep_infra.mdx
@@ -5,10 +5,6 @@ title: DeepInfra
LangChain supports LLMs hosted by [Deep Infra](https://deepinfra.com/) through the `DeepInfra` wrapper.
First, you'll need to install the `@langchain/community` package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/friendli.mdx b/src/oss/javascript/integrations/llms/friendli.mdx
index 91fb4f981..0091098cd 100644
--- a/src/oss/javascript/integrations/llms/friendli.mdx
+++ b/src/oss/javascript/integrations/llms/friendli.mdx
@@ -10,10 +10,6 @@ This tutorial guides you through integrating `Friendli` with LangChain.
Ensure the `@langchain/community` is installed.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/gradient_ai.mdx b/src/oss/javascript/integrations/llms/gradient_ai.mdx
index 8ccbdb016..215b95244 100644
--- a/src/oss/javascript/integrations/llms/gradient_ai.mdx
+++ b/src/oss/javascript/integrations/llms/gradient_ai.mdx
@@ -27,10 +27,6 @@ const model = new GradientLLM({
```
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/ibm.md b/src/oss/javascript/integrations/llms/ibm.md
index e887b7d37..fc48d1853 100644
--- a/src/oss/javascript/integrations/llms/ibm.md
+++ b/src/oss/javascript/integrations/llms/ibm.md
@@ -105,15 +105,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain IBM watsonx.ai integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/llms/jigsawstack.mdx b/src/oss/javascript/integrations/llms/jigsawstack.mdx
index ce480f131..6471d8f60 100644
--- a/src/oss/javascript/integrations/llms/jigsawstack.mdx
+++ b/src/oss/javascript/integrations/llms/jigsawstack.mdx
@@ -17,10 +17,6 @@ export JIGSAWSTACK_API_KEY="your-api-key"
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/jigsawstack
```
diff --git a/src/oss/javascript/integrations/llms/llama_cpp.mdx b/src/oss/javascript/integrations/llms/llama_cpp.mdx
index f448869f4..5f38547fa 100644
--- a/src/oss/javascript/integrations/llms/llama_cpp.mdx
+++ b/src/oss/javascript/integrations/llms/llama_cpp.mdx
@@ -17,10 +17,6 @@ You'll need to install major version `3` of the [node-llama-cpp](https://github.
```bash npm
npm install -S node-llama-cpp@3
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/raycast.mdx b/src/oss/javascript/integrations/llms/raycast.mdx
index a0d6b36a1..5e788c1f6 100644
--- a/src/oss/javascript/integrations/llms/raycast.mdx
+++ b/src/oss/javascript/integrations/llms/raycast.mdx
@@ -10,10 +10,6 @@ You can utilize the LangChain's RaycastAI class within the [Raycast Environment]
- There is a rate limit of approx 10 requests per minute for each Raycast Pro user. If you exceed this limit, you will receive an error. You can set your desired rpm limit by passing `rateLimitPerMinute` to the `RaycastAI` constructor as shown in the example, as this rate limit may change in the future.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/replicate.mdx b/src/oss/javascript/integrations/llms/replicate.mdx
index d11a951d5..ca9205d8f 100644
--- a/src/oss/javascript/integrations/llms/replicate.mdx
+++ b/src/oss/javascript/integrations/llms/replicate.mdx
@@ -4,10 +4,6 @@ title: Replicate
Here's an example of calling a Replicate model as an LLM:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install replicate@1 @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/together.md b/src/oss/javascript/integrations/llms/together.md
index 9bffd2587..76976a209 100644
--- a/src/oss/javascript/integrations/llms/together.md
+++ b/src/oss/javascript/integrations/llms/together.md
@@ -43,15 +43,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain TogetherAI integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/llms/writer.mdx b/src/oss/javascript/integrations/llms/writer.mdx
index 5a9c6977c..9a63df68d 100644
--- a/src/oss/javascript/integrations/llms/writer.mdx
+++ b/src/oss/javascript/integrations/llms/writer.mdx
@@ -13,10 +13,6 @@ Next, you'll need to install the official package as a peer dependency:
```bash npm
yarn add @writerai/writer-sdk
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/llms/yandex.mdx b/src/oss/javascript/integrations/llms/yandex.mdx
index a57c0f7c7..91790eef5 100644
--- a/src/oss/javascript/integrations/llms/yandex.mdx
+++ b/src/oss/javascript/integrations/llms/yandex.mdx
@@ -17,10 +17,6 @@ Next, you have two authentication options:
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/yandex @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/astradb.mdx b/src/oss/javascript/integrations/memory/astradb.mdx
index 5f7f5994e..e150bb69c 100644
--- a/src/oss/javascript/integrations/memory/astradb.mdx
+++ b/src/oss/javascript/integrations/memory/astradb.mdx
@@ -11,10 +11,6 @@ You need to install the Astra DB TS client:
```bash npm
npm install @datastax/astra-db-ts
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/aurora_dsql.mdx b/src/oss/javascript/integrations/memory/aurora_dsql.mdx
index aa070604c..4bdab93c0 100644
--- a/src/oss/javascript/integrations/memory/aurora_dsql.mdx
+++ b/src/oss/javascript/integrations/memory/aurora_dsql.mdx
@@ -14,10 +14,6 @@ This is very similar to the PostgreSQL integration with a few differences to mak
Go to you AWS Console and create an Aurora DSQL Cluster, https://console.aws.amazon.com/dsql/clusters
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core pg @aws-sdk/dsql-signer
```
diff --git a/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx b/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx
index cd02d115f..ec68eafeb 100644
--- a/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx
+++ b/src/oss/javascript/integrations/memory/azure_cosmos_mongo_vcore.mdx
@@ -12,10 +12,6 @@ You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs
```bash npm
npm install @langchain/azure-cosmosdb @langchain/core
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx b/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx
index fd6c0e390..846d1d4e4 100644
--- a/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx
+++ b/src/oss/javascript/integrations/memory/azure_cosmosdb_nosql.mdx
@@ -12,10 +12,6 @@ You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs
```bash npm
npm install @langchain/azure-cosmosdb @langchain/core
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/cassandra.mdx b/src/oss/javascript/integrations/memory/cassandra.mdx
index cb08a3fb8..b67ede74a 100644
--- a/src/oss/javascript/integrations/memory/cassandra.mdx
+++ b/src/oss/javascript/integrations/memory/cassandra.mdx
@@ -8,10 +8,6 @@ For longer-term persistence across chat sessions, you can swap out the default i
First, install the Cassandra Node.js driver:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install cassandra-driver @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/cloudflare_d1.mdx b/src/oss/javascript/integrations/memory/cloudflare_d1.mdx
index aff8e6ccf..0dd89d66a 100644
--- a/src/oss/javascript/integrations/memory/cloudflare_d1.mdx
+++ b/src/oss/javascript/integrations/memory/cloudflare_d1.mdx
@@ -13,10 +13,6 @@ For longer-term persistence across chat sessions, you can swap out the default i
You'll need to install the LangChain Cloudflare integration package.
For the below example, we also use Anthropic, but you can use any model you'd like:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/cloudflare @langchain/anthropic @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/convex.mdx b/src/oss/javascript/integrations/memory/convex.mdx
index 5dc148ae4..7856e6580 100644
--- a/src/oss/javascript/integrations/memory/convex.mdx
+++ b/src/oss/javascript/integrations/memory/convex.mdx
@@ -50,10 +50,6 @@ export default defineSchema({
Each chat history session stored in Convex must have a unique session id.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/dynamodb.mdx b/src/oss/javascript/integrations/memory/dynamodb.mdx
index 6249742a6..9b66b690c 100644
--- a/src/oss/javascript/integrations/memory/dynamodb.mdx
+++ b/src/oss/javascript/integrations/memory/dynamodb.mdx
@@ -11,10 +11,6 @@ First, install the AWS DynamoDB client in your project:
```bash npm
npm install @aws-sdk/client-dynamodb
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/file.mdx b/src/oss/javascript/integrations/memory/file.mdx
index d09985571..1dea567a1 100644
--- a/src/oss/javascript/integrations/memory/file.mdx
+++ b/src/oss/javascript/integrations/memory/file.mdx
@@ -11,10 +11,6 @@ You'll first need to install the [`@langchain/community`](https://www.npmjs.com/
```bash npm
npm install @langchain/community @langchain/core
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/firestore.mdx b/src/oss/javascript/integrations/memory/firestore.mdx
index 7db1859bf..fccc59410 100644
--- a/src/oss/javascript/integrations/memory/firestore.mdx
+++ b/src/oss/javascript/integrations/memory/firestore.mdx
@@ -11,10 +11,6 @@ First, install the Firebase admin package in your project:
```bash npm
npm install firebase-admin
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/ipfs_datastore.mdx b/src/oss/javascript/integrations/memory/ipfs_datastore.mdx
index 3cf44ab92..3d12dec4e 100644
--- a/src/oss/javascript/integrations/memory/ipfs_datastore.mdx
+++ b/src/oss/javascript/integrations/memory/ipfs_datastore.mdx
@@ -8,10 +8,6 @@ For a storage backend you can use the IPFS Datastore Chat Memory to wrap an IPFS
First, install the integration dependencies:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install cborg interface-datastore it-all @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/mem0_memory.mdx b/src/oss/javascript/integrations/memory/mem0_memory.mdx
index 919c25d5d..8b0992ae3 100644
--- a/src/oss/javascript/integrations/memory/mem0_memory.mdx
+++ b/src/oss/javascript/integrations/memory/mem0_memory.mdx
@@ -12,10 +12,6 @@ Goto [Mem0 Dashboard](https://app.mem0.ai) to get API keys for Mem0.
import Mem0 from "/snippets/javascript-integrations/examples/memory/mem0.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core @langchain/community
```
diff --git a/src/oss/javascript/integrations/memory/momento.mdx b/src/oss/javascript/integrations/memory/momento.mdx
index 31cf3866a..6b0fe0482 100644
--- a/src/oss/javascript/integrations/memory/momento.mdx
+++ b/src/oss/javascript/integrations/memory/momento.mdx
@@ -19,10 +19,6 @@ To install for **browser/edge workers**:
```bash npm
npm install @gomomento/sdk-web
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/mongodb.mdx b/src/oss/javascript/integrations/memory/mongodb.mdx
index ed88d9f33..5534d2a3a 100644
--- a/src/oss/javascript/integrations/memory/mongodb.mdx
+++ b/src/oss/javascript/integrations/memory/mongodb.mdx
@@ -25,10 +25,6 @@ You need to install Node MongoDB SDK in your project:
```bash npm
npm install -S mongodb
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/motorhead_memory.mdx b/src/oss/javascript/integrations/memory/motorhead_memory.mdx
index 8945adc6d..7d6f46d39 100644
--- a/src/oss/javascript/integrations/memory/motorhead_memory.mdx
+++ b/src/oss/javascript/integrations/memory/motorhead_memory.mdx
@@ -12,10 +12,6 @@ See instructions at [Motörhead](https://github.com/getmetal/motorhead) for runn
import Motorhead from "/snippets/javascript-integrations/examples/memory/motorhead.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/planetscale.mdx b/src/oss/javascript/integrations/memory/planetscale.mdx
index f4a1c50ec..9d8a88105 100644
--- a/src/oss/javascript/integrations/memory/planetscale.mdx
+++ b/src/oss/javascript/integrations/memory/planetscale.mdx
@@ -10,10 +10,6 @@ For longer-term persistence across chat sessions, you can swap out the default i
You will need to install [@planetscale/database](https://github.com/planetscale/database-js) in your project:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @planetscale/database @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/postgres.mdx b/src/oss/javascript/integrations/memory/postgres.mdx
index 3015db70a..fc14539de 100644
--- a/src/oss/javascript/integrations/memory/postgres.mdx
+++ b/src/oss/javascript/integrations/memory/postgres.mdx
@@ -8,10 +8,6 @@ For longer-term persistence across chat sessions, you can swap out the default i
First install the [node-postgres](https://node-postgres.com/) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core pg
```
diff --git a/src/oss/javascript/integrations/memory/redis.mdx b/src/oss/javascript/integrations/memory/redis.mdx
index 78f30cda3..e4d0395e7 100644
--- a/src/oss/javascript/integrations/memory/redis.mdx
+++ b/src/oss/javascript/integrations/memory/redis.mdx
@@ -8,10 +8,6 @@ For longer-term persistence across chat sessions, you can swap out the default i
You will need to install [node-redis](https://github.com/redis/node-redis) in your project:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core redis
```
diff --git a/src/oss/javascript/integrations/memory/upstash_redis.mdx b/src/oss/javascript/integrations/memory/upstash_redis.mdx
index 0df192016..6dfb86745 100644
--- a/src/oss/javascript/integrations/memory/upstash_redis.mdx
+++ b/src/oss/javascript/integrations/memory/upstash_redis.mdx
@@ -11,10 +11,6 @@ For longer-term persistence across chat sessions, you can swap out the default i
You will need to install [@upstash/redis](https://github.com/upstash/upstash-redis) in your project:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core @upstash/redis
```
diff --git a/src/oss/javascript/integrations/memory/xata.mdx b/src/oss/javascript/integrations/memory/xata.mdx
index 22b417f31..e6fe9daee 100644
--- a/src/oss/javascript/integrations/memory/xata.mdx
+++ b/src/oss/javascript/integrations/memory/xata.mdx
@@ -39,10 +39,6 @@ Each chat history session stored in Xata database must have a unique id.
In this example, the `getXataClient()` function is used to create a new Xata client based on the environment variables. However, we recommend using the code generated by the `xata init` command, in which case you only need to import the `getXataClient()` function from the generated `xata.ts` file.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/zep_memory.mdx b/src/oss/javascript/integrations/memory/zep_memory.mdx
index b4b9d3516..4a2a87b82 100644
--- a/src/oss/javascript/integrations/memory/zep_memory.mdx
+++ b/src/oss/javascript/integrations/memory/zep_memory.mdx
@@ -31,10 +31,6 @@ Zep allows you to be more intentional about constructing your prompt:
See the instructions from [Zep Open Source](https://github.com/getzep/zep) for running the server locally or through an automated hosting provider.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx b/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx
index 2a63508df..d49a2e011 100644
--- a/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx
+++ b/src/oss/javascript/integrations/memory/zep_memory_cloud.mdx
@@ -39,10 +39,6 @@ Follow the [Zep Cloud Typescript SDK Installation Guide](https://help.getzep.com
You'll need your Zep Cloud Project API Key to use the Zep Cloud Memory. See the [Zep Cloud docs](https://help.getzep.com/projects) for more information.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @getzep/zep-cloud @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/providers/anthropic.mdx b/src/oss/javascript/integrations/providers/anthropic.mdx
index 8f7100927..d8a9e5e85 100644
--- a/src/oss/javascript/integrations/providers/anthropic.mdx
+++ b/src/oss/javascript/integrations/providers/anthropic.mdx
@@ -20,10 +20,6 @@ Anthropic models require any system messages to be the first one in your prompts
`ChatAnthropic` is a subclass of LangChain's `ChatModel`, meaning it works best with `ChatPromptTemplate`.
You can import this wrapper with the following code:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/anthropic @langchain/core
```
diff --git a/src/oss/javascript/integrations/providers/microsoft.mdx b/src/oss/javascript/integrations/providers/microsoft.mdx
index 7ba99eb07..9815f8006 100644
--- a/src/oss/javascript/integrations/providers/microsoft.mdx
+++ b/src/oss/javascript/integrations/providers/microsoft.mdx
@@ -46,10 +46,6 @@ AZURE_OPENAI_API_VERSION="2024-02-01"
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/providers/openai.mdx b/src/oss/javascript/integrations/providers/openai.mdx
index 77127e591..d576b43f2 100644
--- a/src/oss/javascript/integrations/providers/openai.mdx
+++ b/src/oss/javascript/integrations/providers/openai.mdx
@@ -30,10 +30,6 @@ import { ChatOpenAI } from "@langchain/openai";
See a [usage example](/oss/integrations/llms/openai).
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/azion-edgesql.md b/src/oss/javascript/integrations/retrievers/azion-edgesql.md
index f5a11510e..5f95bf2a4 100644
--- a/src/oss/javascript/integrations/retrievers/azion-edgesql.md
+++ b/src/oss/javascript/integrations/retrievers/azion-edgesql.md
@@ -37,14 +37,19 @@ If you want to get automated tracing from individual queries, you can also set y
This retriever lives in the `@langchain/community/retrievers/azion_edgesql` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install azion @langchain/openai @langchain/community
+```
+
+```bash yarn
+yarn add azion @langchain/openai @langchain/community
+```
-
- azion @langchain/openai @langchain/community
-
+```bash pnpm
+pnpm add azion @langchain/openai @langchain/community
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md b/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md
index 80df0a5b0..2486a23dd 100644
--- a/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md
+++ b/src/oss/javascript/integrations/retrievers/bedrock-knowledge-bases.md
@@ -39,14 +39,19 @@ If you want to get automated tracing from individual queries, you can also set y
This retriever lives in the `@langchain/aws` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/aws @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/aws @langchain/core
+```
-
- @langchain/aws @langchain/core
-
+```bash pnpm
+pnpm add @langchain/aws @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/bm25.md b/src/oss/javascript/integrations/retrievers/bm25.md
index 7e30b4d4a..42dadc650 100644
--- a/src/oss/javascript/integrations/retrievers/bm25.md
+++ b/src/oss/javascript/integrations/retrievers/bm25.md
@@ -10,14 +10,19 @@ You can use it as part of your retrieval pipeline as a to rerank documents as a
The `BM25Retriever` is exported from `@langchain/community`. You'll need to install it like this:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
This retriever uses code from [`this implementation`](https://github.com/FurkanToprak/OkapiBM25) of Okapi BM25.
diff --git a/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx b/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx
index 21f9e050d..8e880e415 100644
--- a/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx
+++ b/src/oss/javascript/integrations/retrievers/chaindesk-retriever.mdx
@@ -6,10 +6,6 @@ This example shows how to use the Chaindesk Retriever in a retrieval chain to re
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/dria.mdx b/src/oss/javascript/integrations/retrievers/dria.mdx
index 4de3c8e61..654418b5a 100644
--- a/src/oss/javascript/integrations/retrievers/dria.mdx
+++ b/src/oss/javascript/integrations/retrievers/dria.mdx
@@ -22,10 +22,6 @@ Dria retriever exposes the underlying [Dria client](https://npmjs.com/package/dr
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install dria @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/exa.md b/src/oss/javascript/integrations/retrievers/exa.md
index 8db504127..62a95f7eb 100644
--- a/src/oss/javascript/integrations/retrievers/exa.md
+++ b/src/oss/javascript/integrations/retrievers/exa.md
@@ -35,14 +35,19 @@ If you want to get automated tracing from individual queries, you can also set y
This retriever lives in the `@langchain/exa` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/exa @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/exa @langchain/core
+```
-
- @langchain/exa @langchain/core
-
+```bash pnpm
+pnpm add @langchain/exa @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/hyde.mdx b/src/oss/javascript/integrations/retrievers/hyde.mdx
index 4625c0de2..503d9e53c 100644
--- a/src/oss/javascript/integrations/retrievers/hyde.mdx
+++ b/src/oss/javascript/integrations/retrievers/hyde.mdx
@@ -12,10 +12,6 @@ In order to use HyDE, we therefore need to provide a base embedding model, as we
import Hyde from "/snippets/javascript-integrations/examples/retrievers/hyde.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/kendra-retriever.md b/src/oss/javascript/integrations/retrievers/kendra-retriever.md
index cf63599d6..8a9c856d0 100644
--- a/src/oss/javascript/integrations/retrievers/kendra-retriever.md
+++ b/src/oss/javascript/integrations/retrievers/kendra-retriever.md
@@ -34,14 +34,19 @@ If you want to get automated tracing from individual queries, you can also set y
This retriever lives in the `@langchain/aws` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/aws @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/aws @langchain/core
+```
-
- @langchain/aws @langchain/core
-
+```bash pnpm
+pnpm add @langchain/aws @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/metal-retriever.mdx b/src/oss/javascript/integrations/retrievers/metal-retriever.mdx
index 185b6c747..43286e572 100644
--- a/src/oss/javascript/integrations/retrievers/metal-retriever.mdx
+++ b/src/oss/javascript/integrations/retrievers/metal-retriever.mdx
@@ -6,10 +6,6 @@ This example shows how to use the Metal Retriever in a retrieval chain to retrie
## Setup
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm i @getmetal/metal-sdk @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/self_query/chroma.md b/src/oss/javascript/integrations/retrievers/self_query/chroma.md
index f7ea83480..afb136037 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/chroma.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/chroma.md
@@ -34,14 +34,19 @@ The vector store lives in the `@langchain/community` package. You'll also need t
For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com):
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community langchain @langchain/openai @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community langchain @langchain/openai @langchain/core
+```
-
- @langchain/community langchain @langchain/openai @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community langchain @langchain/openai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md b/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md
index 5cb9285d1..e8defa14b 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/hnswlib.md
@@ -33,14 +33,19 @@ The vector store lives in the `@langchain/community` package. You'll also need t
For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com):
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community langchain @langchain/openai @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community langchain @langchain/openai @langchain/core
+```
-
- @langchain/community langchain @langchain/openai @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community langchain @langchain/openai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/self_query/memory.md b/src/oss/javascript/integrations/retrievers/self_query/memory.md
index b955eb30a..a63542254 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/memory.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/memory.md
@@ -33,14 +33,19 @@ The vector store lives in the `@langchain/community` package. You'll also need t
For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com):
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community langchain @langchain/openai @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community langchain @langchain/openai @langchain/core
+```
-
- @langchain/community langchain @langchain/openai @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community langchain @langchain/openai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/self_query/pinecone.md b/src/oss/javascript/integrations/retrievers/self_query/pinecone.md
index ad463d25a..0ce727789 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/pinecone.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/pinecone.md
@@ -41,14 +41,19 @@ You will also need to install the official Pinecone SDK (`@pinecone-database/pin
For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com):
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone
+```
+
+```bash yarn
+yarn add @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone
+```
-
- @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone
-
+```bash pnpm
+pnpm add @langchain/pinecone langchain @langchain/openai @langchain/core @pinecone-database/pinecone
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/self_query/qdrant.md b/src/oss/javascript/integrations/retrievers/self_query/qdrant.md
index d3f23a085..1eed41b18 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/qdrant.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/qdrant.md
@@ -37,14 +37,19 @@ The vector store lives in the `@langchain/qdrant` package. You'll also need to i
For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com):
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core
+```
-
- @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core
-
+```bash pnpm
+pnpm add @langchain/qdrant langchain @langchain/community @langchain/openai @langchain/core
```
+
The official Qdrant SDK (`@qdrant/js-client-rest`) is automatically installed as a dependency of `@langchain/qdrant`, but you may wish to install it independently as well.
diff --git a/src/oss/javascript/integrations/retrievers/self_query/supabase.md b/src/oss/javascript/integrations/retrievers/self_query/supabase.md
index 6fe3127f1..ab696635b 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/supabase.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/supabase.md
@@ -38,14 +38,19 @@ The vector store lives in the `@langchain/community` package, which requires the
For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com):
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js
+```
+
+```bash yarn
+yarn add @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js
+```
-
- @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js
-
+```bash pnpm
+pnpm add @langchain/community langchain @langchain/openai @langchain/core @supabase/supabase-js
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/self_query/vectara.md b/src/oss/javascript/integrations/retrievers/self_query/vectara.md
index bb410900c..ce4deb540 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/vectara.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/vectara.md
@@ -37,14 +37,19 @@ If you want to get automated tracing from individual queries, you can also set y
The vector store lives in the `@langchain/community` package. You'll also need to install the `langchain` package to import the main `SelfQueryRetriever` class.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community langchain @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community langchain @langchain/core
+```
-
- @langchain/community langchain @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community langchain @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/self_query/weaviate.md b/src/oss/javascript/integrations/retrievers/self_query/weaviate.md
index a4a6a129c..a49fcb9c0 100644
--- a/src/oss/javascript/integrations/retrievers/self_query/weaviate.md
+++ b/src/oss/javascript/integrations/retrievers/self_query/weaviate.md
@@ -42,14 +42,19 @@ The official Weaviate SDK (`weaviate-client`) is automatically installed as a de
For this example, we'll also use OpenAI embeddings, so you'll need to install the `@langchain/openai` package and [obtain an API key](https://platform.openai.com):
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client
+```
+
+```bash yarn
+yarn add @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client
+```
-
- @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client
-
+```bash pnpm
+pnpm add @langchain/weaviate langchain @langchain/openai @langchain/core weaviate-client
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx b/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx
index 534c3e3bc..fd7b2ec90 100644
--- a/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx
+++ b/src/oss/javascript/integrations/retrievers/supabase-hybrid.mdx
@@ -73,10 +73,6 @@ $$ language plpgsql;
```
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/tavily.md b/src/oss/javascript/integrations/retrievers/tavily.md
index 7f96172d3..2defdd7f2 100644
--- a/src/oss/javascript/integrations/retrievers/tavily.md
+++ b/src/oss/javascript/integrations/retrievers/tavily.md
@@ -30,14 +30,19 @@ If you want to get automated tracing from individual queries, you can also set y
This retriever lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx b/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx
index f279d2cf0..1ff631a05 100644
--- a/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx
+++ b/src/oss/javascript/integrations/retrievers/time-weighted-retriever.mdx
@@ -21,10 +21,6 @@ It is important to note that due to required metadata, all documents must be add
import TimeWeightedRetriever from "/snippets/javascript-integrations/examples/retrievers/time-weighted-retriever.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx b/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx
index e3407ed51..8b5fbae6b 100644
--- a/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx
+++ b/src/oss/javascript/integrations/retrievers/zep-cloud-retriever.mdx
@@ -18,10 +18,6 @@ You'll need your Zep Cloud Project API Key to use the ZepCloudRetriever. See the
## Setup
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm i @getzep/zep-cloud @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/retrievers/zep-retriever.mdx b/src/oss/javascript/integrations/retrievers/zep-retriever.mdx
index 243121c4d..ac23857af 100644
--- a/src/oss/javascript/integrations/retrievers/zep-retriever.mdx
+++ b/src/oss/javascript/integrations/retrievers/zep-retriever.mdx
@@ -16,10 +16,6 @@ Follow the [Zep Open Source Quickstart Guide](https://help.getzep.com/quickstart
## Setup
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm i @getzep/zep-js @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/stores/in_memory.md b/src/oss/javascript/integrations/stores/in_memory.md
index 65f1f617d..2dddb9674 100644
--- a/src/oss/javascript/integrations/stores/in_memory.md
+++ b/src/oss/javascript/integrations/stores/in_memory.md
@@ -20,16 +20,19 @@ The `InMemoryStore` allows for a generic type to be assigned to the values in th
The LangChain InMemoryStore integration lives in the `@langchain/core` package:
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/core
+```
-
- @langchain/core
-
+```bash yarn
+yarn add @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx b/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx
index 4840e0d90..2b599725b 100644
--- a/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx
+++ b/src/oss/javascript/integrations/text_embedding/alibaba_tongyi.mdx
@@ -10,10 +10,6 @@ You'll need to sign up for an Alibaba API key and set it as an environment varia
Then, you'll need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx b/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx
index 01a47bcc4..6a308d083 100644
--- a/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx
+++ b/src/oss/javascript/integrations/text_embedding/baidu_qianfan.mdx
@@ -12,10 +12,6 @@ Please set the acquired API key as an environment variable named BAIDU_API_KEY,
Then, you'll need to install the [`@langchain/baidu-qianfan`](https://www.npmjs.com/package/@langchain/baidu-qianfan) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/baidu-qianfan @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/bedrock.md b/src/oss/javascript/integrations/text_embedding/bedrock.md
index 70de32830..192ba8744 100644
--- a/src/oss/javascript/integrations/text_embedding/bedrock.md
+++ b/src/oss/javascript/integrations/text_embedding/bedrock.md
@@ -34,14 +34,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain Bedrock integration lives in the `@langchain/aws` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/aws @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/aws @langchain/core
+```
-
- @langchain/aws @langchain/core
-
+```bash pnpm
+pnpm add @langchain/aws @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md b/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md
index bd484b732..3aeaed29d 100644
--- a/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md
+++ b/src/oss/javascript/integrations/text_embedding/bytedance_doubao.md
@@ -31,14 +31,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain ByteDanceDoubaoEmbeddings integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community
+```
+
+```bash yarn
+yarn add @langchain/community
+```
-
- @langchain/community
-
+```bash pnpm
+pnpm add @langchain/community
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md b/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md
index 713b8f45b..9c2e41ac4 100644
--- a/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md
+++ b/src/oss/javascript/integrations/text_embedding/cloudflare_ai.md
@@ -46,14 +46,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain CloudflareWorkersAIEmbeddings integration lives in the `@langchain/cloudflare` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/cloudflare @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/cloudflare @langchain/core
+```
-
- @langchain/cloudflare @langchain/core
-
+```bash pnpm
+pnpm add @langchain/cloudflare @langchain/core
```
+
## Usage
diff --git a/src/oss/javascript/integrations/text_embedding/cohere.md b/src/oss/javascript/integrations/text_embedding/cohere.md
index 605f10dd4..27ce4a1e8 100644
--- a/src/oss/javascript/integrations/text_embedding/cohere.md
+++ b/src/oss/javascript/integrations/text_embedding/cohere.md
@@ -35,14 +35,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain CohereEmbeddings integration lives in the `@langchain/cohere` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/cohere @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/cohere @langchain/core
+```
-
- @langchain/cohere @langchain/core
-
+```bash pnpm
+pnpm add @langchain/cohere @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/deepinfra.mdx b/src/oss/javascript/integrations/text_embedding/deepinfra.mdx
index 82ac4289e..7fbdc21dd 100644
--- a/src/oss/javascript/integrations/text_embedding/deepinfra.mdx
+++ b/src/oss/javascript/integrations/text_embedding/deepinfra.mdx
@@ -8,10 +8,6 @@ The `DeepInfraEmbeddings` class utilizes the DeepInfra API to generate embedding
Install the `@langchain/community` package as shown below:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm i @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/fireworks.md b/src/oss/javascript/integrations/text_embedding/fireworks.md
index d07f9b384..d1ca1ecb1 100644
--- a/src/oss/javascript/integrations/text_embedding/fireworks.md
+++ b/src/oss/javascript/integrations/text_embedding/fireworks.md
@@ -35,14 +35,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `FireworksEmbeddings` integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/google_generativeai.md b/src/oss/javascript/integrations/text_embedding/google_generativeai.md
index f214d5478..bbed6e64f 100644
--- a/src/oss/javascript/integrations/text_embedding/google_generativeai.md
+++ b/src/oss/javascript/integrations/text_embedding/google_generativeai.md
@@ -37,14 +37,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `GoogleGenerativeAIEmbeddings` integration lives in the `@langchain/google-genai` package. You may also wish to install the official SDK:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/google-genai @langchain/core @google/generative-ai
+```
+
+```bash yarn
+yarn add @langchain/google-genai @langchain/core @google/generative-ai
+```
-
- @langchain/google-genai @langchain/core @google/generative-ai
-
+```bash pnpm
+pnpm add @langchain/google-genai @langchain/core @google/generative-ai
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md b/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md
index 0897c64ee..70443a1b9 100644
--- a/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md
+++ b/src/oss/javascript/integrations/text_embedding/google_vertex_ai.md
@@ -46,14 +46,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain `VertexAIEmbeddings` integration lives in the `@langchain/google-vertexai` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/google-vertexai @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/google-vertexai @langchain/core
+```
-
- @langchain/google-vertexai @langchain/core
-
+```bash pnpm
+pnpm add @langchain/google-vertexai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx b/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx
index e92cbc252..6bc52604d 100644
--- a/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx
+++ b/src/oss/javascript/integrations/text_embedding/hugging_face_inference.mdx
@@ -8,10 +8,6 @@ This Embeddings integration uses the HuggingFace Inference API to generate embed
You'll first need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package and the required peer dependency:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core @huggingface/inference@4
```
diff --git a/src/oss/javascript/integrations/text_embedding/ibm.md b/src/oss/javascript/integrations/text_embedding/ibm.md
index 7d64df28d..ee49a2d78 100644
--- a/src/oss/javascript/integrations/text_embedding/ibm.md
+++ b/src/oss/javascript/integrations/text_embedding/ibm.md
@@ -105,15 +105,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain IBM watsonx.ai integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/jina.mdx b/src/oss/javascript/integrations/text_embedding/jina.mdx
index ec1520ec7..ed4aac643 100644
--- a/src/oss/javascript/integrations/text_embedding/jina.mdx
+++ b/src/oss/javascript/integrations/text_embedding/jina.mdx
@@ -8,10 +8,6 @@ The `JinaEmbeddings` class utilizes the Jina API to generate embeddings for give
Install the `@langchain/community` package as shown below:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm i @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx b/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx
index 55f77b296..a25b7dafb 100644
--- a/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx
+++ b/src/oss/javascript/integrations/text_embedding/llama_cpp.mdx
@@ -17,10 +17,6 @@ You'll need to install major version `3` of the [node-llama-cpp](https://github.
```bash npm
npm install -S node-llama-cpp@3
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/mistralai.md b/src/oss/javascript/integrations/text_embedding/mistralai.md
index 64055ac39..40df58f4e 100644
--- a/src/oss/javascript/integrations/text_embedding/mistralai.md
+++ b/src/oss/javascript/integrations/text_embedding/mistralai.md
@@ -35,14 +35,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain MistralAIEmbeddings integration lives in the `@langchain/mistralai` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/mistralai @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/mistralai @langchain/core
+```
-
- @langchain/mistralai @langchain/core
-
+```bash pnpm
+pnpm add @langchain/mistralai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx b/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx
index 9eeda9e36..f99857a03 100644
--- a/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx
+++ b/src/oss/javascript/integrations/text_embedding/mixedbread_ai.mdx
@@ -8,10 +8,6 @@ The `MixedbreadAIEmbeddings` class uses the [Mixedbread AI](https://mixedbread.a
To install the `@langchain/mixedbread-ai` package, use the following command:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/mixedbread-ai @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/nomic.mdx b/src/oss/javascript/integrations/text_embedding/nomic.mdx
index 7c1c72b4a..48c4d88ad 100644
--- a/src/oss/javascript/integrations/text_embedding/nomic.mdx
+++ b/src/oss/javascript/integrations/text_embedding/nomic.mdx
@@ -11,10 +11,6 @@ You can sign up for a Nomic account and create an API key [here](https://atlas.n
You'll first need to install the [`@langchain/nomic`](https://www.npmjs.com/package/@langchain/nomic) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/nomic @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/ollama.md b/src/oss/javascript/integrations/text_embedding/ollama.md
index 23d25909b..40024169e 100644
--- a/src/oss/javascript/integrations/text_embedding/ollama.md
+++ b/src/oss/javascript/integrations/text_embedding/ollama.md
@@ -29,14 +29,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain OllamaEmbeddings integration lives in the `@langchain/ollama` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/ollama @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/ollama @langchain/core
+```
-
- @langchain/ollama @langchain/core
-
+```bash pnpm
+pnpm add @langchain/ollama @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/openai.md b/src/oss/javascript/integrations/text_embedding/openai.md
index 24999c86c..f2cfd646b 100644
--- a/src/oss/javascript/integrations/text_embedding/openai.md
+++ b/src/oss/javascript/integrations/text_embedding/openai.md
@@ -35,14 +35,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain OpenAIEmbeddings integration lives in the `@langchain/openai` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/openai @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/openai @langchain/core
+```
-
- @langchain/openai @langchain/core
-
+```bash pnpm
+pnpm add @langchain/openai @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/pinecone.md b/src/oss/javascript/integrations/text_embedding/pinecone.md
index 1be63df0d..56d179e03 100644
--- a/src/oss/javascript/integrations/text_embedding/pinecone.md
+++ b/src/oss/javascript/integrations/text_embedding/pinecone.md
@@ -35,14 +35,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain PineconeEmbeddings integration lives in the `@langchain/pinecone` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/pinecone @langchain/core @pinecone-database/pinecone@5
+```
+
+```bash yarn
+yarn add @langchain/pinecone @langchain/core @pinecone-database/pinecone@5
+```
-
- @langchain/pinecone @langchain/core @pinecone-database/pinecone@5
-
+```bash pnpm
+pnpm add @langchain/pinecone @langchain/core @pinecone-database/pinecone@5
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/premai.mdx b/src/oss/javascript/integrations/text_embedding/premai.mdx
index 3f39b7154..d1c9584fc 100644
--- a/src/oss/javascript/integrations/text_embedding/premai.mdx
+++ b/src/oss/javascript/integrations/text_embedding/premai.mdx
@@ -10,10 +10,6 @@ In order to use the Prem API you'll need an API key. You can sign up for a Prem
You'll first need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx b/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx
index 07d29db29..ae1087484 100644
--- a/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx
+++ b/src/oss/javascript/integrations/text_embedding/tencent_hunyuan.mdx
@@ -10,10 +10,6 @@ The `TencentHunyuanEmbeddings` class uses the Tencent Hunyuan API to generate em
2. Create SecretID & SecretKey [here](https://console.cloud.tencent.com/cam/capi).
3. Set SecretID and SecretKey as environment variables named `TENCENT_SECRET_ID` and `TENCENT_SECRET_KEY`, respectively.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/togetherai.md b/src/oss/javascript/integrations/text_embedding/togetherai.md
index fee7aa368..cd455de31 100644
--- a/src/oss/javascript/integrations/text_embedding/togetherai.md
+++ b/src/oss/javascript/integrations/text_embedding/togetherai.md
@@ -35,14 +35,19 @@ If you want to get automated tracing of your model calls you can also set your [
The LangChain TogetherAIEmbeddings integration lives in the `@langchain/community` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/text_embedding/transformers.mdx b/src/oss/javascript/integrations/text_embedding/transformers.mdx
index ef35f6849..0421c2f12 100644
--- a/src/oss/javascript/integrations/text_embedding/transformers.mdx
+++ b/src/oss/javascript/integrations/text_embedding/transformers.mdx
@@ -20,10 +20,6 @@ import the embeddings from `"@langchain/community/embeddings/hf_transformers"` b
```bash npm
npm install @huggingface/transformers
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/text_embedding/zhipuai.mdx b/src/oss/javascript/integrations/text_embedding/zhipuai.mdx
index d33b23ad0..4879d6712 100644
--- a/src/oss/javascript/integrations/text_embedding/zhipuai.mdx
+++ b/src/oss/javascript/integrations/text_embedding/zhipuai.mdx
@@ -12,10 +12,6 @@ https://open.bigmodel.cn
Then, you'll need to install the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core jsonwebtoken
```
diff --git a/src/oss/javascript/integrations/tools/aiplugin-tool.mdx b/src/oss/javascript/integrations/tools/aiplugin-tool.mdx
index 6ed8634e0..386f8a346 100644
--- a/src/oss/javascript/integrations/tools/aiplugin-tool.mdx
+++ b/src/oss/javascript/integrations/tools/aiplugin-tool.mdx
@@ -14,10 +14,6 @@ Note 1: This currently only works for plugins with no auth.
Note 2: There are almost certainly other ways to do this, this is just a first pass. If you have better ideas, please open a PR!
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx b/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx
index f36233ef6..2757fdc10 100644
--- a/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx
+++ b/src/oss/javascript/integrations/tools/azure_dynamic_sessions.mdx
@@ -10,10 +10,6 @@ You can learn more about Azure Container Apps dynamic sessions and its code inte
You'll first need to install the [`@langchain/azure-dynamic-sessions`](https://www.npmjs.com/package/@langchain/azure-dynamic-sessions) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/azure-dynamic-sessions @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/dalle.mdx b/src/oss/javascript/integrations/tools/dalle.mdx
index cc3de1121..7df0768a0 100644
--- a/src/oss/javascript/integrations/tools/dalle.mdx
+++ b/src/oss/javascript/integrations/tools/dalle.mdx
@@ -13,10 +13,6 @@ and then set the OPENAI_API_KEY environment variable to the key you just created
To use the Dall-E Tool you need to install the LangChain OpenAI integration package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/discord_tool.mdx b/src/oss/javascript/integrations/tools/discord_tool.mdx
index 15ee1e65d..d3d7deb3e 100644
--- a/src/oss/javascript/integrations/tools/discord_tool.mdx
+++ b/src/oss/javascript/integrations/tools/discord_tool.mdx
@@ -16,10 +16,6 @@ npm install discord.js
import DiscordTool from "/snippets/javascript-integrations/examples/tools/discord_tool.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/duckduckgo_search.md b/src/oss/javascript/integrations/tools/duckduckgo_search.md
index 2aa903ca7..a3e47d8cf 100644
--- a/src/oss/javascript/integrations/tools/duckduckgo_search.md
+++ b/src/oss/javascript/integrations/tools/duckduckgo_search.md
@@ -18,14 +18,19 @@ DuckDuckGoSearch offers a privacy-focused search API designed for LLM Agents. It
The integration lives in the `@langchain/community` package, along with the `duck-duck-scrape` dependency:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core duck-duck-scrape
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core duck-duck-scrape
+```
-
- @langchain/community @langchain/core duck-duck-scrape
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core duck-duck-scrape
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/exa_search.md b/src/oss/javascript/integrations/tools/exa_search.md
index 73cfbbaee..b5786a978 100644
--- a/src/oss/javascript/integrations/tools/exa_search.md
+++ b/src/oss/javascript/integrations/tools/exa_search.md
@@ -20,16 +20,19 @@ This page goes over how to use `ExaSearchResults` with LangChain.
The integration lives in the `@langchain/exa` package.
-```{=mdx}
-
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/exa @langchain/core
+```
-
- @langchain/exa @langchain/core
-
+```bash yarn
+yarn add @langchain/exa @langchain/core
+```
+```bash pnpm
+pnpm add @langchain/exa @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/gmail.mdx b/src/oss/javascript/integrations/tools/gmail.mdx
index 0a19de6be..fb1fdc8be 100644
--- a/src/oss/javascript/integrations/tools/gmail.mdx
+++ b/src/oss/javascript/integrations/tools/gmail.mdx
@@ -20,10 +20,6 @@ You can authenticate via two methods:
To use the Gmail Tool you need to install the following official peer dependency:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core googleapis
```
diff --git a/src/oss/javascript/integrations/tools/google_calendar.mdx b/src/oss/javascript/integrations/tools/google_calendar.mdx
index 1380485ec..72c31693c 100644
--- a/src/oss/javascript/integrations/tools/google_calendar.mdx
+++ b/src/oss/javascript/integrations/tools/google_calendar.mdx
@@ -15,10 +15,6 @@ npm install googleapis
import GoogleCalendar from "/snippets/javascript-integrations/examples/tools/google_calendar.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core @langchain/community @langchain/langgraph
```
diff --git a/src/oss/javascript/integrations/tools/google_places.mdx b/src/oss/javascript/integrations/tools/google_places.mdx
index bf4777f89..7a8157b19 100644
--- a/src/oss/javascript/integrations/tools/google_places.mdx
+++ b/src/oss/javascript/integrations/tools/google_places.mdx
@@ -13,10 +13,6 @@ as `process.env.GOOGLE_PLACES_API_KEY` or pass it in as an `apiKey` constructor
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/google_routes.mdx b/src/oss/javascript/integrations/tools/google_routes.mdx
index 61ee27370..c45e38569 100644
--- a/src/oss/javascript/integrations/tools/google_routes.mdx
+++ b/src/oss/javascript/integrations/tools/google_routes.mdx
@@ -13,10 +13,6 @@ as `process.env.GOOGLE_ROUTES_API_KEY` or pass it in as an `apiKey` constructor
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/google_trends.mdx b/src/oss/javascript/integrations/tools/google_trends.mdx
index ec5151632..9f3431172 100644
--- a/src/oss/javascript/integrations/tools/google_trends.mdx
+++ b/src/oss/javascript/integrations/tools/google_trends.mdx
@@ -20,10 +20,6 @@ Then, set your API key as `process.env.SERPAPI_API_KEY` or pass it in as an `api
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/ibm.mdx b/src/oss/javascript/integrations/tools/ibm.mdx
index c0a9d79f1..193cf7f71 100644
--- a/src/oss/javascript/integrations/tools/ibm.mdx
+++ b/src/oss/javascript/integrations/tools/ibm.mdx
@@ -34,10 +34,6 @@ process.env.LANGSMITH_API_KEY="your-api-key"
This toolkit lives in the `@langchain/community` package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
@@ -104,11 +100,19 @@ For detailed info about tools please visit [watsonx.ai API docs](https://cloud.i
First, ensure you have LangGraph installed:
-```{=mdx}
-
- @langchain/langgraph
-
+
+```bash npm
+npm install @langchain/langgraph
+```
+
+```bash yarn
+yarn add @langchain/langgraph
+```
+
+```bash pnpm
+pnpm add @langchain/langgraph
```
+
Then, instantiate your LLM to be used in the React agent:
diff --git a/src/oss/javascript/integrations/tools/jigsawstack.mdx b/src/oss/javascript/integrations/tools/jigsawstack.mdx
index 7f7a9b7ff..966a8a20c 100644
--- a/src/oss/javascript/integrations/tools/jigsawstack.mdx
+++ b/src/oss/javascript/integrations/tools/jigsawstack.mdx
@@ -27,10 +27,6 @@ export JIGSAWSTACK_API_KEY="your-api-key"
## Usage, standalone
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai
```
diff --git a/src/oss/javascript/integrations/tools/json.mdx b/src/oss/javascript/integrations/tools/json.mdx
index f0667e4f2..dd25cbb32 100644
--- a/src/oss/javascript/integrations/tools/json.mdx
+++ b/src/oss/javascript/integrations/tools/json.mdx
@@ -4,10 +4,6 @@ title: JSON Agent Toolkit
This example shows how to load and use an agent with a JSON toolkit.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/lambda_agent.mdx b/src/oss/javascript/integrations/tools/lambda_agent.mdx
index 0cb3a3b96..3ae2f5136 100644
--- a/src/oss/javascript/integrations/tools/lambda_agent.mdx
+++ b/src/oss/javascript/integrations/tools/lambda_agent.mdx
@@ -17,10 +17,6 @@ This quick start will demonstrate how an Agent could use a Lambda function to se
- If you have not run [`aws configure`](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-configure.html) via the AWS CLI, the `region`, `accessKeyId`, and `secretAccessKey` must be provided to the AWSLambda constructor.
- The IAM role corresponding to those credentials must have permission to invoke the lambda function.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/pyinterpreter.mdx b/src/oss/javascript/integrations/tools/pyinterpreter.mdx
index 149fc35c7..26b4d4300 100644
--- a/src/oss/javascript/integrations/tools/pyinterpreter.mdx
+++ b/src/oss/javascript/integrations/tools/pyinterpreter.mdx
@@ -13,10 +13,6 @@ This can be useful in combination with an LLM that can generate code to perform
import Pyinterpreter from "/snippets/javascript-integrations/examples/tools/pyinterpreter.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/searchapi.mdx b/src/oss/javascript/integrations/tools/searchapi.mdx
index 574f63355..1f2d46926 100644
--- a/src/oss/javascript/integrations/tools/searchapi.mdx
+++ b/src/oss/javascript/integrations/tools/searchapi.mdx
@@ -12,10 +12,6 @@ Input should be a search query.
import SearchapiGoogleNews from "/snippets/javascript-integrations/examples/tools/searchapi_google_news.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/searxng.mdx b/src/oss/javascript/integrations/tools/searxng.mdx
index d45ee3926..eeec4d5a4 100644
--- a/src/oss/javascript/integrations/tools/searxng.mdx
+++ b/src/oss/javascript/integrations/tools/searxng.mdx
@@ -10,10 +10,6 @@ A wrapper around the SearxNG API, this tool is useful for performing meta-search
import SearxngSearch from "/snippets/javascript-integrations/examples/tools/searxng_search.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/serpapi.md b/src/oss/javascript/integrations/tools/serpapi.md
index e931ce7d4..65e762e80 100644
--- a/src/oss/javascript/integrations/tools/serpapi.md
+++ b/src/oss/javascript/integrations/tools/serpapi.md
@@ -18,14 +18,19 @@ This guide provides a quick overview for getting started with the SerpAPI [tool]
The integration lives in the `@langchain/community` package, which you can install as shown below:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/sfn_agent.mdx b/src/oss/javascript/integrations/tools/sfn_agent.mdx
index f19e0655a..fcd522b4f 100644
--- a/src/oss/javascript/integrations/tools/sfn_agent.mdx
+++ b/src/oss/javascript/integrations/tools/sfn_agent.mdx
@@ -17,10 +17,6 @@ npm install @aws-sdk/client-sfn
```
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/sql.md b/src/oss/javascript/integrations/tools/sql.md
index 58da542fd..e4b33f395 100644
--- a/src/oss/javascript/integrations/tools/sql.md
+++ b/src/oss/javascript/integrations/tools/sql.md
@@ -30,14 +30,19 @@ process.env.LANGSMITH_API_KEY="your-api-key"
This toolkit lives in the `langchain` package. You'll also need to install the `typeorm` peer dependency.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install langchain @langchain/core typeorm
+```
-
- langchain @langchain/core typeorm
-
+```bash yarn
+yarn add langchain @langchain/core typeorm
+```
+
+```bash pnpm
+pnpm add langchain @langchain/core typeorm
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/tools/tavily_crawl.md b/src/oss/javascript/integrations/tools/tavily_crawl.md
index dfa1f213c..bcdb2b4c0 100644
--- a/src/oss/javascript/integrations/tools/tavily_crawl.md
+++ b/src/oss/javascript/integrations/tools/tavily_crawl.md
@@ -18,14 +18,19 @@ This guide provides a quick overview for getting started with the Tavily [tool](
The integration lives in the `@langchain/tavily` package, which you can install as shown below:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/tavily @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/tavily @langchain/core
+```
-
- @langchain/tavily @langchain/core
-
+```bash pnpm
+pnpm add @langchain/tavily @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/tavily_extract.md b/src/oss/javascript/integrations/tools/tavily_extract.md
index 1039228ef..945bcbc3e 100644
--- a/src/oss/javascript/integrations/tools/tavily_extract.md
+++ b/src/oss/javascript/integrations/tools/tavily_extract.md
@@ -18,14 +18,19 @@ This guide provides a quick overview for getting started with the Tavily [tool](
The integration lives in the `@langchain/tavily` package, which you can install as shown below:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/tavily @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/tavily @langchain/core
+```
-
- @langchain/tavily @langchain/core
-
+```bash pnpm
+pnpm add @langchain/tavily @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/tavily_map.md b/src/oss/javascript/integrations/tools/tavily_map.md
index e059a837e..306d5407d 100644
--- a/src/oss/javascript/integrations/tools/tavily_map.md
+++ b/src/oss/javascript/integrations/tools/tavily_map.md
@@ -18,14 +18,19 @@ This guide provides a quick overview for getting started with the Tavily [tool](
The integration lives in the `@langchain/tavily` package, which you can install as shown below:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/tavily @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/tavily @langchain/core
+```
-
- @langchain/tavily @langchain/core
-
+```bash pnpm
+pnpm add @langchain/tavily @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/tavily_search.md b/src/oss/javascript/integrations/tools/tavily_search.md
index 90ffc163c..1476bd265 100644
--- a/src/oss/javascript/integrations/tools/tavily_search.md
+++ b/src/oss/javascript/integrations/tools/tavily_search.md
@@ -18,14 +18,19 @@ This guide provides a quick overview for getting started with the Tavily [tool](
The integration lives in the `@langchain/tavily` package, which you can install as shown below:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/tavily @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/tavily @langchain/core
+```
-
- @langchain/tavily @langchain/core
-
+```bash pnpm
+pnpm add @langchain/tavily @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/tavily_search_community.md b/src/oss/javascript/integrations/tools/tavily_search_community.md
index 4907466ee..9e5f6cd3b 100644
--- a/src/oss/javascript/integrations/tools/tavily_search_community.md
+++ b/src/oss/javascript/integrations/tools/tavily_search_community.md
@@ -25,14 +25,19 @@ This guide provides a quick overview for getting started with the Tavily [tool](
The integration lives in the `@langchain/community` package, which you can install as shown below:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core
+```
-
- @langchain/community @langchain/core
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/tools/vectorstore.md b/src/oss/javascript/integrations/tools/vectorstore.md
index 5381b02e3..aee0f902c 100644
--- a/src/oss/javascript/integrations/tools/vectorstore.md
+++ b/src/oss/javascript/integrations/tools/vectorstore.md
@@ -19,14 +19,19 @@ process.env.LANGSMITH_API_KEY="your-api-key"
This toolkit lives in the `langchain` package:
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install langchain @langchain/core
+```
-
- langchain @langchain/core
-
+```bash yarn
+yarn add langchain @langchain/core
+```
+
+```bash pnpm
+pnpm add langchain @langchain/core
```
+
## Instantiation
diff --git a/src/oss/javascript/integrations/tools/webbrowser.mdx b/src/oss/javascript/integrations/tools/webbrowser.mdx
index 54554d11c..a0a053f56 100644
--- a/src/oss/javascript/integrations/tools/webbrowser.mdx
+++ b/src/oss/javascript/integrations/tools/webbrowser.mdx
@@ -24,10 +24,6 @@ npm install cheerio axios
import Webbrowser from "/snippets/javascript-integrations/examples/tools/webbrowser.mdx";
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/tools/zapier_agent.mdx b/src/oss/javascript/integrations/tools/zapier_agent.mdx
index fc6bfc857..15a3ee298 100644
--- a/src/oss/javascript/integrations/tools/zapier_agent.mdx
+++ b/src/oss/javascript/integrations/tools/zapier_agent.mdx
@@ -26,10 +26,6 @@ Review [auth docs](https://docs.zapier.com/platform/build/auth) for more details
The example below demonstrates how to use the Zapier integration as an Agent:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/analyticdb.mdx b/src/oss/javascript/integrations/vectorstores/analyticdb.mdx
index fcd24c1b1..6c554e93f 100644
--- a/src/oss/javascript/integrations/vectorstores/analyticdb.mdx
+++ b/src/oss/javascript/integrations/vectorstores/analyticdb.mdx
@@ -30,10 +30,6 @@ And we need [pg-copy-streams](https://github.com/brianc/node-pg-copy-streams) to
```bash npm
npm install -S pg-copy-streams
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/astradb.mdx b/src/oss/javascript/integrations/vectorstores/astradb.mdx
index 7c05a80e9..4fad29a83 100644
--- a/src/oss/javascript/integrations/vectorstores/astradb.mdx
+++ b/src/oss/javascript/integrations/vectorstores/astradb.mdx
@@ -28,10 +28,6 @@ Where `ASTRA_DB_COLLECTION` is the desired name of your collection
6. Install the Astra TS Client & the LangChain community package
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @datastax/astra-db-ts @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/azion-edgesql.md b/src/oss/javascript/integrations/vectorstores/azion-edgesql.md
index 56aa6eff5..7221cf4d8 100644
--- a/src/oss/javascript/integrations/vectorstores/azion-edgesql.md
+++ b/src/oss/javascript/integrations/vectorstores/azion-edgesql.md
@@ -20,14 +20,19 @@ To use the `AzionVectorStore` vector store, you will need to install the `@langc
This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install azion @langchain/openai @langchain/community
+```
+
+```bash yarn
+yarn add azion @langchain/openai @langchain/community
+```
-
- azion @langchain/openai @langchain/community
-
+```bash pnpm
+pnpm add azion @langchain/openai @langchain/community
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx b/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx
index fba2155cb..0423a5bbb 100644
--- a/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx
+++ b/src/oss/javascript/integrations/vectorstores/azure_aisearch.mdx
@@ -12,10 +12,6 @@ Learn how to leverage the vector search capabilities of Azure AI Search from [th
You'll first need to install the `@azure/search-documents` SDK and the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install -S @langchain/community @langchain/core @azure/search-documents
```
diff --git a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx
index 6c404c0e4..33148aeb4 100644
--- a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx
+++ b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_mongodb.mdx
@@ -12,10 +12,6 @@ Learn how to leverage the vector search capabilities of Azure Cosmos DB for Mong
You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs.com/package/@langchain/azure-cosmosdb) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/azure-cosmosdb @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx
index 12c4d45aa..b6c7c9bae 100644
--- a/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx
+++ b/src/oss/javascript/integrations/vectorstores/azure_cosmosdb_nosql.mdx
@@ -10,10 +10,6 @@ Learn how to leverage the vector search capabilities of Azure Cosmos DB for NoSQ
You'll first need to install the [`@langchain/azure-cosmosdb`](https://www.npmjs.com/package/@langchain/azure-cosmosdb) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/azure-cosmosdb @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/cassandra.mdx b/src/oss/javascript/integrations/vectorstores/cassandra.mdx
index 1f925ff7d..aa9d0978e 100644
--- a/src/oss/javascript/integrations/vectorstores/cassandra.mdx
+++ b/src/oss/javascript/integrations/vectorstores/cassandra.mdx
@@ -16,10 +16,6 @@ The [latest version](
-
```bash npm
npm install cassandra-driver @langchain/community @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/chroma.md b/src/oss/javascript/integrations/vectorstores/chroma.md
index 87843068d..0450a4d74 100644
--- a/src/oss/javascript/integrations/vectorstores/chroma.md
+++ b/src/oss/javascript/integrations/vectorstores/chroma.md
@@ -29,14 +29,19 @@ To use Chroma vector stores, you'll need to install the `@langchain/community` i
This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/openai @langchain/core chromadb
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/openai @langchain/core chromadb
+```
-
- @langchain/community @langchain/openai @langchain/core chromadb
-
+```bash pnpm
+pnpm add @langchain/community @langchain/openai @langchain/core chromadb
```
+
If you want to run Chroma locally, you can [run a local Chroma server](https://docs.trychroma.com/docs/cli/run) using the Chroma CLI, which ships with the `chromadb` package:
diff --git a/src/oss/javascript/integrations/vectorstores/clickhouse.mdx b/src/oss/javascript/integrations/vectorstores/clickhouse.mdx
index 3ef863bb7..4e16ddb67 100644
--- a/src/oss/javascript/integrations/vectorstores/clickhouse.mdx
+++ b/src/oss/javascript/integrations/vectorstores/clickhouse.mdx
@@ -21,10 +21,6 @@ You will need to install the following peer dependencies:
```bash npm
npm install -S @clickhouse/client mysql2
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/closevector.mdx b/src/oss/javascript/integrations/vectorstores/closevector.mdx
index 70603cd32..ce1941ae6 100644
--- a/src/oss/javascript/integrations/vectorstores/closevector.mdx
+++ b/src/oss/javascript/integrations/vectorstores/closevector.mdx
@@ -22,10 +22,6 @@ npm install -S closevector-web
```bash npm
npm install -S closevector-node
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx b/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx
index 329b27e5d..b1c3f71cf 100644
--- a/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx
+++ b/src/oss/javascript/integrations/vectorstores/cloudflare_vectorize.mdx
@@ -32,10 +32,6 @@ index_name = ""
Finally, you'll need to install the LangChain Cloudflare integration package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/cloudflare @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/convex.mdx b/src/oss/javascript/integrations/vectorstores/convex.mdx
index 17a3b5958..017daed23 100644
--- a/src/oss/javascript/integrations/vectorstores/convex.mdx
+++ b/src/oss/javascript/integrations/vectorstores/convex.mdx
@@ -44,10 +44,6 @@ export default defineSchema({
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/hanavector.mdx b/src/oss/javascript/integrations/vectorstores/hanavector.mdx
index 931da9bf6..177482a19 100644
--- a/src/oss/javascript/integrations/vectorstores/hanavector.mdx
+++ b/src/oss/javascript/integrations/vectorstores/hanavector.mdx
@@ -8,10 +8,6 @@ title: SAP HANA Cloud Vector Engine
You'll first need to install either the [`@sap/hana-client`](https://www.npmjs.com/package/@sap/hana-client) or the [`hdb`](https://www.npmjs.com/package/hdb) package, and the [`@langchain/community`](https://www.npmjs.com/package/@langchain/community) package:
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install -S @langchain/community @langchain/core @sap/hana-client
# or
diff --git a/src/oss/javascript/integrations/vectorstores/lancedb.mdx b/src/oss/javascript/integrations/vectorstores/lancedb.mdx
index 9a60bb609..e27dd1651 100644
--- a/src/oss/javascript/integrations/vectorstores/lancedb.mdx
+++ b/src/oss/javascript/integrations/vectorstores/lancedb.mdx
@@ -13,10 +13,6 @@ Install the [LanceDB](https://github.com/lancedb/lancedb) [Node.js bindings](htt
```bash npm
npm install -S @lancedb/lancedb
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/libsql.mdx b/src/oss/javascript/integrations/vectorstores/libsql.mdx
index ae0ec7f02..b824ea68e 100644
--- a/src/oss/javascript/integrations/vectorstores/libsql.mdx
+++ b/src/oss/javascript/integrations/vectorstores/libsql.mdx
@@ -24,10 +24,6 @@ This guide will also use OpenAI embeddings, which require you to install the `@l
You can use local SQLite when working with the libSQL vector store, or use a hosted Turso Database.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @libsql/client @langchain/openai @langchain/community
```
diff --git a/src/oss/javascript/integrations/vectorstores/memory.md b/src/oss/javascript/integrations/vectorstores/memory.md
index c96548587..2e7696436 100644
--- a/src/oss/javascript/integrations/vectorstores/memory.md
+++ b/src/oss/javascript/integrations/vectorstores/memory.md
@@ -22,14 +22,19 @@ To use in-memory vector stores, you'll need to install the `langchain` package:
This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install langchain @langchain/openai @langchain/core
+```
+
+```bash yarn
+yarn add langchain @langchain/openai @langchain/core
+```
-
- langchain @langchain/openai @langchain/core
-
+```bash pnpm
+pnpm add langchain @langchain/openai @langchain/core
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/vectorstores/milvus.mdx b/src/oss/javascript/integrations/vectorstores/milvus.mdx
index 148f5d485..c72fa224c 100644
--- a/src/oss/javascript/integrations/vectorstores/milvus.mdx
+++ b/src/oss/javascript/integrations/vectorstores/milvus.mdx
@@ -42,10 +42,6 @@ Only available on Node.js.
## Index and query docs
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx b/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx
index 395d6ad16..c834e6ee9 100644
--- a/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx
+++ b/src/oss/javascript/integrations/vectorstores/momento_vector_index.mdx
@@ -38,10 +38,6 @@ To sign up and access MVI, visit the [Momento Console](https://console.gomomento
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/myscale.mdx b/src/oss/javascript/integrations/vectorstores/myscale.mdx
index d2c686e0a..e457884ec 100644
--- a/src/oss/javascript/integrations/vectorstores/myscale.mdx
+++ b/src/oss/javascript/integrations/vectorstores/myscale.mdx
@@ -16,10 +16,6 @@ Only available on Node.js.
2. After launching a cluster, view your `Connection Details` from your cluster's `Actions` menu. You will need the host, port, username, and password.
3. Install the required Node.js peer dependency in your workspace.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install -S @langchain/openai @clickhouse/client @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx b/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx
index ebeb01c62..a37b00a9e 100644
--- a/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx
+++ b/src/oss/javascript/integrations/vectorstores/neo4jvector.mdx
@@ -16,10 +16,6 @@ To work with Neo4j Vector Index, you need to install the `neo4j-driver` package:
```bash npm
npm install neo4j-driver
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/neon.mdx b/src/oss/javascript/integrations/vectorstores/neon.mdx
index 4b1319ed0..88fcdf754 100644
--- a/src/oss/javascript/integrations/vectorstores/neon.mdx
+++ b/src/oss/javascript/integrations/vectorstores/neon.mdx
@@ -31,10 +31,6 @@ driver to connect to the database.
```bash npm
npm install @neondatabase/serverless
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/opensearch.mdx b/src/oss/javascript/integrations/vectorstores/opensearch.mdx
index 99446e7f1..6b2979a9d 100644
--- a/src/oss/javascript/integrations/vectorstores/opensearch.mdx
+++ b/src/oss/javascript/integrations/vectorstores/opensearch.mdx
@@ -14,10 +14,6 @@ Langchain.js accepts [@opensearch-project/opensearch](https://opensearch.org/doc
## Setup
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install -S @langchain/openai @langchain/core @opensearch-project/opensearch
```
diff --git a/src/oss/javascript/integrations/vectorstores/pinecone.md b/src/oss/javascript/integrations/vectorstores/pinecone.md
index 4a5415862..549b86d56 100644
--- a/src/oss/javascript/integrations/vectorstores/pinecone.md
+++ b/src/oss/javascript/integrations/vectorstores/pinecone.md
@@ -20,14 +20,19 @@ To use Pinecone vector stores, you'll need to create a Pinecone account, initial
This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5
+```
+
+```bash yarn
+yarn add @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5
+```
-
- @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5
-
+```bash pnpm
+pnpm add @langchain/pinecone @langchain/openai @langchain/core @pinecone-database/pinecone@5
```
+
### Credentials
diff --git a/src/oss/javascript/integrations/vectorstores/prisma.mdx b/src/oss/javascript/integrations/vectorstores/prisma.mdx
index 4599eb96b..b6ccdc6bc 100644
--- a/src/oss/javascript/integrations/vectorstores/prisma.mdx
+++ b/src/oss/javascript/integrations/vectorstores/prisma.mdx
@@ -63,10 +63,6 @@ npx prisma migrate dev
```
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/rockset.mdx b/src/oss/javascript/integrations/vectorstores/rockset.mdx
index 3336f5be8..5155db3f2 100644
--- a/src/oss/javascript/integrations/vectorstores/rockset.mdx
+++ b/src/oss/javascript/integrations/vectorstores/rockset.mdx
@@ -15,10 +15,6 @@ yarn add @rockset/client
### Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/core @langchain/community
```
diff --git a/src/oss/javascript/integrations/vectorstores/singlestore.mdx b/src/oss/javascript/integrations/vectorstores/singlestore.mdx
index ed4764358..65431f757 100644
--- a/src/oss/javascript/integrations/vectorstores/singlestore.mdx
+++ b/src/oss/javascript/integrations/vectorstores/singlestore.mdx
@@ -36,10 +36,6 @@ npm install -S mysql2
### Standard usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/supabase.md b/src/oss/javascript/integrations/vectorstores/supabase.md
index fa1495af3..e0bf4133e 100644
--- a/src/oss/javascript/integrations/vectorstores/supabase.md
+++ b/src/oss/javascript/integrations/vectorstores/supabase.md
@@ -22,14 +22,19 @@ To use Supabase vector stores, you'll need to set up a Supabase database and ins
This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core @supabase/supabase-js @langchain/openai
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core @supabase/supabase-js @langchain/openai
+```
-
- @langchain/community @langchain/core @supabase/supabase-js @langchain/openai
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core @supabase/supabase-js @langchain/openai
```
+
Once you've created a database, run the following SQL to set up [`pgvector`](https://github.com/pgvector/pgvector) and create the necessary table and functions:
diff --git a/src/oss/javascript/integrations/vectorstores/tigris.mdx b/src/oss/javascript/integrations/vectorstores/tigris.mdx
index b90db6b4a..775fd04e4 100644
--- a/src/oss/javascript/integrations/vectorstores/tigris.mdx
+++ b/src/oss/javascript/integrations/vectorstores/tigris.mdx
@@ -31,10 +31,6 @@ Application Keys section of the project.
## Index docs
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install -S @langchain/openai
```
diff --git a/src/oss/javascript/integrations/vectorstores/typeorm.mdx b/src/oss/javascript/integrations/vectorstores/typeorm.mdx
index 587c69585..ef38d9128 100644
--- a/src/oss/javascript/integrations/vectorstores/typeorm.mdx
+++ b/src/oss/javascript/integrations/vectorstores/typeorm.mdx
@@ -15,10 +15,6 @@ npm install typeorm
npm install pg
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/typesense.mdx b/src/oss/javascript/integrations/vectorstores/typesense.mdx
index 2fde62a6d..41d66f4aa 100644
--- a/src/oss/javascript/integrations/vectorstores/typesense.mdx
+++ b/src/oss/javascript/integrations/vectorstores/typesense.mdx
@@ -6,10 +6,6 @@ Vector store that utilizes the Typesense search engine.
### Basic Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/upstash.md b/src/oss/javascript/integrations/vectorstores/upstash.md
index 291ada974..d9f4659cf 100644
--- a/src/oss/javascript/integrations/vectorstores/upstash.md
+++ b/src/oss/javascript/integrations/vectorstores/upstash.md
@@ -20,14 +20,19 @@ To use Upstash vector stores, you'll need to create an Upstash account, create a
This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/community @langchain/core @upstash/vector @langchain/openai
+```
+
+```bash yarn
+yarn add @langchain/community @langchain/core @upstash/vector @langchain/openai
+```
-
- @langchain/community @langchain/core @upstash/vector @langchain/openai
-
+```bash pnpm
+pnpm add @langchain/community @langchain/core @upstash/vector @langchain/openai
```
+
You can create an index from the [Upstash Console](https://console.upstash.com/login). For further reference, see [the official docs](https://upstash.com/docs/vector/overall/getstarted).
diff --git a/src/oss/javascript/integrations/vectorstores/usearch.mdx b/src/oss/javascript/integrations/vectorstores/usearch.mdx
index 27c93f3f0..38b4cf2c3 100644
--- a/src/oss/javascript/integrations/vectorstores/usearch.mdx
+++ b/src/oss/javascript/integrations/vectorstores/usearch.mdx
@@ -17,10 +17,6 @@ Install the [usearch](https://github.com/unum-cloud/usearch/tree/main/javascript
```bash npm
npm install -S usearch
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx b/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx
index 99fc259ec..39c212acb 100644
--- a/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx
+++ b/src/oss/javascript/integrations/vectorstores/vercel_postgres.mdx
@@ -14,10 +14,6 @@ To work with Vercel Postgres, you need to install the `@vercel/postgres` package
```bash npm
npm install @vercel/postgres
```
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/voy.mdx b/src/oss/javascript/integrations/vectorstores/voy.mdx
index 6758380ad..83be3355a 100644
--- a/src/oss/javascript/integrations/vectorstores/voy.mdx
+++ b/src/oss/javascript/integrations/vectorstores/voy.mdx
@@ -7,10 +7,6 @@ It's supported in non-Node environments like browsers. You can use Voy as a vect
### Install Voy
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai voy-search @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/weaviate.md b/src/oss/javascript/integrations/vectorstores/weaviate.md
index a346545eb..c7f6d6d7d 100644
--- a/src/oss/javascript/integrations/vectorstores/weaviate.md
+++ b/src/oss/javascript/integrations/vectorstores/weaviate.md
@@ -20,14 +20,19 @@ To use Weaviate vector stores, you'll need to set up a Weaviate instance and ins
This guide will also use [OpenAI embeddings](/oss/integrations/text_embedding/openai), which require you to install the `@langchain/openai` integration package. You can also use [other supported embeddings models](/oss/integrations/text_embedding) if you wish.
-```{=mdx}
-import IntegrationInstallTooltip from "@mdx_components/integration_install_tooltip.mdx";
-
+
+```bash npm
+npm install @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai
+```
+
+```bash yarn
+yarn add @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai
+```
-
- @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai
-
+```bash pnpm
+pnpm add @langchain/weaviate @langchain/core weaviate-client uuid @langchain/openai
```
+
You'll need to run Weaviate either locally or on a server. See [the Weaviate documentation](https://weaviate.io/developers/weaviate/installation) for more information.
diff --git a/src/oss/javascript/integrations/vectorstores/xata.mdx b/src/oss/javascript/integrations/vectorstores/xata.mdx
index 3b1116a31..c98f9657f 100644
--- a/src/oss/javascript/integrations/vectorstores/xata.mdx
+++ b/src/oss/javascript/integrations/vectorstores/xata.mdx
@@ -35,10 +35,6 @@ and then choose the database you created above. This will also generate a `xata.
## Usage
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/zep.mdx b/src/oss/javascript/integrations/vectorstores/zep.mdx
index 6b964a8bb..905dbcf0d 100644
--- a/src/oss/javascript/integrations/vectorstores/zep.mdx
+++ b/src/oss/javascript/integrations/vectorstores/zep.mdx
@@ -39,10 +39,6 @@ You must also set your document collection to `isAutoEmbedded === false`. See th
### Example: Creating a ZepVectorStore from Documents & Querying
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
```bash npm
npm install @langchain/openai @langchain/community @langchain/core
```
diff --git a/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx b/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx
index 431f4e42e..02046775d 100644
--- a/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx
+++ b/src/oss/javascript/integrations/vectorstores/zep_cloud.mdx
@@ -34,10 +34,6 @@ You'll need your Zep Cloud Project API Key to use the Zep VectorStore. See the [
Zep auto embeds all documents by default, and it's not expecting to receive any embeddings from the user.
Since LangChain requires passing in a `Embeddings` instance, we pass in `FakeEmbeddings`.
-import IntegrationInstallTooltip from '/snippets/javascript-integrations/integration-install-tooltip.mdx';
-
-
-
### Example: Creating a ZepVectorStore from Documents & Querying
```bash npm