+ {[
+ {
+ name: "AI21",
+ description: "You can get started with AI21Labs' Jurassic family of models, as well as their task-specific models.",
+ provider: "AI21 Labs",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "AlephAlpha",
+ description: "LangChain.js supports AlephAlpha's Luminous family of models. You'll need an AlephAlpha API key.",
+ provider: "Aleph Alpha",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Arcjet Redact",
+ description: "The Arcjet redact integration allows you to redact sensitive information from text.",
+ provider: "Arcjet",
+ type: "Text Processing",
+ status: "Beta"
+ },
+ {
+ name: "AWS SageMakerEndpoint",
+ description: "LangChain.js supports integration with AWS SageMaker-hosted endpoints for custom models.",
+ provider: "AWS",
+ type: "Custom Models",
+ status: "Active"
+ },
+ {
+ name: "Azure OpenAI",
+ description: "Azure OpenAI provides access to OpenAI models through Microsoft's Azure cloud platform.",
+ provider: "Microsoft",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Bedrock",
+ description: "Amazon Bedrock is a fully managed service that makes foundation models from AI21, Anthropic, and Amazon accessible via API.",
+ provider: "AWS",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "ChromeAI",
+ description: "This feature is experimental and is subject to change. Provides browser-based AI capabilities.",
+ provider: "Google",
+ type: "Experimental",
+ status: "Preview"
+ },
+ {
+ name: "Cloudflare Workers AI",
+ description: "This will help you get started with Cloudflare Workers AI text generation models.",
+ provider: "Cloudflare",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Cohere",
+ description: "This will help you get started with Cohere completion models (LLMs) and embedding models.",
+ provider: "Cohere",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Deep Infra",
+ description: "LangChain supports LLMs hosted by Deep Infra through the DeepInfra wrapper.",
+ provider: "Deep Infra",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Fireworks",
+ description: "Fireworks AI is an AI inference platform to run open-source models at scale.",
+ provider: "Fireworks",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Friendli",
+ description: "Friendli enhances AI application performance and optimizes cost savings for LLM inference.",
+ provider: "Friendli",
+ type: "Optimization",
+ status: "Beta"
+ },
+ {
+ name: "Google Vertex AI",
+ description: "Google Vertex is a service that provides access to Google's foundation models.",
+ provider: "Google",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "HuggingFaceInference",
+ description: "Here's an example of calling a HuggingFaceInference model as an LLM.",
+ provider: "Hugging Face",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "IBM watsonx.ai",
+ description: "This will help you get started with IBM text completion models on watsonx.ai.",
+ provider: "IBM",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Llama CPP",
+ description: "Only available on Node.js. Provides access to locally run Llama models.",
+ provider: "Meta",
+ type: "Local Inference",
+ status: "Active"
+ },
+ {
+ name: "MistralAI",
+ description: "Mistral AI is a platform that offers hosting for their open-weight models.",
+ provider: "Mistral",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Ollama",
+ description: "This will help you get started with Ollama text completion models running locally.",
+ provider: "Ollama",
+ type: "Local Inference",
+ status: "Active"
+ },
+ {
+ name: "Replicate",
+ description: "Here's an example of calling a Replicate model as an LLM.",
+ provider: "Replicate",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Together AI",
+ description: "You are currently on a page documenting the use of Together AI models with LangChain.",
+ provider: "Together AI",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "Writer",
+ description: "LangChain.js supports calling Writer LLMs for content generation.",
+ provider: "Writer",
+ type: "Text Generation",
+ status: "Active"
+ },
+ {
+ name: "YandexGPT",
+ description: "LangChain.js supports calling YandexGPT LLMs. Also supports qwen and deepseek models.",
+ provider: "Yandex",
+ type: "Text Generation",
+ status: "Active"
+ }
+ ].map((model, index) => (
+
+
+
{model.name}
+
+ {model.status}
+
+
+
+ {model.description}
+
+
+
+
+ {model.provider}
+
+
+ {model.type}
+
+
+
+
+
+ ))}
+