🤖 OpenAI

from langchain_openai import ChatOpenAI, OpenAIEmbeddings

llm = ChatOpenAI(
model="gpt-4-turbo",
temperature=0.7
)

# 流式输出
for chunk in llm.stream("你好"):
print(chunk.content, end="")

🎭 Anthropic (Claude)

from langchain_anthropic import ChatAnthropic

llm = ChatAnthropic(
model="claude-3-opus-20240229",
temperature=0.7
)

🌟 Google (Gemini)

from langchain_google_genai import ChatGoogleGenerativeAI

llm = ChatGoogleGenerativeAI(
model="gemini-pro",
temperature=0.7
)

🏠 本地模型 (Ollama)

from langchain_community.llms import Ollama

llm = Ollama(model="llama3.1")
result = llm.invoke("你好")

☁️ Azure OpenAI

from langchain_openai import AzureChatOpenAI

llm = AzureChatOpenAI(
azure_deployment="gpt-4",
api_version="2024-02-01"
)

📋 其他提供商

提供商包名
Coherelangchain-cohere
Hugging Facelangchain-huggingface
AWS Bedrocklangchain-aws
Mistrallangchain-mistralai
Groqlangchain-groq

🔄 统一接口

from langchain.chat_models import init_chat_model

# 自动选择提供商
llm = init_chat_model("gpt-4", model_provider="openai")
llm = init_chat_model("claude-3-opus", model_provider="anthropic")
llm = init_chat_model("gemini-pro", model_provider="google")
上一页:RAG 检索增强下一页:常见问题