開源 MCP 客戶端倉庫(MCP-Use)
🌐 MCP-Use 是一種開源工具,可將 ** 意大語言模型(LLM)** 連接到任何 MCP 服務器,幫助開發人員構建自定義的智能代理,訪問諸如網頁瀏覽、文件操作等工具,無需使用封閉源或專用客戶端。
💡 讓開發者輕鬆連接任意 LLM 到瀏覽網頁、文件操作等各種工具。
主要功能
快速開始
安裝:
pip install mcp-use
從源代碼安裝:
git clone https://github.com/pietrozullo/mcp-use.git
cd mcp-use
pip install -e .
安裝 LangChain 提供者
根據使用的 LLM 安裝對應的 LangChain 提供者。例如:
# 對於OpenAI
pip install langchain-openai
# 對於Anthropic
pip install langchain-anthropic
在.env
文件中添加 API 密鑰:
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
注意: 僅支持具備工具調用功能的模型。
啓動代理
import asyncio
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from mcp_use import MCPAgent, MCPClient
async def main():
load_dotenv()
config = {
"mcpServers": {
"playwright": {
"command": "npx",
"args": ["@playwright/mcp@latest"],
"env": {"DISPLAY": ":1"}
}
}
}
client = MCPClient.from_dict(config)
llm = ChatOpenAI(model="gpt-4o")
agent = MCPAgent(llm=llm, client=client, max_steps=30)
result = await agent.run("查找舊金山最好的餐廳")
print(f"\n結果: {result}")
if __name__ == "__main__":
asyncio.run(main())
也可使用配置文件創建客戶端:
client = MCPClient.from_config_file("browser_mcp.json")
配置文件示例 (browser_mcp.json
):
{
"mcpServers": {
"playwright": {
"command": "npx",
"args": ["@playwright/mcp@latest"],
"env": {"DISPLAY": ":1"}
}
}
}
使用場景示例
網頁瀏覽(使用 Playwright)
# 同快速啓動代碼示例
import asyncio
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from mcp_use import MCPAgent, MCPClient
async def main():
# Load environment variables
load_dotenv()
# Create MCPClient from config file
client = MCPClient.from_config_file(
os.path.join(os.path.dirname(__file__), "browser_mcp.json")
)
# Create LLM
llm = ChatOpenAI(model="gpt-4o")
# Alternative models:
# llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# llm = ChatGroq(model="llama3-8b-8192")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
# Run the query
result = await agent.run(
"Find the best restaurant in San Francisco USING GOOGLE SEARCH",
max_steps=30,
)
print(f"\nResult: {result}")
if __name__ == "__main__":
asyncio.run(main())
Airbnb 搜索
# Airbnb搜索代碼示例
import asyncio
import os
from dotenv import load_dotenv
from langchain_anthropic import ChatAnthropic
from mcp_use import MCPAgent, MCPClient
async def run_airbnb_example():
# Load environment variables
load_dotenv()
# Create MCPClient with Airbnb configuration
client = MCPClient.from_config_file(
os.path.join(os.path.dirname(__file__), "airbnb_mcp.json")
)
# Create LLM - you can choose between different models
llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
try:
# Run a query to search for accommodations
result = await agent.run(
"Find me a nice place to stay in Barcelona for 2 adults "
"for a week in August. I prefer places with a pool and "
"good reviews. Show me the top 3 options.",
max_steps=30,
)
print(f"\nResult: {result}")
finally:
# Ensure we clean up resources properly
if client.sessions:
await client.close_all_sessions()
if __name__ == "__main__":
asyncio.run(run_airbnb_example())
配置文件示例 (airbnb_mcp.json
):
{
"mcpServers": {
"airbnb": {
"command": "npx",
"args": ["-y", "@openbnb/mcp-server-airbnb"]
}
}
}
Blender 3D 創建
# Blender 3D創建代碼示例
import asyncio
from dotenv import load_dotenv
from langchain_anthropic import ChatAnthropic
from mcp_use import MCPAgent, MCPClient
async def run_blender_example():
# Load environment variables
load_dotenv()
# Create MCPClient with Blender MCP configuration
config = {"mcpServers": {"blender": {"command": "uvx", "args": ["blender-mcp"]}}}
client = MCPClient.from_dict(config)
# Create LLM
llm = ChatAnthropic(model="claude-3-5-sonnet-20240620")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
try:
# Run the query
result = await agent.run(
"Create an inflatable cube with soft material and a plane as ground.",
max_steps=30,
)
print(f"\nResult: {result}")
finally:
# Ensure we clean up resources properly
if client.sessions:
await client.close_all_sessions()
if __name__ == "__main__":
asyncio.run(run_blender_example())
配置文件支持
# 從配置文件初始化MCP會話
import asyncio
from mcp_use import create_session_from_config
async def main():
# Create an MCP session from a config file
session = create_session_from_config("mcp-config.json")
# Initialize the session
await session.initialize()
# Use the session...
# Disconnect when done
await session.disconnect()
if __name__ == "__main__":
asyncio.run(main())
HTTP 連接示例
支持連接到 HTTP 端口運行的 MCP 服務器:
# HTTP連接示例代碼
import asyncio
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from mcp_use import MCPAgent, MCPClient
async def main():
"""Run the example using a configuration file."""
# Load environment variables
load_dotenv()
config = {
"mcpServers": {
"http": {
"url": "http://localhost:8931/sse"
}
}
}
# Create MCPClient from config file
client = MCPClient.from_dict(config)
# Create LLM
llm = ChatOpenAI(model="gpt-4o")
# Create agent with the client
agent = MCPAgent(llm=llm, client=client, max_steps=30)
# Run the query
result = await agent.run(
"Find the best restaurant in San Francisco USING GOOGLE SEARCH",
max_steps=30,
)
print(f"\nResult: {result}")
if __name__ == "__main__":
# Run the appropriate example
asyncio.run(main())
多服務器支持
支持同時使用多個 MCP 服務器:
{
"mcpServers": {
"airbnb": {"command": "npx", "args": ["-y", "@openbnb/mcp-server-airbnb"]},
"playwright": {"command": "npx", "args": ["@playwright/mcp@latest"], "env": {"DISPLAY": ":1"}}
}
}
工具訪問控制
限制代理可訪問的工具,提高安全性:
agent = MCPAgent(
llm=ChatOpenAI(model="gpt-4"),
client=client,
disallowed_tools=["file_system", "network"]
)
路線圖
• 支持多服務器
• 遠程連接測試(HTTP、WS)
• 更多功能持續開發中
貢獻
歡迎提交問題和功能請求。
系統要求
•Python 3.11+
•MCP 實現(如 Playwright MCP)
•LangChain 和相應模型庫
引用方式
若在研究或項目中使用,請引用:
@software{mcp_use2025,
author = {Zullo, Pietro},
title = {MCP-Use: MCP Library for Python},
year = {2025},
publisher = {GitHub},
url = {https://github.com/pietrozullo/mcp-use}
}
MIT 許可。
本文由 Readfog 進行 AMP 轉碼,版權歸原作者所有。
來源:https://mp.weixin.qq.com/s/OO0dk4-N_3dx4uXc7cEwVA