+
27
-

有哪些支持mcp的python开发框架?

有哪些支持mcp的python开发框架?


网友回复

+
24
-

3、fastapi-mcp

from fastapi import FastAPI
from fastapi_mcp import FastApiMCP  
app = FastAPI()
FastApiMCP(app).mount()  # MCP服务即刻生效!

4、langchain

import asyncio
import pathlib
import sys
import typing as t

from langchain_core.messages import AIMessage, BaseMessage, HumanMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.tools import BaseTool
from langchain_groq import ChatGroq
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client

from langchain_mcp import MCPToolkit

async def run(tools: list[BaseTool], prompt: str) -> str:
    # 注意:原文使用了Groq,但为了更通用,这里可以替换为 OpenAI 或其他模型
    # model = ChatGroq(model_name="llama-3.1-8b-instant", stop_sequences=None) 
    # 假设您已设置 OPENAI_API_KEY 环境变量
    from langchain_openai import ChatOpenAI
    model = ChatOpenAI(model="gpt-4o-mini") # 使用 OpenAI 模型示例

    tools_map = {tool.name: tool for tool in tools}
    tools_model = model.bind_tools(tools)
    messages: list[BaseMessage] = [HumanMessage(prompt)]
    ai_message = t.cast(AIMessage, await tools_model.ainvoke(messages))
    messages.append(ai_message)
    for tool_call in ai_message.tool_calls:
        # LangChain 工具名称通常是小写的
        selected_tool = tools_map[tool_call["name"].lower()] 
        tool_msg = await selected_tool.ainvoke(tool_call["args"]) # 传递参数
        messages.append(tool_msg) # 应为 ToolMessage 类型,但此处简化处理
        # 注意:更完整的实现需要创建 ToolMessage
        # from langchain_core.messages import ToolMessage
        # tool_msg = ToolMessage(content=..., tool_call_id=tool_call['id'])

    # 再次调用模型以获得最终的自然语言响应
    return await (tools_model | StrOutputParser()).ainvoke(messages)

async def main(prompt: str) -> None:
    server_params = StdioServerParameters(
        command="npx",
        # 确保您已全局安装或 npx 可以找到此包
        args=["-y", "@modelcontextprotocol/server-filesystem", str(pathlib.Path(__file__).parent.parent)], 
    )
    async with stdio_client(server_params) as (read, write):
        async with ClientSession(read, write) as session:
            toolkit = MCPToolkit(session=session)
     ...

点击查看剩余70%

+
16
-

1、MCP 官方 Python SDK

功能:提供完整的 MCP 协议实现,支持快速开发 MCP Server 和工具集成。

特点:

支持 FastMCP 快速构建 MCP Server(如天气查询、文件操作等)

提供 @mcp.tool() 装饰器,方便定义工具函数供大模型调用支持 stdio 和 sse传输方式,适用于本地和远程调用

示例:

from mcp.server.fastmcp import FastMCP
mcp = FastMCP("demo")

@mcp.tool()
def get_files():
    """获取桌面文件列表"""
    import os
    return os.listdir(os.path.expanduser("~/Desktop"))

if __name__ == "__main__":
    mcp.run(transport='stdio')

2、qwenagent

支持rag、函数调用、mcp服务调用

https://github.com/QwenLM/Qwen-Agent/blob/main/README_CN.md

示例

import os
import asyncio
from typing import Optional

from qwen_agent.agents import Assistant
from qwen_agent.gui import WebUI

ROOT_RESOURCE = os.path.join(os.path.dirname(__file__), 'resource')


def init_agent_service():
    llm_cfg = {'model': 'qwen-max'}
    system = ('你扮演一个数据库助手,你具有查询数据库的能力')
    tools = [{
        "mcpServers": {
            "sqlite" : {
                "command": "uvx",
                "args": [
    ...

点击查看剩余70%

我知道答案,我要回答