通过一个DEMO理解MCP(模型上下文协议)的生命周期
str:\n \"\"\"\n 获取指定城市的天气信息\n \"\"\"\n weather_data = {\n \"北京\": \"北京:晴,25°C\",\n \"上海\": \"上海:多云,27°C\"\n }\n return weather_data.get(city, f\"{city}:天气信息未知\")\n\n@mcp.tool(name=\"suggest_activity\", description=\"根据天气描述推荐适合的活动\")\nasync def suggest_activity(condition: str) -> str:\n \"\"\"\n 根据天气描述推荐适合的活动\n \"\"\"\n if \"晴\" in condition:\n return \"天气晴朗,推荐你去户外散步或运动。\"\n elif \"多云\" in condition:\n return \"多云天气适合逛公园或咖啡馆。\"\n elif \"雨\" in condition:\n return \"下雨了,建议你在家阅读或看电影。\"\n else:\n return \"建议进行室内活动。\"\n\nasync def main():\n print(\"✅ 启动 MCP Server: http://127.0.0.1:1234\")\n await mcp.run_sse_async()\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n"},["span",{"data-type":"text"},["span",{"data-type":"leaf"},"#mcp_server_demo.py\nfrom mcp.server.fastmcp import FastMCP\nimport asyncio\n\nmcp = FastMCP(name=\"weather-demo\", host=\"0.0.0.0\", port=1234)\n\n@mcp.tool(name=\"get_weather\", description=\"获取指定城市的天气信息\")\nasync def get_weather(city: str) -> str:\n \"\"\"\n 获取指定城市的天气信息\n \"\"\"\n weather_data = {\n \"北京\": \"北京:晴,25°C\",\n \"上海\": \"上海:多云,27°C\"\n }\n return weather_data.get(city, f\"{city}:天气信息未知\")\n\n@mcp.tool(name=\"suggest_activity\", description=\"根据天气描述推荐适合的活动\")\nasync def suggest_activity(condition: str) -> str:\n \"\"\"\n 根据天气描述推荐适合的活动\n \"\"\"\n if \"晴\" in condition:\n return \"天气晴朗,推荐你去户外散步或运动。\"\n elif \"多云\" in condition:\n return \"多云天气适合逛公园或咖啡馆。\"\n elif \"雨\" in condition:\n return \"下雨了,建议你在家阅读或看电影。\"\n else:\n return \"建议进行室内活动。\"\n\nasync def main():\n print(\"✅ 启动 MCP Server: http://127.0.0.1:1234\")\n await mcp.run_sse_async()\n\nif __name__ == \"__main__\":\n asyncio.run(main())\n"]]],["p",{"uuid":"m9gj19coj4uvzw1w3l"},["span",{"data-type":"text"},["span",{"data-type":"leaf"},""]],["img",{"uuid":"68qgtg","name":"image.png","size":571939,"width":748,"height":300.31226765799255,"src":"https://oss-ata.alibaba.com/article/2025/04/9aa5d1a3-c839-4dea-82a7-61cf0d93fe7d.png","extraData":{"resourceId":"0e1d9393-4b47-47c6-9ddc-b7f6dfcb268e","metaData":{"size":571939,"originWidth":2152,"originHeight":864,"format":"png","ratio":1}}},["span",{"data-type":"text"},["span",{"data-type":"leaf"},""]]],["span",{"data-type":"text"},["span",{"data-type":"leaf"},""]]],["p",{"uuid":"m9gj382igfqj0jh950p"},["span",{"data-type":"text"},["span",{"data-type":"leaf"},""]]],["h2",{"uuid":"m9gj386lf6f8dxfpcid","spacing":{"before":21.333333333333332,"after":9,"line":0.8529411764705882}},["span",{"data-type":"text"},["span",{"bold":true,"sz":16,"szUnit":"pt","data-type":"leaf"},"大模型调用代码"]]],["p",{"uuid":"m9gnqk3osdq10y2u83r","ind":{"firstLine":32}},["span",{"data-type":"text"},["span",{"data-type":"leaf"},"大模型调用选择使用openrouter这个LLM的聚合网站,主要是因为该网站方便调用与测试不同的模型,同时网络环境可以直接连接而不用其他手段。"]]],["p",{"uuid":"m9gns3uy38jmq8jngk6","ind":{"firstLine":32}},["span",{"data-type":"text"},["span",{"data-type":"leaf"},"代码如下:"]]],["code",{"syntax":"python","theme":"default","wrap":true,"id":"3snrpo","title":"","fold":false,"hideHeader":false,"codeFolding":false,"showLineNumber":true,"font":{"fontFamily":"defaultFont","ligatures":false},"enableMacHeader":false,"uuid":"m9gj3harjfft3rgbm3g","code":"# llm_router.py\nimport json\nimport requests\n\n# OpenRouter 配置\nOPENROUTER_API_KEY = '这里写入使用的Key'\nOPENROUTER_API_URL = \"https://openrouter.ai/api/v1/chat/completions\"\n\nOPENROUTER_HEADERS = {\n \"Authorization\": f\"Bearer {OPENROUTER_API_KEY}\",\n \"Content-Type\": \"application/json\",\n \"HTTP-Referer\": \"http://localhost\",\n \"X-Title\": \"MCP Demo Server\"\n}\n\n\nclass OpenRouterLLM:\n \"\"\"\n 自定义 LLM 类,使用 OpenRouter API 来生成回复\n \"\"\"\n def __init__(self, model: str = LLM_MODEL):\n self.model = model\n\n def generate(self, messages):\n \"\"\"\n 发送对话消息给 OpenRouter API 并返回 LLM 的回复文本\n\n 参数:\n messages: 一个 list,每个元素都是形如 {'role': role, 'content': content} 的字典\n\n 返回:\n LLM 返回的回复文本\n \"\"\"\n request_body = {\n \"model\": self.model,\n \"messages\": messages\n }\n\n print(f\"发送请求到 OpenRouter: {json.dumps(request_body, ensure_ascii=False)}\")\n\n response = requests.post(\n OPENROUTER_API_URL,\n headers=OPENROUTER_HEADERS,\n json=request_body\n )\n\n if response.status_code != 200:\n print(f\"OpenRouter API 错误: {response.status_code}\")\n print(f\"错误详情: {response.text}\")\n raise Exception(f\"OpenRouter API 返回错误: {response.status_code}\")\n\n response_json = response.json()\n print(f\"OpenRouter API 响应: {json.dumps(response_json, ensure_ascii=False)}\")\n\n # 提取 LLM 响应文本\n try:\n content = response_json['choices']['message']['content']\n return content\n except KeyError:\n raise Exception(\"无法从 OpenRouter 响应中提取内容\")\n\n\n# 如果需要独立测试该模块,可以在此进行简单的测试\nif __name__ == \"__main__\":\n # 示例系统提示和用户输入\n messages = [\n {\"role\": \"system\", \"content\": \"你是一个智能助手,可以帮助查询天气信息。\"},\n {\"role\": \"user\", \"content\": \"请告诉我北京今天的天气情况。\"}\n ]\n\n llm = OpenRouterLLM()\n try:\n result = llm.generate(messages)\n print(\"LLM 返回结果:\")\n print(result)\n except Exception as e:\n print(f\"调用 OpenRouter 时发生异常: {e}\")\n"},["span",{"data-type":"text"},["span",{"data-type":"leaf"},"# llm_router.py\nimport json\nimport requests\n\n# OpenRouter 配置\nOPENROUTER_API_KEY = '这里写入使用的Key'\nOPENROUTER_API_URL = \"https://openrouter.ai/api/v1/chat/completions\"\n\nOPENROUTER_HEADERS = {\n \"Authorization\": f\"Bearer {OPENROUTER_API_KEY}\",\n \"Content-Type\": \"application/json\",\n \"HTTP-Referer\": \"http://localhost\",\n \"X-Title\": \"MCP Demo Server\"\n}\n\n\nclass OpenRouterLLM:\n \"\"\"\n 自定义 LLM 类,使用 OpenRouter API 来生成回复\n \"\"\"\n def __init__(self, model: str = LLM_MODEL):\n self.model = model\n\n def generate(self, messages):\n \"\"\"\n 发送对话消息给 OpenRouter API 并返回 LLM 的回复文本\n\n 参数:\n messages: 一个 list,每个元素都是形如 {'role': role, 'content': content} 的字典\n\n 返回:\n LLM 返回的回复文本\n \"\"\"\n request_body = {\n \"model\": self.model,\n \"messages\": messages\n }\n\n print(f\"发送请求到 OpenRouter: {json.dumps(request_body, ensure_ascii=False)}\")\n\n response = requests.post(\n OPENROUTER_API_URL,\n headers=OPENROUTER_HEADERS,\n json=request_body\n )\n\n if response.status_code != 200:\n print(f\"OpenRouter API 错误: {response.status_code}\")\n print(f\"错误详情: {response.text}\")\n raise Exception(f\"OpenRouter API 返回错误: {response.status_code}\")\n\n response_json = response.json()\n print(f\"OpenRouter API 响应: {json.dumps(response_json, ensure_ascii=False)}\")\n\n # 提取 LLM 响应文本\n try:\n content = response_json['choices']['message']['content']\n return content\n except KeyError:\n raise Exception(\"无法从 OpenRouter 响应中提取内容\")\n\n\n# 如果需要独立测试该模块,可以在此进行简单的测试\nif __name__ == \"__main__\":\n # 示例系统提示和用户输入\n messages = [\n {\"role\": \"system\", \"content\": \"你是一个智能助手,可以帮助查询天气信息。\"},\n {\"role\": \"user\", \"content\": \"请告诉我北京今天的天气情况。\"}\n ]\n\n llm = OpenRouterLLM()\n try:\n result = llm.generate(messages)\n print(\"LLM 返回结果:\")\n print(result)\n except Exception as e:\n print(f\"调用 OpenRouter 时发生异常: {e}\")\n"]]],["p",{"uuid":"m9gj3eqam83b3a4mmqg"},["span",{"data-type":"text"},["span",{"data-type":"leaf"},""]]],["h2",{"uuid":"m9gj63omo60niumykm","spacing":{"before":21.333333333333332,"after":9,"line":0.8529411764705882}},["span",{"data-type":"text"},["span",{"bold":true,"sz":16,"szUnit":"pt","data-type":"leaf"},"MCP Client"]]],["p",{"uuid":"m9gnds2kda39p1azkr9","ind":{"firstLine":32}},["span",{"data-type":"text"},["span",{"data-type":"leaf"},"这里的MCP Client,使用Server-Side Event(SSE)方式进行连接(题外话,MCP协议使用SSE协议作为默认远程协议稍微有点奇怪,听说后续迭代会考虑HTTP Streaming以及JSONRPC over HTTP2的方式)。"]]],["p",{"uuid":"m9gnp8yw4kyzv1cwung","ind":{"firstLine":32}},["span",{"data-type":"text"},["span",{"data-type":"leaf"},"这里我们在main测试代码中,尝试列出所有可用的Tool与Resource,并尝试调用Tool,结果如图,可以看到能够展示出MCP Server中定义的Tool。"]]],["code",{"syntax":"python","theme":"default","wrap":true,"id":"akhang","title":"","fold":false,"hideHeader":false,"codeFolding":false,"showLineNumber":true,"font":{"fontFamily":"defaultFont","ligatures":false},"enableMacHeader":false,"uuid":"m9gj6it14ni61mp9b96","code":"# mcp_client_demo.py\nimport asyncio\nfrom mcp.client.session import ClientSession\nfrom mcp.client.sse import sse_client\n\nclass WeatherMCPClient:\n def __init__(self, server_url=\"http://127.0.0.1:1234/sse\"):\n self.server_url = server_url\n self._sse_context = None\n self._session = None\n\n async def __aenter__(self):\n # 创建 SSE 通道\n self._sse_context = sse_client(self.server_url)\n self.read, self.write = await self._sse_context.__aenter__()\n\n # 创建 MCP 会话\n self._session = ClientSession(self.read, self.write)\n await self._session.__aenter__()\n await self._session.initialize()\n\n return self\n\n async def __aexit__(self, exc_type, exc_val, exc_tb):\n if self._session:\n await self._session.__aexit__(exc_type, exc_val, exc_tb)\n if self._sse_context:\n await self._sse_context.__aexit__(exc_type, exc_val, exc_tb)\n\n async def list_tools(self):\n return await self._session.list_tools()\n\n async def list_resources(self):\n return await self._session.list_resources()\n\n async def call_tool(self, name, arguments):\n return await self._session.call_tool(name, arguments)\n\n\nasync def main():\n async with WeatherMCPClient() as client:\n print(\"✅ 成功连接 MCP Server\")\n\n tools = await client.list_tools()\n\n print(\"\\n来源:程序园用户自行投稿发布,如果侵权,请联系站长删除
免责声明:如果侵犯了您的权益,请联系站长,我们会及时删除侵权内容,谢谢合作!
页:
[1]