使用 ChatDeepSeek 实现通过自然语言调用(高德地图 MCP 服务)查询天气示例

MCP 客户端参考我的这篇文章:

Python实现 MCP 客户端调用(高德地图 MCP 服务)查询天气工具示例

ChatDeepSeek 参考我的这篇文章:

使用 langchain_deepseek 实现自然语言转数据库查询SQL

MCP 环境与高德服务KEY申请

注: Node版本 >= 18.20.4 ,版本太低无法执行 npx 命令。

DeepSeek API 申请

DEEPSEEK_API_KEY = "sk-xxxx"
DEEPSEEK_API_BASE = "https://api.deepseek.com"

注:API_KEY 申请后可以充个10块钱。没充值 API 接口用不了。

完整示例代码

注: 目前只试了官方版 DeepSeek API 支持返回 tool_calls。 试了腾讯云版 DeepSeek API 不会返回 tool_calls。

pip install mcp
pip install langchain
pip install langchain_deepseek
import asyncio
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from langchain_deepseek import ChatDeepSeek

# MCP 服务配置
server_params = StdioServerParameters(
    command="npx",
    args=["-y", "@amap/amap-maps-mcp-server"],
    env={
    
    
        "AMAP_MAPS_API_KEY": "xxxxx"
    }
)

# DeepSeek 模型配置
llm = ChatDeepSeek(
    api_base="https://api.deepseek.com",
    api_key="sk-xxxxx",
    model="deepseek-chat"
)

# 第一次启动 MCP,用于获取工具并绑定模型
async def bind_llm_with_tools():
    async with stdio_client(server_params) as (read, write):
        async with ClientSession(read, write) as session:
            await session.initialize()
            tools_list = await session.list_tools()
            print("工具列表:", [t.name for t in tools_list.tools])
            available_tools = [
                {
    
    
                    "type": "function",
                    "function": {
    
    
                        "name": tool.name,
                        "description": tool.description,
                        "parameters": tool.inputSchema
                    }
                }
                for tool in tools_list.tools
            ]
            return llm.bind_tools(available_tools)

# 每次对话工具调用时,开启新 session 调用
async def call_tools(tool_calls):
    async with stdio_client(server_params) as (read, write):
        async with ClientSession(read, write) as session:
            await session.initialize()
            for tool in tool_calls:
                name = tool["name"]
                args = tool["args"]
                print(f"调用工具 `{
      
      name}` 参数: {
      
      args}")
                result = await session.call_tool(name, arguments=args)
                print(f"工具 `{
      
      name}` 返回结果: {
      
      result}")

# 主对话循环,支持多轮输入
async def chat_loop():
    llm_with_tools = await bind_llm_with_tools()

    messages = [("system", "You are a warm-hearted assistant, and you only speak Chinese.")]

    while True:
        user_input = input("你:")
        if user_input.lower() in {
    
    "exit", "quit", "bye"}:
            print("再见!")
            break

        messages.append(("human", user_input))
        response = llm_with_tools.invoke(messages)
        print("模型回复:", response.content)
        messages.append(("ai", response.content))

        if response.tool_calls:
            await call_tools(response.tool_calls)

# 启动入口
if __name__ == "__main__":
    asyncio.run(chat_loop())

在这里插入图片描述