Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 15 additions & 4 deletions .env.example
Original file line number Diff line number Diff line change
@@ -1,4 +1,15 @@
LLM_MODEL_TYPE = "deepseek"
LLM_API_URL = "https://api.deepseek.com"
LLM_API_KEY = "sk-xxxxxxxxxxxxxxxxxx"
LLM_MODEL_NAME = "deepseek-chat"
# --- Anthropic (for anthropic_mcp_client.py - Claude native tool calling) ---
# Option 1: set the key directly
ANTHROPIC_API_KEY=sk-ant-xxxxxxxxxxxxxxxx
# Option 2: reference a shell env var (dotenv expands ${VAR} from environment)
# ANTHROPIC_API_KEY=${ANTHROPIC_KEY}
LLM_MODEL_NAME=claude-sonnet-4-20250514

# --- OpenAI-compatible (for simple_mcp_client.py / mcp_chatbot) ---
LLM_MODEL_TYPE=openai
LLM_API_URL=https://api.openai.com/v1
LLM_API_KEY=sk-xxxxxxxxxxxxxxxx
# LLM_MODEL_TYPE=deepseek
# LLM_API_URL=https://api.deepseek.com
# LLM_API_KEY=sk-xxxxxxxxxxxxxxxxxx
# LLM_MODEL_NAME=deepseek-chat
13 changes: 13 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,16 @@ bak/*
simple_mcp_client_stream.py
mcp_chatbot_termial.py
mcp_chatbot_stream.py

# Script/terminal recording artifacts
typescript
^C

# Data files (user-provided, not committed)
data/*.json
!data/.gitkeep

# IDE
.idea/
.vscode/
*.swp
226 changes: 226 additions & 0 deletions anthropic_mcp_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,226 @@
#!/usr/bin/env python3
"""MCP Client using Anthropic Claude for tool calling.

Usage:
# Direct server script:
python anthropic_mcp_client.py services/time_service.py

# Via config file:
python anthropic_mcp_client.py get_current_time config/mcp_config.json

Requires ANTHROPIC_API_KEY in .env or environment.
"""

import asyncio
import json
import os
import sys
from contextlib import AsyncExitStack
from typing import Any, Dict, List, Optional

import anthropic
from dotenv import load_dotenv
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from pathlib import Path

# Load .env from script dir (project root) — dotenv loads relative to CWD by default
_PROJECT_ROOT = Path(__file__).resolve().parent
load_dotenv(_PROJECT_ROOT / ".env")


class AnthropicMCPClient:
def __init__(
self,
api_key: str,
model_name: str = "claude-sonnet-4-20250514",
max_tokens: int = 4096,
):
self.model_name = model_name
self.max_tokens = max_tokens
self.session: Optional[ClientSession] = None
self.exit_stack = AsyncExitStack()
self._cleanup_lock = asyncio.Lock()

self.available_tools: List[Dict[str, Any]] = []
self.anthropic_tools: List[Dict[str, Any]] = []

self.client = anthropic.AsyncAnthropic(api_key=api_key)

@staticmethod
def parse_arguments(args: List[str]) -> StdioServerParameters:
"""Parse CLI args into StdioServerParameters."""
if len(args) == 1:
server_script = args[0]
if not server_script.endswith((".py", ".js")):
raise ValueError("Server script must be .py or .js")
command = "python" if server_script.endswith(".py") else "node"
return StdioServerParameters(command=command, args=[server_script], env=None)
elif len(args) == 2:
server_id, config_path = args
with open(config_path) as f:
config = json.load(f)
srv = config.get("mcpServers", {}).get(server_id)
if not srv:
raise ValueError(f"Server '{server_id}' not found in config")
return StdioServerParameters(
command=srv["command"], args=srv["args"], env=None
)
else:
raise ValueError(
"Usage: python anthropic_mcp_client.py <script.py>\n"
" or: python anthropic_mcp_client.py <server_id> <config.json>"
)

async def connect_to_server(self, server_params: StdioServerParameters):
"""Connect to an MCP server via stdio."""
transport = await self.exit_stack.enter_async_context(
stdio_client(server_params)
)
reader, writer = transport
self.session = await self.exit_stack.enter_async_context(
ClientSession(reader, writer)
)
await self.session.initialize()

# Discover tools
tools_resp = await self.session.list_tools()
for tool in tools_resp.tools:
# Store in Anthropic tool format
self.anthropic_tools.append(
{
"name": tool.name,
"description": tool.description or "",
"input_schema": tool.inputSchema,
}
)

tool_names = [t["name"] for t in self.anthropic_tools]
print(f"[SYS] Connected. Available tools: {tool_names}")

async def process_query(self, query: str) -> str:
"""Send query to Claude, handle tool calls, return final response."""
messages = [{"role": "user", "content": query}]

while True:
resp = await self.client.messages.create(
model=self.model_name,
max_tokens=self.max_tokens,
tools=self.anthropic_tools,
messages=messages,
)

# Check if Claude wants to use tools
if resp.stop_reason == "tool_use":
# Collect all content blocks (text + tool_use)
assistant_content = []
for block in resp.content:
if block.type == "text":
assistant_content.append(
{"type": "text", "text": block.text}
)
elif block.type == "tool_use":
assistant_content.append(
{
"type": "tool_use",
"id": block.id,
"name": block.name,
"input": block.input,
}
)

messages.append({"role": "assistant", "content": assistant_content})

# Execute each tool call
tool_results = []
for block in resp.content:
if block.type == "tool_use":
print(f"[TOOL] Calling {block.name}({block.input})")
result = await self.session.call_tool(block.name, block.input)
result_text = (
result.content[0].text
if result.content
else "No result"
)
print(f"[TOOL] {block.name} -> {result_text[:200]}")
tool_results.append(
{
"type": "tool_result",
"tool_use_id": block.id,
"content": result_text,
}
)

messages.append({"role": "user", "content": tool_results})
continue # Let Claude process the tool results

# No more tool calls — extract final text
text_parts = [b.text for b in resp.content if b.type == "text"]
return "\n".join(text_parts) if text_parts else "(no response)"

async def chat_loop(self):
"""Interactive REPL."""
print("[SYS] MCP Client ready. Type your query (or 'quit' to exit).\n")
loop = asyncio.get_event_loop()

while True:
try:
query = await loop.run_in_executor(
None, lambda: input("[YOU]: ").strip()
)
if not query:
continue
if query.lower() in ("quit", "exit", "q"):
break

response = await self.process_query(query)
print(f"\n[Claude]: {response}\n")

except (KeyboardInterrupt, EOFError):
print("\n[SYS] Shutting down...")
break
except Exception as e:
print(f"\n[ERR] {e}\n")

async def cleanup(self):
async with self._cleanup_lock:
try:
await self.exit_stack.aclose()
self.session = None
except Exception as e:
print(f"[ERR] Cleanup error: {e}")


async def main():
try:
server_params = AnthropicMCPClient.parse_arguments(sys.argv[1:])
except ValueError as e:
print(f"[ERR] {e}")
sys.exit(1)

api_key = (
os.getenv("ANTHROPIC_API_KEY")
or os.getenv("ANTHROPIC_KEY")
or ""
)
model = os.getenv("LLM_MODEL_NAME", "claude-sonnet-4-20250514")

if not api_key:
print("[ERR] ANTHROPIC_API_KEY or ANTHROPIC_KEY not set. Add to .env or environment.")
sys.exit(1)

print(f"[SYS] Model: {model}")

client = AnthropicMCPClient(api_key=api_key, model_name=model)

try:
await client.connect_to_server(server_params)
await client.chat_loop()
except Exception as e:
print(f"\n[ERR] {e}")
finally:
await client.cleanup()


if __name__ == "__main__":
asyncio.run(main())
20 changes: 20 additions & 0 deletions config/mcp_config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
{
"mcpServers": {
"time": {
"command": "python",
"args": ["services/time_service.py"]
},
"weather": {
"command": "python",
"args": ["services/weather_service_us.py"]
},
"calculator": {
"command": "python",
"args": ["services/calculator_service.py"]
},
"devops": {
"command": "python",
"args": ["services/devops_knowledge_service.py"]
}
}
}
44 changes: 34 additions & 10 deletions config/server_config.json
Original file line number Diff line number Diff line change
@@ -1,30 +1,54 @@
{
"mcpServers": {
"get_current_time": {
"name": "时间",
"name": "Time",
"type": "stdio",
"description": "获取时间",
"description": "Get current time in any timezone",
"command": "uv",
"args": [
"--directory",
"E:/04Code/llm/tiny-mcp/services",
"./services",
"run",
"time_service.py"
]
},
"get_weather": {
"name": "天气",
"get_weather_us": {
"name": "Weather US",
"type": "stdio",
"description": "获取国内天气",
"description": "Get US weather forecasts and alerts (NWS API, no key needed)",
"command": "uv",
"args": [
"--directory",
"E:/04Code/llm/tiny-mcp/services",
"./services",
"run",
"weather_service_zh.py"
"weather_service_us.py"
]
},
"calculator": {
"name": "Calculator",
"type": "stdio",
"description": "Math calculations and unit conversions",
"command": "uv",
"args": [
"--directory",
"./services",
"run",
"calculator_service.py"
]
},
"devops_knowledge": {
"name": "DevOps & LLM Knowledge",
"type": "stdio",
"description": "DevOps/SRE and LLM interview Q&A search",
"command": "uv",
"args": [
"--directory",
".",
"run",
"services/devops_knowledge_service.py"
]
}
},
"defaultServer": "get_current_time",
"system": "自定义系统提示词"
}
"system": "You are a helpful assistant with access to tools for time, weather, and math."
}
Empty file added data/.gitkeep
Empty file.
9 changes: 8 additions & 1 deletion mcp_chatbot/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,11 @@
from .chat.chat_session import ChatSession
from .llm.llm_service import LLMService
from .mcp.mcp_client import MCPClient
from .mcp.mcp_tool import MCPTool
from .mcp.mcp_tool import MCPTool

# Lazy import to avoid circular dependency; main lives in mcp_chatbot_main.py
def __getattr__(name: str):
if name == "main":
from mcp_chatbot_main import main
return main
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
12 changes: 12 additions & 0 deletions mcp_chatbot/__main__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
"""Allow running as: python -m mcp_chatbot"""
import asyncio

from mcp_chatbot_main import main

if __name__ == "__main__":
import platform
if platform.system().lower() == "windows":
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
else:
asyncio.run(main())
Empty file added mcp_chatbot/chat/__init__.py
Empty file.
4 changes: 2 additions & 2 deletions mcp_chatbot/chat/chat_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,10 +330,10 @@ async def get_llm_response_stream_with_tool_call(
tool_calls = []
for idx, tool_call_data in enumerate(tool_call_data_list):
tool_name = tool_call_data["tool"]
argments = tool_call_data["arguments"]
arguments = tool_call_data["arguments"]

yield ("tool_call", tool_name)
yield ("tool_arguments", json.dumps(argments))
yield ("tool_arguments", json.dumps(arguments))

# 执行工具调用
yield ("tool_execution", f"Executing tool {tool_name} ...")
Expand Down
Empty file added mcp_chatbot/config/__init__.py
Empty file.
Loading