Skip to content

Commit 8cf50a6

Browse files
committed
更新示例 README.md,添加天气服务器和客户端的详细信息;新增 MCP 兼容的天气服务器和客户端示例
1 parent 3095ce3 commit 8cf50a6

5 files changed

Lines changed: 388 additions & 2 deletions

File tree

examples/README.md

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,19 @@ to new users.
55

66
## Minimal
77

8-
- `zcp_weather_server.py`: smallest native ZCP server
9-
- `run_zcp_mcp_stdio_server.py`: smallest MCP-compatible stdio server
8+
- `weather_zcp_server.py`: smallest native ZCP weather server (`/zcp`)
9+
- `weather_zcp_client.py`: matching native ZCP client using a prompt-driven OpenAI-compatible tool loop
10+
- `weather_mcp_server.py`: smallest MCP-compatible stdio weather server
11+
- `weather_mcp_client.py`: matching official MCP client using a prompt-driven OpenAI-compatible tool loop
12+
13+
If you want to run the prompt-driven client examples, install the optional dependencies first:
14+
15+
```bash
16+
pip install "zero-context-protocol-sdk[mcp,openai]"
17+
```
18+
19+
The client examples expect one of `OPENAI_API_KEY` or `DEEPSEEK_API_KEY`, plus optional
20+
`OPENAI_BASE_URL` and `OPENAI_MODEL`.
1021

1122
## Production-shaped
1223

examples/weather_mcp_client.py

Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
#!/usr/bin/env python3
2+
"""Minimal prompt-driven official MCP client for the weather server example."""
3+
4+
from __future__ import annotations
5+
6+
import asyncio
7+
import json
8+
import os
9+
import sys
10+
from typing import Any
11+
from pathlib import Path
12+
13+
ROOT = Path(__file__).resolve().parents[1]
14+
if str(ROOT) not in sys.path:
15+
sys.path.insert(0, str(ROOT))
16+
17+
SERVER_SCRIPT = ROOT / "examples" / "weather_mcp_server.py"
18+
19+
try:
20+
from mcp.client.session import ClientSession
21+
from mcp.client.stdio import StdioServerParameters, stdio_client
22+
except ModuleNotFoundError as exc: # pragma: no cover
23+
raise RuntimeError("Install the optional MCP dependency first: pip install 'zero-context-protocol-sdk[mcp]'") from exc
24+
25+
try:
26+
from openai import OpenAI
27+
except ModuleNotFoundError as exc: # pragma: no cover
28+
raise RuntimeError("Install the optional OpenAI dependency first: pip install 'zero-context-protocol-sdk[openai]'") from exc
29+
30+
31+
def _model_client() -> OpenAI:
32+
api_key = os.environ.get("OPENAI_API_KEY") or os.environ.get("DEEPSEEK_API_KEY")
33+
if not api_key:
34+
raise RuntimeError("Set OPENAI_API_KEY or DEEPSEEK_API_KEY before running this example.")
35+
return OpenAI(
36+
api_key=api_key,
37+
base_url=os.environ.get("OPENAI_BASE_URL", "https://api.deepseek.com"),
38+
)
39+
40+
41+
def _tool_spec(tool: Any) -> dict[str, Any]:
42+
return {
43+
"type": "function",
44+
"function": {
45+
"name": tool.name,
46+
"description": tool.description or "",
47+
"parameters": tool.inputSchema,
48+
"strict": True,
49+
},
50+
}
51+
52+
53+
async def run(query: str) -> dict[str, object]:
54+
async with stdio_client(
55+
StdioServerParameters(
56+
command=sys.executable,
57+
args=[str(SERVER_SCRIPT)],
58+
cwd=str(ROOT),
59+
)
60+
) as (read_stream, write_stream):
61+
async with ClientSession(read_stream, write_stream) as session:
62+
await session.initialize()
63+
tools = await session.list_tools()
64+
llm = _model_client()
65+
model = os.environ.get("OPENAI_MODEL", "deepseek-chat")
66+
messages: list[dict[str, Any]] = [
67+
{
68+
"role": "system",
69+
"content": "You are a helpful weather assistant. Use tools when the user asks about weather. Keep the final answer concise.",
70+
},
71+
{
72+
"role": "user",
73+
"content": query,
74+
},
75+
]
76+
tool_specs = [_tool_spec(tool) for tool in tools.tools]
77+
tool_log: list[dict[str, Any]] = []
78+
79+
for _ in range(4):
80+
response = llm.chat.completions.create(
81+
model=model,
82+
messages=messages,
83+
tools=tool_specs,
84+
tool_choice="auto",
85+
)
86+
message = response.choices[0].message
87+
assistant_payload = message.model_dump(exclude_none=True)
88+
messages.append(assistant_payload)
89+
tool_calls = message.tool_calls or []
90+
if not tool_calls:
91+
return {
92+
"tool_names": [tool.name for tool in tools.tools],
93+
"tool_log": tool_log,
94+
"answer": message.content,
95+
}
96+
for tool_call in tool_calls:
97+
arguments = json.loads(tool_call.function.arguments or "{}")
98+
result = await session.call_tool(tool_call.function.name, arguments)
99+
payload: Any = result.structured_content
100+
if payload is None and result.content:
101+
payload = {"content": [block.text for block in result.content]}
102+
tool_log.append({"name": tool_call.function.name, "arguments": arguments, "result": payload})
103+
messages.append(
104+
{
105+
"role": "tool",
106+
"tool_call_id": tool_call.id,
107+
"content": json.dumps(payload, ensure_ascii=False),
108+
}
109+
)
110+
return {
111+
"tool_names": [tool.name for tool in tools.tools],
112+
"tool_log": tool_log,
113+
"answer": "Model did not finish within the maximum number of tool rounds.",
114+
}
115+
116+
117+
def main() -> None:
118+
query = " ".join(sys.argv[1:]) or "请查询 Hangzhou 当前天气,并用一句话总结。"
119+
print(json.dumps(asyncio.run(run(query)), ensure_ascii=False, indent=2))
120+
121+
122+
if __name__ == "__main__":
123+
main()

examples/weather_mcp_server.py

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
#!/usr/bin/env python3
2+
"""Minimal MCP-compatible weather server backed by ZCP."""
3+
4+
from __future__ import annotations
5+
6+
import sys
7+
from pathlib import Path
8+
9+
ROOT = Path(__file__).resolve().parents[1]
10+
SRC = ROOT / "src"
11+
if str(SRC) not in sys.path:
12+
sys.path.insert(0, str(SRC))
13+
14+
from zcp import FastZCP
15+
from zcp.mcp_stdio import run_mcp_stdio_server_sync
16+
17+
18+
app = FastZCP("Weather MCP Compatibility Server", version="0.1.0")
19+
20+
21+
@app.tool(
22+
name="weather.get_current",
23+
description="Get the current weather for a city.",
24+
input_schema={
25+
"type": "object",
26+
"properties": {
27+
"city": {"type": "string"},
28+
"unit": {"type": "string", "enum": ["celsius", "fahrenheit"]},
29+
},
30+
"required": ["city"],
31+
"additionalProperties": False,
32+
},
33+
output_schema={
34+
"type": "object",
35+
"properties": {
36+
"city": {"type": "string"},
37+
"unit": {"type": "string"},
38+
"temperature": {"type": "integer"},
39+
"condition": {"type": "string"},
40+
},
41+
"required": ["city", "unit", "temperature", "condition"],
42+
"additionalProperties": False,
43+
},
44+
output_mode="scalar",
45+
inline_ok=True,
46+
)
47+
def get_current_weather(city: str, unit: str = "celsius") -> dict[str, object]:
48+
base = {
49+
"hangzhou": {"temperature": 24, "condition": "Cloudy"},
50+
"beijing": {"temperature": 18, "condition": "Sunny"},
51+
"shanghai": {"temperature": 22, "condition": "Rainy"},
52+
}
53+
payload = base.get(city.strip().lower(), {"temperature": 20, "condition": "Unknown"})
54+
return {"city": city, "unit": unit, **payload}
55+
56+
57+
def main() -> None:
58+
run_mcp_stdio_server_sync(app)
59+
60+
61+
if __name__ == "__main__":
62+
main()

examples/weather_zcp_client.py

Lines changed: 117 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,117 @@
1+
#!/usr/bin/env python3
2+
"""Minimal prompt-driven ZCP client for the native weather server example."""
3+
4+
from __future__ import annotations
5+
6+
import asyncio
7+
import json
8+
import os
9+
import sys
10+
from typing import Any
11+
from pathlib import Path
12+
13+
ROOT = Path(__file__).resolve().parents[1]
14+
if str(ROOT) not in sys.path:
15+
sys.path.insert(0, str(ROOT))
16+
SRC = ROOT / "src"
17+
if str(SRC) not in sys.path:
18+
sys.path.insert(0, str(SRC))
19+
20+
from examples.weather_zcp_server import app
21+
from zcp import streamable_http_client, streamable_http_server
22+
23+
try:
24+
from openai import OpenAI
25+
except ModuleNotFoundError as exc: # pragma: no cover
26+
raise RuntimeError("Install the optional OpenAI dependency first: pip install 'zero-context-protocol-sdk[openai]'") from exc
27+
28+
29+
def _model_client() -> OpenAI:
30+
api_key = os.environ.get("OPENAI_API_KEY") or os.environ.get("DEEPSEEK_API_KEY")
31+
if not api_key:
32+
raise RuntimeError("Set OPENAI_API_KEY or DEEPSEEK_API_KEY before running this example.")
33+
return OpenAI(
34+
api_key=api_key,
35+
base_url=os.environ.get("OPENAI_BASE_URL", "https://api.deepseek.com"),
36+
)
37+
38+
39+
def _tool_spec(tool: dict[str, Any]) -> dict[str, Any]:
40+
return {
41+
"type": "function",
42+
"function": {
43+
"name": tool["name"],
44+
"description": tool.get("description") or "",
45+
"parameters": tool["inputSchema"],
46+
"strict": True,
47+
},
48+
}
49+
50+
51+
async def run(query: str) -> dict[str, object]:
52+
client = streamable_http_client(streamable_http_server(app, endpoint="http://127.0.0.1:8000/zcp"))
53+
init = await client.initialize()
54+
await client.initialized()
55+
tools = await client.list_tools()
56+
llm = _model_client()
57+
model = os.environ.get("OPENAI_MODEL", "deepseek-chat")
58+
messages: list[dict[str, Any]] = [
59+
{
60+
"role": "system",
61+
"content": "You are a helpful weather assistant. Use tools when the user asks about weather. Keep the final answer concise.",
62+
},
63+
{
64+
"role": "user",
65+
"content": query,
66+
},
67+
]
68+
tool_specs = [_tool_spec(tool) for tool in tools["tools"]]
69+
tool_log: list[dict[str, Any]] = []
70+
71+
for _ in range(4):
72+
response = llm.chat.completions.create(
73+
model=model,
74+
messages=messages,
75+
tools=tool_specs,
76+
tool_choice="auto",
77+
)
78+
message = response.choices[0].message
79+
assistant_payload = message.model_dump(exclude_none=True)
80+
messages.append(assistant_payload)
81+
tool_calls = message.tool_calls or []
82+
if not tool_calls:
83+
return {
84+
"initialize": init,
85+
"tool_names": [tool["name"] for tool in tools["tools"]],
86+
"tool_log": tool_log,
87+
"answer": message.content,
88+
}
89+
for tool_call in tool_calls:
90+
arguments = json.loads(tool_call.function.arguments or "{}")
91+
result = await client.call_tool(tool_call.function.name, arguments)
92+
payload = result.get("structuredContent")
93+
if payload is None:
94+
payload = {"content": result.get("content", [])}
95+
tool_log.append({"name": tool_call.function.name, "arguments": arguments, "result": payload})
96+
messages.append(
97+
{
98+
"role": "tool",
99+
"tool_call_id": tool_call.id,
100+
"content": json.dumps(payload, ensure_ascii=False),
101+
}
102+
)
103+
return {
104+
"initialize": init,
105+
"tool_names": [tool["name"] for tool in tools["tools"]],
106+
"tool_log": tool_log,
107+
"answer": "Model did not finish within the maximum number of tool rounds.",
108+
}
109+
110+
111+
def main() -> None:
112+
query = " ".join(sys.argv[1:]) or "请查询 Hangzhou 当前天气,并用一句话总结。"
113+
print(json.dumps(asyncio.run(run(query)), ensure_ascii=False, indent=2))
114+
115+
116+
if __name__ == "__main__":
117+
main()

0 commit comments

Comments
 (0)