-
Notifications
You must be signed in to change notification settings - Fork 10
Expand file tree
/
Copy pathtest_mcp.py
More file actions
107 lines (90 loc) · 3.29 KB
/
test_mcp.py
File metadata and controls
107 lines (90 loc) · 3.29 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import json
import asyncio
from typing import Optional
from contextlib import AsyncExitStack
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from openai import AsyncOpenAI
# USING OPENAI COMPATIBLE API
API_KEY = ""
BASE_URL = "https://ai.sumopod.com/v1"
MODEL = "gpt-5-nano"
class MCPClient:
def __init__(self):
self.session: Optional[ClientSession] = None
self.exit_stack = AsyncExitStack()
self.llm_tools = []
self.llm = None
async def connect(self):
server_params = StdioServerParameters(
command="telbot", args=["--mcp"], env=None
)
stdio_transport = await self.exit_stack.enter_async_context(
stdio_client(server_params)
)
self.stdio, self.write = stdio_transport
self.session = await self.exit_stack.enter_async_context(
ClientSession(self.stdio, self.write)
)
await self.session.initialize()
response = await self.session.list_tools()
tools = response.tools
print("MCP tools:", [t.name for t in tools])
for tool in tools:
schema = tool.inputSchema if isinstance(tool.inputSchema, dict) else {}
params = {"type": "object", "properties": schema.get("properties", {})}
if "required" in schema:
params["required"] = schema["required"]
self.llm_tools.append(
{
"type": "function",
"function": {
"name": tool.name,
"description": tool.description,
"parameters": params,
},
}
)
self.llm = AsyncOpenAI(base_url=BASE_URL, api_key=API_KEY)
async def chat(self, prompt: str):
messages = [{"role": "user", "content": prompt}]
response = await self.llm.chat.completions.create(
model=MODEL, messages=messages, tools=self.llm_tools
)
message = response.choices[0].message
if not message.tool_calls:
print(message.content)
return
messages.append(message)
for tc in message.tool_calls:
args = json.loads(tc.function.arguments)
print(f"[tool] {tc.function.name}({args})")
result = await self.session.call_tool(tc.function.name, arguments=args)
messages.append(
{
"role": "tool",
"tool_call_id": tc.id,
"content": result.content[0].text,
}
)
final = await self.llm.chat.completions.create(model=MODEL, messages=messages)
print(f"\n{final.choices[0].message.content}")
async def cleanup(self):
await self.exit_stack.aclose()
async def main():
client = MCPClient()
try:
await client.connect()
print("\nReady! Type your prompt (or 'exit' to quit)\n")
while True:
prompt = input("> ")
if prompt.strip().lower() in ("exit", "quit", "q"):
break
if not prompt.strip():
continue
await client.chat(prompt)
print()
finally:
await client.cleanup()
if __name__ == "__main__":
asyncio.run(main())