Skip to content

Commit c027912

Browse files
tylerpayneTyler Payneclaudeekzhu
authored
Support MCP Elicitation, Sampling, and Roots via new McpSessionHost (#6833)
Co-authored-by: Tyler Payne <[email protected]> Co-authored-by: Claude <[email protected]> Co-authored-by: Eric Zhu <[email protected]>
1 parent a2bf539 commit c027912

File tree

17 files changed

+2655
-1392
lines changed

17 files changed

+2655
-1392
lines changed
Lines changed: 75 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,75 @@
1+
"""From: https://github.com/modelcontextprotocol/python-sdk?tab=readme-ov-file#elicitation"""
2+
3+
from pathlib import Path
4+
5+
from mcp import SamplingMessage
6+
from mcp.server.fastmcp import Context, FastMCP
7+
from mcp.types import TextContent
8+
from pydantic import BaseModel, Field
9+
10+
mcp = FastMCP(name="Elicitation Example")
11+
12+
13+
class BookingPreferences(BaseModel):
14+
"""Schema for collecting user preferences."""
15+
16+
checkAlternative: bool = Field(description="Would you like to check another time?")
17+
alternativeTime: str = Field(
18+
description="Alternative time.",
19+
)
20+
21+
22+
@mcp.tool()
23+
async def book_table(
24+
time: str,
25+
party_size: int,
26+
ctx: Context,
27+
) -> str:
28+
"""Book a table with time availability check."""
29+
# time unavailable - ask user for alternative
30+
result = await ctx.elicit(
31+
message=(f"No tables available for {party_size} at {time}. Would you like to try another time?"),
32+
schema=BookingPreferences,
33+
)
34+
35+
if result.action == "accept" and result.data:
36+
if result.data.checkAlternative:
37+
return f"[SUCCESS] Booked for {result.data.alternativeTime}"
38+
return "[CANCELLED] No booking made"
39+
return "[CANCELLED] Booking cancelled"
40+
41+
42+
@mcp.tool()
43+
async def list_dir(path: Path, ctx: Context) -> list[str]:
44+
"""List the files and directories in path"""
45+
roots = await ctx.session.list_roots()
46+
for root in roots.roots:
47+
root_path = root.uri.path
48+
if root_path:
49+
root_path = Path(root_path)
50+
try:
51+
_ = path.relative_to(root_path)
52+
return ["Downloads", "Documents", "image.png", "presentation.pptx"]
53+
except ValueError:
54+
# Skip relative_to failure
55+
pass
56+
raise ValueError(f"Cannot list_dir in {path} because it is not a child of the available roots.")
57+
58+
59+
@mcp.tool()
60+
async def generate_poem(topic: str, ctx: Context) -> str:
61+
poem = await ctx.session.create_message(
62+
[SamplingMessage(role="user", content=TextContent(type="text", text=f"Write a poem about {topic}."))],
63+
max_tokens=100,
64+
system_prompt="You are a very creative poet.",
65+
temperature=0.8,
66+
stop_sequences=["\n\n"],
67+
)
68+
if isinstance(poem.content, TextContent):
69+
return poem.content.text
70+
else:
71+
raise TypeError(f"Unrecognized message response type {type(poem.content).__name__}")
72+
73+
74+
if __name__ == "__main__":
75+
mcp.run("stdio")
Lines changed: 251 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,251 @@
1+
"""
2+
Interactive MCP Host Capabilities Demo
3+
4+
This example demonstrates advanced MCP host capabilities including:
5+
- Sampling: Language model text generation requests from MCP server back to host
6+
- Elicitation: Interactive user input collection through command-line
7+
- Roots: File system root listing
8+
9+
The demo is fully interactive and allows you to communicate directly with
10+
the MCP server through the command line interface.
11+
"""
12+
13+
import argparse
14+
import asyncio
15+
import json
16+
import logging
17+
import sys
18+
from pathlib import Path
19+
20+
import yaml
21+
from autogen_agentchat.agents import AssistantAgent, UserProxyAgent
22+
from autogen_agentchat.conditions import MaxMessageTermination
23+
from autogen_agentchat.teams import RoundRobinGroupChat
24+
from autogen_agentchat.ui import Console
25+
from autogen_core.models import ChatCompletionClient
26+
from autogen_ext.models.openai import OpenAIChatCompletionClient
27+
from autogen_ext.tools.mcp import (
28+
ChatCompletionClientSampler,
29+
McpSessionHost,
30+
McpWorkbench,
31+
StaticRootsProvider,
32+
StdioElicitor,
33+
StdioServerParams,
34+
)
35+
from mcp.types import Root
36+
from pydantic import FileUrl
37+
38+
# Configure logging
39+
logging.basicConfig(
40+
level=logging.INFO,
41+
format="%(message)s", # Clean format for demo output
42+
)
43+
logger = logging.getLogger(__name__)
44+
logging.getLogger("autogen_core").setLevel(logging.WARNING)
45+
46+
47+
def load_model_client_from_config(config_path: str) -> ChatCompletionClient:
48+
"""Load a ChatCompletionClient from a JSON or YAML config file.
49+
50+
Args:
51+
config_path: Path to the JSON or YAML config file
52+
53+
Returns:
54+
ChatCompletionClient: Loaded model client
55+
56+
Raises:
57+
FileNotFoundError: If config file doesn't exist
58+
ValueError: If config format is invalid or unsupported file type
59+
"""
60+
config_file = Path(config_path)
61+
if not config_file.exists():
62+
raise FileNotFoundError(f"Config file not found: {config_path}")
63+
64+
# Load config based on file extension
65+
if config_file.suffix.lower() == ".json":
66+
with open(config_file, "r") as f:
67+
config_data = json.load(f)
68+
elif config_file.suffix.lower() in [".yml", ".yaml"]:
69+
with open(config_file, "r") as f:
70+
config_data = yaml.safe_load(f)
71+
else:
72+
raise ValueError(f"Unsupported config file type: {config_file.suffix}. Use .json, .yml, or .yaml")
73+
74+
if not isinstance(config_data, dict):
75+
raise ValueError("Config file must contain a JSON/YAML object")
76+
77+
logger.info(f"📄 Loading ChatCompletionClient from config: {config_path}")
78+
return ChatCompletionClient.load_component(config_data)
79+
80+
81+
async def interactive_mcp_demo(config_path: str | None = None):
82+
"""Interactive MCP host capabilities demo with command-line interface."""
83+
logger.info("🌟 Interactive MCP Host Capabilities Demo")
84+
logger.info("=" * 60)
85+
logger.info("This demo showcases MCP server-to-host communication:")
86+
logger.info("• Sampling: MCP server requests language model generation")
87+
logger.info("• Elicitation: MCP server requests user input via AgentElicitor")
88+
logger.info("• Roots: MCP server lists available file system roots")
89+
logger.info("=" * 60)
90+
91+
# Setup model client for sampling
92+
if config_path:
93+
logger.info("⚙️ Loading model client from config file...")
94+
model_client = load_model_client_from_config(config_path)
95+
else:
96+
logger.info("⚙️ Setting up default OpenAI model client (gpt-4)...")
97+
model_client = OpenAIChatCompletionClient(model="gpt-4o-mini")
98+
99+
other_assistant = AssistantAgent(
100+
"booking_assistant",
101+
model_client=model_client,
102+
description="An AI assistant who helps a user book 5pm reservations.",
103+
)
104+
105+
sampler = ChatCompletionClientSampler(model_client)
106+
107+
# Start runtime and create AgentElicitor that targets the UserProxy
108+
logger.info("🎯 Creating StdioElicitor...")
109+
elicitor = StdioElicitor()
110+
111+
roots = StaticRootsProvider(
112+
[Root(uri=FileUrl("file:///home"), name="Home"), Root(uri=FileUrl("file:///tmp"), name="Tmp")]
113+
)
114+
115+
# Create host with all capabilities including elicitation
116+
logger.info("🏠 Creating MCP session host with sampling, elicitation, and roots support...")
117+
host = McpSessionHost(
118+
sampler=sampler, # Support sampling via model clicent
119+
elicitor=elicitor, # Support elicitation via booking_assistant
120+
roots=roots, # support roots in /home and /tmp
121+
)
122+
123+
# Setup workbench with host
124+
logger.info("🔧 Creating MCP Workbench for mcp_example_server...")
125+
mcp_workbench = McpWorkbench(
126+
server_params=StdioServerParams(
127+
command=sys.executable,
128+
args=[str(Path(__file__).parent / "mcp_example_server.py")],
129+
read_timeout_seconds=60,
130+
),
131+
host=host,
132+
)
133+
134+
# Create assistant with MCP capabilities
135+
assistant = AssistantAgent(
136+
"mcp_assistant",
137+
model_client=model_client,
138+
workbench=mcp_workbench,
139+
description="An AI assistant with access to MCP tools that can request sampling and elicitation from the host",
140+
)
141+
142+
# Create RoundRobinGroupChat with the agents
143+
logger.info("🔄 Setting up RoundRobinGroupChat...")
144+
team = RoundRobinGroupChat(
145+
[assistant, other_assistant], termination_condition=MaxMessageTermination(max_messages=2)
146+
)
147+
148+
# Run the team with the initial task
149+
tasks = ["Book a table for 2 at 7pm", "Generate a poem about computer protocols.", "ls /home", "ls /bin"]
150+
for task in tasks:
151+
await team.reset()
152+
result = await Console(team.run_stream(task=task))
153+
154+
logger.info("💬 Team conversation:")
155+
for message in result.messages:
156+
header = f"--- {type(message).__name__.upper()} ---"
157+
logger.info(header)
158+
logger.info(message.model_dump_json(indent=2))
159+
160+
161+
def parse_arguments() -> argparse.Namespace:
162+
"""Parse command line arguments."""
163+
parser = argparse.ArgumentParser(
164+
description="Interactive MCP Host Capabilities Demo with AgentElicitor",
165+
formatter_class=argparse.RawDescriptionHelpFormatter,
166+
epilog="""
167+
Examples:
168+
# Run with default OpenAI GPT-4 client
169+
python mcp_elicitation_example.py
170+
171+
# Run with custom model client from config
172+
python mcp_elicitation_example.py --config model_config.json
173+
python mcp_elicitation_example.py --config model_config.yaml
174+
175+
Config file format (JSON/YAML):
176+
{
177+
"component_type": "OpenAIChatCompletionClient",
178+
"model": "gpt-4",
179+
"api_key": "your-api-key"
180+
}
181+
""",
182+
)
183+
184+
parser.add_argument(
185+
"--config",
186+
"-c",
187+
type=str,
188+
help="Path to JSON or YAML config file containing ChatCompletionClient configuration",
189+
)
190+
191+
return parser.parse_args()
192+
193+
194+
async def main():
195+
"""
196+
Run the interactive MCP host capabilities demonstration.
197+
198+
This demo allows direct command-line interaction with an MCP-enabled assistant
199+
that can use tools requiring host-side capabilities like sampling and elicitation.
200+
"""
201+
args = parse_arguments()
202+
203+
try:
204+
await interactive_mcp_demo(config_path=args.config)
205+
except KeyboardInterrupt:
206+
logger.info("\n👋 Demo interrupted by user. Goodbye!")
207+
except Exception as e:
208+
logger.error(f"❌ Error running demo: {e}")
209+
logger.info("Troubleshooting tips:")
210+
logger.info("1. Install the everything server:")
211+
logger.info(" npm install -g @modelcontextprotocol/server-everything")
212+
logger.info("2. Ensure your OpenAI API key is configured")
213+
logger.info("3. Check that Node.js and npx are available in your PATH")
214+
logger.info("4. Make sure you have internet connectivity for npm package download")
215+
if args.config:
216+
logger.info("5. Check that your config file exists and contains valid ChatCompletionClient configuration")
217+
218+
219+
if __name__ == "__main__":
220+
"""
221+
Interactive MCP Host Capabilities Demo with AgentElicitor
222+
223+
This demo provides a command-line interface to interact with an MCP-enabled
224+
assistant that demonstrates advanced host capabilities:
225+
226+
🔄 Sampling: MCP server can request language model text generation from the host
227+
❓ Elicitation: MCP server can request interactive user input via AgentElicitor → UserProxy
228+
📁 Roots: MCP server can request file system root listings from the host
229+
230+
Key Features:
231+
- AgentElicitor routes elicitation requests from MCP server to UserProxyAgent
232+
- Full bidirectional communication between MCP server and AutoGen agents
233+
- Interactive command-line interface for real-time demonstration
234+
235+
Prerequisites:
236+
1. Install the everything reference server:
237+
npm install -g @modelcontextprotocol/server-everything
238+
2. Set up your OpenAI API key (required for sampling capability)
239+
3. Ensure Node.js and npx are available in your PATH
240+
241+
Usage:
242+
- Run with default model: python mcp_elicitation_example.py
243+
- Run with custom model: python mcp_elicitation_example.py --config model.json
244+
- Interact through the command-line interface
245+
- Ask the assistant to use MCP tools that demonstrate host capabilities
246+
- Watch elicitation requests get routed through the AgentElicitor
247+
- Type 'quit' to exit the interactive session
248+
"""
249+
250+
# Run the interactive demo
251+
asyncio.run(main())

python/packages/autogen-ext/src/autogen_ext/tools/mcp/__init__.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,19 @@
11
from ._actor import McpSessionActor
22
from ._config import McpServerParams, SseServerParams, StdioServerParams, StreamableHttpServerParams
33
from ._factory import mcp_server_tools
4+
from ._host import (
5+
ChatCompletionClientSampler,
6+
ChatCompletionClientSamplerConfig,
7+
Elicitor,
8+
McpSessionHost,
9+
RootsProvider,
10+
Sampler,
11+
StaticRootsProvider,
12+
StaticRootsProviderConfig,
13+
StdioElicitor,
14+
StdioElicitorConfig,
15+
StreamElicitor,
16+
)
417
from ._session import create_mcp_server_session
518
from ._sse import SseMcpToolAdapter
619
from ._stdio import StdioMcpToolAdapter
@@ -19,4 +32,15 @@
1932
"McpServerParams",
2033
"mcp_server_tools",
2134
"McpWorkbench",
35+
"Elicitor",
36+
"StdioElicitor",
37+
"StdioElicitorConfig",
38+
"StreamElicitor",
39+
"RootsProvider",
40+
"StaticRootsProvider",
41+
"StaticRootsProviderConfig",
42+
"McpSessionHost",
43+
"ChatCompletionClientSampler",
44+
"ChatCompletionClientSamplerConfig",
45+
"Sampler",
2246
]

0 commit comments

Comments
 (0)