-
Notifications
You must be signed in to change notification settings - Fork 104
Expand file tree
/
Copy pathworkflow_handoffbuilder.py
More file actions
157 lines (132 loc) · 6.61 KB
/
workflow_handoffbuilder.py
File metadata and controls
157 lines (132 loc) · 6.61 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
"""Handoff orchestration in autonomous mode using HandoffBuilder.
Demonstrates: HandoffBuilder with .with_autonomous_mode() where agents
transfer control to each other without any human-in-the-loop interaction.
A triage agent decides which specialist to involve first, then agents
hand off freely — researcher gathers facts, writer drafts content,
and editor reviews (handing back to the writer if revisions are needed).
By default every participant can hand off to every other participant;
no explicit routing rules are needed.
Reference:
https://learn.microsoft.com/en-us/agent-framework/workflows/orchestrations/handoff?pivots=programming-language-python#autonomous-mode
Run:
uv run examples/workflow_handoffbuilder.py
"""
import asyncio
import logging
import os
from agent_framework import Agent, AgentResponseUpdate, MCPStreamableHTTPTool
from agent_framework.openai import OpenAIChatClient
from agent_framework.orchestrations import HandoffBuilder
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
from dotenv import load_dotenv
from rich.console import Console
logging.basicConfig(level=logging.WARNING)
console = Console()
load_dotenv(override=True)
API_HOST = os.getenv("API_HOST", "github")
# Configure the chat client based on the API host
async_credential = None
if API_HOST == "azure":
async_credential = DefaultAzureCredential()
token_provider = get_bearer_token_provider(async_credential, "https://cognitiveservices.azure.com/.default")
client = OpenAIChatClient(
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
api_key=token_provider,
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
)
elif API_HOST == "github":
client = OpenAIChatClient(
base_url="https://models.github.ai/inference",
api_key=os.environ["GITHUB_TOKEN"],
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
)
else:
client = OpenAIChatClient(
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
)
async def main() -> None:
# ── MCP tool for the researcher ────────────────────────────────────────
async with MCPStreamableHTTPTool(
name="Microsoft Learn MCP",
url="https://learn.microsoft.com/api/mcp",
) as mcp_server:
# ── Agents ─────────────────────────────────────────────────────────
triage = Agent(
client=client,
name="triage",
instructions=(
"You are a triage coordinator for a content creation team. "
"Analyze the user's request and hand off to the most appropriate agent: "
"'researcher' for fact-gathering (preferred first step for most requests), "
"'writer' for drafting, or 'editor' for review. "
"Do NOT produce content yourself — just decide who should start and hand off."
),
)
researcher = Agent(
client=client,
name="researcher",
instructions=(
"You are a researcher. Use the Microsoft Learn search tool to find "
"relevant, up-to-date documentation on the given topic. "
"The Microsoft Agent Framework Python package is documented at "
"learn.microsoft.com/agent-framework — search for terms like "
"'agent framework workflow', 'agent framework orchestrations', etc. "
"Do NOT confuse it with Microsoft Bot Framework — they are different products. "
"Produce 3-5 concise bullet points summarizing your findings. "
"When done, hand off to the writer."
),
tools=[mcp_server],
)
writer = Agent(
client=client,
name="writer",
instructions=(
"You are a social media writer specializing in LinkedIn. "
"Take the researcher's bullet points and write a punchy LinkedIn post "
"(80-120 words). Use a hook opening, short paragraphs, and a clear CTA. "
"Include 2-3 relevant hashtags at the end. When done, hand off to the editor."
),
)
editor = Agent(
client=client,
name="editor",
instructions=(
"You are a LinkedIn editor. Review the writer's draft for clarity, tone, and engagement. "
"If you see issues (weak hook, filler, vague CTA, poor formatting), "
"give 2-3 specific critiques and hand off to the writer for revision. "
"If the draft is solid or has already been revised, output the polished version "
"prefixed with 'FINAL:' on the first line. "
"Each response must EITHER hand off OR output FINAL — never both."
),
)
# ── Build the handoff workflow (autonomous — no user interaction) ──
workflow = (
HandoffBuilder(
name="content_pipeline",
participants=[triage, researcher, writer, editor],
termination_condition=lambda conversation: (
len(conversation) > 0 and conversation[-1].text.strip().startswith("FINAL:")
),
)
.with_start_agent(triage)
.with_autonomous_mode()
.build()
)
# ── Run ───────────────────────────────────────────────────────────
prompt = "Write a LinkedIn post about deploying Python agents on Azure Container Apps."
console.print(f"[bold]Prompt:[/bold] {prompt}\n")
current_agent = None
async for event in workflow.run(prompt, stream=True):
if event.type == "handoff_sent":
console.print(
f"\n🔀 [bold yellow]Handoff:[/bold yellow] {event.data.source} → {event.data.target}\n"
)
elif event.type == "output" and isinstance(event.data, AgentResponseUpdate):
if event.executor_id != current_agent:
current_agent = event.executor_id
console.print(f"\n🤖 [bold cyan]{current_agent}[/bold cyan]")
console.print(event.data.text, end="")
if async_credential:
await async_credential.close()
if __name__ == "__main__":
asyncio.run(main())