-
Notifications
You must be signed in to change notification settings - Fork 105
Expand file tree
/
Copy pathagent_knowledge_aisearch.py
More file actions
140 lines (114 loc) · 5.59 KB
/
agent_knowledge_aisearch.py
File metadata and controls
140 lines (114 loc) · 5.59 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
"""
Knowledge retrieval using Azure AI Search with agent-framework.
Diagram:
Input ──▶ Agent ──────────────────▶ LLM ──▶ Response
│ ▲
│ search with input │ relevant knowledge
▼ │
┌────────────┐ │
│ Knowledge │───────────────┘
│ store │
│ (Azure AI │
│ Search) │
└────────────┘
This example uses the built-in AzureAISearchContextProvider in agentic
mode, which handles the entire retrieval pipeline — no custom
BaseContextProvider subclass needed. Agentic mode uses Knowledge Bases
for multi-hop reasoning across documents, providing accurate results
through intelligent query planning.
Requires:
- An Azure AI Search service with a Knowledge Base
- An OpenAI-compatible model endpoint (Azure OpenAI, GitHub Models, or OpenAI)
Environment variables:
- AZURE_SEARCH_ENDPOINT: Your Azure AI Search endpoint
- AZURE_SEARCH_KNOWLEDGE_BASE_NAME: Your Knowledge Base name
- Plus the standard API_HOST / model config (see other examples)
See also:
- agent_knowledge_sqlite.py for keyword-only search with SQLite
- agent_knowledge_postgres.py for hybrid search with PostgreSQL + pgvector
"""
import asyncio
import logging
import os
import sys
from agent_framework import Agent
from agent_framework.azure import AzureAISearchContextProvider
from agent_framework.openai import OpenAIChatClient
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
from dotenv import load_dotenv
from rich import print
from rich.logging import RichHandler
# ── Logging ──────────────────────────────────────────────────────────
handler = RichHandler(show_path=False, rich_tracebacks=True, show_level=False)
logging.basicConfig(level=logging.WARNING, handlers=[handler], force=True, format="%(message)s")
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# ── Configuration ────────────────────────────────────────────────────
load_dotenv(override=True)
API_HOST = os.getenv("API_HOST", "github")
SEARCH_ENDPOINT = os.environ["AZURE_SEARCH_ENDPOINT"]
KNOWLEDGE_BASE_NAME = os.environ["AZURE_SEARCH_KNOWLEDGE_BASE_NAME"]
# ── OpenAI client ────────────────────────────────────────────────────
async_credential = None
if API_HOST == "azure":
async_credential = DefaultAzureCredential()
token_provider = get_bearer_token_provider(async_credential, "https://cognitiveservices.azure.com/.default")
client = OpenAIChatClient(
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
api_key=token_provider,
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
)
elif API_HOST == "github":
client = OpenAIChatClient(
base_url="https://models.github.ai/inference",
api_key=os.environ["GITHUB_TOKEN"],
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
)
else:
client = OpenAIChatClient(
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
)
# ── Azure AI Search context provider ─────────────────────────────────
search_credential = DefaultAzureCredential()
search_provider = AzureAISearchContextProvider(
endpoint=SEARCH_ENDPOINT,
credential=search_credential,
knowledge_base_name=KNOWLEDGE_BASE_NAME,
mode="agentic",
)
# ── Agent ────────────────────────────────────────────────────────────
agent = Agent(
client=client,
name="search-agent",
instructions=(
"You are a helpful home improvement shopping assistant. "
"Answer customer questions using the product information provided in the context. "
"If no relevant products are found, say you don't have information about that item. "
),
context_providers=[search_provider],
)
async def main() -> None:
"""Demonstrate Azure AI Search RAG in a multi-turn conversation."""
async with search_provider:
print("\n[bold]=== Knowledge Retrieval with Azure AI Search (agentic mode) ===[/bold]")
print(f"[dim]Knowledge Base: {KNOWLEDGE_BASE_NAME}[/dim]\n")
session = agent.create_session()
# Turn 1
user_msg = "What kind of interior paint do you have for a living room?"
print(f"[blue]User:[/blue] {user_msg}")
response = await agent.run(user_msg, session=session)
print(f"[green]Agent:[/green] {response.text}\n")
# Turn 2 — follow-up referencing the previous answer
user_msg = "What supplies do I need to apply it?"
print(f"[blue]User:[/blue] {user_msg}")
response = await agent.run(user_msg, session=session)
print(f"[green]Agent:[/green] {response.text}\n")
if async_credential:
await async_credential.close()
await search_credential.close()
if __name__ == "__main__":
if "--devui" in sys.argv:
from agent_framework.devui import serve
serve(entities=[agent], auto_open=True)
else:
asyncio.run(main())