Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
23cf75b
Checkpoint
crickman Mar 5, 2026
fcd60da
Checkpoint
crickman Mar 5, 2026
eb84062
Stable
crickman Mar 5, 2026
0e8b9b2
Strategies
crickman Mar 5, 2026
b275d34
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 5, 2026
dda15ea
Updated
crickman Mar 5, 2026
7608005
Encoding
crickman Mar 5, 2026
1428286
Formatting
crickman Mar 5, 2026
f70423b
Cleanup
crickman Mar 5, 2026
defb9dd
Formatting
crickman Mar 5, 2026
6ce0447
Tests
crickman Mar 5, 2026
7e2c5ad
Tuning
crickman Mar 5, 2026
06f55c0
Update tests
crickman Mar 5, 2026
f42863e
Test update
crickman Mar 5, 2026
c513694
Remove working solution
crickman Mar 5, 2026
1a8a58f
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 5, 2026
43d226f
Add sample to solution
crickman Mar 5, 2026
5ef100c
Sample readyme
crickman Mar 5, 2026
4d6e1ff
Experimental
crickman Mar 5, 2026
2f443a1
Format
crickman Mar 5, 2026
209d0e3
Formatting
crickman Mar 5, 2026
84aa392
Encoding
crickman Mar 5, 2026
7c88b20
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 5, 2026
9c1165f
Support IChatReducer
crickman Mar 5, 2026
6ef397e
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 6, 2026
7afed95
Sample output formatting
crickman Mar 6, 2026
36ba6b0
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 6, 2026
1598991
Initial plan
Copilot Mar 6, 2026
dc2bb4d
Replace CompactingChatClient with MessageCompactionContextProvider
Copilot Mar 6, 2026
601eddb
Boundary condition
crickman Mar 6, 2026
cc441d2
Merge branch 'main' into crickman/feature-compaction-deux
crickman Mar 6, 2026
14aae1f
Merge branch 'crickman/feature-compaction-deux' into copilot/create-m…
crickman Mar 6, 2026
094b415
Fix encoding
crickman Mar 6, 2026
93728c1
Fix cast
crickman Mar 6, 2026
aff1d06
Test coverage
crickman Mar 6, 2026
04f29e6
Merge branch 'crickman/feature-compaction-deux' into copilot/create-m…
crickman Mar 6, 2026
278912b
Namespace
crickman Mar 6, 2026
576f750
Improvements
crickman Mar 6, 2026
aa47a14
Efficiency
crickman Mar 6, 2026
b202b7c
Cleanup
crickman Mar 6, 2026
9f0cc62
Resolve merge
crickman Mar 7, 2026
da4886f
Detect service managed conversation
crickman Mar 7, 2026
dcf4b1a
Fix namespace
crickman Mar 7, 2026
fe09a1e
Fix merge
crickman Mar 7, 2026
b6070fd
Fix test expectation
crickman Mar 7, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@
<!-- Inference SDKs -->
<PackageVersion Include="AWSSDK.Extensions.Bedrock.MEAI" Version="4.0.5.1" />
<PackageVersion Include="Microsoft.ML.OnnxRuntimeGenAI" Version="0.10.0" />
<PackageVersion Include="Microsoft.ML.Tokenizers" Version="2.0.0" />
<PackageVersion Include="OllamaSharp" Version="5.4.8" />
<PackageVersion Include="OpenAI" Version="2.8.0" />
<!-- Identity -->
Expand Down
1 change: 1 addition & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
<Project Path="samples/02-agents/Agents/Agent_Step15_DeepResearch/Agent_Step15_DeepResearch.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step16_Declarative/Agent_Step16_Declarative.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step17_AdditionalAIContext/Agent_Step17_AdditionalAIContext.csproj" />
<Project Path="samples/02-agents/Agents/Agent_Step18_CompactionPipeline/Agent_Step18_CompactionPipeline.csproj" />
</Folder>
<Folder Name="/Samples/02-agents/DeclarativeAgents/">
<Project Path="samples/02-agents/DeclarativeAgents/ChatClient/DeclarativeChatClientAgents.csproj" />
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFrameworks>net10.0</TargetFrameworks>

<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.OpenAI" />
<PackageReference Include="Azure.Identity" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" />
</ItemGroup>

<ItemGroup>
<ProjectReference Include="..\..\..\..\src\Microsoft.Agents.AI.OpenAI\Microsoft.Agents.AI.OpenAI.csproj" />
</ItemGroup>

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
// Copyright (c) Microsoft. All rights reserved.

// This sample demonstrates how to use a MessageCompactionContextProvider with a compaction pipeline
// as an AIContextProvider for an agent's in-run context management. The pipeline chains multiple
// compaction strategies from gentle to aggressive:
// 1. ToolResultCompactionStrategy - Collapses old tool-call groups into concise summaries
// 2. SummarizationCompactionStrategy - LLM-compresses older conversation spans
// 3. SlidingWindowCompactionStrategy - Keeps only the most recent N user turns
// 4. TruncationCompactionStrategy - Emergency token-budget backstop

using System.ComponentModel;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Compaction;
using Microsoft.Extensions.AI;

var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";

// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AzureOpenAIClient openAIClient = new(new Uri(endpoint), new DefaultAzureCredential());

// Create a chat client for the agent and a separate one for the summarization strategy.
// Using the same model for simplicity; in production, use a smaller/cheaper model for summarization.
IChatClient agentChatClient = openAIClient.GetChatClient(deploymentName).AsIChatClient();
IChatClient summarizerChatClient = openAIClient.GetChatClient(deploymentName).AsIChatClient();

// Define a tool the agent can use, so we can see tool-result compaction in action.
[Description("Look up the current price of a product by name.")]
static string LookupPrice([Description("The product name to look up.")] string productName) =>
productName.ToUpperInvariant() switch
{
"LAPTOP" => "The laptop costs $999.99.",
"KEYBOARD" => "The keyboard costs $79.99.",
"MOUSE" => "The mouse costs $29.99.",
_ => $"Sorry, I don't have pricing for '{productName}'."
};

// Configure the compaction pipeline with one of each strategy, ordered least to most aggressive.
PipelineCompactionStrategy compactionPipeline =
new(// 1. Gentle: collapse old tool-call groups into short summaries like "[Tool calls: LookupPrice]"
new ToolResultCompactionStrategy(CompactionTriggers.MessagesExceed(7)),

// 2. Moderate: use an LLM to summarize older conversation spans into a concise message
new SummarizationCompactionStrategy(summarizerChatClient, CompactionTriggers.TokensExceed(0x500)),

// 3. Aggressive: keep only the last N user turns and their responses
new SlidingWindowCompactionStrategy(CompactionTriggers.TurnsExceed(4)),

// 4. Emergency: drop oldest groups until under the token budget
new TruncationCompactionStrategy(CompactionTriggers.TokensExceed(0x8000)));

// Create the agent with a MessageCompactionContextProvider that uses the compaction pipeline.
AIAgent agent =
agentChatClient.AsAIAgent(
new ChatClientAgentOptions
{
Name = "ShoppingAssistant",
ChatOptions = new()
{
Instructions =
"""
You are a helpful, but long winded, shopping assistant.
Help the user look up prices and compare products.
When responding, Be sure to be extra descriptive and use as
many words as possible without sounding ridiculous.
""",
Tools = [AIFunctionFactory.Create(LookupPrice)],
},
AIContextProviders = [new MessageCompactionContextProvider(compactionPipeline)],
});

AgentSession session = await agent.CreateSessionAsync();

// Helper to print chat history size
void PrintChatHistory()
{
if (session.TryGetInMemoryChatHistory(out var history))
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.WriteLine($"\n[Messages: x{history.Count}]\n");
Console.ResetColor();
}
}

// Run a multi-turn conversation with tool calls to exercise the pipeline.
string[] prompts =
[
"What's the price of a laptop?",
"How about a keyboard?",
"And a mouse?",
"Which product is the cheapest?",
"Can you compare the laptop and the keyboard for me?",
"What was the first product I asked about?",
"Thank you!",
];

foreach (string prompt in prompts)
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[User] ");
Console.ResetColor();
Console.WriteLine(prompt);
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("\n[Agent] ");
Console.ResetColor();
Console.WriteLine(await agent.RunAsync(prompt, session));

PrintChatHistory();
}
1 change: 1 addition & 0 deletions dotnet/samples/02-agents/Agents/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ Before you begin, ensure you have the following prerequisites:
|[Deep research with an agent](./Agent_Step15_DeepResearch/)|This sample demonstrates how to use the Deep Research Tool to perform comprehensive research on complex topics|
|[Declarative agent](./Agent_Step16_Declarative/)|This sample demonstrates how to declaratively define an agent.|
|[Providing additional AI Context to an agent using multiple AIContextProviders](./Agent_Step17_AdditionalAIContext/)|This sample demonstrates how to inject additional AI context into a ChatClientAgent using multiple custom AIContextProvider components that are attached to the agent.|
|[Using compaction pipeline with an agent](./Agent_Step18_CompactionPipeline/)|This sample demonstrates how to use a compaction pipeline to efficiently limit the size of the conversation history for an agent.|

## Running the samples from the console

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,20 +79,21 @@ public List<ChatMessage> GetMessages(AgentSession? session)
/// <exception cref="ArgumentNullException"><paramref name="messages"/> is <see langword="null"/>.</exception>
public void SetMessages(AgentSession? session, List<ChatMessage> messages)
{
_ = Throw.IfNull(messages);
Throw.IfNull(messages);

var state = this._sessionState.GetOrInitializeState(session);
State state = this._sessionState.GetOrInitializeState(session);
state.Messages = messages;
}

/// <inheritdoc />
protected override async ValueTask<IEnumerable<ChatMessage>> ProvideChatHistoryAsync(InvokingContext context, CancellationToken cancellationToken = default)
{
var state = this._sessionState.GetOrInitializeState(context.Session);
State state = this._sessionState.GetOrInitializeState(context.Session);

if (this.ReducerTriggerEvent is InMemoryChatHistoryProviderOptions.ChatReducerTriggerEvent.BeforeMessagesRetrieval && this.ChatReducer is not null)
{
state.Messages = (await this.ChatReducer.ReduceAsync(state.Messages, cancellationToken).ConfigureAwait(false)).ToList();
// Apply pre-invocation compaction strategy if configured
await this.CompactMessagesAsync(state, cancellationToken).ConfigureAwait(false);
}

return state.Messages;
Expand All @@ -101,15 +102,26 @@ protected override async ValueTask<IEnumerable<ChatMessage>> ProvideChatHistoryA
/// <inheritdoc />
protected override async ValueTask StoreChatHistoryAsync(InvokedContext context, CancellationToken cancellationToken = default)
{
var state = this._sessionState.GetOrInitializeState(context.Session);
State state = this._sessionState.GetOrInitializeState(context.Session);

// Add request and response messages to the provider
var allNewMessages = context.RequestMessages.Concat(context.ResponseMessages ?? []);
state.Messages.AddRange(allNewMessages);

if (this.ReducerTriggerEvent is InMemoryChatHistoryProviderOptions.ChatReducerTriggerEvent.AfterMessageAdded && this.ChatReducer is not null)
if (this.ReducerTriggerEvent is InMemoryChatHistoryProviderOptions.ChatReducerTriggerEvent.AfterMessageAdded)
{
state.Messages = (await this.ChatReducer.ReduceAsync(state.Messages, cancellationToken).ConfigureAwait(false)).ToList();
// Apply pre-write compaction strategy if configured
await this.CompactMessagesAsync(state, cancellationToken).ConfigureAwait(false);
}
}

private async Task CompactMessagesAsync(State state, CancellationToken cancellationToken = default)
{
if (this.ChatReducer is not null)
{
// ChatReducer takes precedence, if configured
state.Messages = [.. await this.ChatReducer.ReduceAsync(state.Messages, cancellationToken).ConfigureAwait(false)];
return;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ internal static IChatClient WithDefaultAgentMiddleware(this IChatClient chatClie

if (chatClient.GetService<FunctionInvokingChatClient>() is null)
{
_ = chatBuilder.Use((innerClient, services) =>
chatBuilder.Use((innerClient, services) =>
{
var loggerFactory = services.GetService<ILoggerFactory>();

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.AI;
using Microsoft.Shared.DiagnosticIds;
using Microsoft.Shared.Diagnostics;

namespace Microsoft.Agents.AI.Compaction;

/// <summary>
/// A compaction strategy that delegates to an <see cref="IChatReducer"/> to reduce the conversation's
/// included messages.
/// </summary>
/// <remarks>
/// <para>
/// This strategy bridges the <see cref="IChatReducer"/> abstraction from <c>Microsoft.Extensions.AI</c>
/// into the compaction pipeline. It collects the currently included messages from the
/// <see cref="MessageIndex"/>, passes them to the reducer, and rebuilds the index from the
/// reduced message list when the reducer produces fewer messages.
/// </para>
/// <para>
/// The <see cref="CompactionTrigger"/> controls when reduction is attempted.
/// Use <see cref="CompactionTriggers"/> for common trigger conditions such as token or message thresholds.
/// </para>
/// <para>
/// Use this strategy when you have an existing <see cref="IChatReducer"/> implementation
/// (such as <c>MessageCountingChatReducer</c>) and want to apply it as part of a
/// <see cref="CompactionStrategy"/> pipeline or as an in-run compaction strategy.
/// </para>
/// </remarks>
[Experimental(DiagnosticIds.Experiments.AgentsAIExperiments)]
public sealed class ChatReducerCompactionStrategy : CompactionStrategy
{
/// <summary>
/// Initializes a new instance of the <see cref="ChatReducerCompactionStrategy"/> class.
/// </summary>
/// <param name="chatReducer">
/// The <see cref="IChatReducer"/> that performs the message reduction.
/// </param>
/// <param name="trigger">
/// The <see cref="CompactionTrigger"/> that controls when compaction proceeds.
/// </param>
/// <param name="target">
/// An optional target condition that controls when compaction stops. When <see langword="null"/>,
/// defaults to the inverse of the <paramref name="trigger"/> — compaction stops as soon as the trigger would no longer fire.
/// Note that the <see cref="IChatReducer"/> performs reduction in a single call, so the target is
/// not evaluated incrementally; it is available for composition with other strategies via
/// <see cref="PipelineCompactionStrategy"/>.
/// </param>
public ChatReducerCompactionStrategy(IChatReducer chatReducer, CompactionTrigger trigger, CompactionTrigger? target = null)
: base(trigger, target)
{
this.ChatReducer = Throw.IfNull(chatReducer);
}

/// <summary>
/// Gets the chat reducer used to reduce messages.
/// </summary>
public IChatReducer ChatReducer { get; }

/// <inheritdoc/>
protected override async Task<bool> ApplyCompactionAsync(MessageIndex index, CancellationToken cancellationToken)
{
// No need to short-circuit on empty conversations, this is handled by <see cref="CompactionStrategy.CompactAsync"/>.
List<ChatMessage> includedMessages = [.. index.GetIncludedMessages()];

IEnumerable<ChatMessage> reduced = await this.ChatReducer.ReduceAsync(includedMessages, cancellationToken).ConfigureAwait(false);
IList<ChatMessage> reducedMessages = reduced as IList<ChatMessage> ?? [.. reduced];

if (reducedMessages.Count >= includedMessages.Count)
{
return false;
}

// Rebuild the index from the reduced messages
MessageIndex rebuilt = MessageIndex.Create(reducedMessages, index.Tokenizer);
index.Groups.Clear();
foreach (MessageGroup group in rebuilt.Groups)
{
index.Groups.Add(group);
}

return true;
}
}
Loading
Loading