Skip to content
Merged
2 changes: 1 addition & 1 deletion Examples/Examples/Chat/ChatCustomGrammarExample.cs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ public async Task Start()
await AIHub.Chat()
.WithModel<Gemma2_2b>()
.WithMessage("Generate random person")
.WithInferenceParams(new InferenceParams
.WithInferenceParams(new LocalInferenceParams
{
Grammar = personGrammar
})
Expand Down
9 changes: 9 additions & 0 deletions Examples/Examples/Chat/ChatExampleOpenAi.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using Examples.Utils;
using MaIN.Core.Hub;
using MaIN.Domain.Configuration.BackendInferenceParams;
using MaIN.Domain.Models.Concrete;

namespace Examples.Chat;
Expand All @@ -15,6 +16,14 @@ public async Task Start()
await AIHub.Chat()
.WithModel<Gpt5Nano>()
.WithMessage("What do you consider to be the greatest invention in history?")
.WithInferenceParams(new OpenAiInferenceParams // We could override some inference params
{
ResponseFormat = "text",
AdditionalParams = new Dictionary<string, object>
{
["max_completion_tokens"] = 2137
}
})
.CompleteAsync(interactive: true);
}
}
2 changes: 1 addition & 1 deletion Examples/Examples/Chat/ChatGrammarExampleGemini.cs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ public async Task Start()
await AIHub.Chat()
.WithModel<Gemini2_5Flash>()
.WithMessage("Generate random person")
.WithInferenceParams(new InferenceParams
.WithInferenceParams(new LocalInferenceParams
{
Grammar = new Grammar(grammarValue, GrammarFormat.JSONSchema)
})
Expand Down
319 changes: 319 additions & 0 deletions MaIN.Core.IntegrationTests/BackendParamsTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,319 @@
using MaIN.Core.Hub;
using MaIN.Domain.Configuration;
using MaIN.Domain.Entities;
using MaIN.Domain.Configuration.BackendInferenceParams;
using MaIN.Domain.Exceptions;
using MaIN.Domain.Models.Concrete;

namespace MaIN.Core.IntegrationTests;

public class BackendParamsTests : IntegrationTestBase
{
private const string TestQuestion = "What is 2+2? Answer with just the number.";

[SkippableFact]
public async Task OpenAi_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.OpenAi)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel<Gpt4oMini>()
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Anthropic_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Anthropic)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel<ClaudeSonnet4>()
.WithMessage(TestQuestion)
.WithInferenceParams(new AnthropicInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Gemini_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Gemini)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel<Gemini2_0Flash>()
.WithMessage(TestQuestion)
.WithInferenceParams(new GeminiInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task DeepSeek_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.DeepSeek)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel<DeepSeekReasoner>()
.WithMessage(TestQuestion)
.WithInferenceParams(new DeepSeekInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task GroqCloud_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.GroqCloud)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel<Llama3_1_8bInstant>()
.WithMessage(TestQuestion)
.WithInferenceParams(new GroqCloudInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Xai_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Xai)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel<Grok3Beta>()
.WithMessage(TestQuestion)
.WithInferenceParams(new XaiInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task Self_Should_RespondWithParams()
{
Skip.If(!File.Exists("C:/Models/gemma2-2b.gguf"), "Local model not found at C:/Models/gemma2-2b.gguf");

var result = await AIHub.Chat()
.WithModel<Gemma2_2b>()
.WithMessage(TestQuestion)
.WithInferenceParams(new LocalInferenceParams
{
Temperature = 0.3f,
ContextSize = 8192,
MaxTokens = 100,
TopK = 40,
TopP = 0.9f
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task LocalOllama_Should_RespondWithParams()
{
SkipIfOllamaNotRunning();

var result = await AIHub.Chat()
.WithModel<OllamaGemma3_4b>()
.WithMessage(TestQuestion)
.WithInferenceParams(new OllamaInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopK = 40,
TopP = 0.9f,
NumCtx = 2048
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

[SkippableFact]
public async Task ClaudOllama_Should_RespondWithParams()
{
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.Ollama)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel<OllamaGemma3_4b>()
.WithMessage(TestQuestion)
.WithInferenceParams(new OllamaInferenceParams
{
Temperature = 0.3f,
MaxTokens = 100,
TopK = 40,
TopP = 0.9f,
NumCtx = 2048
})
.CompleteAsync();

Assert.True(result.Done);
Assert.NotNull(result.Message);
Assert.NotEmpty(result.Message.Content);
Assert.Contains("4", result.Message.Content);
}

// --- Params mismatch validation (no API key required) ---

[Fact]
public async Task Self_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<Gemma2_2b>()
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task OpenAi_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<Gpt4oMini>()
.WithMessage(TestQuestion)
.WithInferenceParams(new DeepSeekInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Anthropic_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<ClaudeSonnet4>()
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Gemini_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<Gemini2_0Flash>()
.WithMessage(TestQuestion)
.WithInferenceParams(new AnthropicInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task DeepSeek_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<DeepSeekReasoner>()
.WithMessage(TestQuestion)
.WithInferenceParams(new GeminiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task GroqCloud_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<Llama3_1_8bInstant>()
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Xai_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<Grok3Beta>()
.WithMessage(TestQuestion)
.WithInferenceParams(new AnthropicInferenceParams())
.CompleteAsync());
}

[Fact]
public async Task Ollama_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel<OllamaGemma3_4b>()
.WithMessage(TestQuestion)
.WithInferenceParams(new DeepSeekInferenceParams())
.CompleteAsync());
}

private static void SkipIfMissingKey(string envName)
{
Skip.If(string.IsNullOrEmpty(Environment.GetEnvironmentVariable(envName)),
$"{envName} environment variable not set");
}

private static void SkipIfOllamaNotRunning()
{
Skip.If(!Helpers.NetworkHelper.PingHost("127.0.0.1", 11434, 3),
"Ollama is not running on localhost:11434");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
<PackageReference Include="Xunit.SkippableFact" Version="1.5.61" />
</ItemGroup>

<ItemGroup>
Expand Down
4 changes: 2 additions & 2 deletions src/MaIN.Core.UnitTests/AgentContextTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ public async Task CreateAsync_ShouldCallAgentServiceCreateAgent()
It.IsAny<Agent>(),
It.IsAny<bool>(),
It.IsAny<bool>(),
It.IsAny<InferenceParams>(),
It.IsAny<IBackendInferenceParams>(),
It.IsAny<MemoryParams>(),
It.IsAny<bool>()))
.ReturnsAsync(agent);
Expand All @@ -151,7 +151,7 @@ public async Task CreateAsync_ShouldCallAgentServiceCreateAgent()
It.IsAny<Agent>(),
It.Is<bool>(f => f == true),
It.Is<bool>(r => r == false),
It.IsAny<InferenceParams>(),
It.IsAny<IBackendInferenceParams>(),
It.IsAny<MemoryParams>(),
It.IsAny<bool>()),
Times.Once);
Expand Down
Loading
Loading