Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Examples/Examples/Chat/ChatExampleGroqCloud.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ public async Task Start()
Console.WriteLine("(GroqCloud) ChatExample is running!");

await AIHub.Chat()
.WithModel(Models.Groq.Llama3_1_8bInstant)
.WithModel(Models.Groq.Llama3_1_8b)
.WithMessage("Which color do people like the most?")
.CompleteAsync(interactive: true);
}
Expand Down
24 changes: 24 additions & 0 deletions Examples/Examples/Chat/ChatExampleVertex.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
using Examples.Utils;
using MaIN.Core.Hub;
using MaIN.Domain.Configuration.BackendInferenceParams;
using MaIN.Domain.Models;

namespace Examples.Chat;

public class ChatExampleVertex : IExample
{
public async Task Start()
{
VertexExample.Setup(); //We need to provide Google service account config
Console.WriteLine("(Vertex AI) ChatExample is running!");

await AIHub.Chat()
.WithModel(Models.Vertex.Gemini2_5Pro)
.WithMessage("Is the killer whale the smartest animal?")
.WithInferenceParams(new VertexInferenceParams
{
Location = "europe-central2"
})
.CompleteAsync(interactive: true);
}
}
2 changes: 2 additions & 0 deletions Examples/Examples/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ static void RegisterExamples(IServiceCollection services)
services.AddTransient<ChatGrammarExampleGemini>();
services.AddTransient<ChatWithImageGenGeminiExample>();
services.AddTransient<ChatWithFilesExampleGemini>();
services.AddTransient<ChatExampleVertex>();
services.AddTransient<ChatWithReasoningDeepSeekExample>();
services.AddTransient<ChatWithTextToSpeechExample>();
services.AddTransient<ChatExampleGroqCloud>();
Expand Down Expand Up @@ -186,6 +187,7 @@ public class ExampleRegistry(IServiceProvider serviceProvider)
("\u25a0 Gemini Chat with grammar", serviceProvider.GetRequiredService<ChatGrammarExampleGemini>()),
("\u25a0 Gemini Chat with image", serviceProvider.GetRequiredService<ChatWithImageGenGeminiExample>()),
("\u25a0 Gemini Chat with files", serviceProvider.GetRequiredService<ChatWithFilesExampleGemini>()),
("\u25a0 Vertex Chat", serviceProvider.GetRequiredService<ChatExampleVertex>()),
("\u25a0 DeepSeek Chat with reasoning", serviceProvider.GetRequiredService<ChatWithReasoningDeepSeekExample>()),
("\u25a0 GroqCloud Chat", serviceProvider.GetRequiredService<ChatExampleGroqCloud>()),
("\u25a0 Anthropic Chat", serviceProvider.GetRequiredService<ChatExampleAnthropic>()),
Expand Down
22 changes: 22 additions & 0 deletions Examples/Examples/Utils/VertexExample.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
using MaIN.Core;
using MaIN.Domain.Configuration;
using MaIN.Domain.Configuration.Vertex;

namespace Examples.Utils;

public class VertexExample
{
public static void Setup()
{
MaINBootstrapper.Initialize(configureSettings: options =>
{
options.BackendType = BackendType.Vertex;
options.GoogleServiceAccountAuth = new GoogleServiceAccountConfig
{
ProjectId = "<YOUR_GCP_PROJECT_ID>",
ClientEmail = "<YOUR_SERVICE_ACCOUNT_EMAIL>",
PrivateKey = @"<YOUR_PRIVATE_KEY>"
};
});
}
}
4 changes: 2 additions & 2 deletions MaIN.Core.E2ETests/BackendParamsTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ public async Task GroqCloud_Should_RespondWithParams()
SkipIfMissingKey(LLMApiRegistry.GetEntry(BackendType.GroqCloud)?.ApiKeyEnvName!);

var result = await AIHub.Chat()
.WithModel(Models.Groq.Llama3_1_8bInstant)
.WithModel(Models.Groq.Llama3_1_8b)
.WithMessage(TestQuestion)
.WithInferenceParams(new GroqCloudInferenceParams
{
Expand Down Expand Up @@ -279,7 +279,7 @@ public async Task GroqCloud_Should_ThrowWhenGivenWrongParams()
{
await Assert.ThrowsAsync<InvalidBackendParamsException>(() =>
AIHub.Chat()
.WithModel(Models.Groq.Llama3_1_8bInstant)
.WithModel(Models.Groq.Llama3_1_8b)
.WithMessage(TestQuestion)
.WithInferenceParams(new OpenAiInferenceParams())
.CompleteAsync());
Expand Down
3 changes: 3 additions & 0 deletions Releases/0.10.4.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# 0.10.4 release

Adds Google Vertex AI as a backend with authentication, MCP support, and new models including image generation, along with UI configuration and example usage.
2 changes: 1 addition & 1 deletion src/MaIN.Core/.nuspec
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
<package>
<metadata>
<id>MaIN.NET</id>
<version>0.10.3</version>
<version>0.10.4</version>
<authors>Wisedev</authors>
<owners>Wisedev</owners>
<icon>favicon.png</icon>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ public static class BackendParamsFactory
BackendType.Gemini => new GeminiInferenceParams(),
BackendType.Anthropic => new AnthropicInferenceParams(),
BackendType.Ollama => new OllamaInferenceParams(),
BackendType.Vertex => new VertexInferenceParams(),
_ => new LocalInferenceParams()
};
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
using MaIN.Domain.Entities;
using Grammar = MaIN.Domain.Models.Grammar;

namespace MaIN.Domain.Configuration.BackendInferenceParams;

public class VertexInferenceParams : IBackendInferenceParams
{
public BackendType Backend => BackendType.Vertex;

public string Location { get; init; } = "us-central1";

public float? Temperature { get; init; }
public int? MaxTokens { get; init; }
public float? TopP { get; init; }
public string[]? StopSequences { get; init; }
public Grammar? Grammar { get; set; }
public Dictionary<string, object>? AdditionalParams { get; init; }
}
3 changes: 3 additions & 0 deletions src/MaIN.Domain/Configuration/MaINSettings.cs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
using MaIN.Domain.Configuration.Vertex;

namespace MaIN.Domain.Configuration;

Expand All @@ -18,6 +19,7 @@ public class MaINSettings
public SqliteSettings? SqliteSettings { get; set; }
public SqlSettings? SqlSettings { get; set; }
public string? VoicesPath { get; set; }
public GoogleServiceAccountConfig? GoogleServiceAccountAuth { get; set; }
}

public enum BackendType
Expand All @@ -30,4 +32,5 @@ public enum BackendType
Anthropic = 5,
Xai = 6,
Ollama = 7,
Vertex = 8,
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
namespace MaIN.Domain.Configuration.Vertex;

public class GoogleServiceAccountConfig
{
public required string ProjectId { get; init; }
public required string ClientEmail { get; init; }
public required string PrivateKey { get; init; }
public string TokenUri { get; init; } = "https://oauth2.googleapis.com/token";
}
1 change: 1 addition & 0 deletions src/MaIN.Domain/Entities/Mcp.cs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ public class Mcp
public required List<string> Arguments { get; init; }
public required string Command { get; init; }
public required string Model { get; init; }
public string Location { get; set; } = "us-central1";
public Dictionary<string, string> Properties { get; set; } = [];
public BackendType? Backend { get; set; }
public Dictionary<string, string> EnvironmentVariables { get; set; } = [];
Expand Down
149 changes: 147 additions & 2 deletions src/MaIN.Domain/Models/Concrete/CloudModels.cs
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,112 @@ public sealed record Gemini2_0Flash() : CloudModel(
public string? MMProjectName => null;
}

public sealed record Gemini2_5Pro() : CloudModel(
Models.Gemini.Gemini2_5Pro,
BackendType.Gemini,
"Gemini 2.5 Pro",
1000000,
"Google's most capable Gemini model"), IVisionModel
{
public string? MMProjectName => null;
}

public sealed record GeminiImagen4_0FastGenerate() : CloudModel(
Comment thread
Madzionator marked this conversation as resolved.
Models.Gemini.Imagen4_0_FastGenerate,
BackendType.Gemini,
"Imagen 4.0 Fast (Gemini)",
4000,
"Google's fast image generation model via Gemini API"), IImageGenerationModel;

public sealed record GeminiNanoBanana() : CloudModel(
Models.Gemini.NanoBanana,
BackendType.Gemini,
"Gemini 2.5 Flash Image (NanoBanana)",
130000,
"Google’s high-speed, high-fidelity image generation via Gemini API."), IImageGenerationModel;

// ===== Vertex AI Models =====

public sealed record VertexGemini2_5Pro() : CloudModel(
Models.Vertex.Gemini2_5Pro,
BackendType.Vertex,
"Gemini 2.5 Pro (Vertex)",
1000000,
"Fast and efficient Gemini model served via Vertex AI"), IVisionModel
{
public string? MMProjectName => null;
}

public sealed record VertexGemini2_5Flash() : CloudModel(
Models.Vertex.Gemini2_5Flash,
BackendType.Vertex,
"Gemini 2.5 Flash (Vertex)",
1000000,
"Fast and efficient Gemini model served via Vertex AI"), IVisionModel
{
public string? MMProjectName => null;
}

public sealed record VertexVeo2_0Generate() : CloudModel(
Models.Vertex.Veo2_0_Generate,
BackendType.Vertex,
"Veo 2.0 Generate",
4000,
"Google's video generation model available through Vertex AI"), IImageGenerationModel;

public sealed record VertexImagen4_0Generate() : CloudModel(
Models.Vertex.Imagen4_0_Generate,
BackendType.Vertex,
"Imagen 4.0 (Vertex)",
4000,
"Google's latest image generation model available through Vertex AI"), IImageGenerationModel;

// ===== xAI Models =====

public sealed record Grok4_20Reasoning() : CloudModel(
Models.Xai.Grok4_20Reasoning,
BackendType.Xai,
"Grok 4.20 reasoning",
2_000_000,
"A xai flagship model, offering fast, agentic tool use with low hallucination and strong prompt adherence for precise, reliable responses."), IVisionModel, IReasoningModel
{
public string? MMProjectName => null;
public Func<string, ThinkingState, LLMTokenValue>? ReasonFunction => null;
public string? AdditionalPrompt => null;
}

public sealed record Grok4_20NonReasoning() : CloudModel(
Models.Xai.Grok4_20NonReasoning,
BackendType.Xai,
"Grok 4.20 non reasoning",
2_000_000,
"A xai flagship model, offering fast, agentic tool use with low hallucination and strong prompt adherence for precise, reliable responses."), IVisionModel
{
public string? MMProjectName => null;
}

public sealed record Grok4_1FastReasoning() : CloudModel(
Models.Xai.Grok4_1FastReasoning,
BackendType.Xai,
"Grok 4.1 fast reasoning",
2_000_000,
"A xai multimodal model optimized specifically for high-performance agentic tool calling"), IVisionModel, IReasoningModel
{
public string? MMProjectName => null;
public Func<string, ThinkingState, LLMTokenValue>? ReasonFunction => null;
public string? AdditionalPrompt => null;
}

public sealed record Grok4_1Fast() : CloudModel(
Models.Xai.Grok4_1FastNonReasoning,
BackendType.Xai,
"Grok 4.1 fast",
2_000_000,
"A xai multimodal model optimized specifically for high-performance agentic tool calling"), IVisionModel
{
public string? MMProjectName => null;
}

public sealed record Grok3Beta() : CloudModel(
Models.Xai.Grok3Beta,
BackendType.Xai,
Expand All @@ -112,35 +216,76 @@ public sealed record GrokImage() : CloudModel(
4000,
"xAI image generation model"), IImageGenerationModel;

public sealed record GrokImagineImage() : CloudModel(
Models.Xai.GrokImagineImage,
BackendType.Xai,
"Grok Imagine Image",
4000,
"xAI image generation model"), IImageGenerationModel, IVisionModel
{
public string? MMProjectName => null;
}

public sealed record GrokImagineImagePro() : CloudModel(
Models.Xai.GrokImagineImagePro,
BackendType.Xai,
"Grok Imagine Image Pro",
4000,
"xAI image generation model"), IImageGenerationModel, IVisionModel
{
public string? MMProjectName => null;
}

// ===== GroqCloud Models =====

public sealed record Llama3_1_8bInstant() : CloudModel(
Models.Groq.Llama3_1_8bInstant,
Models.Groq.Llama3_1_8b,
BackendType.GroqCloud,
"Llama 3.1 8B Instant",
8192,
"Meta Llama 3.1 8B model optimized for fast inference on Groq hardware");

public sealed record Llama3_3_70bVersatile() : CloudModel(
Models.Groq.Llama3_3_70b,
BackendType.GroqCloud,
"Llama 3.3 70B Versatile",
130_000,
"Meta's efficient, high-performance multilingual language model");

public sealed record GptOss20b() : CloudModel(
Models.Groq.GptOss20b,
BackendType.GroqCloud,
"GPT OSS 20B",
8192,
"Open-source 20B parameter GPT model running on Groq infrastructure");

public sealed record GptOss120b() : CloudModel(
Models.Groq.GptOss120b,
BackendType.GroqCloud,
"GPT OSS 120B",
130_000,
"Open-source 120B parameter GPT model running on Groq infrastructure");

// ===== DeepSeek Models =====

public sealed record DeepSeekReasoner() : CloudModel(
Models.DeepSeek.Reasoner,
BackendType.DeepSeek,
"DeepSeek Reasoner",
64000,
128_000,
"DeepSeek reasoning-focused model for complex problem solving"), IReasoningModel
{
public Func<string, ThinkingState, LLMTokenValue>? ReasonFunction => null;
public string? AdditionalPrompt => null;
}

public sealed record DeepSeekChat() : CloudModel(
Models.DeepSeek.Chat,
BackendType.DeepSeek,
"DeepSeek Chat",
128_000,
"DeepSeek model for complex problem solving");

// ===== Ollama Models =====

public sealed record OllamaGemma3_4b() : CloudModel(
Expand Down
2 changes: 2 additions & 0 deletions src/MaIN.Domain/Models/Concrete/LLMApiRegistry.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ public static class LLMApiRegistry
public static readonly LLMApiRegistryEntry Anthropic = new("Anthropic", "ANTHROPIC_API_KEY");
public static readonly LLMApiRegistryEntry Xai = new("Xai", "XAI_API_KEY");
public static readonly LLMApiRegistryEntry Ollama = new("Ollama", "OLLAMA_API_KEY");
public static readonly LLMApiRegistryEntry Vertex = new("Vertex", "GOOGLE_APPLICATION_CREDENTIALS");

public static LLMApiRegistryEntry? GetEntry(BackendType backendType) => backendType switch
{
Expand All @@ -21,6 +22,7 @@ public static class LLMApiRegistry
BackendType.Anthropic => Anthropic,
BackendType.Xai => Xai,
BackendType.Ollama => Ollama,
BackendType.Vertex => Vertex,
_ => null
};
}
Expand Down
10 changes: 10 additions & 0 deletions src/MaIN.Domain/Models/Concrete/LocalModels.cs
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,16 @@ public sealed record Olmo2_7b() : LocalModel(
8192,
"Open-source 7B model for research, benchmarking, and academic studies");

// ===== Image Generation =====

public sealed record Flux1Shnell() : LocalModel(
Models.Local.Flux1Shnell,
"FLUX.1_Shnell",
null,
"FLUX.1 Schnell",
4096,
"Fast local image generation model"), IImageGenerationModel;

// ===== Embedding Model =====

public sealed record Mxbai_Embedding() : LocalModel(
Expand Down
Loading
Loading