Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions src/Cellm/AddIn/CellmAddIn.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
using Cellm.Models.Providers.Anthropic;
using Cellm.Models.Providers.Cellm;
using Cellm.Models.Providers.DeepSeek;
using Cellm.Models.Providers.Google;
using Cellm.Models.Providers.Mistral;
using Cellm.Models.Providers.Ollama;
using Cellm.Models.Providers.OpenAi;
Expand Down Expand Up @@ -68,6 +69,7 @@ private static ServiceCollection ConfigureServices(ServiceCollection services)
.Configure<AccountConfiguration>(configuration.GetRequiredSection(nameof(AccountConfiguration)))
.Configure<AnthropicConfiguration>(configuration.GetRequiredSection(nameof(AnthropicConfiguration)))
.Configure<CellmConfiguration>(configuration.GetRequiredSection(nameof(CellmConfiguration)))
.Configure<GoogleGeminiConfiguration>(configuration.GetRequiredSection(nameof(GoogleGeminiConfiguration)))
.Configure<DeepSeekConfiguration>(configuration.GetRequiredSection(nameof(DeepSeekConfiguration)))
.Configure<CellmAddInConfiguration>(configuration.GetRequiredSection(nameof(CellmAddInConfiguration)))
.Configure<MistralConfiguration>(configuration.GetRequiredSection(nameof(MistralConfiguration)))
Expand Down Expand Up @@ -133,6 +135,7 @@ private static ServiceCollection ConfigureServices(ServiceCollection services)
.AddAnthropicChatClient()
.AddCellmChatClient()
.AddDeepSeekChatClient()
.AddGoogleGeminiChatClient()
.AddMistralChatClient()
.AddOllamaChatClient()
.AddOpenAiChatClient()
Expand Down
1 change: 0 additions & 1 deletion src/Cellm/AddIn/CellmFunctions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,6 @@ internal static async Task<object> GetResponseAsync(Arguments arguments, Stopwat
logger.LogInformation("Sending prompt to {}/{} ({}) ... Done (elapsed time: {}ms, request time: {}ms)", arguments.Provider, arguments.Model, callerCoordinates, wallClock.ElapsedMilliseconds, requestClock.ElapsedMilliseconds);

return assistantMessage;

}
// Short-circuit if any cells were found to be #GETTING_DATA or contain other errors during cell parsing.
// Excel will re-trigger this function (or already has) when inputs are updated with realized values.
Expand Down
11 changes: 6 additions & 5 deletions src/Cellm/AddIn/UserInterface/Ribbon/RibbonModelGroup.cs
Original file line number Diff line number Diff line change
Expand Up @@ -57,11 +57,12 @@ private class ProviderItem
private readonly Dictionary<int, ProviderItem> _providerItems = new()
{
[0] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.Anthropic)}", Image = $"{ResourcesBasePath}/anthropic.png", Label = nameof(Provider.Anthropic), Entitlement = Entitlement.EnableAnthropicProvider },
[1] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.DeepSeek)}", Image = $"{ResourcesBasePath}/deepseek.png", Label = nameof(Provider.DeepSeek), Entitlement = Entitlement.EnableDeepSeekProvider },
[2] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.Mistral)}", Image = $"{ResourcesBasePath}/mistral.png", Label = nameof(Provider.Mistral), Entitlement = Entitlement.EnableMistralProvider },
[3] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.Ollama)}", Image = $"{ResourcesBasePath}/ollama.png", Label = nameof(Provider.Ollama), Entitlement = Entitlement.EnableOllamaProvider },
[4] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.OpenAi)}", Image = $"{ResourcesBasePath}/openai.png", Label = nameof(Provider.OpenAi), Entitlement = Entitlement.EnableOpenAiProvider },
[5] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.OpenAiCompatible)}", Image = $"{ResourcesBasePath}/openai.png", Label = nameof(Provider.OpenAiCompatible) }
[1] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.GoogleGemini)}", Image = $"{ResourcesBasePath}/google.png", Label = nameof(Provider.GoogleGemini), Entitlement = Entitlement.EnableGoogleGeminiProvider },
[2] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.DeepSeek)}", Image = $"{ResourcesBasePath}/deepseek.png", Label = nameof(Provider.DeepSeek), Entitlement = Entitlement.EnableDeepSeekProvider },
[3] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.Mistral)}", Image = $"{ResourcesBasePath}/mistral.png", Label = nameof(Provider.Mistral), Entitlement = Entitlement.EnableMistralProvider },
[4] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.Ollama)}", Image = $"{ResourcesBasePath}/ollama.png", Label = nameof(Provider.Ollama), Entitlement = Entitlement.EnableOllamaProvider },
[5] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.OpenAi)}", Image = $"{ResourcesBasePath}/openai.png", Label = nameof(Provider.OpenAi), Entitlement = Entitlement.EnableOpenAiProvider },
[6] = new ProviderItem { Id = $"{nameof(Provider)}.{nameof(Provider.OpenAiCompatible)}", Image = $"{ResourcesBasePath}/openai.png", Label = nameof(Provider.OpenAiCompatible) }
};

internal int _selectedProviderIndex = 3; // Default to Ollama
Expand Down
16 changes: 16 additions & 0 deletions src/Cellm/Models/Providers/Google/GoogleGeminiConfiguration.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
namespace Cellm.Models.Providers.Google;

internal class GoogleGeminiConfiguration
{
public Uri BaseAddress => new("https://generativelanguage.googleapis.com/v1beta/openai/");

public string DefaultModel { get; init; } = string.Empty;

public string ApiKey { get; init; } = string.Empty;

public string SmallModel { get; init; } = string.Empty;

public string MediumModel { get; init; } = string.Empty;

public string LargeModel { get; init; } = string.Empty;
}
1 change: 1 addition & 0 deletions src/Cellm/Models/Providers/Provider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ public enum Provider
Anthropic,
Cellm,
DeepSeek,
GoogleGemini,
Mistral,
Ollama,
OpenAi,
Expand Down
27 changes: 27 additions & 0 deletions src/Cellm/Models/ServiceCollectionExtensions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
using Cellm.Models.Providers.Anthropic;
using Cellm.Models.Providers.Cellm;
using Cellm.Models.Providers.DeepSeek;
using Cellm.Models.Providers.Google;
using Cellm.Models.Providers.Mistral;
using Cellm.Models.Providers.Ollama;
using Cellm.Models.Providers.OpenAi;
Expand Down Expand Up @@ -211,6 +212,32 @@ public static IServiceCollection AddDeepSeekChatClient(this IServiceCollection s
return services;
}

public static IServiceCollection AddGoogleGeminiChatClient(this IServiceCollection services)
{
services
.AddKeyedChatClient(Provider.GoogleGemini, serviceProvider =>
{
var account = serviceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableGoogleGeminiProvider);

var googleGeminiConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<GoogleGeminiConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

var openAiClient = new OpenAIClient(
new ApiKeyCredential(googleGeminiConfiguration.CurrentValue.ApiKey),
new OpenAIClientOptions
{
Transport = new HttpClientPipelineTransport(resilientHttpClient),
Endpoint = googleGeminiConfiguration.CurrentValue.BaseAddress
});

return openAiClient.GetChatClient(googleGeminiConfiguration.CurrentValue.DefaultModel).AsIChatClient();
}, ServiceLifetime.Transient)
.UseFunctionInvocation();

return services;
}

public static IServiceCollection AddMistralChatClient(this IServiceCollection services)
{
services
Expand Down
3 changes: 2 additions & 1 deletion src/Cellm/Users/Entitlement.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,12 @@ public enum Entitlement
EnableAnthropicProvider,
EnableCellmProvider,
EnableDeepSeekProvider,
EnableGoogleGeminiProvider,
EnableMistralProvider,
EnableOllamaProvider,
EnableOpenAiProvider,
EnableOpenAiCompatibleProvider,
EnableModelContextProtocol,
DisableTelemetry
DisableTelemetry,
}

19 changes: 13 additions & 6 deletions src/Cellm/appsettings.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
},
"AnthropicConfiguration": {
"DefaultModel": "claude-3-7-sonnet-latest",
"ApiKey": "API_KEY",
"ApiKey": "",
"SmallModel": "claude-3-5-haiku-latest",
"MediumModel": "claude-3-7-sonnet-latest",
"LargeModel": "claude-3-opus-latest"
Expand All @@ -33,18 +33,25 @@
},
"DeepSeekConfiguration": {
"DefaultModel": "deepseek-chat",
"ApiKey": "API_KEY",
"ApiKey": "",
"MediumModel": "deepseek-chat",
"LargeModel": "deepseek-reasoner"
},
"GoogleGeminiConfiguration": {
"DefaultModel": "gemini-2.5-flash-preview-05-20",
"ApiKey": "",
"MediumModel": "gemini-2.5-flash-preview-05-20",
"LargeModel": "gemini-2.5-pro-preview-06-05"
},
"Logging": {
"LogLevel": {
"Default": "Information"
"Default": "Information",
"Microsoft.Extensions.Http.Logging.HttpClientLogger": "Warning"
}
},
"MistralConfiguration": {
"DefaultModel": "mistral-small-latest",
"ApiKey": "API_KEY",
"ApiKey": "",
"SmallModel": "mistral-small-latest",
"LargeModel": "mistral-large-latest"
},
Expand All @@ -61,15 +68,15 @@
},
"OpenAiConfiguration": {
"DefaultModel": "gpt-4.1-mini",
"ApiKey": "API_KEY",
"ApiKey": "",
"SmallModel": "gpt-4.1-mini",
"MediumModel": "gpt-4.1",
"LargeModel": "o4-mini"
},
"OpenAiCompatibleConfiguration": {
"BaseAddress": "https://api.openai.com/v1",
"DefaultModel": "gpt-4o-mini",
"ApiKey": "API_KEY"
"ApiKey": ""
},
"ModelContextProtocolConfiguration": {
"StdioServers": [
Expand Down
Loading