Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 7 additions & 3 deletions src/Cellm.Models/Behaviors/SentryBehavior.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using Cellm.AddIn;
using Cellm.Models.Providers;
using Cellm.User;
using MediatR;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Logging;
Expand All @@ -9,18 +10,21 @@ namespace Cellm.Models.Behaviors;

internal class SentryBehavior<TRequest, TResponse>(
IOptionsMonitor<ProviderConfiguration> providerConfiguration,
Account account,
ILogger<SentryBehavior<TRequest, TResponse>> logger) : IPipelineBehavior<TRequest, TResponse>
where TRequest : IModelRequest<TResponse>
{
public async Task<TResponse> Handle(TRequest request, RequestHandlerDelegate<TResponse> next, CancellationToken cancellationToken)
{
if (!SentrySdk.IsEnabled)
var disableTelemetry = await account.HasEntitlementAsync(Entitlement.DisableTelemetry);

if (!SentrySdk.IsEnabled || disableTelemetry)
{
logger.LogDebug("Sentry disabled");
logger.LogDebug("Telemetry disabled");
return await next();
}

logger.LogDebug("Sentry enabled");
logger.LogDebug("Telemetry enabled");

var transaction = SentrySdk.StartTransaction($"{nameof(Cellm)}.{nameof(Models)}.{nameof(Client)}", typeof(TRequest).Name);

Expand Down
4 changes: 4 additions & 0 deletions src/Cellm.Models/Cellm.Models.projitems
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,10 @@
<Compile Include="$(MSBuildThisFileDirectory)Providers\Anthropic\AnthropicRequest.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Anthropic\AnthropicRequestHandler.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Anthropic\AnthropicResponse.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Cellm\CellmConfiguration.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Cellm\CellmRequest.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Cellm\CellmRequestHandler.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\Cellm\CellmResponse.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\DeepSeek\DeepSeekConfiguration.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\IModelRequest.cs" />
<Compile Include="$(MSBuildThisFileDirectory)Providers\IModelRequestHandler.cs" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

internal class AnthropicConfiguration : IProviderConfiguration
{
public Uri BaseAddress { get; init; }

public string DefaultModel { get; init; }

public string Version { get; init; }
Expand All @@ -14,7 +12,6 @@ internal class AnthropicConfiguration : IProviderConfiguration

public AnthropicConfiguration()
{
BaseAddress = default!;
DefaultModel = default!;
Version = default!;
ApiKey = default!;
Expand Down
10 changes: 10 additions & 0 deletions src/Cellm.Models/Providers/Cellm/CellmConfiguration.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
namespace Cellm.Models.Providers.Cellm;

internal class CellmConfiguration : IProviderConfiguration
{
public Uri BaseAddress => new("https://api.getcellm.com/v1/");

public string DefaultModel { get; init; } = string.Empty;

public List<string> Models { get; init; } = [];
}
7 changes: 7 additions & 0 deletions src/Cellm.Models/Providers/Cellm/CellmRequest.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
using Cellm.Models.Behaviors;
using Cellm.Models.Prompts;
using Cellm.User;

namespace Cellm.Models.Providers.Cellm;

internal record CellmRequest(Prompt Prompt) : IModelRequest<CellmResponse>;
24 changes: 24 additions & 0 deletions src/Cellm.Models/Providers/Cellm/CellmRequestHandler.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
using Cellm.Models.Prompts;
using Cellm.User;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;

namespace Cellm.Models.Providers.Cellm;

internal class CellmRequestHandler(Account account, [FromKeyedServices(Provider.Cellm)] IChatClient chatClient)
: IModelRequestHandler<CellmRequest, CellmResponse>
{

public async Task<CellmResponse> Handle(CellmRequest request, CancellationToken cancellationToken)
{
await account.RequireEntitlementAsync(Entitlement.EnableCellmProvider);

var chatCompletion = await chatClient.GetResponseAsync(request.Prompt.Messages, request.Prompt.Options, cancellationToken);

var prompt = new PromptBuilder(request.Prompt)
.AddMessages(chatCompletion.Messages)
.Build();

return new CellmResponse(prompt, chatCompletion);
}
}
6 changes: 6 additions & 0 deletions src/Cellm.Models/Providers/Cellm/CellmResponse.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
using Cellm.Models.Prompts;
using Microsoft.Extensions.AI;

namespace Cellm.Models.Providers.Cellm;

internal record CellmResponse(Prompt Prompt, ChatResponse ChatResponse) : IModelResponse;
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ namespace Cellm.Models.Providers.DeepSeek;

internal class DeepSeekConfiguration : IProviderConfiguration
{
public Uri BaseAddress { get; init; } = default!;
public Uri BaseAddress => new("https://api.deepseek.com/v1/");

public string DefaultModel { get; init; } = string.Empty;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

internal class LlamafileConfiguration : IProviderConfiguration
{
public Uri BaseAddress { get; init; } = default!;
public Uri BaseAddress => new("http://127.0.0.1:8080/v1/");

public string DefaultModel { get; init; } = string.Empty;

Expand Down
2 changes: 1 addition & 1 deletion src/Cellm.Models/Providers/Mistral/MistralConfiguration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ namespace Cellm.Models.Providers.Mistral;

internal class MistralConfiguration : IProviderConfiguration
{
public Uri BaseAddress { get; init; } = default!;
public Uri BaseAddress => new("https://api.mistral.ai/v1/");

public string DefaultModel { get; init; } = string.Empty;

Expand Down
6 changes: 3 additions & 3 deletions src/Cellm.Models/Providers/Ollama/OllamaConfiguration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@

internal class OllamaConfiguration : IProviderConfiguration
{
public Uri ZipUrl { get; init; } = default!;

public Uri BaseAddress { get; init; } = default!;
public Uri BaseAddress => new("http://127.0.0.1:11434/");

public string DefaultModel { get; init; } = string.Empty;

public List<string> Models { get; init; } = [];

public int MaxInputTokens { get; init; } = 16364;
}
13 changes: 12 additions & 1 deletion src/Cellm.Models/Providers/Ollama/OllamaRequestHandler.cs
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
using System.Text;
using System.Text.Json;
using Cellm.Models.Prompts;
using Cellm.User;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;

namespace Cellm.Models.Providers.Ollama;

internal class OllamaRequestHandler(
IOptionsMonitor<OllamaConfiguration> ollamaConfiguration,
[FromKeyedServices(Provider.Ollama)] IChatClient chatClient,
IOptionsMonitor<OllamaConfiguration> ollamaConfiguration,
HttpClient httpClient) : IModelRequestHandler<OllamaRequest, OllamaResponse>
{
public async Task<OllamaResponse> Handle(OllamaRequest request, CancellationToken cancellationToken)
Expand All @@ -28,6 +29,16 @@ public async Task<OllamaResponse> Handle(OllamaRequest request, CancellationToke
response.EnsureSuccessStatusCode();
}

var metadata = request.Prompt.Options.AdditionalProperties;

if (request.Prompt.Options.AdditionalProperties is null)
{
request.Prompt.Options.AdditionalProperties = [];
}

request.Prompt.Options.AdditionalProperties["num_ctx"] = 8192;


var chatResponse = await chatClient.GetResponseAsync(
request.Prompt.Messages,
request.Prompt.Options,
Expand Down
2 changes: 0 additions & 2 deletions src/Cellm.Models/Providers/OpenAi/OpenAiConfiguration.cs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

internal class OpenAiConfiguration : IProviderConfiguration
{
public Uri BaseAddress { get; init; } = default!;

public string DefaultModel { get; init; } = string.Empty;

public string ApiKey { get; init; } = string.Empty;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

namespace Cellm.Models.Providers.OpenAiCompatible;

internal class OpenAiCompatibleRequestHandler()
internal class OpenAiCompatibleRequestHandler
: IModelRequestHandler<OpenAiCompatibleRequest, OpenAiCompatibleResponse>
{

Expand Down
1 change: 1 addition & 0 deletions src/Cellm.Models/Providers/Provider.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
public enum Provider
{
Anthropic,
Cellm,
DeepSeek,
Llamafile,
Mistral,
Expand Down
78 changes: 76 additions & 2 deletions src/Cellm.Models/ServiceCollectionExtensions.cs
Original file line number Diff line number Diff line change
@@ -1,16 +1,43 @@
using System.ClientModel;
// Circumventing limitations anywhere in the codebase is a direct violation of the
// Fair Core License, Version 1.0 until a commit transitions to the Apache 2.0 Future License.
// Once a commit transitions to the Apache 2.0 Future License, you can checkout out
// that commit and use the code under the Apache 2.0 License. A commit transitions to
// the Apache 2.0 Future License on the second anniversary of the date the git commit
// became available.
//
// The relevant section of the Fair Core License, Version 1.0 is:
//
// > ### Limitations
// > You must not move, change, disable, or circumvent the license key functionality
// > in the Software; or modify any portion of the Software protected by the license
// > key to:
//
// > 1. enable access to the protected functionality without a valid license key; or
//
// > 2. remove the protected functionality.
//
// You can checkout the latest commit licensed under the Apache 2.0 License like this:
//
// $ git checkout $(git rev-list -n 1 --before="2 years ago" HEAD)
//
// For more details, go to https://github.com/getcellm/cellm/blob/main/LICENSE.

using System.ClientModel;
using System.ClientModel.Primitives;
using System.Threading.RateLimiting;
using Anthropic.SDK;
using Cellm.Models.Providers;
using Cellm.Models.Providers.Anthropic;
using Cellm.Models.Providers.Cellm;
using Cellm.Models.Providers.DeepSeek;
using Cellm.Models.Providers.Llamafile;
using Cellm.Models.Providers.Mistral;
using Cellm.Models.Providers.Ollama;
using Cellm.Models.Providers.OpenAi;
using Cellm.Models.Providers.OpenAiCompatible;
using Cellm.Models.Resilience;
using Cellm.Services;
using Cellm.User;
using Microsoft.Extensions.AI;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
Expand Down Expand Up @@ -91,11 +118,14 @@ public static IServiceCollection AddRetryHttpClient(this IServiceCollection serv
return services;
}

public static IServiceCollection AddAnthropicChatClient(this IServiceCollection services, IConfiguration configuration)
public static IServiceCollection AddAnthropicChatClient(this IServiceCollection services)
{
services
.AddKeyedChatClient(Provider.Anthropic, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableAnthropicProvider);

var anthropicConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<AnthropicConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

Expand All @@ -109,11 +139,40 @@ public static IServiceCollection AddAnthropicChatClient(this IServiceCollection
return services;
}

public static IServiceCollection AddCellmChatClient(this IServiceCollection services)
{
services
.AddKeyedChatClient(Provider.Cellm, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableCellmProvider);

var cellmConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<CellmConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

var openAiClient = new OpenAIClient(
new ApiKeyCredential(string.Empty),
new OpenAIClientOptions
{
Transport = new HttpClientPipelineTransport(resilientHttpClient),
Endpoint = cellmConfiguration.CurrentValue.BaseAddress
});

return openAiClient.GetChatClient(cellmConfiguration.CurrentValue.DefaultModel).AsIChatClient();
}, ServiceLifetime.Transient)
.UseFunctionInvocation();

return services;
}

public static IServiceCollection AddOllamaChatClient(this IServiceCollection services)
{
services
.AddKeyedChatClient(Provider.Ollama, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableOllamaProvider);

var ollamaConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<OllamaConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

Expand All @@ -132,6 +191,9 @@ public static IServiceCollection AddDeepSeekChatClient(this IServiceCollection s
services
.AddKeyedChatClient(Provider.DeepSeek, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableDeepSeekProvider);

var deepSeekConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<DeepSeekConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

Expand All @@ -155,6 +217,9 @@ public static IServiceCollection AddLlamafileChatClient(this IServiceCollection
services
.AddKeyedChatClient(Provider.Llamafile, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableLlamafileProvider);

var llamafileConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<LlamafileConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

Expand All @@ -178,6 +243,9 @@ public static IServiceCollection AddMistralChatClient(this IServiceCollection se
services
.AddKeyedChatClient(Provider.Mistral, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableMistralProvider);

var mistralConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<MistralConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

Expand All @@ -201,6 +269,9 @@ public static IServiceCollection AddOpenAiChatClient(this IServiceCollection ser
services
.AddKeyedChatClient(Provider.OpenAi, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableOpenAiProvider);

var openAiConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<OpenAiConfiguration>>();

return new OpenAIClient(new ApiKeyCredential(openAiConfiguration.CurrentValue.ApiKey))
Expand All @@ -217,6 +288,9 @@ public static IServiceCollection AddOpenAiCompatibleChatClient(this IServiceColl
services
.AddKeyedChatClient(Provider.OpenAiCompatible, serviceProvider =>
{
var account = ServiceLocator.ServiceProvider.GetRequiredService<Account>();
account.RequireEntitlement(Entitlement.EnableOpenAiCompatibleProvider);

var openAiCompatibleConfiguration = serviceProvider.GetRequiredService<IOptionsMonitor<OpenAiCompatibleConfiguration>>();
var resilientHttpClient = serviceProvider.GetKeyedService<HttpClient>("ResilientHttpClient") ?? throw new NullReferenceException("ResilientHttpClient");

Expand Down
4 changes: 2 additions & 2 deletions src/Cellm/AddIn/ExcelFunctions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ public static object PromptWith(
// ExcelAsyncUtil.Run yields Excel's UI thread, Task.Run enables async/await in inner code
return ExcelAsyncUtil.Run(nameof(PromptWith), new object[] { providerAndModel, instructionsOrContext, instructionsOrTemperature, temperature }, () =>
{
return Task.Run(async () => await CompleteAsync(prompt, arguments.Provider)).GetAwaiter().GetResult();
return Task.Run(async () => await GetResponseAsync(prompt, arguments.Provider)).GetAwaiter().GetResult();
});

}
Expand All @@ -118,7 +118,7 @@ public static object PromptWith(
/// <returns>A task that represents the asynchronous operation. The task result contains the model's response as a string.</returns>
/// <exception cref="CellmException">Thrown when an unexpected error occurs during the operation.</exception>

internal static async Task<string> CompleteAsync(Prompt prompt, Provider provider)
internal static async Task<string> GetResponseAsync(Prompt prompt, Provider provider)
{
var client = ServiceLocator.ServiceProvider.GetRequiredService<Client>();
var response = await client.Send(prompt, provider, CancellationToken.None);
Expand Down
Loading