Skip to content

Commit

Permalink
.Net: Update Microsoft.Extensions.AI packages to the latest version. (#…
Browse files Browse the repository at this point in the history
…10573)

cc @RogerBarret0 @SergeyMenshykh

---------

Co-authored-by: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
  • Loading branch information
eiriktsarpalis and RogerBarreto authored Feb 18, 2025
1 parent 31166d4 commit 78a1c4a
Show file tree
Hide file tree
Showing 14 changed files with 201 additions and 158 deletions.
10 changes: 5 additions & 5 deletions dotnet/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
<PackageVersion Include="MSTest.TestFramework" Version="3.8.0" />
<PackageVersion Include="Newtonsoft.Json" Version="13.0.3" />
<PackageVersion Include="Npgsql" Version="8.0.6" />
<PackageVersion Include="OllamaSharp" Version="4.0.17" />
<PackageVersion Include="OllamaSharp" Version="5.0.7" />
<PackageVersion Include="OpenAI" Version="[2.2.0-beta.1]" />
<PackageVersion Include="OpenTelemetry.Exporter.Console" Version="1.9.0" />
<PackageVersion Include="PdfPig" Version="0.1.9" />
Expand All @@ -68,10 +68,10 @@
<PackageVersion Include="Microsoft.DeepDev.TokenizerLib" Version="1.3.3" />
<PackageVersion Include="SharpToken" Version="2.0.3" />
<!-- Microsoft.Extensions.* -->
<PackageVersion Include="Microsoft.Extensions.AI" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI.OpenAI" Version="9.1.0-preview.1.25064.3" />
<PackageVersion Include="Microsoft.Extensions.AI" Version="9.3.0-preview.1.25114.11" />
<PackageVersion Include="Microsoft.Extensions.AI.Abstractions" Version="9.3.0-preview.1.25114.11" />
<PackageVersion Include="Microsoft.Extensions.AI.AzureAIInference" Version="9.3.0-preview.1.25114.11" />
<PackageVersion Include="Microsoft.Extensions.AI.OpenAI" Version="9.3.0-preview.1.25114.11" />
<PackageVersion Include="Microsoft.Extensions.Configuration" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration.Abstractions" Version="8.0.0" />
<PackageVersion Include="Microsoft.Extensions.Configuration.Binder" Version="8.0.2" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ public async Task FallbackToAvailableModelAsync()
// Create a fallback chat client that will fallback to the available chat client when unavailable chat client fails
IChatClient fallbackChatClient = new FallbackChatClient([unavailableChatClient, availableChatClient]);

ChatOptions chatOptions = new() { Tools = [AIFunctionFactory.Create(GetWeather, new AIFunctionFactoryCreateOptions { Name = "GetWeather" })] };
ChatOptions chatOptions = new() { Tools = [AIFunctionFactory.Create(GetWeather)] };

var result = await fallbackChatClient.CompleteAsync("Do I need an umbrella?", chatOptions);
var result = await fallbackChatClient.GetResponseAsync("Do I need an umbrella?", chatOptions);

Output.WriteLine(result);

Expand All @@ -64,9 +64,9 @@ public async Task FallbackToAvailableModelStreamingAsync()
// Create a fallback chat client that will fallback to the available chat client when unavailable chat client fails
IChatClient fallbackChatClient = new FallbackChatClient([unavailableChatClient, availableChatClient]);

ChatOptions chatOptions = new() { Tools = [AIFunctionFactory.Create(GetWeather, new AIFunctionFactoryCreateOptions { Name = "GetWeather" })] };
ChatOptions chatOptions = new() { Tools = [AIFunctionFactory.Create(GetWeather)] };

var result = fallbackChatClient.CompleteStreamingAsync("Do I need an umbrella?", chatOptions);
var result = fallbackChatClient.GetStreamingResponseAsync("Do I need an umbrella?", chatOptions);

await foreach (var update in result)
{
Expand Down Expand Up @@ -151,15 +151,15 @@ public FallbackChatClient(IList<IChatClient> chatClients)
public ChatClientMetadata Metadata => new();

/// <inheritdoc/>
public async Task<Microsoft.Extensions.AI.ChatCompletion> CompleteAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
public async Task<Microsoft.Extensions.AI.ChatResponse> GetResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
for (int i = 0; i < this._chatClients.Count; i++)
{
var chatClient = this._chatClients.ElementAt(i);

try
{
return await chatClient.CompleteAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
return await chatClient.GetResponseAsync(chatMessages, options, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
Expand All @@ -177,15 +177,15 @@ public FallbackChatClient(IList<IChatClient> chatClients)
}

/// <inheritdoc/>
public async IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamingAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
for (int i = 0; i < this._chatClients.Count; i++)
{
var chatClient = this._chatClients.ElementAt(i);

IAsyncEnumerable<StreamingChatCompletionUpdate> completionStream = chatClient.CompleteStreamingAsync(chatMessages, options, cancellationToken);
IAsyncEnumerable<ChatResponseUpdate> completionStream = chatClient.GetStreamingResponseAsync(chatMessages, options, cancellationToken);

ConfiguredCancelableAsyncEnumerable<StreamingChatCompletionUpdate>.Enumerator enumerator = completionStream.ConfigureAwait(false).GetAsyncEnumerator();
ConfiguredCancelableAsyncEnumerable<ChatResponseUpdate>.Enumerator enumerator = completionStream.ConfigureAwait(false).GetAsyncEnumerator();

try
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ public async Task GetChatMessageInResponseFormatsAsync(string formatType, string
format = JsonSerializer.Deserialize<JsonElement>(formatValue);
break;
case "ChatResponseFormat":
format = formatValue == "text" ? new ChatCompletionsResponseFormatText() : new ChatCompletionsResponseFormatJSON();
format = formatValue == "text" ? new ChatCompletionsResponseFormatText() : new ChatCompletionsResponseFormatJsonObject();
break;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ public int? MaxTokens
/// Note that to enable JSON mode, some AI models may also require you to instruct the model to produce JSON
/// via a system or user message.
/// Please note <see cref="ChatCompletionsResponseFormat"/> is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes.
/// The available derived classes include <see cref="ChatCompletionsResponseFormatJSON"/> and <see cref="ChatCompletionsResponseFormatText"/>.
/// The available derived classes include <see cref="ChatCompletionsResponseFormatJsonObject"/> and <see cref="ChatCompletionsResponseFormatText"/>.
/// </summary>
[JsonPropertyName("response_format")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Reflection;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
Expand All @@ -12,8 +14,7 @@ namespace Microsoft.SemanticKernel.ChatCompletion;

/// <summary>Provides a <see cref="KernelFunction"/> that wraps an <see cref="AIFunction"/>.</summary>
/// <remarks>
/// The implementation should largely be unused, other than for its <see cref="AIFunction.Metadata"/>. The implementation of
/// <see cref="ChatCompletionServiceChatClient"/> only manufactures these to pass along to the underlying
/// The implementation of <see cref="ChatCompletionServiceChatClient"/> only manufactures these to pass along to the underlying
/// <see cref="IChatCompletionService"/> with autoInvoke:false, which means the <see cref="IChatCompletionService"/>
/// implementation shouldn't be invoking these functions at all. As such, the <see cref="InvokeCoreAsync"/> and
/// <see cref="InvokeStreamingCoreAsync"/> methods both unconditionally throw, even though they could be implemented.
Expand All @@ -23,28 +24,15 @@ internal sealed class AIFunctionKernelFunction : KernelFunction
private readonly AIFunction _aiFunction;

public AIFunctionKernelFunction(AIFunction aiFunction) :
base(aiFunction.Metadata.Name,
aiFunction.Metadata.Description,
aiFunction.Metadata.Parameters.Select(p => new KernelParameterMetadata(p.Name, AbstractionsJsonContext.Default.Options)
{
Description = p.Description,
DefaultValue = p.DefaultValue,
IsRequired = p.IsRequired,
ParameterType = p.ParameterType,
Schema =
p.Schema is JsonElement je ? new KernelJsonSchema(je) :
p.Schema is string s ? new KernelJsonSchema(JsonSerializer.Deserialize(s, AbstractionsJsonContext.Default.JsonElement)) :
null,
}).ToList(),
AbstractionsJsonContext.Default.Options,
base(aiFunction.Name,
aiFunction.Description,
MapParameterMetadata(aiFunction),
aiFunction.JsonSerializerOptions,
new KernelReturnParameterMetadata(AbstractionsJsonContext.Default.Options)
{
Description = aiFunction.Metadata.ReturnParameter.Description,
ParameterType = aiFunction.Metadata.ReturnParameter.ParameterType,
Schema =
aiFunction.Metadata.ReturnParameter.Schema is JsonElement je ? new KernelJsonSchema(je) :
aiFunction.Metadata.ReturnParameter.Schema is string s ? new KernelJsonSchema(JsonSerializer.Deserialize(s, AbstractionsJsonContext.Default.JsonElement)) :
null,
Description = aiFunction.UnderlyingMethod?.ReturnParameter.GetCustomAttribute<DescriptionAttribute>()?.Description,
ParameterType = aiFunction.UnderlyingMethod?.ReturnParameter.ParameterType,
Schema = new KernelJsonSchema(AIJsonUtilities.CreateJsonSchema(aiFunction.UnderlyingMethod?.ReturnParameter.ParameterType)),
})
{
this._aiFunction = aiFunction;
Expand Down Expand Up @@ -73,4 +61,30 @@ protected override IAsyncEnumerable<TResult> InvokeStreamingCoreAsync<TResult>(K
// This should never be invoked, as instances are always passed with autoInvoke:false.
throw new NotSupportedException();
}

private static IReadOnlyList<KernelParameterMetadata> MapParameterMetadata(AIFunction aiFunction)
{
if (!aiFunction.JsonSchema.TryGetProperty("properties", out JsonElement properties))
{
return Array.Empty<KernelParameterMetadata>();
}

List<KernelParameterMetadata> kernelParams = [];
var parameterInfos = aiFunction.UnderlyingMethod?.GetParameters().ToDictionary(p => p.Name!, StringComparer.Ordinal);
foreach (var param in properties.EnumerateObject())
{
ParameterInfo? paramInfo = null;
parameterInfos?.TryGetValue(param.Name, out paramInfo);
kernelParams.Add(new(param.Name, aiFunction.JsonSerializerOptions)
{
Description = param.Value.TryGetProperty("description", out JsonElement description) ? description.GetString() : null,
DefaultValue = param.Value.TryGetProperty("default", out JsonElement defaultValue) ? defaultValue : null,
IsRequired = param.Value.TryGetProperty("required", out JsonElement required) && required.GetBoolean(),
ParameterType = paramInfo?.ParameterType,
Schema = param.Value.TryGetProperty("schema", out JsonElement schema) ? new KernelJsonSchema(schema) : null,
});
}

return kernelParams;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,12 @@ public ChatClientChatCompletionService(IChatClient chatClient, IServiceProvider?
var attrs = new Dictionary<string, object?>();
this.Attributes = new ReadOnlyDictionary<string, object?>(attrs);

var metadata = chatClient.Metadata;
if (metadata.ProviderUri is not null)
var metadata = chatClient.GetService<ChatClientMetadata>();
if (metadata?.ProviderUri is not null)
{
attrs[AIServiceExtensions.EndpointKey] = metadata.ProviderUri.ToString();
}
if (metadata.ModelId is not null)
if (metadata?.ModelId is not null)
{
attrs[AIServiceExtensions.ModelIdKey] = metadata.ModelId;
}
Expand All @@ -57,7 +57,7 @@ public async Task<IReadOnlyList<ChatMessageContent>> GetChatMessageContentsAsync
var messageList = ChatCompletionServiceExtensions.ToChatMessageList(chatHistory);
var currentSize = messageList.Count;

var completion = await this._chatClient.CompleteAsync(
var completion = await this._chatClient.GetResponseAsync(
messageList,
ToChatOptions(executionSettings, kernel),
cancellationToken).ConfigureAwait(false);
Expand All @@ -76,7 +76,7 @@ public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessa
{
Verify.NotNull(chatHistory);

await foreach (var update in this._chatClient.CompleteStreamingAsync(
await foreach (var update in this._chatClient.GetStreamingResponseAsync(
ChatCompletionServiceExtensions.ToChatMessageList(chatHistory),
ToChatOptions(executionSettings, kernel),
cancellationToken).ConfigureAwait(false))
Expand Down Expand Up @@ -158,13 +158,19 @@ public async IAsyncEnumerable<StreamingChatMessageContent> GetStreamingChatMessa
else if (entry.Key.Equals("response_format", StringComparison.OrdinalIgnoreCase) &&
entry.Value is { } responseFormat)
{
options.ResponseFormat = responseFormat switch
if (TryConvert(responseFormat, out string? responseFormatString))
{
"text" => ChatResponseFormat.Text,
"json_object" => ChatResponseFormat.Json,
JsonElement e => ChatResponseFormat.ForJsonSchema(e),
_ => null,
};
options.ResponseFormat = responseFormatString switch
{
"text" => ChatResponseFormat.Text,
"json_object" => ChatResponseFormat.Json,
_ => null,
};
}
else
{
options.ResponseFormat = responseFormat is JsonElement e ? ChatResponseFormat.ForJsonSchema(e) : null;
}
}
else
{
Expand Down Expand Up @@ -268,9 +274,9 @@ static bool TryConvert<T>(object? value, [NotNullWhen(true)] out T? result)
}
}

/// <summary>Converts a <see cref="StreamingChatCompletionUpdate"/> to a <see cref="StreamingChatMessageContent"/>.</summary>
/// <summary>Converts a <see cref="ChatResponseUpdate"/> to a <see cref="StreamingChatMessageContent"/>.</summary>
/// <remarks>This conversion should not be necessary once SK eventually adopts the shared content types.</remarks>
private static StreamingChatMessageContent ToStreamingChatMessageContent(StreamingChatCompletionUpdate update)
private static StreamingChatMessageContent ToStreamingChatMessageContent(ChatResponseUpdate update)
{
StreamingChatMessageContent content = new(
update.Role is not null ? new AuthorRole(update.Role.Value.Value) : null,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public ChatCompletionServiceChatClient(IChatCompletionService chatCompletionServ
public ChatClientMetadata Metadata { get; }

/// <inheritdoc />
public async Task<Extensions.AI.ChatCompletion> CompleteAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
public async Task<Extensions.AI.ChatResponse> GetResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
{
Verify.NotNull(chatMessages);

Expand All @@ -53,7 +53,7 @@ public ChatCompletionServiceChatClient(IChatCompletionService chatCompletionServ
}

/// <inheritdoc />
public async IAsyncEnumerable<StreamingChatCompletionUpdate> CompleteStreamingAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
public async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
{
Verify.NotNull(chatMessages);

Expand Down Expand Up @@ -82,6 +82,7 @@ public void Dispose()
serviceKey is not null ? null :
serviceType.IsInstanceOfType(this) ? this :
serviceType.IsInstanceOfType(this._chatCompletionService) ? this._chatCompletionService :
serviceType.IsInstanceOfType(this.Metadata) ? this.Metadata :
null;
}

Expand Down Expand Up @@ -191,11 +192,11 @@ public void Dispose()
return settings;
}

/// <summary>Converts a <see cref="StreamingChatMessageContent"/> to a <see cref="StreamingChatCompletionUpdate"/>.</summary>
/// <summary>Converts a <see cref="StreamingChatMessageContent"/> to a <see cref="ChatResponseUpdate"/>.</summary>
/// <remarks>This conversion should not be necessary once SK eventually adopts the shared content types.</remarks>
private static StreamingChatCompletionUpdate ToStreamingChatCompletionUpdate(StreamingChatMessageContent content)
private static ChatResponseUpdate ToStreamingChatCompletionUpdate(StreamingChatMessageContent content)
{
StreamingChatCompletionUpdate update = new()
ChatResponseUpdate update = new()
{
AdditionalProperties = content.Metadata is not null ? new AdditionalPropertiesDictionary(content.Metadata) : null,
AuthorName = content.AuthorName,
Expand Down
Loading

0 comments on commit 78a1c4a

Please sign in to comment.