Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

.Net: Update {Azure}OpenAI Connectors to latest 2.2.0-beta.1 #10496

Merged
Merged
Prev Previous commit
Next Next commit
Adding Azure Developer Message
  • Loading branch information
RogerBarreto committed Feb 11, 2025
commit d8e338248aeddd4b638ff50843fa6c931c2cf7ef
Original file line number Diff line number Diff line change
Expand Up @@ -262,6 +262,66 @@ public async Task GetChatMessageContentsHandlesResponseFormatCorrectlyAsync(obje
Assert.Equal(expectedResponseType, content.GetProperty("response_format").GetProperty("type").GetString());
}

[Theory]
[InlineData(null, null)]
[InlineData("string", "low")]
[InlineData("string", "medium")]
[InlineData("string", "high")]
[InlineData("ChatReasonEffortLevel.Low", "low")]
[InlineData("ChatReasonEffortLevel.Medium", "medium")]
[InlineData("ChatReasonEffortLevel.High", "high")]
public async Task GetChatMessageInReasoningEffortAsync(string? effortType, string? expectedEffortLevel)
{
// Assert
object? reasoningEffortObject = null;
switch (effortType)
{
case "string":
reasoningEffortObject = expectedEffortLevel;
break;
case "ChatReasonEffortLevel.Low":
reasoningEffortObject = ChatReasoningEffortLevel.Low;
break;
case "ChatReasonEffortLevel.Medium":
reasoningEffortObject = ChatReasoningEffortLevel.Medium;
break;
case "ChatReasonEffortLevel.High":
reasoningEffortObject = ChatReasoningEffortLevel.High;
break;
}

var modelId = "o1";
var sut = new OpenAIChatCompletionService(modelId, "apiKey", httpClient: this._httpClient);
OpenAIPromptExecutionSettings executionSettings = new() { ReasoningEffort = reasoningEffortObject };
using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.json"))
};

this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);

// Act
var result = await sut.GetChatMessageContentAsync(new ChatHistory("System message"), executionSettings);

// Assert
Assert.NotNull(result);

var actualRequestContent = Encoding.UTF8.GetString(this._messageHandlerStub.RequestContents[0]!);
Assert.NotNull(actualRequestContent);

var optionsJson = JsonSerializer.Deserialize<JsonElement>(actualRequestContent);

if (expectedEffortLevel is null)
{
Assert.False(optionsJson.TryGetProperty("reasoning_effort", out _));
return;
}

var requestedReasoningEffort = optionsJson.GetProperty("reasoning_effort").GetString();

Assert.Equal(expectedEffortLevel, requestedReasoningEffort);
}

[Theory]
[MemberData(nameof(ToolCallBehaviors))]
public async Task GetChatMessageContentsWorksCorrectlyAsync(ToolCallBehavior behavior)
Expand Down Expand Up @@ -823,6 +883,49 @@ public async Task GetChatMessageContentsUsesPromptAndSettingsCorrectlyAsync()
Assert.Equal("user", messages[1].GetProperty("role").GetString());
}

[Fact]
public async Task GetChatMessageContentsUsesDeveloperPromptAndSettingsCorrectlyAsync()
{
// Arrange
const string Prompt = "This is test prompt";
const string DeveloperMessage = "This is test system message";

var service = new AzureOpenAIChatCompletionService("deployment", "https://endpoint", "api-key", "model-id", this._httpClient);
var settings = new AzureOpenAIPromptExecutionSettings() { ChatDeveloperPrompt = DeveloperMessage };

using var responseMessage = new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new StringContent(AzureOpenAITestHelper.GetTestResponse("chat_completion_test_response.json"))
};
this._messageHandlerStub.ResponsesToReturn.Add(responseMessage);

IKernelBuilder builder = Kernel.CreateBuilder();
builder.Services.AddTransient<IChatCompletionService>((sp) => service);
Kernel kernel = builder.Build();

// Act
var result = await kernel.InvokePromptAsync(Prompt, new(settings));

// Assert
Assert.Equal("Test chat response", result.ToString());

var requestContentByteArray = this._messageHandlerStub.RequestContents[0];

Assert.NotNull(requestContentByteArray);

var requestContent = JsonSerializer.Deserialize<JsonElement>(Encoding.UTF8.GetString(requestContentByteArray));

var messages = requestContent.GetProperty("messages");

Assert.Equal(2, messages.GetArrayLength());

Assert.Equal(DeveloperMessage, messages[0].GetProperty("content").GetString());
Assert.Equal("developer", messages[0].GetProperty("role").GetString());

Assert.Equal(Prompt, messages[1].GetProperty("content").GetString());
Assert.Equal("user", messages[1].GetProperty("role").GetString());
}

[Fact]
public async Task GetChatMessageContentsWithChatMessageContentItemCollectionAndSettingsCorrectlyAsync()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ protected override ChatCompletionOptions CreateChatCompletionOptions(
TopLogProbabilityCount = executionSettings.TopLogprobs,
IncludeLogProbabilities = executionSettings.Logprobs,
StoredOutputEnabled = executionSettings.Store,
ReasoningEffortLevel = GetEffortLevel(executionSettings),
};

var responseFormat = GetResponseFormat(executionSettings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,7 @@ protected virtual ChatCompletionOptions CreateChatCompletionOptions(
return options;
}

private static ChatReasoningEffortLevel? GetEffortLevel(OpenAIPromptExecutionSettings executionSettings)
protected static ChatReasoningEffortLevel? GetEffortLevel(OpenAIPromptExecutionSettings executionSettings)
{
var effortLevelObject = executionSettings.ReasoningEffort;
if (effortLevelObject is null)
Expand Down