Skip to content

Commit

Permalink
.Net: Update OpenAI / AzureOpenAI Concepts (#8919)
Browse files Browse the repository at this point in the history
### Motivation and Context

- Split AzureOpenAI from OpenAI examples
- Added missing links in Readme for Ollama and Azure AI Inference
samples.
- Added more descriptive text to OpenAI examples
- Update naming for tests
- Resolves #7007
  • Loading branch information
RogerBarreto authored Sep 20, 2024
1 parent 8e25752 commit be90d23
Show file tree
Hide file tree
Showing 7 changed files with 672 additions and 86 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
// Copyright (c) Microsoft. All rights reserved.

using System.Text;
using Azure.Identity;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;

namespace ChatCompletion;

// The following example shows how to use Semantic Kernel with Azure OpenAI API
public class AzureOpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task ChatPromptAsync()
{
Assert.NotNull(TestConfiguration.Ollama.ModelId);

StringBuilder chatPrompt = new("""
<message role="system">You are a librarian, expert about books</message>
<message role="user">Hi, I'm looking for book suggestions</message>
""");

var kernel = Kernel.CreateBuilder()
.AddAzureOpenAIChatCompletion(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
modelId: TestConfiguration.AzureOpenAI.ChatModelId)
.Build();

var reply = await kernel.InvokePromptAsync(chatPrompt.ToString());

chatPrompt.AppendLine($"<message role=\"assistant\"><![CDATA[{reply}]]></message>");
chatPrompt.AppendLine("<message role=\"user\">I love history and philosophy, I'd like to learn something new about Greece, any suggestion</message>");

reply = await kernel.InvokePromptAsync(chatPrompt.ToString());

Console.WriteLine(reply);
}

[Fact]
public async Task ServicePromptAsync()
{
Console.WriteLine("======== Azure Open AI - Chat Completion ========");

AzureOpenAIChatCompletionService chatCompletionService = new(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
modelId: TestConfiguration.AzureOpenAI.ChatModelId);

await StartChatAsync(chatCompletionService);
}

/// <summary>
/// Sample showing how to use Azure Open AI Chat Completion with Azure Default Credential.
/// If local auth is disabled in the Azure Open AI deployment, you can use Azure Default Credential to authenticate.
/// </summary>
[Fact]
public async Task DefaultAzureCredentialSampleAsync()
{
Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential ========");

AzureOpenAIChatCompletionService chatCompletionService = new(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
credentials: new DefaultAzureCredential(),
modelId: TestConfiguration.AzureOpenAI.ChatModelId);

await StartChatAsync(chatCompletionService);
}

private async Task StartChatAsync(IChatCompletionService chatGPT)
{
Console.WriteLine("Chat content:");
Console.WriteLine("------------------------");

var chatHistory = new ChatHistory("You are a librarian, expert about books");

// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
OutputLastMessage(chatHistory);

// First assistant message
var reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
OutputLastMessage(chatHistory);

// Second user message
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion");
OutputLastMessage(chatHistory);

// Second assistant message
reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
OutputLastMessage(chatHistory);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,155 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.AzureOpenAI;
using Microsoft.SemanticKernel.Connectors.OpenAI;

namespace ChatCompletion;

/// <summary>
/// These examples demonstrate the ways different content types are streamed by Azure OpenAI via the chat completion service.
/// </summary>
public class AzureOpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output)
{
/// <summary>
/// This example demonstrates chat completion streaming using Azure OpenAI.
/// </summary>
[Fact]
public Task StreamServicePromptAsync()
{
Console.WriteLine("======== Azure Open AI Chat Completion Streaming ========");

AzureOpenAIChatCompletionService chatCompletionService = new(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
modelId: TestConfiguration.AzureOpenAI.ChatModelId);

return this.StartStreamingChatAsync(chatCompletionService);
}

/// <summary>
/// This example demonstrates how the chat completion service streams text content.
/// It shows how to access the response update via StreamingChatMessageContent.Content property
/// and alternatively via the StreamingChatMessageContent.Items property.
/// </summary>
[Fact]
public async Task StreamServicePromptTextAsync()
{
Console.WriteLine("======== Azure Open AI Streaming Text ========");

// Create chat completion service
AzureOpenAIChatCompletionService chatCompletionService = new(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
modelId: TestConfiguration.AzureOpenAI.ChatModelId);

// Create chat history with initial system and user messages
ChatHistory chatHistory = new("You are a librarian, an expert on books.");
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions.");
chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?");

// Start streaming chat based on the chat history
await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory))
{
// Access the response update via StreamingChatMessageContent.Content property
Console.Write(chatUpdate.Content);

// Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property
Console.Write(chatUpdate.Items.OfType<StreamingTextContent>().FirstOrDefault());
}
}

/// <summary>
/// This example demonstrates how the chat completion service streams raw function call content.
/// See <see cref="FunctionCalling.FunctionCalling.RunStreamingChatCompletionApiWithManualFunctionCallingAsync"/> for a sample demonstrating how to simplify
/// function call content building out of streamed function call updates using the <see cref="FunctionCallContentBuilder"/>.
/// </summary>
[Fact]
public async Task StreamFunctionCallContentAsync()
{
Console.WriteLine("======== Stream Function Call Content ========");

// Create chat completion service
AzureOpenAIChatCompletionService chatCompletionService = new(deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
modelId: TestConfiguration.AzureOpenAI.ChatModelId);

// Create kernel with helper plugin.
Kernel kernel = new();
kernel.ImportPluginFromFunctions("HelperFunctions",
[
kernel.CreateFunctionFromMethod((string longTestString) => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."),
]);

// Create execution settings with manual function calling
OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) };

// Create chat history with initial user question
ChatHistory chatHistory = [];
chatHistory.AddUserMessage("Hi, what is the current time?");

// Start streaming chat based on the chat history
await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
{
// Getting list of function call updates requested by LLM
var streamingFunctionCallUpdates = chatUpdate.Items.OfType<StreamingFunctionCallUpdateContent>();

// Iterating over function call updates. Please use the unctionCallContentBuilder to simplify function call content building.
foreach (StreamingFunctionCallUpdateContent update in streamingFunctionCallUpdates)
{
Console.WriteLine($"Function call update: callId={update.CallId}, name={update.Name}, arguments={update.Arguments?.Replace("\n", "\\n")}, functionCallIndex={update.FunctionCallIndex}");
}
}
}

private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService)
{
Console.WriteLine("Chat content:");
Console.WriteLine("------------------------");

var chatHistory = new ChatHistory("You are a librarian, expert about books");
OutputLastMessage(chatHistory);

// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
OutputLastMessage(chatHistory);

// First assistant message
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);

// Second user message
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?");
OutputLastMessage(chatHistory);

// Second assistant message
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
}

private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole)
{
bool roleWritten = false;
string fullMessage = string.Empty;

await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory))
{
if (!roleWritten && chatUpdate.Role.HasValue)
{
Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}");
roleWritten = true;
}

if (chatUpdate.Content is { Length: > 0 })
{
fullMessage += chatUpdate.Content;
Console.Write(chatUpdate.Content);
}
}

Console.WriteLine("\n------------------------");
chatHistory.AddMessage(authorRole, fullMessage);
}
}
Original file line number Diff line number Diff line change
@@ -1,28 +1,21 @@
// Copyright (c) Microsoft. All rights reserved.

using System.ClientModel.Primitives;
using Azure;
using Azure.AI.OpenAI;
using Microsoft.SemanticKernel;

namespace ChatCompletion;

public sealed class OpenAI_CustomAzureOpenAIClient(ITestOutputHelper output) : BaseTest(output)
public sealed class AzureOpenAI_CustomClient(ITestOutputHelper output) : BaseTest(output)
{
[Fact]
public async Task RunAsync()
{
Console.WriteLine("======== Using a custom OpenAI client ========");
Console.WriteLine("======== Using a custom AzureOpenAI client ========");

string endpoint = TestConfiguration.AzureOpenAI.Endpoint;
string deploymentName = TestConfiguration.AzureOpenAI.ChatDeploymentName;
string apiKey = TestConfiguration.AzureOpenAI.ApiKey;

if (endpoint is null || deploymentName is null || apiKey is null)
{
Console.WriteLine("Azure OpenAI credentials not found. Skipping example.");
return;
}
Assert.NotNull(TestConfiguration.AzureOpenAI.Endpoint);
Assert.NotNull(TestConfiguration.AzureOpenAI.ChatDeploymentName);
Assert.NotNull(TestConfiguration.AzureOpenAI.ApiKey);

// Create an HttpClient and include your custom header(s)
var httpClient = new HttpClient();
Expand All @@ -32,12 +25,15 @@ public async Task RunAsync()
var clientOptions = new AzureOpenAIClientOptions
{
Transport = new HttpClientPipelineTransport(httpClient),
NetworkTimeout = TimeSpan.FromSeconds(30),
RetryPolicy = new ClientRetryPolicy()
};
var openAIClient = new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(apiKey), clientOptions);

IKernelBuilder builder = Kernel.CreateBuilder();
builder.AddAzureOpenAIChatCompletion(deploymentName, openAIClient);
Kernel kernel = builder.Build();
var customClient = new AzureOpenAIClient(new Uri(TestConfiguration.AzureOpenAI.Endpoint), TestConfiguration.AzureOpenAI.ApiKey, clientOptions);

var kernel = Kernel.CreateBuilder()
.AddAzureOpenAIChatCompletion(TestConfiguration.AzureOpenAI.ChatDeploymentName, customClient)
.Build();

// Load semantic plugin defined with prompt templates
string folder = RepoFiles.SamplePluginsPath();
Expand Down
Loading

0 comments on commit be90d23

Please sign in to comment.