-
Notifications
You must be signed in to change notification settings - Fork 3.3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
.Net: Update OpenAI / AzureOpenAI Concepts (#8919)
### Motivation and Context - Split AzureOpenAI from OpenAI examples - Added missing links in Readme for Ollama and Azure AI Inference samples. - Added more descriptive text to OpenAI examples - Update naming for tests - Resolves #7007
- Loading branch information
1 parent
8e25752
commit be90d23
Showing
7 changed files
with
672 additions
and
86 deletions.
There are no files selected for viewing
99 changes: 99 additions & 0 deletions
99
dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletion.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,99 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System.Text; | ||
using Azure.Identity; | ||
using Microsoft.SemanticKernel; | ||
using Microsoft.SemanticKernel.ChatCompletion; | ||
using Microsoft.SemanticKernel.Connectors.AzureOpenAI; | ||
|
||
namespace ChatCompletion; | ||
|
||
// The following example shows how to use Semantic Kernel with Azure OpenAI API | ||
public class AzureOpenAI_ChatCompletion(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
[Fact] | ||
public async Task ChatPromptAsync() | ||
{ | ||
Assert.NotNull(TestConfiguration.Ollama.ModelId); | ||
|
||
StringBuilder chatPrompt = new(""" | ||
<message role="system">You are a librarian, expert about books</message> | ||
<message role="user">Hi, I'm looking for book suggestions</message> | ||
"""); | ||
|
||
var kernel = Kernel.CreateBuilder() | ||
.AddAzureOpenAIChatCompletion( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId) | ||
.Build(); | ||
|
||
var reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); | ||
|
||
chatPrompt.AppendLine($"<message role=\"assistant\"><![CDATA[{reply}]]></message>"); | ||
chatPrompt.AppendLine("<message role=\"user\">I love history and philosophy, I'd like to learn something new about Greece, any suggestion</message>"); | ||
|
||
reply = await kernel.InvokePromptAsync(chatPrompt.ToString()); | ||
|
||
Console.WriteLine(reply); | ||
} | ||
|
||
[Fact] | ||
public async Task ServicePromptAsync() | ||
{ | ||
Console.WriteLine("======== Azure Open AI - Chat Completion ========"); | ||
|
||
AzureOpenAIChatCompletionService chatCompletionService = new( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId); | ||
|
||
await StartChatAsync(chatCompletionService); | ||
} | ||
|
||
/// <summary> | ||
/// Sample showing how to use Azure Open AI Chat Completion with Azure Default Credential. | ||
/// If local auth is disabled in the Azure Open AI deployment, you can use Azure Default Credential to authenticate. | ||
/// </summary> | ||
[Fact] | ||
public async Task DefaultAzureCredentialSampleAsync() | ||
{ | ||
Console.WriteLine("======== Azure Open AI - Chat Completion with Azure Default Credential ========"); | ||
|
||
AzureOpenAIChatCompletionService chatCompletionService = new( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
credentials: new DefaultAzureCredential(), | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId); | ||
|
||
await StartChatAsync(chatCompletionService); | ||
} | ||
|
||
private async Task StartChatAsync(IChatCompletionService chatGPT) | ||
{ | ||
Console.WriteLine("Chat content:"); | ||
Console.WriteLine("------------------------"); | ||
|
||
var chatHistory = new ChatHistory("You are a librarian, expert about books"); | ||
|
||
// First user message | ||
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// First assistant message | ||
var reply = await chatGPT.GetChatMessageContentAsync(chatHistory); | ||
chatHistory.Add(reply); | ||
OutputLastMessage(chatHistory); | ||
|
||
// Second user message | ||
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// Second assistant message | ||
reply = await chatGPT.GetChatMessageContentAsync(chatHistory); | ||
chatHistory.Add(reply); | ||
OutputLastMessage(chatHistory); | ||
} | ||
} |
155 changes: 155 additions & 0 deletions
155
dotnet/samples/Concepts/ChatCompletion/AzureOpenAI_ChatCompletionStreaming.cs
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,155 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using Microsoft.SemanticKernel; | ||
using Microsoft.SemanticKernel.ChatCompletion; | ||
using Microsoft.SemanticKernel.Connectors.AzureOpenAI; | ||
using Microsoft.SemanticKernel.Connectors.OpenAI; | ||
|
||
namespace ChatCompletion; | ||
|
||
/// <summary> | ||
/// These examples demonstrate the ways different content types are streamed by Azure OpenAI via the chat completion service. | ||
/// </summary> | ||
public class AzureOpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output) | ||
{ | ||
/// <summary> | ||
/// This example demonstrates chat completion streaming using Azure OpenAI. | ||
/// </summary> | ||
[Fact] | ||
public Task StreamServicePromptAsync() | ||
{ | ||
Console.WriteLine("======== Azure Open AI Chat Completion Streaming ========"); | ||
|
||
AzureOpenAIChatCompletionService chatCompletionService = new( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId); | ||
|
||
return this.StartStreamingChatAsync(chatCompletionService); | ||
} | ||
|
||
/// <summary> | ||
/// This example demonstrates how the chat completion service streams text content. | ||
/// It shows how to access the response update via StreamingChatMessageContent.Content property | ||
/// and alternatively via the StreamingChatMessageContent.Items property. | ||
/// </summary> | ||
[Fact] | ||
public async Task StreamServicePromptTextAsync() | ||
{ | ||
Console.WriteLine("======== Azure Open AI Streaming Text ========"); | ||
|
||
// Create chat completion service | ||
AzureOpenAIChatCompletionService chatCompletionService = new( | ||
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId); | ||
|
||
// Create chat history with initial system and user messages | ||
ChatHistory chatHistory = new("You are a librarian, an expert on books."); | ||
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions."); | ||
chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?"); | ||
|
||
// Start streaming chat based on the chat history | ||
await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) | ||
{ | ||
// Access the response update via StreamingChatMessageContent.Content property | ||
Console.Write(chatUpdate.Content); | ||
|
||
// Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property | ||
Console.Write(chatUpdate.Items.OfType<StreamingTextContent>().FirstOrDefault()); | ||
} | ||
} | ||
|
||
/// <summary> | ||
/// This example demonstrates how the chat completion service streams raw function call content. | ||
/// See <see cref="FunctionCalling.FunctionCalling.RunStreamingChatCompletionApiWithManualFunctionCallingAsync"/> for a sample demonstrating how to simplify | ||
/// function call content building out of streamed function call updates using the <see cref="FunctionCallContentBuilder"/>. | ||
/// </summary> | ||
[Fact] | ||
public async Task StreamFunctionCallContentAsync() | ||
{ | ||
Console.WriteLine("======== Stream Function Call Content ========"); | ||
|
||
// Create chat completion service | ||
AzureOpenAIChatCompletionService chatCompletionService = new(deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName, | ||
endpoint: TestConfiguration.AzureOpenAI.Endpoint, | ||
apiKey: TestConfiguration.AzureOpenAI.ApiKey, | ||
modelId: TestConfiguration.AzureOpenAI.ChatModelId); | ||
|
||
// Create kernel with helper plugin. | ||
Kernel kernel = new(); | ||
kernel.ImportPluginFromFunctions("HelperFunctions", | ||
[ | ||
kernel.CreateFunctionFromMethod((string longTestString) => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."), | ||
]); | ||
|
||
// Create execution settings with manual function calling | ||
OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(autoInvoke: false) }; | ||
|
||
// Create chat history with initial user question | ||
ChatHistory chatHistory = []; | ||
chatHistory.AddUserMessage("Hi, what is the current time?"); | ||
|
||
// Start streaming chat based on the chat history | ||
await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel)) | ||
{ | ||
// Getting list of function call updates requested by LLM | ||
var streamingFunctionCallUpdates = chatUpdate.Items.OfType<StreamingFunctionCallUpdateContent>(); | ||
|
||
// Iterating over function call updates. Please use the unctionCallContentBuilder to simplify function call content building. | ||
foreach (StreamingFunctionCallUpdateContent update in streamingFunctionCallUpdates) | ||
{ | ||
Console.WriteLine($"Function call update: callId={update.CallId}, name={update.Name}, arguments={update.Arguments?.Replace("\n", "\\n")}, functionCallIndex={update.FunctionCallIndex}"); | ||
} | ||
} | ||
} | ||
|
||
private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService) | ||
{ | ||
Console.WriteLine("Chat content:"); | ||
Console.WriteLine("------------------------"); | ||
|
||
var chatHistory = new ChatHistory("You are a librarian, expert about books"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// First user message | ||
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// First assistant message | ||
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); | ||
|
||
// Second user message | ||
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?"); | ||
OutputLastMessage(chatHistory); | ||
|
||
// Second assistant message | ||
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant); | ||
} | ||
|
||
private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletionService, ChatHistory chatHistory, AuthorRole authorRole) | ||
{ | ||
bool roleWritten = false; | ||
string fullMessage = string.Empty; | ||
|
||
await foreach (var chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory)) | ||
{ | ||
if (!roleWritten && chatUpdate.Role.HasValue) | ||
{ | ||
Console.Write($"{chatUpdate.Role.Value}: {chatUpdate.Content}"); | ||
roleWritten = true; | ||
} | ||
|
||
if (chatUpdate.Content is { Length: > 0 }) | ||
{ | ||
fullMessage += chatUpdate.Content; | ||
Console.Write(chatUpdate.Content); | ||
} | ||
} | ||
|
||
Console.WriteLine("\n------------------------"); | ||
chatHistory.AddMessage(authorRole, fullMessage); | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.