-
Notifications
You must be signed in to change notification settings - Fork 3.3k
/
Connectors_WithMultipleLLMs.cs
185 lines (152 loc) · 8.07 KB
/
Connectors_WithMultipleLLMs.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
// Copyright (c) Microsoft. All rights reserved.
using Microsoft.SemanticKernel;
namespace ChatCompletion;
public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output)
{
private const string ChatPrompt = "Hello AI, what can you do for me?";
private static Kernel BuildKernel()
{
return Kernel.CreateBuilder()
.AddAzureOpenAIChatCompletion(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
endpoint: TestConfiguration.AzureOpenAI.Endpoint,
apiKey: TestConfiguration.AzureOpenAI.ApiKey,
serviceId: "AzureOpenAIChat",
modelId: TestConfiguration.AzureOpenAI.ChatModelId)
.AddOpenAIChatCompletion(
modelId: TestConfiguration.OpenAI.ChatModelId,
apiKey: TestConfiguration.OpenAI.ApiKey,
serviceId: "OpenAIChat")
.Build();
}
/// <summary>
/// Shows how to invoke a prompt and specify the service id of the preferred AI service. When the prompt is executed the AI Service with the matching service id will be selected.
/// </summary>
/// <param name="serviceId">Service Id</param>
[Theory]
[InlineData("AzureOpenAIChat")]
public async Task InvokePromptByServiceIdAsync(string serviceId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Service Id: {serviceId} ========");
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId }));
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows how to invoke a prompt and specify the model id of the preferred AI service. When the prompt is executed the AI Service with the matching model id will be selected.
/// </summary>
[Fact]
private async Task InvokePromptByModelIdAsync()
{
var modelId = TestConfiguration.OpenAI.ChatModelId;
var kernel = BuildKernel();
Console.WriteLine($"======== Model Id: {modelId} ========");
var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId }));
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows how to invoke a prompt and specify the service ids of the preferred AI services.
/// When the prompt is executed the AI Service will be selected based on the order of the provided service ids.
/// </summary>
[Fact]
public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync()
{
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
var kernel = BuildKernel();
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");
var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })));
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows how to invoke a prompt and specify the model ids of the preferred AI services.
/// When the prompt is executed the AI Service will be selected based on the order of the provided model ids.
/// </summary>
[Fact]
public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync()
{
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
var kernel = BuildKernel();
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");
var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })));
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows how to create a KernelFunction from a prompt and specify the service ids of the preferred AI services.
/// When the function is invoked the AI Service will be selected based on the order of the provided service ids.
/// </summary>
[Fact]
public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync()
{
string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
var kernel = BuildKernel();
Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");
var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }));
var result = await kernel.InvokeAsync(function);
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows how to create a KernelFunction from a prompt and specify the model ids of the preferred AI services.
/// When the function is invoked the AI Service will be selected based on the order of the provided model ids.
/// </summary>
[Fact]
public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync()
{
string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
var kernel = BuildKernel();
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");
var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }));
var result = await kernel.InvokeAsync(function);
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows how to invoke a KernelFunction and specify the model id of the AI Service the function will use.
/// </summary>
[Fact]
public async Task InvokePreconfiguredFunctionByModelIdAsync()
{
var modelId = TestConfiguration.OpenAI.ChatModelId;
var kernel = BuildKernel();
Console.WriteLine($"======== Model Id: {modelId} ========");
var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ModelId = modelId }));
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows how to invoke a KernelFunction and specify the service id of the AI Service the function will use.
/// </summary>
/// <param name="serviceId">Service Id</param>
[Theory]
[InlineData("AzureOpenAIChat")]
public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Service Id: {serviceId} ========");
var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ServiceId = serviceId }));
Console.WriteLine(result.GetValue<string>());
}
/// <summary>
/// Shows when specifying a non-existent ServiceId the kernel throws an exception.
/// </summary>
/// <param name="serviceId">Service Id</param>
[Theory]
[InlineData("NotFound")]
public async Task InvokePromptByNonExistingServiceIdThrowsExceptionAsync(string serviceId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Service Id: {serviceId} ========");
await Assert.ThrowsAsync<KernelException>(async () => await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })));
}
/// <summary>
/// Shows how in the execution settings when no model id is found it falls back to the default service.
/// </summary>
/// <param name="modelId">Model Id</param>
[Theory]
[InlineData("NotFound")]
public async Task InvokePromptByNonExistingModelIdUsesDefaultServiceAsync(string modelId)
{
var kernel = BuildKernel();
Console.WriteLine($"======== Model Id: {modelId} ========");
await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ModelId = modelId }));
}
}