forked from microsoft/semantic-kernel
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSemanticKernelFactory.cs
104 lines (89 loc) · 3.63 KB
/
SemanticKernelFactory.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
// Copyright (c) Microsoft. All rights reserved.
using System.Collections.Generic;
using System.Linq;
using KernelHttpServer.Config;
using KernelHttpServer.Utils;
using Microsoft.Azure.Functions.Worker.Http;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Memory;
using static KernelHttpServer.Config.Constants;
namespace KernelHttpServer;
internal static class SemanticKernelFactory
{
internal static IKernel? CreateForRequest(
HttpRequestData req,
ILogger logger,
IEnumerable<string>? skillsToLoad = null,
IMemoryStore? memoryStore = null)
{
var apiConfig = req.ToApiKeyConfig();
// must have a completion service
if (!apiConfig.CompletionConfig.IsValid())
{
logger.LogError("Chat completion service has not been supplied");
return null;
}
// Text embedding service is optional, don't fail if we were not given the config
if (memoryStore != null &&
!apiConfig.EmbeddingConfig.IsValid())
{
logger.LogWarning("Text embedding service has not been supplied");
}
KernelBuilder builder = Kernel.Builder;
builder = _ConfigureKernelBuilder(apiConfig, builder, memoryStore);
return _CompleteKernelSetup(req, builder, logger, skillsToLoad);
}
private static KernelBuilder _ConfigureKernelBuilder(ApiKeyConfig config, KernelBuilder builder, IMemoryStore? memoryStore)
{
switch (config.CompletionConfig.AIService)
{
case AIService.OpenAI:
builder.WithOpenAIChatCompletionService(
modelId: config.CompletionConfig.DeploymentOrModelId,
apiKey: config.CompletionConfig.Key);
break;
case AIService.AzureOpenAI:
builder.WithAzureChatCompletionService(
deploymentName: config.CompletionConfig.DeploymentOrModelId,
endpoint: config.CompletionConfig.Endpoint,
apiKey: config.CompletionConfig.Key);
break;
default:
break;
}
if (memoryStore != null && config.EmbeddingConfig.IsValid())
{
switch (config.EmbeddingConfig.AIService)
{
case AIService.OpenAI:
builder.WithOpenAITextEmbeddingGenerationService(
modelId: config.EmbeddingConfig.DeploymentOrModelId,
apiKey: config.EmbeddingConfig.Key);
break;
case AIService.AzureOpenAI:
builder.WithAzureTextEmbeddingGenerationService(
deploymentName: config.EmbeddingConfig.DeploymentOrModelId,
endpoint: config.EmbeddingConfig.Endpoint,
apiKey: config.EmbeddingConfig.Key);
break;
default:
break;
}
builder.WithMemoryStorage(memoryStore);
}
return builder;
}
private static IKernel _CompleteKernelSetup(HttpRequestData req, KernelBuilder builder, ILogger logger, IEnumerable<string>? skillsToLoad = null)
{
IKernel kernel = builder.Build();
kernel.RegisterSemanticSkills(RepoFiles.SampleSkillsPath(), logger, skillsToLoad);
kernel.RegisterNativeSkills(skillsToLoad);
if (req.Headers.TryGetValues(SKHttpHeaders.MSGraph, out var graphToken))
{
kernel.RegisterNativeGraphSkills(graphToken.First());
}
kernel.RegisterTextMemory();
return kernel;
}
}