Skip to content

Commit

Permalink
Merge pull request MicrosoftDocs#191 from MicrosoftDocs/matthew-v1-up…
Browse files Browse the repository at this point in the history
…dates

Update docs for v1.0.0
  • Loading branch information
matthewbolanos authored Dec 14, 2023
2 parents d93bbba + a9fde26 commit 41d3efd
Show file tree
Hide file tree
Showing 405 changed files with 5,038 additions and 7,833 deletions.
132 changes: 131 additions & 1 deletion .openpublishing.redirection.json
Original file line number Diff line number Diff line change
Expand Up @@ -379,6 +379,136 @@
"source_path": "semantic-kernel/samples-and-solutions/simple-chat-summary.md",
"redirect_url": "/semantic-kernel/chat-copilot/index",
"redirect_document_id": false
},
{
"source_path": "semantic-kernel/ai-orchestration/kernel/adding-services.md",
"redirect_url": "/semantic-kernel/agents/kernel/adding-services",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/kernel/index.md",
"redirect_url": "/semantic-kernel/agents/kernel/index",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/planners/evaluate-and-deploy-planners/create-a-prompt-flow-with-semantic-kernel.md",
"redirect_url": "/semantic-kernel/agents/planners/evaluate-and-deploy-planners/create-a-prompt-flow-with-semantic-kernel",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/planners/evaluate-and-deploy-planners/deploying-prompt-flows-with-semantic-kernel.md",
"redirect_url": "/semantic-kernel/agents/planners/evaluate-and-deploy-planners/deploying-prompt-flows-with-semantic-kernel",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/planners/evaluate-and-deploy-planners/evaluating-plugins-and-planners-with-prompt-flow.md",
"redirect_url": "/semantic-kernel/agents/planners/evaluate-and-deploy-planners/evaluating-plugins-and-planners-with-prompt-flow",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/planners/evaluate-and-deploy-planners/index.md",
"redirect_url": "/semantic-kernel/agents/planners/evaluate-and-deploy-planners/index",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/planners/evaluate-and-deploy-planners/running-batches-with-prompt-flow.md",
"redirect_url": "/semantic-kernel/agents/planners/evaluate-and-deploy-planners/running-batches-with-prompt-flow",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/planners/index.md",
"redirect_url": "/semantic-kernel/agents/planners/index",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/chaining-functions.md",
"redirect_url": "/semantic-kernel/agents/plugins/index",
"redirect_document_id": false
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/chatgpt-plugins.md",
"redirect_url": "/semantic-kernel/agents/plugins/openai-plugins",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/index.md",
"redirect_url": "/semantic-kernel/agents/plugins/index",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/native-functions/calling-nested-functions.md",
"redirect_url": "/semantic-kernel/agents/plugins/index",
"redirect_document_id": false
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/native-functions/multiple-parameters.md",
"redirect_url": "/semantic-kernel/agents/plugins/index",
"redirect_document_id": false
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/native-functions/using-the-SKFunction-decorator.md",
"redirect_url": "/semantic-kernel/agents/plugins/using-the-KernelFunction-decorator",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/out-of-the-box-plugins.md",
"redirect_url": "/semantic-kernel/agents/plugins/out-of-the-box-plugins",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/semantic-functions/calling-nested-functions.md",
"redirect_url": "/semantic-kernel/prompts/calling-nested-functions",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/semantic-functions/inline-semantic-functions.md",
"redirect_url": "/semantic-kernel/prompts/your-first-prompt",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/semantic-functions/serializing-semantic-functions.md",
"redirect_url": "/semantic-kernel/prompts/saving-prompts-as-files",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/ai-orchestration/plugins/semantic-functions/templatizing-semantic-functions.md",
"redirect_url": "/semantic-kernel/prompts/templatizing-prompts",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/memories/context.md",
"redirect_url": "/semantic-kernel/memories/index",
"redirect_document_id": false
},
{
"source_path": "semantic-kernel/prompt-engineering/configure-prompts.md",
"redirect_url": "/semantic-kernel/prompts/configure-prompts",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/prompt-engineering/index.md",
"redirect_url": "/semantic-kernel/prompts/index",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/prompt-engineering/llm-models.md",
"redirect_url": "/semantic-kernel/prompts/index",
"redirect_document_id": false
},
{
"source_path": "semantic-kernel/prompt-engineering/prompt-template-syntax.md",
"redirect_url": "/semantic-kernel/prompts/prompt-template-syntax",
"redirect_document_id": true
},
{
"source_path": "semantic-kernel/prompt-engineering/tokens.md",
"redirect_url": "/semantic-kernel/prompts/index",
"redirect_document_id": false
},
{
"source_path": "semantic-kernel/prompt-engineering/your-first-prompt.md",
"redirect_url": "/semantic-kernel/prompts/your-first-prompt",
"redirect_document_id": false
}
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"request": "launch",
"preLaunchTask": "build",
// If you have changed target frameworks, make sure to update the program path.
"program": "${workspaceFolder}/bin/Debug/net6.0/10-Chaining-Functions.dll",
"program": "${workspaceFolder}/bin/Debug/net8.0/00-Getting-Started.dll",
"args": [],
"cwd": "${workspaceFolder}",
// For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"type": "process",
"args": [
"build",
"${workspaceFolder}/10-Chaining-Functions.csproj",
"${workspaceFolder}/00-Getting-Started.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],
Expand All @@ -19,7 +19,7 @@
"type": "process",
"args": [
"publish",
"${workspaceFolder}/10-Chaining-Functions.csproj",
"${workspaceFolder}/00-Getting-Started.csproj",
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
],
Expand All @@ -33,7 +33,7 @@
"watch",
"run",
"--project",
"${workspaceFolder}/10-Chaining-Functions.csproj"
"${workspaceFolder}/00-Getting-Started.csproj"
],
"problemMatcher": "$msCompile"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net6.0</TargetFramework>
<TargetFramework>net8.0</TargetFramework>
<RootNamespace></RootNamespace>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
Expand Down
25 changes: 25 additions & 0 deletions samples/dotnet/00-Getting-Started/00-Getting-Started.sln
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.5.002.0
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "00-Getting-Started", "00-Getting-Started.csproj", "{C6C0B288-CBC9-4C4A-AE46-25022EA75352}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{C6C0B288-CBC9-4C4A-AE46-25022EA75352}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C6C0B288-CBC9-4C4A-AE46-25022EA75352}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C6C0B288-CBC9-4C4A-AE46-25022EA75352}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C6C0B288-CBC9-4C4A-AE46-25022EA75352}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {5AE54649-58C0-4538-A347-9F2D46AD297E}
EndGlobalSection
EndGlobal
52 changes: 52 additions & 0 deletions samples/dotnet/00-Getting-Started/Program.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
using Plugins;

// Create kernel
var builder = Kernel.CreateBuilder();
// Add a text or chat completion service using either:
// builder.Services.AddAzureOpenAIChatCompletion()
// builder.Services.AddAzureOpenAITextGeneration()
// builder.Services.AddOpenAIChatCompletion()
// builder.Services.AddOpenAITextGeneration()
builder.WithCompletionService();
builder.Services.AddLogging(c => c.AddDebug().SetMinimumLevel(LogLevel.Trace));
builder.Plugins.AddFromType<LightPlugin>();
var kernel = builder.Build();

// Create chat history
ChatHistory history = [];

// Get chat completion service
var chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();

// Start the conversation
while (true)
{
// Get user input
Console.Write("User > ");
history.AddUserMessage(Console.ReadLine()!);

// Enable auto function calling
OpenAIPromptExecutionSettings openAIPromptExecutionSettings = new()
{
ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
};

// Get the response from the AI
var result = await chatCompletionService.GetChatMessageContentAsync(
history,
executionSettings: openAIPromptExecutionSettings,
kernel: kernel);

// Print the results
Console.WriteLine("Assistant > " + result);

// Add the message from the agent to the chat history
history.AddMessage(result);
}
Original file line number Diff line number Diff line change
@@ -1,17 +1,17 @@
# Creating native functions

The `07-Simple-Native-Functions` console application shows the final solution to the [Run native code with Semantic Kernel](https://learn.microsoft.com/en-us/semantic-kernel/ai-orchestration/plugins/native-functions/using-the-skfunction-decorator) doc article.
The `00-Getting-Started` console application shows the final solution to the [Run native code with Semantic Kernel](https://learn.microsoft.com/en-us/semantic-kernel/agents/plugins/native-functions/using-the-skfunction-decorator) doc article.

## Prerequisites

- [.NET 6](https://dotnet.microsoft.com/download/dotnet/6.0) is required to run this sample.
- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0) is required to run this sample.
- Install the recommended extensions
- [C#](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.csharp)
- [Semantic Kernel Tools](https://marketplace.visualstudio.com/items?itemName=ms-semantic-kernel.semantic-kernel) (optional)

## Prerequisites

- [.NET 6](https://dotnet.microsoft.com/download/dotnet/6.0) is required to run this sample.
- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0) is required to run this sample.
- Install the recommended extensions
- [C#](https://marketplace.visualstudio.com/items?itemName=ms-dotnettools.csharp)
- [Semantic Kernel Tools](https://marketplace.visualstudio.com/items?itemName=ms-semantic-kernel.semantic-kernel) (optional)
Expand All @@ -24,21 +24,24 @@ This sample has been tested with the following models:

| Service | Model type | Model | Model version | Supported |
| ------------ | --------------- | ---------------- | ------------: | --------- |
| OpenAI | Text Completion | text-davinci-003 | 1 ||
| OpenAI | Chat Completion | gpt-3.5-turbo | 1 ||
| OpenAI | Chat Completion | gpt-3.5-turbo | 0301 ||
| OpenAI | Chat Completion | gpt-4 | 1 ||
| OpenAI | Chat Completion | gpt-4 | 0314 ||
| Azure OpenAI | Text Completion | text-davinci-003 | 1 ||
| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 0301 ||
| Azure OpenAI | Chat Completion | gpt-4 | 0314 ||
| OpenAI | Text Completion | text-davinci-003 | 1 ||
| OpenAI | Chat Completion | gpt-3.5-turbo | 1 ||
| OpenAI | Chat Completion | gpt-3.5-turbo | 0301 ||
| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 0613 ||
| Azure OpenAI | Chat Completion | gpt-3.5-turbo | 1106 ||
| OpenAI | Chat Completion | gpt-4 | 1 ||
| OpenAI | Chat Completion | gpt-4 | 0314 ||
| Azure OpenAI | Chat Completion | gpt-4 | 0613 ||
| Azure OpenAI | Chat Completion | gpt-4 | 1106 ||

This sample uses function calling, so it only works on models newer than 0613.

### Using .NET [Secret Manager](https://learn.microsoft.com/en-us/aspnet/core/security/app-secrets)

Configure an OpenAI endpoint

```powershell
cd 07-Simple-Native-Functions
cd 00-Getting-Started
dotnet user-secrets set "Global:LlmService" "OpenAI"
Expand All @@ -51,12 +54,13 @@ dotnet user-secrets set "OpenAI:OrgId" "... your ord ID ..."
Configure an Azure OpenAI endpoint

```powershell
cd 07-Simple-Native-Functions
cd 00-Getting-Started
dotnet user-secrets set "Global:LlmService" "AzureOpenAI"
dotnet user-secrets set "AzureOpenAI:DeploymentType" "chat-completion"
dotnet user-secrets set "AzureOpenAI:ChatCompletionDeploymentName" "gpt-35-turbo"
dotnet user-secrets set "AzureOpenAI:ChatCompletionModelId" "gpt-3.5-turbo-0613"
dotnet user-secrets set "AzureOpenAI:Endpoint" "... your Azure OpenAI endpoint ..."
dotnet user-secrets set "AzureOpenAI:ApiKey" "... your Azure OpenAI key ..."
```
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,41 +9,43 @@ internal static class KernelBuilderExtensions
/// </summary>
/// <param name="kernelBuilder"></param>
/// <exception cref="ArgumentException"></exception>
internal static KernelBuilder WithCompletionService(this KernelBuilder kernelBuilder)
internal static IKernelBuilder WithCompletionService(this IKernelBuilder kernelBuilder)
{
switch (Env.Var("Global:LlmService")!)
{
case "AzureOpenAI":
if (Env.Var("AzureOpenAI:DeploymentType")! == "text-completion")
if (Env.Var("AzureOpenAI:DeploymentType") == "text-completion")
{
kernelBuilder.WithAzureTextCompletionService(
kernelBuilder.Services.AddAzureOpenAITextGeneration(
deploymentName: Env.Var("AzureOpenAI:TextCompletionDeploymentName")!,
modelId: Env.Var("AzureOpenAI:TextCompletionModelId")!,
endpoint: Env.Var("AzureOpenAI:Endpoint")!,
apiKey: Env.Var("AzureOpenAI:ApiKey")!
);
}
else if (Env.Var("AzureOpenAI:DeploymentType")! == "chat-completion")
else if (Env.Var("AzureOpenAI:DeploymentType") == "chat-completion")
{
kernelBuilder.WithAzureChatCompletionService(
kernelBuilder.Services.AddAzureOpenAIChatCompletion(
deploymentName: Env.Var("AzureOpenAI:ChatCompletionDeploymentName")!,
modelId: Env.Var("AzureOpenAI:ChatCompletionModelId")!,
endpoint: Env.Var("AzureOpenAI:Endpoint")!,
apiKey: Env.Var("AzureOpenAI:ApiKey")!
);
}
break;

case "OpenAI":
if (Env.Var("OpenAI:ModelType")! == "text-completion")
if (Env.Var("OpenAI:ModelType") == "text-completion")
{
kernelBuilder.WithOpenAITextCompletionService(
kernelBuilder.Services.AddOpenAITextGeneration(
modelId: Env.Var("OpenAI:TextCompletionModelId")!,
apiKey: Env.Var("OpenAI:ApiKey")!,
orgId: Env.Var("OpenAI:OrgId")
);
}
else if (Env.Var("OpenAI:ModelType")! == "chat-completion")
else if (Env.Var("OpenAI:ModelType") == "chat-completion")
{
kernelBuilder.WithOpenAIChatCompletionService(
kernelBuilder.Services.AddOpenAIChatCompletion(
modelId: Env.Var("OpenAI:ChatCompletionModelId")!,
apiKey: Env.Var("OpenAI:ApiKey")!,
orgId: Env.Var("OpenAI:OrgId")
Expand Down
Loading

0 comments on commit 41d3efd

Please sign in to comment.