Skip to content

Commit

Permalink
feat: add AI (#7)
Browse files Browse the repository at this point in the history
* feat: add dependify AI
  • Loading branch information
NikiforovAll authored Aug 8, 2024
1 parent 2bfa312 commit 9a1676d
Show file tree
Hide file tree
Showing 36 changed files with 1,590 additions and 395 deletions.
11 changes: 10 additions & 1 deletion .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,16 @@
"request": "launch",
"preLaunchTask": "build",
"program": "${workspaceFolder}/src/Dependify.Cli/bin/Debug/net8.0/Dependify.Cli.dll",
"args": ["serve", "C:\\Users\\Oleksii_Nikiforov\\dev\\dependify\\samples\\aspire-project"],
"args": [
"serve",
"C:\\Users\\Oleksii_Nikiforov\\dev\\dependify\\samples\\aspire-project",
"--endpoint",
"http://localhost:60759/",
"--model-id",
"phi3:mini",
"--api-key",
"apiKey"
],
"cwd": "${workspaceFolder}",
"stopAtEntry": false,
"console": "externalTerminal",
Expand Down
37 changes: 37 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ You will see something like the following output in the terminal.

- Workbench ⚙️
- Dependency Explorer 🔎
- Chat (AI) 🤖

Workbench gives you high level overview of the dependencies in the solution.

Expand All @@ -66,6 +67,18 @@ Dependency Explorer allows you to select the dependencies you want to see.
<video src="https://github.com/user-attachments/assets/555df3ef-b0c3-4354-911f-81d4dfd07607" controls="controls">
</video>

Chat (AI) allows you to ask questions about the dependencies.

```bash
dependify serve $dev/cap-aspire/ \
--endpoint https://api.openai.azure.com/ \
--deployment-name gpt-4o-mini \
--api-key <api-key>
```

<video src="https://github.com/user-attachments/assets/b07a8b53-d3d2-4ef8-9a8c-8c3dbd865350" controls="controls">
</video>

### Aspire support

You can add `Dependify.Web` as resource to your Aspire project.
Expand Down Expand Up @@ -249,3 +262,27 @@ var subgraph = graph.SubGraph(n => n.Id.Contains("AwesomeProjectName"));
`dotnet tool install --global --add-source ./Artefacts Dependify.Cli --prerelease`

`dotnet tool uninstall Dependify.Cli -g`

```bash
dotnet watch run --project ./src/Dependify.Cli/ -- \
serve $dev/cap-aspire/ \
--endpoint "http://localhost:1234/v1/chat/completions" \
--model-id "LM Studio Community/Meta-Llama-3-8B-Instruct-GGUF" \
--api-key "lm-studio" \
--log-level "Information"
```

```bash
dotnet watch run --project ./src/Dependify.Cli/ -- \
serve $dev/cap-aspire/ \
--endpoint "" \
--deployment-name "gpt-35-turbo" \
--api-key "" \
--log-level "Information"
```
Set the API key for the AppHost with the following command:

```bash
dotnet user-secrets set "Parameters:api-key" "<api-key>"
dotnet user-secrets set "Parameters:endpoint" "<endpoint>"
```
Binary file added assets/dependify-ai-chat-demo.mp4
Binary file not shown.
7 changes: 7 additions & 0 deletions dependify.sln
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Web", "src\Web\Web.csproj",
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Dependify.Aspire.Hosting", "src\Dependify.Aspire.Hosting\Dependify.Aspire.Hosting.csproj", "{7A8ED9D9-0609-495F-9D36-AF696BCAC5D0}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Dependify.Aspire.Hosting.Ollama", "src\Dependify.Aspire.Hosting.Ollama\Dependify.Aspire.Hosting.Ollama.csproj", "{F3BBEFB8-D9D2-4602-917B-2D7B0F1CCC8C}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand All @@ -43,6 +45,10 @@ Global
{7A8ED9D9-0609-495F-9D36-AF696BCAC5D0}.Debug|Any CPU.Build.0 = Debug|Any CPU
{7A8ED9D9-0609-495F-9D36-AF696BCAC5D0}.Release|Any CPU.ActiveCfg = Release|Any CPU
{7A8ED9D9-0609-495F-9D36-AF696BCAC5D0}.Release|Any CPU.Build.0 = Release|Any CPU
{F3BBEFB8-D9D2-4602-917B-2D7B0F1CCC8C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{F3BBEFB8-D9D2-4602-917B-2D7B0F1CCC8C}.Debug|Any CPU.Build.0 = Debug|Any CPU
{F3BBEFB8-D9D2-4602-917B-2D7B0F1CCC8C}.Release|Any CPU.ActiveCfg = Release|Any CPU
{F3BBEFB8-D9D2-4602-917B-2D7B0F1CCC8C}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand All @@ -53,5 +59,6 @@ Global
{A13ED5C9-227D-4C24-A04C-617A81878415} = {C3712305-26BF-4E1B-B7E3-2A603443E98F}
{1358655A-56D9-45B8-80ED-758704415375} = {6EFAF0C3-2695-4C03-AAB0-D982DA582BEB}
{7A8ED9D9-0609-495F-9D36-AF696BCAC5D0} = {6EFAF0C3-2695-4C03-AAB0-D982DA582BEB}
{F3BBEFB8-D9D2-4602-917B-2D7B0F1CCC8C} = {6EFAF0C3-2695-4C03-AAB0-D982DA582BEB}
EndGlobalSection
EndGlobal
32 changes: 27 additions & 5 deletions samples/aspire-project/aspire-project.AppHost/Program.cs
Original file line number Diff line number Diff line change
@@ -1,13 +1,35 @@
var builder = DistributedApplication.CreateBuilder(args);

var useLocalModelParam = builder.AddParameter("use-local-model");
var endpointParam = builder.AddParameter("endpoint");
var deploymentNameParam = builder.AddParameter("deployment-name");
var apiKeyParam = builder.AddParameter("api-key", secret: true);

var apiService = builder.AddProject<Projects.aspire_project_ApiService>("apiservice");

builder.AddProject<Projects.aspire_project_Web>("webfrontend")
.WithExternalHttpEndpoints()
.WithReference(apiService);
builder.AddProject<Projects.aspire_project_Web>("webfrontend").WithExternalHttpEndpoints().WithReference(apiService);

var dependify = builder.AddDependify().ServeFrom("../../../");

if (useLocalModelParam.Resource.Value.ToString().Equals("true", StringComparison.OrdinalIgnoreCase))
{
var modelName = "phi3:mini";
var ollama = builder.AddOllama("ollama").WithDataVolume().AddModel(modelName).WithOpenWebUI();

builder.AddDependify("dependify1", port: 10000).WithDockerfile("..", "./aspire-project.AppHost/dependify.dockerfile");
dependify.WithOpenAI(ollama, modelName);
}
else
{
// Configure the AppHost with the following command:
// dotnet user-secrets set "Parameters:api-key" "<api-key>"
// dotnet user-secrets set "Parameters:deployment-name" "gpt-4o-mini"
// dotnet user-secrets set "Parameters:endpoint" "<endpoint>"

builder.AddDependify("dependify2", port: 10001).ServeFrom("../../aspire-project/");
dependify.WithAzureOpenAI(
endpointParam.Resource.Value.ToString(),
deploymentNameParam.Resource.Value.ToString(),
apiKeyParam.Resource.Value.ToString()
);
}

builder.Build().Run();
11 changes: 11 additions & 0 deletions samples/aspire-project/aspire-project.AppHost/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# AppHost

```csharp
builder.AddDependify("dependify2", port: 10001).ServeFrom("../../aspire-project/");
```

Control what is gets copied to workspace with dockerfile

```csharp
builder.AddDependify("dependify1", port: 10000).WithDockerfile("..", "./aspire-project.AppHost/dependify.dockerfile");
```
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,11 @@
"Default": "Information",
"Microsoft.AspNetCore": "Warning"
}
},
"Parameters": {
"use-local-model": true,
"endpoint": "",
"deployment-name": "gpt-4o-mini",
"api-key": ""
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,13 @@
<ItemGroup>
<ProjectReference Include="..\aspire-project.ApiService\aspire-project.ApiService.csproj" />
<ProjectReference Include="..\aspire-project.Web\aspire-project.Web.csproj" />

<ProjectReference Include="..\..\..\src\Dependify.Aspire.Hosting\Dependify.Aspire.Hosting.csproj" IsAspireProjectResource="false" />
<ProjectReference Include="..\..\..\src\Dependify.Aspire.Hosting.Ollama\Dependify.Aspire.Hosting.Ollama.csproj" IsAspireProjectResource="false" />
</ItemGroup>

<ItemGroup>
<PackageReference Include="Aspire.Hosting.AppHost" Version="8.1.0" />
<PackageReference Include="Dependify.Aspire.Hosting" Version="1.2.0" />
</ItemGroup>

</Project>
14 changes: 13 additions & 1 deletion samples/aspire-project/aspire-project.sln
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Microsoft Visual Studio Solution File, Format Version 12.00
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 17
VisualStudioVersion = 17.8.0.0
MinimumVisualStudioVersion = 17.8.0.0
Expand All @@ -10,6 +10,10 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "aspire-project.ApiService",
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "aspire-project.Web", "aspire-project.Web\aspire-project.Web.csproj", "{AC1256AB-8F2E-46FA-B5F0-F2B0AD2FA720}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Dependify.Aspire.Hosting.Ollama", "..\..\src\Dependify.Aspire.Hosting.Ollama\Dependify.Aspire.Hosting.Ollama.csproj", "{456B2C5D-AD42-4B5F-A959-9550807D1D01}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Dependify.Aspire.Hosting", "..\..\src\Dependify.Aspire.Hosting\Dependify.Aspire.Hosting.csproj", "{C34E52E7-A577-46AC-B167-8139C8ED75D6}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Expand All @@ -32,6 +36,14 @@ Global
{AC1256AB-8F2E-46FA-B5F0-F2B0AD2FA720}.Debug|Any CPU.Build.0 = Debug|Any CPU
{AC1256AB-8F2E-46FA-B5F0-F2B0AD2FA720}.Release|Any CPU.ActiveCfg = Release|Any CPU
{AC1256AB-8F2E-46FA-B5F0-F2B0AD2FA720}.Release|Any CPU.Build.0 = Release|Any CPU
{456B2C5D-AD42-4B5F-A959-9550807D1D01}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{456B2C5D-AD42-4B5F-A959-9550807D1D01}.Debug|Any CPU.Build.0 = Debug|Any CPU
{456B2C5D-AD42-4B5F-A959-9550807D1D01}.Release|Any CPU.ActiveCfg = Release|Any CPU
{456B2C5D-AD42-4B5F-A959-9550807D1D01}.Release|Any CPU.Build.0 = Release|Any CPU
{C34E52E7-A577-46AC-B167-8139C8ED75D6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{C34E52E7-A577-46AC-B167-8139C8ED75D6}.Debug|Any CPU.Build.0 = Debug|Any CPU
{C34E52E7-A577-46AC-B167-8139C8ED75D6}.Release|Any CPU.ActiveCfg = Release|Any CPU
{C34E52E7-A577-46AC-B167-8139C8ED75D6}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<IsPackable>true</IsPackable>
<PackageTags>aspire hosting ollama llm ai</PackageTags>
<Description>Ollama support for .NET Aspire.</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Aspire.Hosting" />
<PackageReference Include="OllamaSharp" />
</ItemGroup>
</Project>
175 changes: 175 additions & 0 deletions src/Dependify.Aspire.Hosting.Ollama/OllamaBuilderExtensions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
namespace Aspire.Hosting;

using Aspire.Hosting.Lifecycle;
using Aspire.Hosting.ApplicationModel;
using Aspire.Hosting.Ollama;

/// <summary>
/// Provides extension methods for adding Ollama to the application model.
/// </summary>
public static class OllamaBuilderExtensions
{
/// <summary>
/// Adds an Ollama resource to the application. A container is used for local development.
/// </summary>
/// <example>
/// Use in application host
/// <code lang="csharp">
/// var builder = DistributedApplication.CreateBuilder(args);
///
/// var ollama = builder.AddOllama("ollama");
/// var api = builder.AddProject&lt;Projects.Api&gt;("api")
/// .WithReference(ollama);
///
/// builder.Build().Run();
/// </code>
/// </example>
/// <remarks>
/// This version the package defaults to the 0.3.4 tag of the ollama/ollama container image.
/// The .NET client library uses the http port by default to communicate and this resource exposes that endpoint.
/// </remarks>
/// <param name="builder">The <see cref="IDistributedApplicationBuilder"/>.</param>
/// <param name="name">The name of the resource. This name will be used as the connection string name when referenced in a dependency</param>
/// <param name="enableGpu">Whether to enable GPU support.</param>
/// <param name="port">The host port of the http endpoint of Ollama resource.</param>
/// <returns>A reference to the <see cref="IResourceBuilder{OllamaResource}"/>.</returns>
public static IResourceBuilder<OllamaResource> AddOllama(
this IDistributedApplicationBuilder builder,
string name,
bool enableGpu = false,
int? port = null
)
{
builder.Services.TryAddLifecycleHook<OllamaLifecycleHook>();

var ollama = new OllamaResource(name);

var resource = builder
.AddResource(ollama)
.WithImage(OllamaContainerImageTags.Image, OllamaContainerImageTags.Tag)
.WithImageRegistry(OllamaContainerImageTags.Registry)
.WithHttpEndpoint(port: port, targetPort: 11434, OllamaResource.PrimaryEndpointName)
.ExcludeFromManifest()
.PublishAsContainer();

if (enableGpu)
{
resource = resource.WithContainerRuntimeArgs("--gpus=all");
}

return resource;
}

/// <summary>
/// Adds a model to the Ollama resource.
/// </summary>
/// <example>
/// Use in application host
/// <code lang="csharp">
/// var builder = DistributedApplication.CreateBuilder(args);
///
/// var ollama = builder.AddOllama("ollama");
/// .AddModel("phi3")
/// .WithDataVolume("ollama");
///
/// var api = builder.AddProject&lt;Projects.Api&gt;("api")
/// .WithReference(ollama);
///
/// builder.Build().Run();
/// </code>
/// </example>
/// <param name="builder">The Ollama resource builder.</param>
/// <param name="modelName">The name of the model.</param>
/// <remarks>This method will attempt to pull/download the model into the Ollama instance.</remarks>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> AddModel(
this IResourceBuilder<OllamaResource> builder,
string modelName
)
{
builder.Resource.AddModel(modelName);
return builder;
}

/// <summary>
/// Adds a named volume for the data folder to a Ollama container resource.
/// </summary>
/// <param name="builder">The resource builder.</param>
/// <param name="name">The name of the volume. Defaults to an auto-generated name based on the resource name.</param>
/// <param name="isReadOnly">A flag that indicates if this is a read-only volume.</param>
/// <returns>The <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> WithDataVolume(
this IResourceBuilder<OllamaResource> builder,
string? name = null,
bool isReadOnly = false
) => builder.WithVolume(name ?? "ollama-aspire-data", "/root/.ollama", isReadOnly);

/// <summary>
/// Adds a bind mount for the data folder to an Ollama container resource.
/// </summary>
/// <param name="builder">The resource builder.</param>
/// <param name="source">The source directory on the host to mount into the container.</param>
/// <param name="isReadOnly">A flag that indicates if this is a read-only mount.</param>
/// <returns>The <see cref="IResourceBuilder{T}"/>.</returns>
public static IResourceBuilder<OllamaResource> WithDataBindMount(
this IResourceBuilder<OllamaResource> builder,
string source,
bool isReadOnly = false
) => builder.WithBindMount(source, "/root/.ollama", isReadOnly);

/// <summary>
/// Adds an administration web UI Ollama to the application model using Attu. This version the package defaults to the main tag of the Open WebUI container image
/// </summary>
/// <example>
/// Use in application host with an Ollama resource
/// <code lang="csharp">
/// var builder = DistributedApplication.CreateBuilder(args);
///
/// var ollama = builder.AddOllama("ollama")
/// .WithOpenWebUI();
/// var api = builder.AddProject&lt;Projects.Api&gt;("api")
/// .WithReference(ollama);
///
/// builder.Build().Run();
/// </code>
/// </example>
/// <param name="builder">The Ollama resource builder.</param>
/// <param name="configureContainer">Configuration callback for Open WebUI container resource.</param>
/// <param name="containerName">The name of the container (Optional).</param>
/// <returns>A reference to the <see cref="IResourceBuilder{T}"/>.</returns>
/// <remarks>See https://openwebui.com for more information about Open WebUI</remarks>
public static IResourceBuilder<T> WithOpenWebUI<T>(
this IResourceBuilder<T> builder,
Action<IResourceBuilder<OpenWebUIResource>>? configureContainer = null,
string? containerName = null
)
where T : OllamaResource
{
containerName ??= $"{builder.Resource.Name}-openwebui";

var openWebUI = new OpenWebUIResource(containerName);
var resourceBuilder = builder
.ApplicationBuilder.AddResource(openWebUI)
.WithImage(OllamaContainerImageTags.OpenWebUIImage, OllamaContainerImageTags.OpenWebUITag)
.WithImageRegistry(OllamaContainerImageTags.OpenWebUIRegistry)
.WithHttpEndpoint(targetPort: 8080, name: "http")
.WithVolume("open-webui", "/app/backend/data")
.WithEnvironment(context => ConfigureOpenWebUIContainer(context, builder.Resource))
.ExcludeFromManifest();

configureContainer?.Invoke(resourceBuilder);

return builder;
}

private static void ConfigureOpenWebUIContainer(EnvironmentCallbackContext context, OllamaResource resource)
{
context.EnvironmentVariables.Add("ENABLE_SIGNUP", "false");
context.EnvironmentVariables.Add("ENABLE_COMMUNITY_SHARING", "false"); // by default don't enable sharing
context.EnvironmentVariables.Add("WEBUI_AUTH", "false"); // https://docs.openwebui.com/#quick-start-with-docker--recommended
context.EnvironmentVariables.Add(
"OLLAMA_BASE_URL",
$"{resource.PrimaryEndpoint.Scheme}://{resource.PrimaryEndpoint.ContainerHost}:{resource.PrimaryEndpoint.Port}"
);
}
}
Loading

0 comments on commit 9a1676d

Please sign in to comment.