Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System.Numerics;

namespace CommunityToolkit.WinUI.Helpers;

public partial class ColorPaletteSampler
{
private ref struct DBScan
{
private const int Unclassified = -1;

public static Vector3[] Cluster(Span<Vector3> points, float epsilon, int minPoints, ref float[] weights)
{
var centroids = new List<Vector3>();
var newWeights = new List<float>();

// Create context
var context = new DBScan(points, weights, epsilon, minPoints);

// Attempt to create a cluster around each point,
// skipping that point if already classified
for (int i = 0; i < points.Length; i++)
{
// Already classified, skip
if (context.PointClusterIds[i] is not Unclassified)
continue;

// Attempt to create cluster
if(context.CreateCluster(i, out var centroid, out var weight))
{
centroids.Add(centroid);
newWeights.Add(weight);
}
}

weights = newWeights.ToArray();
return centroids.ToArray();
}

private bool CreateCluster(int originIndex, out Vector3 centroid, out float weight)
{
weight = 0;
centroid = Vector3.Zero;
var seeds = GetSeeds(originIndex, out bool isCore);

// Not enough seeds to be a core point.
// Cannot create a cluster around it
if (!isCore)
{
return false;
}

ExpandCluster(seeds, out centroid, out weight);
ClusterId++;

return true;
}

private void ExpandCluster(Queue<int> seeds, out Vector3 centroid, out float weight)
{
weight = 0;
centroid = Vector3.Zero;
while(seeds.Count > 0)
{
var seedIndex = seeds.Dequeue();

// Skip duplicate seed entries
if (PointClusterIds[seedIndex] is not Unclassified)
continue;

// Assign this seed's id to the cluster
PointClusterIds[seedIndex] = ClusterId;
var w = Weights[seedIndex];
centroid += Points[seedIndex] * w;
weight += w;

// Check if this seed is a core point
var grandSeeds = GetSeeds(seedIndex, out var seedIsCore);
if (!seedIsCore)
continue;

// This seed is a core point. Enqueue all its seeds
foreach(var grandSeedIndex in grandSeeds)
if (PointClusterIds[grandSeedIndex] is Unclassified)
seeds.Enqueue(grandSeedIndex);
}

centroid /= weight;
}

private Queue<int> GetSeeds(int originIndex, out bool isCore)
{
var origin = Points[originIndex];

var seeds = new Queue<int>();
for (int i = 0; i < Points.Length; i++)
{
if (Vector3.DistanceSquared(origin, Points[i]) <= Epsilon2)
seeds.Enqueue(i);
}

// Count includes self, so compare without checking equals
isCore = seeds.Count > MinPoints;
return seeds;
}

private DBScan(Span<Vector3> points, Span<float> weights, double epsilon, int minPoints)
{
Points = points;
Weights = weights;
Epsilon2 = epsilon * epsilon;
MinPoints = minPoints;

ClusterId = 0;
PointClusterIds = new int[points.Length];
for(int i = 0; i < points.Length; i++)
PointClusterIds[i] = Unclassified;
}

/// <summary>
/// Gets the points being clustered.
/// </summary>
public Span<Vector3> Points { get; }

/// <summary>
/// Gets the weights of the points.
/// </summary>
public Span<float> Weights { get; }

/// <summary>
/// Gets or sets the id of the currently evaluating cluster.
/// </summary>
public int ClusterId { get; set; }

/// <summary>
/// Gets an array containing the id of the cluster each point belongs to.
/// </summary>
public int[] PointClusterIds { get; }

/// <summary>
/// Gets epsilon squared. Where epslion is the max distance to consider two points connected.
/// </summary>
/// <remarks>
/// This is cached as epsilon squared to skip a sqrt operation when comparing distances to epsilon.
/// </remarks>
public double Epsilon2 { get; }

/// <summary>
/// Gets the miniumum number of points required to make a core point.
/// </summary>
public int MinPoints { get; }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ private static void Split(int k, int[] clusterIds)
/// <summary>
/// Calculates the centroid of each cluster, and prunes empty clusters.
/// </summary>
private static void CalculateCentroidsAndPrune(ref Span<Vector3> centroids, ref int[] counts, Span<Vector3> points, int[] clusterIds)
internal static void CalculateCentroidsAndPrune(ref Span<Vector3> centroids, ref int[] counts, Span<Vector3> points, int[] clusterIds)
{
// Clear centroids and counts before recalculation
for (int i = 0; i < centroids.Length; i++)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ public async Task UpdatePaletteAsync()

const int sampleCount = 4096;
const int k = 8;
const float mergeDistance = 0.12f;

// Retreive pixel samples from source
var samples = await SampleSourcePixelColorsAsync(sampleCount);
Expand All @@ -62,8 +63,11 @@ public async Task UpdatePaletteAsync()

// Cluster samples in RGB floating-point color space
// With Euclidean Squared distance function, then construct palette data.
var clusters = KMeansCluster(samples, k, out var sizes);
var colorData = clusters.Select((vectorColor, i) => new PaletteColor(vectorColor.ToColor(), (float)sizes[i] / samples.Length));
// Merge KMeans results that are too similar, using DBScan
var kClusters = KMeansCluster(samples, k, out var counts);
var weights = counts.Select(x => (float)x / samples.Length).ToArray();
var dbCluster = DBScan.Cluster(kClusters, mergeDistance, 0, ref weights);
var colorData = dbCluster.Select((vectorColor, i) => new PaletteColor(vectorColor.ToColor(), weights[i]));

// Update palettes on the UI thread
foreach (var palette in PaletteSelectors)
Expand Down
Loading