Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,12 @@
using Microsoft.CodeAnalysis.PooledObjects;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Text;
using Roslyn.LanguageServer.Protocol;
using Roslyn.Utilities;
using LSP = Roslyn.LanguageServer.Protocol;

namespace Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens
{
internal class SemanticTokensHelpers
internal static class SemanticTokensHelpers
{
internal static async Task<int[]> HandleRequestHelperAsync(
IGlobalOptionService globalOptions,
Expand All @@ -36,60 +35,82 @@ internal static async Task<int[]> HandleRequestHelperAsync(
}

var contextDocument = context.GetRequiredDocument();
var project = contextDocument.Project;
var options = globalOptions.GetClassificationOptions(project.Language);
var supportsVisualStudioExtensions = context.GetRequiredClientCapabilities().HasVisualStudioLspCapability();

using var _ = ArrayBuilder<LinePositionSpan>.GetInstance(ranges.Length, out var spans);
foreach (var range in ranges)
{
spans.Add(ProtocolConversions.RangeToLinePositionSpan(range));
}

var tokensData = await HandleRequestHelperAsync(contextDocument, spans.ToImmutable(), supportsVisualStudioExtensions, options, cancellationToken).ConfigureAwait(false);

// The above call to get semantic tokens may be inaccurate (because we use frozen partial semantics). Kick
// off a request to ensure that the OOP side gets a fully up to compilation for this project. Once it does
// we can optionally choose to notify our caller to do a refresh if we computed a compilation for a new
// solution snapshot.
await semanticTokensRefreshQueue.TryEnqueueRefreshComputationAsync(project, cancellationToken).ConfigureAwait(false);
return tokensData;
}

public static async Task<int[]> HandleRequestHelperAsync(Document document, ImmutableArray<LinePositionSpan> spans, bool supportsVisualStudioExtensions, ClassificationOptions options, CancellationToken cancellationToken)
{
// If the full compilation is not yet available, we'll try getting a partial one. It may contain inaccurate
// results but will speed up how quickly we can respond to the client's request.
var document = contextDocument.WithFrozenPartialSemantics(cancellationToken);
var project = document.Project;
var options = globalOptions.GetClassificationOptions(project.Language) with { ForceFrozenPartialSemanticsForCrossProcessOperations = true };
document = document.WithFrozenPartialSemantics(cancellationToken);
options = options with { ForceFrozenPartialSemanticsForCrossProcessOperations = true };

// The results from the range handler should not be cached since we don't want to cache
// partial token results. In addition, a range request is only ever called with a whole
// document request, so caching range results is unnecessary since the whole document
// handler will cache the results anyway.
var capabilities = context.GetRequiredClientCapabilities();
var tokensData = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
capabilities,
return await ComputeSemanticTokensDataAsync(
document,
ranges,
spans,
supportsVisualStudioExtensions,
options,
cancellationToken).ConfigureAwait(false);

// The above call to get semantic tokens may be inaccurate (because we use frozen partial semantics). Kick
// off a request to ensure that the OOP side gets a fully up to compilation for this project. Once it does
// we can optionally choose to notify our caller to do a refresh if we computed a compilation for a new
// solution snapshot.
await semanticTokensRefreshQueue.TryEnqueueRefreshComputationAsync(project, cancellationToken).ConfigureAwait(false);
return tokensData;
}

/// <summary>
/// Returns the semantic tokens data for a given document with an optional ranges.
/// </summary>
/// <param name="spans">Spans to compute tokens for. If empty, the whole document will be used.</param>
public static async Task<int[]> ComputeSemanticTokensDataAsync(
ClientCapabilities capabilities,
Document document,
LSP.Range[]? ranges,
ImmutableArray<LinePositionSpan> spans,
bool supportsVisualStudioExtensions,
ClassificationOptions options,
CancellationToken cancellationToken)
{
var tokenTypesToIndex = SemanticTokensSchema.GetSchema(capabilities.HasVisualStudioLspCapability()).TokenTypeToIndex;
var tokenTypesToIndex = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeToIndex;
var root = await document.GetRequiredSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
var text = await document.GetValueTextAsync(cancellationToken).ConfigureAwait(false);
using var _1 = Classifier.GetPooledList(out var classifiedSpans);
using var _2 = Classifier.GetPooledList(out var updatedClassifiedSpans);

// We either calculate the tokens for the full document span, or the user
// can pass in a range from the full document if they wish.
ranges ??= [ProtocolConversions.TextSpanToRange(root.FullSpan, text)];
using var _ = ArrayBuilder<TextSpan>.GetInstance(ranges.Length, out var textSpans);
foreach (var range in ranges)
ImmutableArray<TextSpan> textSpans;
if (spans.Length == 0)
{
textSpans = [root.FullSpan];
}
else
{
textSpans.Add(ProtocolConversions.RangeToTextSpan(range, text));
using var _ = ArrayBuilder<TextSpan>.GetInstance(spans.Length, out var textSpansBuilder);
foreach (var span in spans)
{
textSpansBuilder.Add(text.Lines.GetTextSpan(span));
}

textSpans = textSpansBuilder.ToImmutable();
}

await GetClassifiedSpansForDocumentAsync(
classifiedSpans, document, textSpans.ToImmutableArray(), options, cancellationToken).ConfigureAwait(false);
classifiedSpans, document, textSpans, options, cancellationToken).ConfigureAwait(false);

// Classified spans are not guaranteed to be returned in a certain order so we sort them to be safe.
classifiedSpans.Sort(ClassifiedSpanComparer.Instance);
Expand All @@ -100,7 +121,7 @@ await GetClassifiedSpansForDocumentAsync(

// TO-DO: We should implement support for streaming if LSP adds support for it:
// https://devdiv.visualstudio.com/DevDiv/_workitems/edit/1276300
return ComputeTokens(capabilities, text.Lines, updatedClassifiedSpans, tokenTypesToIndex);
return ComputeTokens(text.Lines, updatedClassifiedSpans, supportsVisualStudioExtensions, tokenTypesToIndex);
}

private static async Task GetClassifiedSpansForDocumentAsync(
Expand Down Expand Up @@ -214,9 +235,9 @@ static void ConvertToSingleLineSpan(
}

private static int[] ComputeTokens(
ClientCapabilities capabilities,
TextLineCollection lines,
SegmentedList<ClassifiedSpan> classifiedSpans,
bool supportsVisualStudioExtensions,
IReadOnlyDictionary<string, int> tokenTypesToIndex)
{
using var _ = ArrayBuilder<int>.GetInstance(classifiedSpans.Count, out var data);
Expand All @@ -226,7 +247,7 @@ private static int[] ComputeTokens(
var lastLineNumber = 0;
var lastStartCharacter = 0;

var tokenTypeMap = SemanticTokensSchema.GetSchema(capabilities.HasVisualStudioLspCapability()).TokenTypeMap;
var tokenTypeMap = SemanticTokensSchema.GetSchema(supportsVisualStudioExtensions).TokenTypeMap;

for (var currentClassifiedSpanIndex = 0; currentClassifiedSpanIndex < classifiedSpans.Count; currentClassifiedSpanIndex++)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,13 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Classification;
using Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens;
using Microsoft.CodeAnalysis.Text;
using Roslyn.LanguageServer.Protocol;
using Roslyn.Test.Utilities;
using Xunit;
Expand Down Expand Up @@ -118,13 +120,14 @@ private void __RazorDirectiveTokenHelpers__() {
markup, mutatingLspWorkspace, GetCapabilities(isVS));

var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
var ranges = new LSP.Range[2] {
new LSP.Range { Start = new Position(12, 0), End = new Position(13, 0) },
new LSP.Range { Start = new Position(29, 0), End = new Position(30, 0) }
};
ImmutableArray<LinePositionSpan> spans = [
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm, feels like these tests should be calling the actual range handler..., but thats not a new problem in this PR

new LinePositionSpan(new LinePosition(12, 0), new LinePosition(13, 0)),
new LinePositionSpan(new LinePosition(29, 0), new LinePosition(30, 0)),
];

var options = ClassificationOptions.Default;
var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
testLspServer.ClientCapabilities, document, ranges, options, CancellationToken.None);
document, spans, isVS, options, CancellationToken.None);

var expectedResults = new LSP.SemanticTokens();
var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
Expand Down Expand Up @@ -175,10 +178,10 @@ static class C { }
markup, mutatingLspWorkspace, GetCapabilities(isVS));

var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
var ranges = new[] { new LSP.Range { Start = new Position(1, 0), End = new Position(2, 0) } };
ImmutableArray<LinePositionSpan> spans = [new LinePositionSpan(new LinePosition(1, 0), new LinePosition(2, 0))];
var options = ClassificationOptions.Default;
var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
testLspServer.ClientCapabilities, document, ranges, options, CancellationToken.None);
document, spans, isVS, options, CancellationToken.None);

var expectedResults = new LSP.SemanticTokens();
var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
Expand Down Expand Up @@ -225,10 +228,10 @@ public async Task TestGetSemanticTokensRange_MultiLineComment_IncludeSyntacticCl
markup, mutatingLspWorkspace, GetCapabilities(isVS));

var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
var ranges = new[] { new LSP.Range { Start = new Position(0, 0), End = new Position(4, 0) } };
ImmutableArray<LinePositionSpan> spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(4, 0))];
var options = ClassificationOptions.Default;
var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
testLspServer.ClientCapabilities, document, ranges, options, CancellationToken.None);
document, spans, isVS, options, CancellationToken.None);

var expectedResults = new LSP.SemanticTokens();
var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
Expand Down Expand Up @@ -284,10 +287,10 @@ void M()
markup, mutatingLspWorkspace, GetCapabilities(isVS));

var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
var ranges = new[] { new LSP.Range { Start = new Position(0, 0), End = new Position(9, 0) } };
ImmutableArray<LinePositionSpan> spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))];
var options = ClassificationOptions.Default;
var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
testLspServer.ClientCapabilities, document, ranges, options, CancellationToken.None);
document, spans, isVS, options, CancellationToken.None);

var expectedResults = new LSP.SemanticTokens();
var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
Expand Down Expand Up @@ -365,10 +368,10 @@ void M()
markup, mutatingLspWorkspace, GetCapabilities(isVS));

var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
var ranges = new[] { new LSP.Range { Start = new Position(0, 0), End = new Position(9, 0) } };
ImmutableArray<LinePositionSpan> spans = [new LinePositionSpan(new LinePosition(0, 0), new LinePosition(9, 0))];
var options = ClassificationOptions.Default;
var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
testLspServer.ClientCapabilities, document, ranges, options, CancellationToken.None);
document, spans, isVS, options, CancellationToken.None);

var expectedResults = new LSP.SemanticTokens();
var tokenTypeToIndex = GetTokenTypeToIndex(testLspServer);
Expand Down Expand Up @@ -476,7 +479,7 @@ void M()
var document = testLspServer.GetCurrentSolution().Projects.First().Documents.First();
var options = ClassificationOptions.Default;
var results = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
testLspServer.ClientCapabilities, document, ranges: null, options: options, cancellationToken: CancellationToken.None);
document, spans: [], isVS, options: options, cancellationToken: CancellationToken.None);

var expectedResults = new LSP.SemanticTokens();

Expand Down
4 changes: 2 additions & 2 deletions src/Features/Lsif/Generator/Generator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -482,9 +482,9 @@ private static async Task GenerateSemanticTokensAsync(
// include syntax tokens in the generated data.
var data = await SemanticTokensHelpers.ComputeSemanticTokensDataAsync(
// Just get the pure-lsp semantic tokens here.
new VSInternalClientCapabilities { SupportsVisualStudioExtensions = true },
document,
ranges: null,
spans: [],
supportsVisualStudioExtensions: true,
options: Classification.ClassificationOptions.Default,
cancellationToken: CancellationToken.None);

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System.Collections.Immutable;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Classification;
using Microsoft.CodeAnalysis.LanguageServer.Handler.SemanticTokens;
using Microsoft.CodeAnalysis.Text;

namespace Microsoft.CodeAnalysis.ExternalAccess.Razor.Cohost.Handlers
{
internal static class SemanticTokensRange
{
public static Task<int[]> GetSemanticTokensAsync(
Document document,
ImmutableArray<LinePositionSpan> spans,
bool supportsVisualStudioExtensions,
CancellationToken cancellationToken)
{
var tokens = SemanticTokensHelpers.HandleRequestHelperAsync(
document,
spans,
supportsVisualStudioExtensions,
ClassificationOptions.Default,
cancellationToken);

// The above call to get semantic tokens may be inaccurate (because we use frozen partial semantics). Kick
// off a request to ensure that the OOP side gets a fully up to compilation for this project. Once it does
// we can optionally choose to notify our caller to do a refresh if we computed a compilation for a new
// solution snapshot.
// TODO: await semanticTokensRefreshQueue.TryEnqueueRefreshComputationAsync(project, cancellationToken).ConfigureAwait(false);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why was this commented out?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is on the long list of follow-ups.

This method is called in the OOP process, so the queue has no access to send a refresh notification back to the client from here. We'll have to expose the queue functionality to the actual endpoint, and handle it there. Razor also has code to intercept refreshes, and map them from the generated C# back to the Razor file, and we need to work out a way to do that (if its still relevant? maybe not for semantic tokens)

return tokens;
}
}
}