Skip to content

Commit

Permalink
Added Handler for Semantic Tokenization (#1328)
Browse files Browse the repository at this point in the history
* added basic semantic token support

* removed unnecessary imports

* removed unnecessary field

* minor refactoring changes

* minor refactoring changes

* change tokenize to non async

* rename handler

* refactoring + copyright

* renamed handler file

* Delete log20200713.txt

* moved/refactored handler

* added e2e tets

* updated test

* remove pragma

* removed extra spacing

* added testing for converting from PS token to semantic tokens

* refactored the functions related to converting between tokens

* refactored ConvertSemanticToken

* fixed tests

* added more test cases

* fixed spacing

* added enum test

* fixed spacing issues

* fixed spacing, added note about token array representation

* changed name to PsesSemanticTokensHandler

* reformatted fields

* renamed file

* used Assert.Collection instead of Assert.Single

* modified yml file to fix build

* undo changes in yml file

* addressed issues in PR

* added basic semantic token support

* removed unnecessary imports

* removed unnecessary field

* minor refactoring changes

* minor refactoring changes

* change tokenize to non async

* rename handler

* refactoring + copyright

* renamed handler file

* moved/refactored handler

* added e2e tets

* Delete log20200713.txt

* updated test

* remove pragma

* removed extra spacing

* added testing for converting from PS token to semantic tokens

* refactored the functions related to converting between tokens

* refactored ConvertSemanticToken

* fixed tests

* added more test cases

* fixed spacing

* added enum test

* fixed spacing issues

* fixed spacing, added note about token array representation

* changed name to PsesSemanticTokensHandler

* reformatted fields

* renamed file

* used Assert.Collection instead of Assert.Single

* addressed issues in PR

* remove unused using

* Delete Untitled-1.json

Co-authored-by: Justin Chen <[email protected]>
  • Loading branch information
justinytchen and Justin Chen authored Jul 30, 2020
1 parent e326c48 commit 6095ae5
Show file tree
Hide file tree
Showing 5 changed files with 415 additions and 0 deletions.
1 change: 1 addition & 0 deletions src/PowerShellEditorServices/Server/PsesLanguageServer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ public async Task StartAsync()
.WithHandler<GetCommandHandler>()
.WithHandler<ShowHelpHandler>()
.WithHandler<ExpandAliasHandler>()
.WithHandler<PsesSemanticTokensHandler>()
.OnInitialize(
async (languageServer, request, cancellationToken) =>
{
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,168 @@
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//

using System;
using System.Collections.Generic;
using System.Management.Automation.Language;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.PowerShell.EditorServices.Services;
using Microsoft.PowerShell.EditorServices.Services.TextDocument;
using Microsoft.PowerShell.EditorServices.Utility;
using OmniSharp.Extensions.LanguageServer.Protocol;
using OmniSharp.Extensions.LanguageServer.Protocol.Document.Proposals;
using OmniSharp.Extensions.LanguageServer.Protocol.Models;
using OmniSharp.Extensions.LanguageServer.Protocol.Models.Proposals;

namespace Microsoft.PowerShell.EditorServices.Handlers
{
internal class PsesSemanticTokensHandler : SemanticTokensHandler
{
private static readonly SemanticTokensRegistrationOptions s_registrationOptions = new SemanticTokensRegistrationOptions
{
DocumentSelector = LspUtils.PowerShellDocumentSelector,
Legend = new SemanticTokensLegend(),
DocumentProvider = new Supports<SemanticTokensDocumentProviderOptions>(
isSupported: true,
new SemanticTokensDocumentProviderOptions
{
Edits = true
}),
RangeProvider = true
};

private readonly ILogger _logger;
private readonly WorkspaceService _workspaceService;

public PsesSemanticTokensHandler(ILogger<PsesSemanticTokensHandler> logger, WorkspaceService workspaceService)
: base(s_registrationOptions)
{
_logger = logger;
_workspaceService = workspaceService;
}

protected override Task Tokenize(SemanticTokensBuilder builder, ITextDocumentIdentifierParams identifier,
CancellationToken cancellationToken)
{
ScriptFile file = _workspaceService.GetFile(identifier.TextDocument.Uri);
foreach (Token token in file.ScriptTokens)
{
PushToken(token, builder);
}
return Task.CompletedTask;
}

private static void PushToken(Token token, SemanticTokensBuilder builder)
{
foreach (SemanticToken sToken in ConvertToSemanticTokens(token))
{
builder.Push(
sToken.Line,
sToken.Column,
length: sToken.Text.Length,
sToken.Type,
tokenModifiers: sToken.TokenModifiers);
}
}

internal static IEnumerable<SemanticToken> ConvertToSemanticTokens(Token token)
{
if (token is StringExpandableToken stringExpandableToken)
{
// Try parsing tokens within the string
if (stringExpandableToken.NestedTokens != null)
{
foreach (Token t in stringExpandableToken.NestedTokens)
{
foreach (SemanticToken subToken in ConvertToSemanticTokens(t))
yield return subToken;
}
yield break;
}
}

SemanticTokenType mappedType = MapSemanticTokenType(token);
if (mappedType == null)
{
yield break;
}

//Note that both column and line numbers are 0-based
yield return new SemanticToken(
token.Text,
mappedType,
line: token.Extent.StartLineNumber - 1,
column: token.Extent.StartColumnNumber - 1,
tokenModifiers: Array.Empty<string>());
}

private static SemanticTokenType MapSemanticTokenType(Token token)
{
// First check token flags
if ((token.TokenFlags & TokenFlags.Keyword) != 0)
{
return SemanticTokenType.Keyword;
}

if ((token.TokenFlags & TokenFlags.CommandName) != 0)
{
return SemanticTokenType.Function;
}

if (token.Kind != TokenKind.Generic && (token.TokenFlags &
(TokenFlags.BinaryOperator | TokenFlags.UnaryOperator | TokenFlags.AssignmentOperator)) != 0)
{
return SemanticTokenType.Operator;
}

if ((token.TokenFlags & TokenFlags.TypeName) != 0)
{
return SemanticTokenType.Type;
}

if ((token.TokenFlags & TokenFlags.MemberName) != 0)
{
return SemanticTokenType.Member;
}

// Only check token kind after checking flags
switch (token.Kind)
{
case TokenKind.Comment:
return SemanticTokenType.Comment;

case TokenKind.Parameter:
case TokenKind.Generic when token is StringLiteralToken slt && slt.Text.StartsWith("--"):
return SemanticTokenType.Parameter;

case TokenKind.Variable:
case TokenKind.SplattedVariable:
return SemanticTokenType.Variable;

case TokenKind.StringExpandable:
case TokenKind.StringLiteral:
case TokenKind.HereStringExpandable:
case TokenKind.HereStringLiteral:
return SemanticTokenType.String;

case TokenKind.Number:
return SemanticTokenType.Number;

case TokenKind.Generic:
return SemanticTokenType.Function;
}

return null;
}

protected override Task<SemanticTokensDocument> GetSemanticTokensDocument(
ITextDocumentIdentifierParams @params,
CancellationToken cancellationToken)
{
return Task.FromResult(new SemanticTokensDocument(GetRegistrationOptions().Legend));
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
using System.Collections.Generic;
using OmniSharp.Extensions.LanguageServer.Protocol.Models.Proposals;

namespace Microsoft.PowerShell.EditorServices.Services.TextDocument
{
internal class SemanticToken
{
public SemanticToken(string text, SemanticTokenType type, int line, int column, IEnumerable<string> tokenModifiers)
{
Line = line;
Text = text;
Column = column;
Type = type;
TokenModifiers = tokenModifiers;
}

public string Text { get; set ;}

public int Line { get; set; }

public int Column { get; set; }

public SemanticTokenType Type { get; set; }

public IEnumerable<string> TokenModifiers { get; set; }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
using OmniSharp.Extensions.LanguageServer.Protocol.Document;
using OmniSharp.Extensions.LanguageServer.Protocol.Models;
using OmniSharp.Extensions.LanguageServer.Protocol.Workspace;
using OmniSharp.Extensions.LanguageServer.Protocol.Models.Proposals;
using Xunit;
using Xunit.Abstractions;
using Range = OmniSharp.Extensions.LanguageServer.Protocol.Models.Range;
Expand Down Expand Up @@ -1136,5 +1137,35 @@ await PsesLanguageClient

Assert.Equal("Get-ChildItem", expandAliasResult.Text);
}

[Fact]
public async Task CanSendSemanticTokenRequest()
{
string scriptContent = "function";
string scriptPath = NewTestFile(scriptContent);

SemanticTokens result =
await PsesLanguageClient
.SendRequest<SemanticTokensParams>(
"textDocument/semanticTokens",
new SemanticTokensParams
{
TextDocument = new TextDocumentIdentifier
{
Uri = new Uri(scriptPath)
}
})
.Returning<SemanticTokens>(CancellationToken.None);

// More information about how this data is generated can be found at
// https://github.com/microsoft/vscode-extension-samples/blob/5ae1f7787122812dcc84e37427ca90af5ee09f14/semantic-tokens-sample/vscode.proposed.d.ts#L71
var expectedArr = new int[5]
{
// line, index, token length, token type, token modifiers
0, 0, scriptContent.Length, 2, 0 //function token: line 0, index 0, length, type 2 = keyword, no modifiers
};

Assert.Equal(expectedArr, result.Data.ToArray());
}
}
}
Loading

0 comments on commit 6095ae5

Please sign in to comment.