From 5e8a8f0793cf8c380cbd88956560f43fc899cb61 Mon Sep 17 00:00:00 2001 From: Alex Carney Date: Sat, 6 Nov 2021 12:58:26 +0000 Subject: [PATCH] Fix how semantic tokens server capabilities are computed (#213) * Fix server capabilities for semantic tokens. A server can support up to three separate semantic tokens requests. Rather than have `@server.feature()` methods provide the full `SemanticTokensOptions` definition, this commit adjusts the `LSP_METHODS_MAP` to accept just the `SemanticTokensLengend`. Then in the `ServerCapabilitiesBuilder`, the `full` and `range` fields of `SemanticTokensOptions` are computed according to which features are present. Since `SemanticTokensOptions` only supports a single legend, if multiple legends are found, only the first one will be used. * Add semantic tokens example This updates the example `json-extension` to include an example `textDocument/semanticTokens/full` implementation that highlights all keys in a JSON document. * Update changelog Co-authored-by: Daniel Elero --- CHANGELOG.md | 11 +- CONTRIBUTORS.md | 1 + examples/json-extension/server/server.py | 46 +++- pygls/capabilities.py | 49 ++++- pygls/lsp/__init__.py | 6 +- tests/lsp/test_semantic_tokens.py | 264 +++++++++++++++++++++++ 6 files changed, 355 insertions(+), 22 deletions(-) create mode 100644 tests/lsp/test_semantic_tokens.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 93e2a386..7e47d0a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,19 +11,16 @@ and this project adheres to [Semantic Versioning][semver]. ### Changed -### Fixed - -## [0.11.3] - 09/30/2021 - -### Added - -### Changed +- Update json-example to include an example semantic tokens method ([204]) ### Fixed - Fix example extension client not detecting debug mode appropriately ([#193]) +- Fix how the `semantic_tokens_provider` field of `ServerCapabilities` is computed ([213]) [#193]: https://github.com/openlawlibrary/pygls/issues/193 +[#204]: https://github.com/openlawlibrary/pygls/issues/204 +[#213]: https://github.com/openlawlibrary/pygls/pulls/213 ## [0.11.2] - 07/23/2021 diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index 8f822b16..80cd884b 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -1,6 +1,7 @@ # Contributors (alphabetical) - [@augb](https://github.com/augb) +- [Alex Carney](https://github.com/alcarney) - [Brett Cannon](https://github.com/brettcannon/) - [Daniel Elero](https://github.com/danixeee) - [Daniel Miller](https://github.com/millerdev) diff --git a/examples/json-extension/server/server.py b/examples/json-extension/server/server.py index 53df561a..3e63a363 100644 --- a/examples/json-extension/server/server.py +++ b/examples/json-extension/server/server.py @@ -16,13 +16,15 @@ ############################################################################ import asyncio import json +import re import time import uuid from json import JSONDecodeError from typing import Optional from pygls.lsp.methods import (COMPLETION, TEXT_DOCUMENT_DID_CHANGE, - TEXT_DOCUMENT_DID_CLOSE, TEXT_DOCUMENT_DID_OPEN) + TEXT_DOCUMENT_DID_CLOSE, TEXT_DOCUMENT_DID_OPEN, + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL) from pygls.lsp.types import (CompletionItem, CompletionList, CompletionOptions, CompletionParams, ConfigurationItem, ConfigurationParams, Diagnostic, @@ -30,6 +32,7 @@ DidCloseTextDocumentParams, DidOpenTextDocumentParams, MessageType, Position, Range, Registration, RegistrationParams, + SemanticTokens, SemanticTokensLegend, SemanticTokensParams, Unregistration, UnregistrationParams) from pygls.lsp.types.basic_structures import (WorkDoneProgressBegin, WorkDoneProgressEnd, @@ -151,6 +154,47 @@ async def did_open(ls, params: DidOpenTextDocumentParams): _validate(ls, params) +@json_server.feature( + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, + SemanticTokensLegend( + token_types = ["operator"], + token_modifiers = [] + ) +) +def semantic_tokens(ls: JsonLanguageServer, params: SemanticTokensParams): + """See https://microsoft.github.io/language-server-protocol/specification#textDocument_semanticTokens + for details on how semantic tokens are encoded.""" + + TOKENS = re.compile('".*"(?=:)') + + uri = params.text_document.uri + doc = ls.workspace.get_document(uri) + + last_line = 0 + last_start = 0 + + data = [] + + for lineno, line in enumerate(doc.lines): + last_start = 0 + + for match in TOKENS.finditer(line): + start, end = match.span() + data += [ + (lineno - last_line), + (start - last_start), + (end - start), + 0, + 0 + ] + + last_line = lineno + last_start = start + + return SemanticTokens(data=data) + + + @json_server.command(JsonLanguageServer.CMD_PROGRESS) async def progress(ls: JsonLanguageServer, *args): """Create and start the progress on the client.""" diff --git a/pygls/capabilities.py b/pygls/capabilities.py index c7191ddf..8540ee30 100644 --- a/pygls/capabilities.py +++ b/pygls/capabilities.py @@ -22,7 +22,9 @@ TEXT_DOCUMENT_CALL_HIERARCHY_PREPARE, TEXT_DOCUMENT_DID_CLOSE, TEXT_DOCUMENT_DID_OPEN, TEXT_DOCUMENT_DID_SAVE, TEXT_DOCUMENT_LINKED_EDITING_RANGE, TEXT_DOCUMENT_MONIKER, - TEXT_DOCUMENT_SEMANTIC_TOKENS, TEXT_DOCUMENT_WILL_SAVE, + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA, + TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE, TEXT_DOCUMENT_WILL_SAVE, TEXT_DOCUMENT_WILL_SAVE_WAIT_UNTIL, TYPE_DEFINITION, WORKSPACE_DID_CREATE_FILES, WORKSPACE_DID_DELETE_FILES, WORKSPACE_DID_RENAME_FILES, WORKSPACE_SYMBOL, @@ -30,12 +32,12 @@ WORKSPACE_WILL_RENAME_FILES) from pygls.lsp.types import (CodeLensOptions, CompletionOptions, DocumentLinkOptions, ExecuteCommandOptions, ImplementationOptions, SaveOptions, + SemanticTokensOptions, SemanticTokensRegistrationOptions, + SemanticTokensRequestsFull, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncOptionsServerCapabilities, TypeDefinitionOptions, WorkspaceFileOperationsServerCapabilities, WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities) -from pygls.lsp.types.language_features.semantic_tokens import (SemanticTokensLegend, - SemanticTokensOptions) class ServerCapabilitiesBuilder: @@ -229,15 +231,40 @@ def _with_call_hierarchy(self): return self def _with_semantic_tokens(self): - value = self._provider_options(TEXT_DOCUMENT_SEMANTIC_TOKENS, - default=SemanticTokensOptions( - legend=SemanticTokensLegend( - token_types=[], - token_modifiers=[], - ), - )) - if value is not None: + + providers = [ + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA, + TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE + ] + + for provider in providers: + value = self._provider_options(provider, None) + if value: + break + + if value is None: + return self + + if isinstance(value, SemanticTokensRegistrationOptions): self.server_cap.semantic_tokens_provider = value + return self + + full_support = ( + SemanticTokensRequestsFull(delta=True) + if TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA in self.features + else TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL in self.features + ) + + options = SemanticTokensOptions( + legend=value, + full=full_support or None, + range=TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE in self.features or None + ) + + if options.full or options.range: + self.server_cap.semantic_tokens_provider = options + return self def _with_linked_editing_range(self): diff --git a/pygls/lsp/__init__.py b/pygls/lsp/__init__.py index 905f8302..ff50c1e8 100644 --- a/pygls/lsp/__init__.py +++ b/pygls/lsp/__init__.py @@ -108,17 +108,17 @@ Optional[List[Moniker]], ), TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL: ( - Union[SemanticTokensOptions, SemanticTokensRegistrationOptions], + Union[SemanticTokensLegend, SemanticTokensRegistrationOptions], SemanticTokensParams, Union[SemanticTokensPartialResult, Optional[SemanticTokens]], ), TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA: ( - Union[SemanticTokensOptions, SemanticTokensRegistrationOptions], + Union[SemanticTokensLegend, SemanticTokensRegistrationOptions], SemanticTokensDeltaParams, Union[SemanticTokensDeltaPartialResult, Optional[Union[SemanticTokens, SemanticTokensDelta]]], ), TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE: ( - Union[SemanticTokensOptions, SemanticTokensRegistrationOptions], + Union[SemanticTokensLegend, SemanticTokensRegistrationOptions], SemanticTokensRangeParams, Union[SemanticTokensPartialResult, Optional[SemanticTokens]], diff --git a/tests/lsp/test_semantic_tokens.py b/tests/lsp/test_semantic_tokens.py new file mode 100644 index 00000000..08d53e31 --- /dev/null +++ b/tests/lsp/test_semantic_tokens.py @@ -0,0 +1,264 @@ +############################################################################ +# Copyright(c) Open Law Library. All rights reserved. # +# See ThirdPartyNotices.txt in the project root for additional notices. # +# # +# Licensed under the Apache License, Version 2.0 (the "License") # +# you may not use this file except in compliance with the License. # +# You may obtain a copy of the License at # +# # +# http: // www.apache.org/licenses/LICENSE-2.0 # +# # +# Unless required by applicable law or agreed to in writing, software # +# distributed under the License is distributed on an "AS IS" BASIS, # +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # +# See the License for the specific language governing permissions and # +# limitations under the License. # +############################################################################ +import unittest +from typing import Optional, Union + +from pygls.lsp.methods import (TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA, + TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE) +from pygls.lsp.types import (Position, Range, SemanticTokens, SemanticTokensDeltaParams, + SemanticTokensLegend, SemanticTokensParams, + SemanticTokensPartialResult, SemanticTokensRequestsFull, + SemanticTokensRangeParams, TextDocumentIdentifier) + +from ..conftest import CALL_TIMEOUT, ClientServer + + +class TestSemanticTokensFullMissingLegend(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.client_server = ClientServer() + cls.client, cls.server = cls.client_server + + @cls.server.feature(TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL) + def f(params: SemanticTokensParams) -> Union[SemanticTokensPartialResult, Optional[SemanticTokens]]: + return SemanticTokens(data=[0,0,3,0,0]) + + cls.client_server.start() + + @classmethod + def tearDownClass(cls): + cls.client_server.stop() + + def test_capabilities(self): + capabilities = self.server.server_capabilities + + assert capabilities.semantic_tokens_provider is None + + +class TestSemanticTokensFull(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.client_server = ClientServer() + cls.client, cls.server = cls.client_server + + @cls.server.feature( + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, + SemanticTokensLegend( + token_types=["keyword", "operator"], + token_modifiers=["readonly"] + ) + ) + def f(params: SemanticTokensParams) -> Optional[Union[SemanticTokensPartialResult, Optional[SemanticTokens]]]: + if params.text_document.uri == "file://return.tokens": + return SemanticTokens(data=[0,0,3,0,0]) + + cls.client_server.start() + + @classmethod + def tearDownClass(cls): + cls.client_server.stop() + + def test_capabilities(self): + capabilities = self.server.server_capabilities + + assert capabilities.semantic_tokens_provider.full + assert capabilities.semantic_tokens_provider.legend.token_types == ["keyword", "operator"] + assert capabilities.semantic_tokens_provider.legend.token_modifiers == ["readonly"] + + def test_semantic_tokens_full_return_tokens(self): + response = self.client.lsp.send_request( + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, + SemanticTokensParams( + text_document=TextDocumentIdentifier(uri='file://return.tokens') + ) + ).result(timeout=CALL_TIMEOUT) + + assert response + + assert response['data'] == [0, 0, 3, 0, 0] + + + def test_semantic_tokens_full_return_none(self): + response = self.client.lsp.send_request( + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL, + SemanticTokensParams( + text_document=TextDocumentIdentifier(uri='file://return.none') + ) + ).result(timeout=CALL_TIMEOUT) + + assert response is None + + +class TestSemanticTokensFullDeltaMissingLegend(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.client_server = ClientServer() + cls.client, cls.server = cls.client_server + + @cls.server.feature(TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA) + def f(params: SemanticTokensDeltaParams) -> Union[SemanticTokensPartialResult, Optional[SemanticTokens]]: + return SemanticTokens(data=[0,0,3,0,0]) + + cls.client_server.start() + + @classmethod + def tearDownClass(cls): + cls.client_server.stop() + + def test_capabilities(self): + capabilities = self.server.server_capabilities + + assert capabilities.semantic_tokens_provider is None + + +class TestSemanticTokensFullDeltaMissingLegend(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.client_server = ClientServer() + cls.client, cls.server = cls.client_server + + @cls.server.feature( + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA, + SemanticTokensLegend( + token_types=["keyword", "operator"], + token_modifiers=["readonly"] + ) + ) + def f(params: SemanticTokensDeltaParams) -> Union[SemanticTokensPartialResult, Optional[SemanticTokens]]: + if params.text_document.uri == 'file://return.tokens': + return SemanticTokens(data=[0,0,3,0,0]) + + cls.client_server.start() + + @classmethod + def tearDownClass(cls): + cls.client_server.stop() + + def test_capabilities(self): + capabilities = self.server.server_capabilities + + assert capabilities.semantic_tokens_provider.full == SemanticTokensRequestsFull(delta=True) + assert capabilities.semantic_tokens_provider.legend.token_types == ["keyword", "operator"] + assert capabilities.semantic_tokens_provider.legend.token_modifiers == ["readonly"] + + def test_semantic_tokens_full_delta_return_tokens(self): + response = self.client.lsp.send_request( + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA, + SemanticTokensDeltaParams( + text_document=TextDocumentIdentifier(uri='file://return.tokens'), + previous_result_id='id' + ) + ).result(timeout=CALL_TIMEOUT) + + assert response + + assert response['data'] == [0, 0, 3, 0, 0] + + def test_semantic_tokens_full_delta_return_none(self): + response = self.client.lsp.send_request( + TEXT_DOCUMENT_SEMANTIC_TOKENS_FULL_DELTA, + SemanticTokensDeltaParams( + text_document=TextDocumentIdentifier(uri='file://return.none'), + previous_result_id='id' + ) + ).result(timeout=CALL_TIMEOUT) + + assert response is None + +class TestSemanticTokensRangeMissingLegend(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.client_server = ClientServer() + cls.client, cls.server = cls.client_server + + @cls.server.feature(TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE) + def f(params: SemanticTokensParams) -> Union[SemanticTokensPartialResult, Optional[SemanticTokens]]: + return SemanticTokens(data=[0,0,3,0,0]) + + cls.client_server.start() + + @classmethod + def tearDownClass(cls): + cls.client_server.stop() + + def test_capabilities(self): + capabilities = self.server.server_capabilities + + assert capabilities.semantic_tokens_provider is None + + +class TestSemanticTokensRange(unittest.TestCase): + @classmethod + def setUpClass(cls): + cls.client_server = ClientServer() + cls.client, cls.server = cls.client_server + + @cls.server.feature( + TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE, + SemanticTokensLegend( + token_types=["keyword", "operator"], + token_modifiers=["readonly"] + ) + ) + def f(params: SemanticTokensRangeParams) -> Optional[Union[SemanticTokensPartialResult, Optional[SemanticTokens]]]: + if params.text_document.uri == "file://return.tokens": + return SemanticTokens(data=[0,0,3,0,0]) + + cls.client_server.start() + + @classmethod + def tearDownClass(cls): + cls.client_server.stop() + + def test_capabilities(self): + capabilities = self.server.server_capabilities + + assert capabilities.semantic_tokens_provider.range + assert capabilities.semantic_tokens_provider.legend.token_types == ["keyword", "operator"] + assert capabilities.semantic_tokens_provider.legend.token_modifiers == ["readonly"] + + def test_semantic_tokens_range_return_tokens(self): + response = self.client.lsp.send_request( + TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE, + SemanticTokensRangeParams( + text_document=TextDocumentIdentifier(uri='file://return.tokens'), + range=Range( + start=Position(line=0, character=0), + end=Position(line=10, character=80) + ) + ) + ).result(timeout=CALL_TIMEOUT) + + assert response + + assert response['data'] == [0, 0, 3, 0, 0] + + + def test_semantic_tokens_range_return_none(self): + response = self.client.lsp.send_request( + TEXT_DOCUMENT_SEMANTIC_TOKENS_RANGE, + SemanticTokensRangeParams( + text_document=TextDocumentIdentifier(uri='file://return.none'), + range=Range( + start=Position(line=0, character=0), + end=Position(line=10, character=80) + ) + ) + ).result(timeout=CALL_TIMEOUT) + + assert response is None \ No newline at end of file