Skip to content

Commit

Permalink
Update for coming token_tools
Browse files Browse the repository at this point in the history
  • Loading branch information
Moosems committed Sep 7, 2024
1 parent b55ec9e commit c44639a
Show file tree
Hide file tree
Showing 6 changed files with 59 additions and 111 deletions.
4 changes: 3 additions & 1 deletion salve/server_functions/highlight/docstring_highlight.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,9 @@ def proper_docstring_tokens(lexer: RegexLexer, full_text: str) -> list[Token]:
continue

start_pos: tuple[int, int] = (1, 0)
simple_token_type: str = get_new_token_type(str(token_type))
simple_token_type: str | None = get_new_token_type(str(token_type))
if not simple_token_type:
continue

while match:
span: tuple[int, int] = match.span()
Expand Down
4 changes: 2 additions & 2 deletions salve/server_functions/highlight/highlight.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,15 @@ def get_highlights(
for line in split_text:
og_tokens: _LexReturnTokens = list(lex(line, lexer))
for token in og_tokens:
new_type: str = get_new_token_type(str(token[0]))
new_type: str | None = get_new_token_type(str(token[0]))
token_str: str = token[1]
token_len: int = len(token_str)

if token_str == "\n":
# Lexer adds the newline back as its own token
continue

if not token_str.strip() or new_type == "Text":
if not token_str.strip() or not new_type:
# If the token is empty or is plain Text we simply skip it because that's ultimately useless info
start_index = (start_index[0], start_index[1] + token_len)
continue
Expand Down
38 changes: 18 additions & 20 deletions salve/server_functions/highlight/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,20 @@

from token_tools import GENERIC_TOKENS

default_tokens: list[str] = [
"Token.Text.Whitespace",
"Token.Text",
"Token.Error",
"Token.Keyword",
"Token.Name",
"Token.Literal.String",
"Token.Literal.Number",
"Token.Literal",
"Token.Operator",
"Token.Punctuation",
"Token.Comment",
"Token.Generic",
]
default_tokens: dict[str, str | None] = {
"Token.Text.Whitespace": None,
"Token.Text": None,
"Token.Error": GENERIC_TOKENS[1],
"Token.Keyword": GENERIC_TOKENS[2],
"Token.Name": GENERIC_TOKENS[0],
"Token.Literal.String": GENERIC_TOKENS[4],
"Token.Literal.Number": GENERIC_TOKENS[5],
"Token.Literal": GENERIC_TOKENS[6],
"Token.Operator": GENERIC_TOKENS[7],
"Token.Punctuation": GENERIC_TOKENS[8],
"Token.Comment": GENERIC_TOKENS[9],
"Token.Generic": None,
}


def normal_text_range(
Expand All @@ -34,11 +34,9 @@ def normal_text_range(


@cache
def get_new_token_type(old_token: str) -> str:
def get_new_token_type(token: str) -> str | None:
"""Turns pygments token types into a generic predefined Token"""
new_type: str = GENERIC_TOKENS[0]
for index, token in enumerate(default_tokens):
for old_token, new_token in default_tokens.items():
if old_token.startswith(token):
new_type = GENERIC_TOKENS[index]
break
return new_type
return new_token
return None
4 changes: 1 addition & 3 deletions salve/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,5 @@ def get_special_tokens_request_wrapper(
) -> list[Token]:
return get_special_tokens(
request["file"], # type: ignore
normal_text_range(request["file"], request["text_range"])[ # type: ignore
1
],
normal_text_range(request["file"], request["text_range"])[1], # type: ignore
)
115 changes: 32 additions & 83 deletions tests/test_ipc.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ def test_IPC():
context.update_file(
"test", open(Path("tests/testing_file1.py"), "r+").read()
)
context.update_file("hidden_test", "https://www.google.com (​)")

context.request(
AUTOCOMPLETE,
Expand Down Expand Up @@ -49,23 +50,23 @@ def test_IPC():
(r":?.*=.*", "before"),
],
)
context.request(LINKS_AND_CHARS, file="test", text_range=(1, 18))
context.request(LINKS_AND_CHARS, file="hidden_test", text_range=(1, 18))
sleep(1)

# Check output
autocomplete_output: Response | None = context.get_response(AUTOCOMPLETE)
autocomplete_output: Response | None = context.get_response(AUTOCOMPLETE) # type: ignore
if autocomplete_output is None:
raise AssertionError("Autocomplete output is None")
autocomplete_output["id"] = 0
assert autocomplete_output == {
"id": 0,
"type": "response",
"cancelled": False,
"command": AUTOCOMPLETE,
"result": ["test", "this"],
"command": "autocomplete",
"result": ["test", "this", "type"],
}

replacements_output: Response | None = context.get_response(REPLACEMENTS)
replacements_output: Response | None = context.get_response(REPLACEMENTS) # type: ignore
if replacements_output is None:
raise AssertionError("Replacements output is None")
replacements_output["id"] = 0
Expand All @@ -77,97 +78,45 @@ def test_IPC():
"result": ["this"],
}

highlight_output: Response | None = context.get_response(HIGHLIGHT)
highlight_output: Response | None = context.get_response(HIGHLIGHT) # type: ignore
if highlight_output is None:
raise AssertionError("Highlight output is None")
highlight_output["id"] = 0

expected_output: Response = {
"id": 0,
"type": "response",
"cancelled": False,
"command": HIGHLIGHT,
"command": "highlight",
"result": [
((1, 0), 4, "Keyword"),
((1, 5), 4, "Name"),
((1, 10), 6, "Keyword"),
((1, 17), 1, "Name"),
((1, 20), 12, "Comment"),
((3, 0), 3, "Name"),
((3, 4), 1, "Operator"),
((3, 6), 3, "Name"),
((3, 11), 7, "Comment"),
((5, 0), 5, "Name"),
((5, 5), 1, "Punctuation"),
((5, 6), 3, "String"),
((5, 9), 1, "Punctuation"),
((5, 12), 16, "Comment"),
((8, 0), 5, "Keyword"),
((8, 6), 3, "Name"),
((8, 9), 1, "Punctuation"),
((8, 10), 3, "Name"),
((8, 13), 2, "Punctuation"),
((9, 4), 3, "String"),
((10, 4), 4, "String"),
((11, 4), 3, "String"),
((13, 4), 3, "Keyword"),
((13, 8), 8, "Name"),
((13, 16), 1, "Punctuation"),
((13, 17), 4, "Name"),
((13, 21), 2, "Punctuation"),
((14, 8), 4, "Keyword"),
((17, 0), 3, "Name"),
((17, 3), 2, "Punctuation"),
((18, 0), 24, "Comment"),
((1, 17), 1, "Identifier"),
((3, 0), 3, "Identifier"),
((3, 3), 1, "Punctuation"),
((3, 10), 1, "Operator"),
((4, 0), 1, "Punctuation"),
((4, 1), 3, "Identifier"),
((4, 5), 2, "Operator"),
((4, 8), 4, "Identifier"),
((4, 12), 1, "Punctuation"),
((6, 5), 2, "Punctuation"),
((9, 0), 5, "Keyword"),
((9, 9), 1, "Punctuation"),
((9, 10), 3, "Identifier"),
((9, 13), 2, "Punctuation"),
((11, 4), 4, "Identifier"),
((14, 4), 3, "Keyword"),
((14, 16), 1, "Punctuation"),
((14, 21), 2, "Punctuation"),
((15, 8), 4, "Keyword"),
((18, 0), 3, "Identifier"),
((18, 3), 2, "Punctuation"),
],
}
# Deal with Windows weirdness
if platform == "win32":
expected_output = {
"id": 0,
"type": "response",
"cancelled": False,
"command": HIGHLIGHT,
"result": [
((1, 0), 4, "Keyword"),
((1, 5), 4, "Name"),
((1, 10), 6, "Keyword"),
((1, 17), 1, "Name"),
((1, 20), 12, "Comment"),
((3, 0), 3, "Name"),
((3, 4), 1, "Operator"),
((3, 6), 3, "Name"),
((3, 11), 7, "Comment"),
((5, 0), 5, "Name"),
((5, 5), 1, "Punctuation"),
((5, 6), 5, "String"),
((5, 11), 1, "Punctuation"),
((5, 14), 16, "Comment"),
((8, 0), 5, "Keyword"),
((8, 6), 3, "Name"),
((8, 9), 1, "Punctuation"),
((8, 10), 3, "Name"),
((8, 13), 2, "Punctuation"),
((9, 4), 3, "String"),
((10, 4), 4, "String"),
((11, 4), 3, "String"),
((13, 4), 3, "Keyword"),
((13, 8), 8, "Name"),
((13, 16), 1, "Punctuation"),
((13, 17), 4, "Name"),
((13, 21), 2, "Punctuation"),
((14, 8), 4, "Keyword"),
((17, 0), 3, "Name"),
((17, 3), 2, "Punctuation"),
((18, 0), 24, "Comment"),
],
}

assert highlight_output == expected_output

links_and_hidden_chars_result: Response | None = context.get_response(
LINKS_AND_CHARS
)
) # type: ignore
if links_and_hidden_chars_result is None:
raise AssertionError("links_and_hidden_chars_result output is None")
links_and_hidden_chars_result["id"] = 0
Expand All @@ -176,15 +125,15 @@ def test_IPC():
"type": "response",
"cancelled": False,
"command": LINKS_AND_CHARS,
"result": [((18, 2), 22, "Link"), ((5, 7), 1, "Hidden_Char")],
"result": [((1, 0), 22, "Link"), ((1, 25), 1, "Hidden_Char")],
}
if platform == "win32":
expected_output = {
"id": 0,
"type": "response",
"cancelled": False,
"command": LINKS_AND_CHARS,
"result": [((18, 2), 22, "Link")],
"result": [((1, 0), 22, "Link")],
}
assert links_and_hidden_chars_result == expected_output

Expand Down
5 changes: 3 additions & 2 deletions tests/testing_file1.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from this import s # noqa: F401

Bar = int # alias
Bar: type = int
(xyz := test)

print("​") # No width space
print()


class Foo(Bar):
Expand Down

0 comments on commit c44639a

Please sign in to comment.