add cache, try parsing using pygls
This commit is contained in:
parent
c05fc0e1ba
commit
a924ca5710
|
@ -1,12 +1,3 @@
|
|||
examlpe = nil
|
||||
example = nil
|
||||
|
||||
(procedure function(param1 (param2 t))
|
||||
|
||||
param1 = 1 + 3
|
||||
|
||||
|
||||
(call_to_other_function "arg1" t)
|
||||
|
||||
c_stype_call("arg")
|
||||
|
||||
)
|
||||
example2 = example
|
||||
|
|
|
@ -31,3 +31,7 @@ skillls = "skillls.main:main"
|
|||
line-length = 100
|
||||
target-version = "py311"
|
||||
include = "skillls"
|
||||
|
||||
[tools.ruff]
|
||||
line-length = 100
|
||||
include = ['ALL']
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
from dataclasses import dataclass, field
|
||||
from typing import Any, Generic, TypeVar, TypeVarTuple, Union, Unpack
|
||||
|
||||
T = TypeVar("T")
|
||||
L = TypeVarTuple("L")
|
||||
ID = int
|
||||
|
||||
@dataclass
|
||||
class Cache(Generic[*L, T]):
|
||||
cached: list[T] = field(default_factory=list)
|
||||
lookups: dict[type[Union[*L]], dict[Union[*L], ID]] = field(default_factory=dict)
|
||||
|
||||
def __getitem__(self, key: Union[*L]) -> T:
|
||||
id = self.lookups[type(key)][key]
|
||||
|
||||
return self.cached[id]
|
||||
|
||||
def __setitem__(self, keys: tuple[Unpack[L]], value: T) -> None:
|
||||
print(type(keys), keys)
|
||||
id = len(self.cached)
|
||||
self.cached.append(value)
|
||||
|
||||
for key in keys:
|
||||
self.lookups.setdefault(type(key), {})
|
||||
self.lookups[type(key)][key] = id
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
c = Cache[int, str, str]()
|
||||
|
||||
print(c)
|
||||
c[0, None] = "a"
|
||||
print(c)
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,8 +1,7 @@
|
|||
|
||||
skill = inline_expr+
|
||||
expr = (inline_expr / nl)
|
||||
|
||||
inline_expr = (listraw / listc / listskill / inline_get / inline_op / inline_assign / ws)
|
||||
inline_expr = (listraw / listc / listskill / inline_get / inline_op / inline_assign / ws / nl)
|
||||
|
||||
inline_assign = TOKEN ws* "=" ws* (inline_expr / LITERAL / TOKEN)
|
||||
|
||||
|
|
|
@ -1,40 +1,38 @@
|
|||
from logging import INFO, basicConfig, getLogger
|
||||
from pathlib import Path
|
||||
from urllib.parse import unquote
|
||||
from lsprotocol.types import (
|
||||
TEXT_DOCUMENT_DOCUMENT_SYMBOL,
|
||||
CompletionItem,
|
||||
Diagnostic,
|
||||
DiagnosticSeverity,
|
||||
DocumentSymbol,
|
||||
DocumentSymbolParams,
|
||||
Position,
|
||||
Range,
|
||||
SymbolKind,
|
||||
)
|
||||
|
||||
from pygls.server import LanguageServer
|
||||
from parsimonious import Grammar
|
||||
from pygls.uris import urlparse
|
||||
|
||||
# from skillls.parsing.location import Range
|
||||
from parsimonious import Grammar, IncompleteParseError
|
||||
|
||||
from .cache import Cache
|
||||
from .parsing.tokenize import Locator, SkillVisitor
|
||||
|
||||
|
||||
example = """
|
||||
(skillist siomfpwqmqwepfomkjnbkjb
|
||||
'(rawlist token)
|
||||
clist(qwerfwf)
|
||||
)
|
||||
"""
|
||||
URI = str
|
||||
|
||||
cache = {}
|
||||
basicConfig(filename="skillls.log", level=INFO)
|
||||
cache: Cache[str, CompletionItem] = Cache()
|
||||
|
||||
logger = getLogger(__name__)
|
||||
server = LanguageServer("skillls", "v0.1")
|
||||
|
||||
|
||||
def parse(path: Path):
|
||||
# path = Path(__file__).parent / "grammar.peg"
|
||||
def parse(content: str):
|
||||
path = Path(__file__).parent / "grammar.peg"
|
||||
grammar = Grammar(path.read_text())
|
||||
|
||||
locator = Locator(example)
|
||||
tree = grammar.parse(example)
|
||||
locator = Locator(content)
|
||||
tree = grammar.parse(content)
|
||||
|
||||
iv = SkillVisitor(locator)
|
||||
output = iv.visit(tree)
|
||||
|
@ -42,42 +40,33 @@ def parse(path: Path):
|
|||
return output
|
||||
|
||||
|
||||
def parse_and_cache(uri: str) -> list[DocumentSymbol]:
|
||||
path = Path(unquote(urlparse(uri).path))
|
||||
if not path.exists():
|
||||
logger.error("could not find %s", path)
|
||||
return []
|
||||
|
||||
if not cache.get(path):
|
||||
logger.info("%s not yet cached, parsing...")
|
||||
out = parse(path)
|
||||
logger.info("%s", out)
|
||||
|
||||
return []
|
||||
|
||||
|
||||
basicConfig(filename="skillls.log", level=INFO)
|
||||
|
||||
logger = getLogger(__name__)
|
||||
server = LanguageServer("skillls", "v0.1")
|
||||
|
||||
|
||||
@server.feature(TEXT_DOCUMENT_DOCUMENT_SYMBOL)
|
||||
def document_symbols(params: DocumentSymbolParams) -> list[DocumentSymbol]:
|
||||
logger.info("requested document symbols for %s", params.text_document.uri)
|
||||
doc = server.workspace.documents[params.text_document.uri]
|
||||
return [
|
||||
DocumentSymbol(
|
||||
"~global_scope",
|
||||
kind=SymbolKind.Namespace,
|
||||
range=Range(
|
||||
start=Position(0, 0),
|
||||
end=Position(len(doc.lines) - 1, len(doc.lines[-1])),
|
||||
),
|
||||
selection_range=Range(Position(0, 0), Position(0, 0)),
|
||||
logger.warning("requested document symbols for %s", params.text_document.uri)
|
||||
doc = server.workspace.get_text_document(params.text_document.uri)
|
||||
try:
|
||||
logger.warning(parse(doc.source))
|
||||
except IncompleteParseError as e:
|
||||
server.publish_diagnostics(
|
||||
params.text_document.uri,
|
||||
[
|
||||
Diagnostic(
|
||||
Range(
|
||||
Position(e.line() - 1, e.column() - 1),
|
||||
Position(len(doc.lines), 0),
|
||||
),
|
||||
str(e),
|
||||
severity=DiagnosticSeverity.Error,
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
return []
|
||||
|
||||
|
||||
def main():
|
||||
server.start_io()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
file = Path(__file__).parent.parent / "examples" / "example.il"
|
||||
parse(file.read_text())
|
||||
|
|
|
@ -6,52 +6,21 @@ from lsprotocol.types import Position, Range
|
|||
from parsimonious.nodes import Node
|
||||
|
||||
|
||||
# @total_ordering
|
||||
# class Position(NamedTuple):
|
||||
# line: int
|
||||
# char: int
|
||||
#
|
||||
# def __lt__(self, other: Self) -> bool:
|
||||
# return (self.line < other.line) or (
|
||||
# (self.line == other.line) and (self.char < other.char)
|
||||
# )
|
||||
#
|
||||
# def __eq__(self, other: Self) -> bool:
|
||||
# return (self.line == other.line) and (self.char == other.char)
|
||||
#
|
||||
#
|
||||
# class Range(NamedTuple):
|
||||
# start: Position
|
||||
# end: Position
|
||||
#
|
||||
# def __add__(self, other: Self) -> Self:
|
||||
# start = min(self.start, other.start)
|
||||
# end = max(self.end, other.end)
|
||||
# return Range(start, end)
|
||||
#
|
||||
# def contained_by(self, possibly_contained_by: Self) -> bool:
|
||||
# return (self.start >= possibly_contained_by.start) and (
|
||||
# self.end <= possibly_contained_by.end
|
||||
# )
|
||||
#
|
||||
# def contains(self, possibly_contains: Self) -> bool:
|
||||
# return (self.start <= possibly_contains.start) and (
|
||||
# self.end >= possibly_contains.end
|
||||
# )
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Locator:
|
||||
raw: str
|
||||
|
||||
@cached_property
|
||||
def newlines(self) -> tuple[int, ...]:
|
||||
t = tuple(i for i, char in enumerate(self.raw) if char == "\n")
|
||||
return t
|
||||
raw: list[str]
|
||||
|
||||
def _locate_pos(self, index: int) -> Position:
|
||||
line = next(i for i, char in enumerate(self.newlines) if char >= index)
|
||||
return Position(line - 1, index - (self.newlines[line - 1] if line > 0 else 0))
|
||||
counter = 0
|
||||
line = 0
|
||||
for ix, raw_line in enumerate(self.raw):
|
||||
if counter + len(raw_line) > index:
|
||||
line = ix
|
||||
break
|
||||
else:
|
||||
counter += len(raw_line)
|
||||
|
||||
return Position(line, index - counter)
|
||||
|
||||
@overload
|
||||
def locate(self, index: int) -> Position:
|
||||
|
|
Loading…
Reference in New Issue