This commit is contained in:
AcerecA 2025-11-16 14:56:14 +01:00
parent 56fb0982b4
commit 51984e297b
8 changed files with 376 additions and 357 deletions

View File

@ -1,22 +1,34 @@
example = nil example = nil
example2 = example example2 = example
; func2(g_arg1 g_arg2 ?g_args1 1 ?g_argw 2) => nil (call qdwdq)
(procedure func2(arg1 arg2 @key (args 1) (argw 2) "ggng")
; some stuff to do
a = some_obj->field1
some_obj->field2 = 2
args = 2
)
( ;; func2(g_arg1 g_arg2 ?g_args1 1 ?g_argw 2) => nil
(let (some vars (default 0)) (procedure Func2(arg1 arg2 @keys (args "ss") (argw 2) "ggng")
; ... some wall of text (let ()
"))\"" ; some stuff to do
wqdqwf = '(doqwf) a = some_obj->field1
var = 1.3 some_obj->field2 = 2
vars = 231 db_obj->help()
qqvwv args = 2
cfunc() args
(procedure Wqrqw(a1 a2 @keys (a "12") "sqd")
)
) )
) )
(let (some vars (default "sd"))
; ... some wall of text
"))\""
wqdqwf = '(doqwf)
'wq
var = 1.3
vars = 231
qqvwv
if(expr then expr else expr)
cfunc()
)
somfunccall("somecalcfunc()")

View File

@ -5,7 +5,9 @@ version = "0.1.0"
dependencies = [ dependencies = [
"parsimonious~=0.10.0", "parsimonious~=0.10.0",
"pygls", "pygls",
"rich" "rich",
"tree-sitter>=0.24.0",
"tree-sitter-skill>=0.1.5",
] ]
[project.optional-dependencies] [project.optional-dependencies]
@ -35,3 +37,6 @@ include = "skillls"
[tools.ruff] [tools.ruff]
line-length = 100 line-length = 100
include = ['ALL'] include = ['ALL']
[tool.uv.sources]
tree-sitter-skill = { git = "ssh://git@git.acereca.net/acereca/tree-sitter-skill.git" }

View File

145
skillls/builtins/common.py Normal file
View File

@ -0,0 +1,145 @@
from abc import ABC
from collections.abc import Mapping
from dataclasses import dataclass, field
from enum import Enum
from typing import ClassVar
from lsprotocol.types import SymbolKind
class SkillDataType(Enum):
array = "a"
"""array"""
ddUserType = "b"
"""Boolean"""
opfcontext = "C"
"""OPF Context"""
dbobject = "d"
"""Cadence database object (CDBA)"""
envobj = "e"
"""environment"""
flonum = "f"
"""floating-point number"""
opffile = "F"
"""OPF file ID"""
general = "g"
"""any data type"""
nil = "g"
""""""
dgbSpecIlUserType = "G"
"""gdm spec"""
hdbobject = "h"
"""hierarchical database configuration object"""
list = "l"
"""linked list"""
nmpIlUserType = "m"
"""nmpll user type"""
cdsEvalObject = "M"
"""Cadence evaluation object"""
number = "n"
"""integere of floating point number"""
userType = "o"
"""user defined type (other)"""
port = "p"
"""I/O port"""
gdmspecListIlUSerType = "q"
""" gdm spec list"""
defstruct = "r"
"""defstruct"""
rodObj = "R"
"""relative object design (ROD) object"""
symbol = "s"
"""symbol"""
stringSymbol = "S"
"""symbol or character string"""
string = "t"
"""character string (text)"""
function = "u"
"""function object, either the name if a function (symbol) or a lambda function body (list)"""
funobj = "U"
"""function object"""
hdbpath = "v"
""""""
wtype = "w"
"""window type"""
integer = "x"
"""integer type"""
binary = "y"
"""binary function"""
pointer = "&"
"""pointer type"""
@dataclass(frozen=True)
class Builtin(ABC):
token: str
kind: ClassVar[SymbolKind]
@dataclass(frozen=True)
class Variable(Builtin):
kind: ClassVar[SymbolKind] = SymbolKind.Variable
typ: SkillDataType
default: str | None = None
@dataclass(frozen=True)
class AnonymousVariable(Builtin):
kind: ClassVar[SymbolKind] = SymbolKind.Variable
typ: SkillDataType
default: str | None = None
@dataclass(frozen=True)
class Procedure(Builtin):
kind: ClassVar[SymbolKind] = SymbolKind.Function
args: Mapping[str, Variable] = field(default_factory=dict)
rest: Variable | None = None
kwargs: Mapping[str, Variable] | Mapping[str, AnonymousVariable] = field(
default_factory=dict
)
"""list of ``Variable` if ``@key`` was used, and ``AnonymousVariable`` if ``@option`` was used"""
ret: SkillDataType = SkillDataType.nil
@property
def has_options(self) -> bool:
return bool(self.kwargs) and isinstance(
next(iter(self.kwargs.values())),
AnonymousVariable,
)
@property
def has_keys(self) -> bool:
return bool(self.kwargs) and isinstance(
next(iter(self.kwargs.values())),
Variable,
)

View File

@ -0,0 +1,6 @@
;; append
append(
l_list1
l_list2
) => l_result

View File

@ -0,0 +1,21 @@
from collections.abc import Mapping
from .common import Procedure, SkillDataType, Variable
NUM = SkillDataType.number
ANY = SkillDataType.general
FUNCTIONS: Mapping[str, Procedure] = {
"plus": Procedure(
"plus",
ret=NUM,
args={
"op1": Variable("op1", NUM),
"op2": Variable("op2", NUM),
},
rest=Variable("op3", NUM),
),
}
FUNCTIONS["plus"]

View File

@ -1,9 +1,12 @@
from collections.abc import Generator from collections.abc import Callable, Generator, Sequence
from dataclasses import dataclass, field from dataclasses import dataclass, field
from difflib import Differ
from itertools import chain from itertools import chain
from logging import INFO, basicConfig, debug, error, getLogger, info, warning from logging import DEBUG, INFO, basicConfig, debug, error, getLogger, info, warning
from re import findall, finditer, fullmatch, match as rematch from re import findall, finditer, fullmatch, match as rematch
import re
from time import time from time import time
from cattrs import Converter
from lsprotocol.types import ( from lsprotocol.types import (
INLAY_HINT_RESOLVE, INLAY_HINT_RESOLVE,
TEXT_DOCUMENT_DID_CHANGE, TEXT_DOCUMENT_DID_CHANGE,
@ -28,23 +31,37 @@ from lsprotocol.types import (
InlayHintKind, InlayHintKind,
InlayHintParams, InlayHintParams,
MessageType, MessageType,
NotebookDocumentSyncOptions,
Position, Position,
Range, Range,
SymbolKind, SymbolKind,
TextDocumentContentChangeEvent,
TextDocumentContentChangeEvent_Type1,
TextDocumentSyncKind,
) )
from pygls.protocol import LanguageServerProtocol, default_converter
from tree_sitter_skill import language as skill_lang
from tree_sitter import Language, Node, Parser, Query, Tree
from pygls.server import LanguageServer from pygls.server import LanguageServer
from pygls.workspace import TextDocument from pygls.workspace import TextDocument
from skillls.builtins.common import SkillDataType
from skillls.parsing.iterative import IterativeParser, TokenParser
from .cache import Cache from .cache import Cache
SKILL_LANG = Language(skill_lang())
SKILL_PARSER = Parser(SKILL_LANG)
URI = str URI = str
basicConfig(filename="skillls.log", filemode="w", level=INFO) basicConfig(
filename="skillls.log",
filemode="w",
level=DEBUG,
format="%(asctime)s [%(levelname)s]: %(message)s",
)
logger = getLogger()
cache: Cache[str, CompletionItem] = Cache() cache: Cache[str, CompletionItem] = Cache()
@ -93,6 +110,19 @@ class LetEnvironment(Environment):
locals: set[str] = field(default_factory=set) locals: set[str] = field(default_factory=set)
def offset_range(range: Range, lines: int, cols: int = 0) -> Range:
return Range(
Position(
range.start.line + lines,
range.start.character + cols,
),
Position(
range.end.line + lines,
range.end.character + cols,
),
)
# #
# @dataclass(frozen=True) # @dataclass(frozen=True)
# class ProcEnvironment(Environment): # class ProcEnvironment(Environment):
@ -111,363 +141,126 @@ class LetEnvironment(Environment):
class SkillLanguageServer(LanguageServer): class SkillLanguageServer(LanguageServer):
lets: list[DocumentSymbol] = [] contents: dict[str, TextDocument]
procs: list[DocumentSymbol] = [] trees: dict[str, Tree]
defs: list[DocumentSymbol] = []
globals: list[DocumentSymbol] = []
@property def __init__(
def envs(self) -> tuple[DocumentSymbol, ...]: self,
return ( name: str,
*self.procs, version: str,
*self.lets, loop=None,
protocol_cls: type[LanguageServerProtocol] = LanguageServerProtocol,
converter_factory: Callable[[], Converter] = default_converter,
text_document_sync_kind: TextDocumentSyncKind = TextDocumentSyncKind.Incremental,
notebook_document_sync: NotebookDocumentSyncOptions | None = None,
max_workers: int = 2,
):
super().__init__(
name,
version,
loop,
protocol_cls,
converter_factory,
text_document_sync_kind,
notebook_document_sync,
max_workers,
) )
self.trees = {}
def _diagnose_parens(self, doc: TextDocument) -> Generator[Diagnostic, None, None]: self.contents = {}
open: list[tuple[int, int]] = []
in_str: bool = False
last = ""
for row, line in enumerate(doc.lines):
for col, char in enumerate(line):
match char:
case "(":
if not in_str:
open.append((row, col))
case ")":
if not in_str:
if len(open) > 0:
open.pop()
else:
yield (
Diagnostic(
Range(
Position(row, col),
Position(row, col),
),
"unopened ) encountered",
)
)
case '"':
if not (in_str and last == "\\"):
in_str = not in_str
case _:
last = char
last = char
if len(open) > 0:
for row, col in open:
yield (
Diagnostic(
Range(Position(row, col), Position(row, col)),
"unclosed ) encountered",
)
)
def _diagnose_cisms(self, doc: TextDocument) -> Generator[Diagnostic, None, None]:
for row, line in enumerate(doc.lines):
for m in finditer(
r"(?P<proc>procedure\s+|;.*)?([a-zA-Z_][a-zA-Z_0-9]+)\(", line
):
if not m.group("proc"):
yield Diagnostic(
Range(Position(row, m.start()), Position(row, m.end())),
f"change `{m.group(2)}(` to `( {m.group(2)}`",
DiagnosticSeverity.Hint,
)
def diagnose(self, doc: TextDocument) -> None:
diags: list[Diagnostic] = []
diags.extend(self._diagnose_parens(doc))
diags.extend(self._diagnose_cisms(doc))
self.publish_diagnostics(doc.uri, diags)
def parse(self, doc: TextDocument) -> None: def parse(self, doc: TextDocument) -> None:
self.lets = [] parsed = SKILL_PARSER.parse(doc.source.encode("utf8"), encoding="utf8")
self._parse_let(doc.lines) self.trees[doc.uri] = parsed
self.procs = [] self.contents[doc.uri] = doc
self._parse_proc(doc.lines, doc.uri)
self.globals = []
self._parse_assigns(doc.lines)
def _parse_assigns(self, lines: list[str]) -> None: def update(self, uri: str, changes: list[TextDocumentContentChangeEvent]) -> None:
for row, line in enumerate(lines): for change in changes:
for found in finditer( if isinstance(change, TextDocumentContentChangeEvent_Type1):
r"\b([a-zA-Z_][a-zA-Z0-9_]*)((-|~)>[a-zA-Z_][a-zA-Z0-9_]*)?\s*=\s+", logger.debug(f"updating {change.range}")
line, change_fixed = TextDocumentContentChangeEvent_Type1(
): offset_range(change.range, -1),
token = found.group(1) change.text,
token_range = Range( change.range_length,
Position(row, found.start()), )
Position(row, found.start() + len(token)), old = self.contents[uri].lines
self.contents[uri].apply_change(change)
d = Differ()
logger.debug("".join(d.compare(old, self.contents[uri].lines)))
else:
pass
self.trees[uri] = SKILL_PARSER.parse(
self.contents[uri].source.encode("utf8"),
old_tree=self.trees[uri],
)
def _get_leaves(self, node: Node) -> list[Node]:
if node.children:
return [l for child in node.children for l in self._get_leaves(child)]
return [node]
def _diagnose_errors(self, uri: str) -> list[Diagnostic]:
diags: list[Diagnostic] = []
q = SKILL_LANG.query("(ERROR) @error")
nodes = (
q.captures(self.trees[uri].root_node)["error"]
if self.trees.get(uri)
else []
)
for node in nodes:
if node.type == "ERROR":
logger.error(node)
logger.error(node.range)
content = node.text.decode("utf8") if node.text else ""
range = Range(
Position(*node.range.start_point), Position(*node.range.end_point)
) )
if any( if "UNEXPECTED" in str(node):
in_range(token_range.start, ns.range) msg = f"unexpected '{content}'"
and (token in (child.name for child in (ns.children or [])))
for ns in chain(self.lets, self.procs)
):
pass
else: else:
self.globals.append( msg = str()
DocumentSymbol(
token, SymbolKind.Variable, token_range, token_range
)
)
def _parse_let(self, lines: list[str]) -> None: diags.append(
active_let: DocumentSymbol Diagnostic(
for row, line in enumerate(lines): range,
for found in finditer(r"(\(\s*let\s+|\blet\(\s+)\((.*)\)", line): msg,
start = Position(row, found.start()) severity=DiagnosticSeverity.Error,
end = find_end(start, lines) ),
children: list[DocumentSymbol] = []
active_let = DocumentSymbol(
"let",
SymbolKind.Namespace,
Range(start, end),
Range(start, end),
children=children,
)
self.lets.append(active_let)
offset = len(found.group(1)) + 3
for local_var in finditer(
r"([a-zA-Z_][a-zA-Z0-9_]*|\([a-zA-Z_][a-zA-Z0-9_]*\s+.+\))",
found.group(2),
):
if local_var.group(1).startswith("("):
if m := fullmatch(
r"\(([a-zA-Z_][a-zA-Z0-9_]*)\s+.+\)",
local_var.group(1),
):
children.append(
DocumentSymbol(
m.group(1),
SymbolKind.Variable,
Range(
Position(row, offset + local_var.start() + 1),
Position(
row,
offset
+ local_var.start()
+ 1
+ len(m.string),
),
),
Range(
Position(row, offset + local_var.start() + 1),
Position(
row,
offset
+ local_var.start()
+ 1
+ len(m.group(1)),
),
),
)
)
else:
assert isinstance(active_let.children, list)
active_let.children.append(
DocumentSymbol(
local_var.group(1),
SymbolKind.Variable,
Range(
Position(row, offset + local_var.start()),
Position(row, offset + local_var.end()),
),
Range(
Position(row, offset + local_var.start()),
Position(row, offset + local_var.end()),
),
)
)
def _parse_proc(self, lines: list[str], uri: str) -> None:
for row, line in enumerate(lines):
for found in finditer(
r"(\(\s*procedure|\bprocedure\()(\s+)([a-zA-Z_][a-zA-Z0-9_]*)\((.*)\)",
line,
):
start = Position(row, found.start())
end = find_end(start, lines)
if "@option" in found.group(4) and "@key" in found.group(4):
self.publish_diagnostics(
uri,
[
Diagnostic(
Range(start, Position(row, len(line))),
"`@key` and `@option` used in same definition",
severity=DiagnosticSeverity.Error,
)
],
)
return
args: list[DocumentSymbol] = []
kwargs: list[DocumentSymbol] = []
rest: list[DocumentSymbol] = []
params_start = found.end() - len(found.group(4))
warning(found.group(4))
for part in finditer(
rf"(@(option|key)(\s\(\w+\s+.+\))+|@rest \w+|\"[{''.join(dt.value for dt in SkillDataType)}]+\"|(\w+\s*))",
found.group(4),
):
info(part.group(1))
if part.group(1).startswith("@rest"):
rest_var_name = part.group(1).split()[1]
rest_var_range = Range(
Position(
row,
params_start + part.end() - len(rest_var_name),
),
Position(row, params_start + part.end()),
)
rest.append(
DocumentSymbol(
rest_var_name,
kind=SymbolKind.Variable,
range=rest_var_range,
selection_range=rest_var_range,
)
)
elif part.group(1).startswith("@"):
for kwarg in finditer(r"(\((\w+)\s+[^\)]+\))", part.group(1)):
kwargs.append(
DocumentSymbol(
kwarg.group(2),
kind=SymbolKind.Variable,
range=Range(
Position(
row,
params_start + part.start() + kwarg.start(),
),
Position(
row,
params_start + part.start() + kwarg.end(),
),
),
selection_range=Range(
Position(
row,
params_start + part.start() + kwarg.start(),
),
Position(
row,
params_start
+ part.start()
+ kwarg.start()
+ len(kwarg.group(2)),
),
),
)
)
elif fullmatch(
rf'"[{"".join(dt.value for dt in SkillDataType)}]+"',
part.group(1),
):
if not (
len(args) + len(kwargs) + len(rest)
== len(part.group(1)) - 2
):
self.publish_diagnostics(
uri,
[
Diagnostic(
Range(start, Position(row, len(line))),
"type info length mismatches number of arguments",
severity=DiagnosticSeverity.Error,
)
],
)
return
for char, arg in zip(
part.group(1)[1:-1], chain(args, rest, kwargs)
):
typ = SkillDataType(char)
arg.detail = f"{typ.value}_"
break
else:
for arg in finditer(r"(\w+)", part.group(1)):
arg_range = Range(
Position(
row,
params_start + part.start() + arg.start() - 1,
),
Position(
row,
params_start + part.start() + arg.end() - 1,
),
)
args.append(
DocumentSymbol(
arg.group(1),
kind=SymbolKind.Variable,
range=arg_range,
selection_range=arg_range,
)
)
self.procs.append(
DocumentSymbol(
found.group(3),
kind=SymbolKind.Function,
range=Range(start, end),
selection_range=Range(start, Position(row, len(line))),
children=args + rest + kwargs,
)
) )
def _hint_let(self) -> Generator[InlayHint, None, None]: return diags
for let in self.lets:
if let.children:
for child in let.children:
yield InlayHint(child.selection_range.end, "|l")
def _hint_proc(self) -> Generator[InlayHint, None, None]: def diagnose(self, uri: str) -> list[Diagnostic]:
for proc in self.procs: diags: list[Diagnostic] = []
warning(proc)
if proc.children:
for child in proc.children:
yield InlayHint(child.selection_range.end, "|l")
if child.detail: diags.extend(self._diagnose_errors(uri))
yield InlayHint(child.selection_range.start, child.detail)
def _hint_globals(self) -> Generator[InlayHint, None, None]: return diags
for glbl in self.globals:
yield InlayHint(glbl.selection_range.end, "|g")
def hint(self, doc: TextDocument, area: Range) -> list[InlayHint]:
hints: list[InlayHint] = []
hints.extend(self._hint_proc())
hints.extend(self._hint_let())
hints.extend(self._hint_globals())
return hints
server = SkillLanguageServer("skillls", "v0.3") server = SkillLanguageServer("skillls", "v0.3")
@server.feature(TEXT_DOCUMENT_DID_SAVE) # @server.feature(TEXT_DOCUMENT_DID_SAVE)
@server.feature(TEXT_DOCUMENT_DID_OPEN) @server.feature(TEXT_DOCUMENT_DID_OPEN)
@server.feature(TEXT_DOCUMENT_DID_CHANGE)
def on_open(ls: SkillLanguageServer, params: DidSaveTextDocumentParams) -> None: def on_open(ls: SkillLanguageServer, params: DidSaveTextDocumentParams) -> None:
doc = server.workspace.get_text_document(params.text_document.uri) doc = server.workspace.get_text_document(params.text_document.uri)
if not ls.diagnose(doc): ls.parse(doc)
ls.parse(doc)
ls.lsp.send_request_async(WORKSPACE_INLAY_HINT_REFRESH) diags = ls.diagnose(doc.uri)
ls.publish_diagnostics(doc.uri, diags)
@server.feature(TEXT_DOCUMENT_INLAY_HINT) @server.feature(TEXT_DOCUMENT_DID_CHANGE)
def inlay_hints(ls: SkillLanguageServer, params: InlayHintParams) -> list[InlayHint]: def on_change(ls: SkillLanguageServer, params: DidChangeTextDocumentParams) -> None:
doc = server.workspace.get_text_document(params.text_document.uri) ls.update(params.text_document.uri, changes=params.content_changes)
return ls.hint(doc, params.range)
diags = ls.diagnose(params.text_document.uri)
ls.publish_diagnostics(params.text_document.uri, diags)
@server.feature(TEXT_DOCUMENT_DOCUMENT_SYMBOL) @server.feature(TEXT_DOCUMENT_DOCUMENT_SYMBOL)
@ -475,7 +268,8 @@ def doc_symbols(
ls: SkillLanguageServer, ls: SkillLanguageServer,
params: DocumentSymbolParams, params: DocumentSymbolParams,
) -> list[DocumentSymbol]: ) -> list[DocumentSymbol]:
return ls.procs + ls.lets + ls.defs + ls.globals # return ls.procs + ls.lets + ls.defs + ls.globals
return []
def main(): def main():

36
uv.lock
View File

@ -1,4 +1,5 @@
version = 1 version = 1
revision = 1
requires-python = ">=3.12" requires-python = ">=3.12"
[[package]] [[package]]
@ -313,6 +314,8 @@ dependencies = [
{ name = "parsimonious" }, { name = "parsimonious" },
{ name = "pygls" }, { name = "pygls" },
{ name = "rich" }, { name = "rich" },
{ name = "tree-sitter" },
{ name = "tree-sitter-skill" },
] ]
[package.optional-dependencies] [package.optional-dependencies]
@ -333,8 +336,41 @@ requires-dist = [
{ name = "pytest", marker = "extra == 'dev'" }, { name = "pytest", marker = "extra == 'dev'" },
{ name = "rich" }, { name = "rich" },
{ name = "ruff", marker = "extra == 'dev'" }, { name = "ruff", marker = "extra == 'dev'" },
{ name = "tree-sitter", specifier = ">=0.24.0" },
{ name = "tree-sitter-skill", git = "ssh://git@git.acereca.net/acereca/tree-sitter-skill.git" },
{ name = "types-parsimonious", marker = "extra == 'dev'" }, { name = "types-parsimonious", marker = "extra == 'dev'" },
] ]
provides-extras = ["dev"]
[[package]]
name = "tree-sitter"
version = "0.24.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a7/a2/698b9d31d08ad5558f8bfbfe3a0781bd4b1f284e89bde3ad18e05101a892/tree-sitter-0.24.0.tar.gz", hash = "sha256:abd95af65ca2f4f7eca356343391ed669e764f37748b5352946f00f7fc78e734", size = 168304 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/57/3a590f287b5aa60c07d5545953912be3d252481bf5e178f750db75572bff/tree_sitter-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:14beeff5f11e223c37be7d5d119819880601a80d0399abe8c738ae2288804afc", size = 140788 },
{ url = "https://files.pythonhosted.org/packages/61/0b/fc289e0cba7dbe77c6655a4dd949cd23c663fd62a8b4d8f02f97e28d7fe5/tree_sitter-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26a5b130f70d5925d67b47db314da209063664585a2fd36fa69e0717738efaf4", size = 133945 },
{ url = "https://files.pythonhosted.org/packages/86/d7/80767238308a137e0b5b5c947aa243e3c1e3e430e6d0d5ae94b9a9ffd1a2/tree_sitter-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fc5c3c26d83c9d0ecb4fc4304fba35f034b7761d35286b936c1db1217558b4e", size = 564819 },
{ url = "https://files.pythonhosted.org/packages/bf/b3/6c5574f4b937b836601f5fb556b24804b0a6341f2eb42f40c0e6464339f4/tree_sitter-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:772e1bd8c0931c866b848d0369b32218ac97c24b04790ec4b0e409901945dd8e", size = 579303 },
{ url = "https://files.pythonhosted.org/packages/0a/f4/bd0ddf9abe242ea67cca18a64810f8af230fc1ea74b28bb702e838ccd874/tree_sitter-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:24a8dd03b0d6b8812425f3b84d2f4763322684e38baf74e5bb766128b5633dc7", size = 581054 },
{ url = "https://files.pythonhosted.org/packages/8c/1c/ff23fa4931b6ef1bbeac461b904ca7e49eaec7e7e5398584e3eef836ec96/tree_sitter-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:f9e8b1605ab60ed43803100f067eed71b0b0e6c1fb9860a262727dbfbbb74751", size = 120221 },
{ url = "https://files.pythonhosted.org/packages/b2/2a/9979c626f303177b7612a802237d0533155bf1e425ff6f73cc40f25453e2/tree_sitter-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:f733a83d8355fc95561582b66bbea92ffd365c5d7a665bc9ebd25e049c2b2abb", size = 108234 },
{ url = "https://files.pythonhosted.org/packages/61/cd/2348339c85803330ce38cee1c6cbbfa78a656b34ff58606ebaf5c9e83bd0/tree_sitter-0.24.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d4a6416ed421c4210f0ca405a4834d5ccfbb8ad6692d4d74f7773ef68f92071", size = 140781 },
{ url = "https://files.pythonhosted.org/packages/8b/a3/1ea9d8b64e8dcfcc0051028a9c84a630301290995cd6e947bf88267ef7b1/tree_sitter-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e0992d483677e71d5c5d37f30dfb2e3afec2f932a9c53eec4fca13869b788c6c", size = 133928 },
{ url = "https://files.pythonhosted.org/packages/fe/ae/55c1055609c9428a4aedf4b164400ab9adb0b1bf1538b51f4b3748a6c983/tree_sitter-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57277a12fbcefb1c8b206186068d456c600dbfbc3fd6c76968ee22614c5cd5ad", size = 564497 },
{ url = "https://files.pythonhosted.org/packages/ce/d0/f2ffcd04882c5aa28d205a787353130cbf84b2b8a977fd211bdc3b399ae3/tree_sitter-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25fa22766d63f73716c6fec1a31ee5cf904aa429484256bd5fdf5259051ed74", size = 578917 },
{ url = "https://files.pythonhosted.org/packages/af/82/aebe78ea23a2b3a79324993d4915f3093ad1af43d7c2208ee90be9273273/tree_sitter-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d5d9537507e1c8c5fa9935b34f320bfec4114d675e028f3ad94f11cf9db37b9", size = 581148 },
{ url = "https://files.pythonhosted.org/packages/a1/b4/6b0291a590c2b0417cfdb64ccb8ea242f270a46ed429c641fbc2bfab77e0/tree_sitter-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:f58bb4956917715ec4d5a28681829a8dad5c342cafd4aea269f9132a83ca9b34", size = 120207 },
{ url = "https://files.pythonhosted.org/packages/a8/18/542fd844b75272630229c9939b03f7db232c71a9d82aadc59c596319ea6a/tree_sitter-0.24.0-cp313-cp313-win_arm64.whl", hash = "sha256:23641bd25dcd4bb0b6fa91b8fb3f46cc9f1c9f475efe4d536d3f1f688d1b84c8", size = 108232 },
]
[[package]]
name = "tree-sitter-skill"
version = "0.1.1"
source = { git = "ssh://git@git.acereca.net/acereca/tree-sitter-skill.git#ce8634713b13f1787837fd9a7c515383ecedac07" }
dependencies = [
{ name = "tree-sitter" },
]
[[package]] [[package]]
name = "types-parsimonious" name = "types-parsimonious"