Compare commits

...

11 Commits

10 changed files with 1455 additions and 135 deletions

1086
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,19 @@
[package]
name = "skill-oxide"
version = "0.1.0"
name = "srls"
version = "0.2.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
regex = "1"
glob = "*"
pest = "2.0"
pest_derive = "2.0"
tokio = { version = "1", features = ["full"] }
tower-lsp = "*"
serde = "*"
serde_json = "*"
tracing-subscriber = "*"
tracing-appender = "*"
dashmap = "5.4.0"
log = "*"
simple-logging = "2.0.2"
walkdir = "*"

55
src/cache.rs Normal file
View File

@ -0,0 +1,55 @@
use dashmap::DashMap;
use log::info;
use tower_lsp::lsp_types::CompletionItem;
use crate::parse_skill;
#[derive(PartialEq, Debug)]
struct Position {
line: u32,
character: u16,
}
#[derive(PartialEq, Debug)]
struct Range {
from: Position,
to: Position,
}
#[derive(Debug)]
pub struct SymbolCache {
pub symbols: DashMap<String, Vec<CompletionItem>>,
}
#[derive(Debug, Clone)]
struct FileNotInCache;
impl SymbolCache {
pub fn new() -> SymbolCache {
SymbolCache {
symbols: DashMap::new(),
}
}
pub fn update(&self, path: &str) {
let parsed = parse_skill(path);
info!("parsed: {:?}", parsed);
self.symbols.insert(path.to_owned(), parsed);
}
}
#[cfg(test)]
mod tests {
// use crate::cache::SymbolCache;
// use std::collections::HashMap;
// use std::path::Path;
#[test]
fn insert() {
// let mut d = SymbolCache::new();
// d.update();
// let mut comp = HashMap::new();
// comp.insert();
// assert_eq!(d.documents, comp)
}
}

View File

@ -1,17 +1,179 @@
use std::{env, fs, io::Read};
use regex::Regex;
use states::Tokenizer;
mod cache;
use cache::SymbolCache;
mod states;
fn main() {
let args: Vec<String> = env::args().collect();
let re = Regex::new(r"((?P<comment>;)|(?P<word>\b\w+\b))").unwrap();
mod skill;
use skill::{parse_global_symbols, parse_skill};
let mut file = fs::File::open(args[1].as_str()).expect("msg");
let mut data = String::new();
file.read_to_string(&mut data).expect("msg");
use serde::{Deserialize, Serialize};
use serde_json::Value;
use tower_lsp::jsonrpc::{Error, ErrorCode, Result};
use tower_lsp::lsp_types::notification::Notification;
use tower_lsp::lsp_types::*;
use tower_lsp::{Client, LanguageServer, LspService, Server};
let mut tokenizer = Tokenizer::new();
tokenizer.read_in(data);
use log::{debug, info};
use walkdir::WalkDir;
extern crate glob;
extern crate pest;
#[macro_use]
extern crate pest_derive;
#[derive(Debug)]
struct Backend {
client: Client,
cache: SymbolCache,
}
#[derive(Debug, Deserialize, Serialize)]
struct CustomNotificationParams {
title: String,
message: String,
}
impl CustomNotificationParams {
fn new(title: impl Into<String>, message: impl Into<String>) -> Self {
CustomNotificationParams {
title: title.into(),
message: message.into(),
}
}
}
enum CustomNotification {}
impl Notification for CustomNotification {
type Params = CustomNotificationParams;
const METHOD: &'static str = "custom/notification";
}
#[tower_lsp::async_trait]
impl LanguageServer for Backend {
async fn initialize(&self, init_params: InitializeParams) -> Result<InitializeResult> {
let root = init_params
.root_uri
.ok_or(Error::new(ErrorCode::InvalidParams))?;
info!(target: "Backend", "Initializing Language Server");
let root_dir = root.path().to_string();
info!(target: "Backend", "Caching started in '{}'", root_dir);
for entry in WalkDir::new(root_dir)
.follow_links(true)
.into_iter()
.filter_map(|e| e.ok())
{
let f_path = entry.path().to_str();
match f_path {
Some(path) => {
if path.ends_with(".il") {
info!("found '{}'", path);
self.cache.update(path);
}
}
None => {}
}
}
info!(target: "Backend", "Caching finished. Found {} files.", self.cache.symbols.len());
debug!(target: "Backend", "{:?}", self.cache.symbols);
Ok(InitializeResult {
server_info: None,
capabilities: ServerCapabilities {
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
}),
file_operations: None,
}),
execute_command_provider: Some(ExecuteCommandOptions {
commands: vec!["custom/notification".to_string()],
work_done_progress_options: Default::default(),
}),
completion_provider: Some(CompletionOptions {
resolve_provider: Some(false),
trigger_characters: Some(vec!["(".to_string()]),
work_done_progress_options: Default::default(),
all_commit_characters: None,
..Default::default()
}),
..ServerCapabilities::default()
},
})
}
async fn initialized(&self, _: InitializedParams) {
self.client
.log_message(MessageType::INFO, "server initialized!")
.await;
self.client
.send_notification::<CustomNotification>(CustomNotificationParams::new(
"title", "message",
))
.await;
}
async fn shutdown(&self) -> Result<()> {
Ok(())
}
async fn execute_command(&self, params: ExecuteCommandParams) -> Result<Option<Value>> {
if params.command == "custom.notification" {
self.client
.send_notification::<CustomNotification>(CustomNotificationParams::new(
"Hello", "Message",
))
.await;
self.client
.log_message(
MessageType::INFO,
format!("Command executed with params: {params:?}"),
)
.await;
Ok(None)
} else {
Err(Error::invalid_request())
}
}
async fn completion(&self, cparams: CompletionParams) -> Result<Option<CompletionResponse>> {
info!("triggered completion");
let path = cparams
.text_document_position
.text_document
.uri
.path()
.to_string();
info!("for: {:?}", path);
info!("with: {:?}", self.cache.symbols);
let resp = self.cache.symbols.get(&path);
info!("returned: {:?}", resp);
Ok(Some(CompletionResponse::Array(resp.unwrap().to_vec())))
}
async fn did_save(&self, params: DidSaveTextDocumentParams) {
let path = params.text_document.uri.path().to_string();
info!("updating cahce for {:?}", path);
self.cache.update(&path);
}
}
#[tokio::main]
async fn main() {
let writer = tracing_appender::rolling::never(".", "srls.out");
tracing_subscriber::fmt().with_writer(writer).init();
info!(target: "main", "Starting");
let (stdin, stdout) = (tokio::io::stdin(), tokio::io::stdout());
let (service, socket) = LspService::new(|client| Backend {
client,
cache: SymbolCache::new(),
});
info!("Creating server instance.");
Server::new(stdin, stdout, socket).serve(service).await;
}

28
src/skill.pest Normal file
View File

@ -0,0 +1,28 @@
WHITESPACE = _{ (" " | NEWLINE) }
token_char = _{(NUMBER | LETTER | "_")}
token = @{ LETTER ~ token_char* }
string = @{"\"" ~ (!"\"" ~ ANY)* ~ "\""}
number = @{NUMBER+ ~ ("." ~ NUMBER+)? ~ ("e" ~ "-"? ~ NUMBER+)?}
bool = {"t" | "nil"}
literal = _{string | number | bool}
COMMENT = {";" ~ (!NEWLINE ~ ANY)* ~ NEWLINE}
lisp_list = _{ "(" ~ expr* ~ ")" }
lazy_list = _{ "'(" ~ expr* ~ ")" }
cstyle_list = @{ token ~ lisp_list}
list = { (cstyle_list | lisp_list | lazy_list) }
assign = {token ~ "=" ~ (inline_expr | list | token | literal)}
inline_operand = _{(list | token | literal)}
inline_operator = {("-" | "+" | "/" | "*")}
inline_expr = {inline_operand ~ (inline_operator ~ inline_operand)+}
get_operator = {("->" | "~>")}
get = {token ~ get_operator ~ (list | token)}
expr = _{(get | assign | list | token | literal)}
skill = { SOI ~ expr* ~ EOI }

82
src/skill.rs Normal file
View File

@ -0,0 +1,82 @@
use log::info;
use pest::error::Error;
use pest::iterators::Pair;
use pest::Parser;
use std::fs;
use tower_lsp::lsp_types::{
lsif::{EdgeDataMultiIn, Item, ItemKind},
CompletionItem, CompletionItemKind, CompletionItemLabelDetails, Documentation, MarkupContent,
MarkupKind,
};
#[derive(Parser)]
#[grammar = "skill.pest"]
pub struct SkillParser;
fn recurse_pairs<'a>(
ps: Pair<Rule>,
catalog: &'a mut Vec<CompletionItem>,
last_comment: Option<String>,
) -> Option<String> {
let mut comment = last_comment;
match ps.as_rule() {
Rule::skill => {
comment = None;
for p in ps.into_inner() {
comment = recurse_pairs(p, catalog, comment);
}
}
Rule::list => {
comment = None;
for p in ps.into_inner() {
comment = recurse_pairs(p, catalog, comment);
}
}
Rule::COMMENT => {
info!("encountered comment: {:?}", ps.as_str());
if ps.as_str().starts_with(";;;") {
comment = Some(ps.as_str().strip_prefix(";;;").unwrap().trim().to_owned());
info!("encountered docstring: {:?}", comment);
}
}
Rule::assign => {
let k = ps.into_inner().next().unwrap();
catalog.push(CompletionItem {
label: k.as_str().to_owned(),
kind: Some(CompletionItemKind::VARIABLE),
detail: match &comment {
Some(s) => Some(s.to_owned()),
None => None,
},
label_details: Some(CompletionItemLabelDetails {
description: None,
detail: Some("global".to_owned()),
}),
..Default::default()
});
comment = None;
}
_ => {
comment = None;
}
}
comment
}
pub fn parse_skill(path: &str) -> Vec<CompletionItem> {
let content = fs::read_to_string(path).expect("could not read file");
let parsed = SkillParser::parse(Rule::skill, &content);
let mut ret = vec![];
let mut last_comment: Option<String> = None;
for pairs in parsed.into_iter() {
for pair in pairs.into_iter() {
last_comment = recurse_pairs(pair, &mut ret, last_comment)
}
}
ret
}
pub fn parse_global_symbols(token: Pair<Rule>) -> Result<&str, Error<Rule>> {
Ok("")
}

View File

@ -1,96 +0,0 @@
#[derive(Debug)]
pub enum CursorState {
Comment,
List,
Token,
Literal,
Operator
}
pub struct Token {
typ: CursorState,
content: Vec<Token>,
}
pub struct Tokenizer {
stateStack: Vec<CursorState>,
tokenTree: Vec<Token>,
}
impl Tokenizer {
pub fn new() -> Tokenizer {
Tokenizer {
stateStack: Vec::new(),
tokenTree: Vec::new(),
}
}
fn match_char(&mut self, c: char) {
match self.stateStack.last() {
None => match c {
c if c.is_whitespace() => {}
';' => {
self.stateStack.push(CursorState::Comment);
}
'(' => {
self.stateStack.push(CursorState::List);
}
c if c.is_alphanumeric() => {}
_ => {
println!("{}", c);
panic!("not a comment, list or symbol ");
}
},
Some(CursorState::Comment) => match c {
c if c.is_control() => {
self.stateStack.pop();
}
c if c.is_alphanumeric() => {}
c if c.is_whitespace() => {}
_ => {}
},
Some(CursorState::List) => match c {
'(' => {
self.stateStack.push(CursorState::List);
}
')' => {
self.stateStack.pop();
}
c if c.is_alphabetic() => {
self.stateStack.push(CursorState::Token);
}
c if c.is_numeric() => {
self.stateStack.push(CursorState::Literal);
}
'"' => {
self.stateStack.push(CursorState::Literal);
}
_ => {}
},
Some(CursorState::Token) => match c {
c if !c.is_alphanumeric() => {
self.stateStack.pop();
self.match_char(c);
}
_ => {}
},
Some(CursorState::Literal) => match c {
c if c.is_whitespace() => {
self.stateStack.pop();
}
'"' => {
self.stateStack.pop();
}
_ => {}
},
_ => {}
}
}
pub fn read_in(&mut self, content: String) {
for c in content.chars() {
self.match_char(c);
println!("{} -> {}: {:?}", c, self.stateStack.len(), self.stateStack.last().clone());
}
}
}

BIN
srls.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

8
srls.svg Normal file
View File

@ -0,0 +1,8 @@
<svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" viewBox="0 0 32 32">
<!-- <circle cx="28" cy="10" r="2" fill="#e20c2a"/> -->
<path fill="#e20c2a" d="M18 29h-7v-2h7c4.963 0 9-4.037 9-9v-4h2v4c0 6.065-4.935 11-11 11Z"/>
<!-- <path fill="#e20c2a" d="M16 21c-2.757 0-5-2.243-5-5s2.243-5 5-5s5 2.243 5 5s-2.243 5-5 5Zm0-8c-1.654 0-3 1.346-3 3s1.346 3 3 3s3-1.346 3-3s-1.346-3-3-3Z"/> -->
<!-- <circle cx="4" cy="22" r="2" fill="#e20c2a"/> -->
<path fill="#e20c2a" d="M5 18H3v-4C3 7.935 7.935 3 14 3h7v2h-7c-4.963 0-9 4.037-9 9v4Z"/>
<text x="16" y="16" dx="-5" dy="6" style="font-size: 16px; fill: #e20c2a; font-family: 'FiraCode Nerd Font'; font-weight: 500;">S</text>
</svg>

After

Width:  |  Height:  |  Size: 704 B

View File

@ -1,11 +1,24 @@
;comment with some words
(function1 arg1 arg2
(a b c)
cstyle( arg3)
1
1.3
1e9
3.5e-4
"string"
nil
t
var1 = "string"
var2 = 1
var3 = nil
var4 = t
)
token
(list with items)
'(lazy list)
cstyle(list with args)
;;; precursor magnitude as controlvector
tap_mag = arg1 + arg2 - " A" / 12
var2 = "a"
obj->op()
(let (a (b 3))
expr1
expr2
)