From 315acdaff702112b53b8353e3951d253d3fa7cf9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=9F=B3=E5=8D=9A=E6=96=87?= Date: Sat, 2 Nov 2024 14:58:05 +0800 Subject: [PATCH] feat: Add more tokens --- lib/src/parser/lexer.rs | 6 +++++ lib/src/parser/token.rs | 42 +++++++++++++++++++++++++++++++++ lsp/src/lsp.rs | 51 +++++++++++++++-------------------------- 3 files changed, 66 insertions(+), 33 deletions(-) diff --git a/lib/src/parser/lexer.rs b/lib/src/parser/lexer.rs index e227197..d2c8492 100644 --- a/lib/src/parser/lexer.rs +++ b/lib/src/parser/lexer.rs @@ -179,6 +179,12 @@ impl StLexerBuilder { TokenKind::Then, TokenKind::ElseIf, TokenKind::EndIf, + TokenKind::For, + TokenKind::EndFor, + TokenKind::By, + TokenKind::Do, + TokenKind::Continue, + TokenKind::Break, TokenKind::Function, TokenKind::EndFunction, TokenKind::Program, diff --git a/lib/src/parser/token.rs b/lib/src/parser/token.rs index 018eb54..1b63d22 100644 --- a/lib/src/parser/token.rs +++ b/lib/src/parser/token.rs @@ -117,6 +117,18 @@ pub enum TokenKind { EndIf, /// 'TO' To, + /// 'FOR' + For, + /// 'BY' + By, + /// 'END_FOR' + EndFor, + /// 'CONTINUE' + Continue, + /// 'BREAK' + Break, + /// 'DO' + Do, /// 'FUNCTION' Function, /// 'END_FUNCTION' @@ -216,6 +228,30 @@ impl TokenKind { ) } + #[inline] + pub fn is_keywords(&self) -> bool { + matches!( + *self, + TokenKind::If + | TokenKind::Else + | TokenKind::ElseIf + | TokenKind::EndIf + | TokenKind::For + | TokenKind::EndFor + | TokenKind::By + | TokenKind::Break + | TokenKind::Do + | TokenKind::Continue + | TokenKind::Program + | TokenKind::EndProgram + | TokenKind::Var + | TokenKind::VarGlobal + | TokenKind::Then + | TokenKind::Array + | TokenKind::EndVar + ) + } + pub fn kind_match(&self, rhs: &TokenKind) -> bool { match *self { TokenKind::AssignRight => matches!(rhs, TokenKind::AssignRight), @@ -365,6 +401,12 @@ impl From<&TokenKind> for String { TokenKind::Time => "TIME", TokenKind::LTime => "LTIME", TokenKind::String => "STRING", + TokenKind::For => "FOR", + TokenKind::By => "BY", + TokenKind::EndFor => "END_FOR", + TokenKind::Continue => "CONTINUE", + TokenKind::Break => "BREAK", + TokenKind::Do => "DO", TokenKind::Literal(x) => { tmp_string = format!("{}", x); tmp_string.as_str() diff --git a/lsp/src/lsp.rs b/lsp/src/lsp.rs index fd1c199..f84131d 100644 --- a/lsp/src/lsp.rs +++ b/lsp/src/lsp.rs @@ -16,34 +16,28 @@ fn semantic_token_type_id(tok: &TokenKind) -> (u32, u32) { TokenKind::String => (TokenTypes::String as u32, TokenModifiers::None as u32), // operators op if op.is_operator() => (TokenTypes::Operator as u32, TokenModifiers::None as u32), - // builtin-types - TokenKind::Int => (TokenTypes::Type as u32, TokenModifiers::None as u32), // builtin-operators TokenKind::SizeOf | TokenKind::Adr => ( TokenTypes::BuiltinFunction as u32, TokenModifiers::None as u32, ), + // builtin-types + _ if tok.is_type() => (TokenTypes::Type as u32, TokenModifiers::None as u32), // keywords - TokenKind::If - | TokenKind::Then - | TokenKind::EndIf - | TokenKind::Var - | TokenKind::EndVar - | TokenKind::Program - | TokenKind::EndProgram => (TokenTypes::Keyword as u32, TokenModifiers::None as u32), + _ if tok.is_keywords() => (TokenTypes::Keyword as u32, TokenModifiers::None as u32), _ => (TokenTypes::None as u32, TokenModifiers::None as u32), } } pub struct StcLsp { - _client: Client, + client: Client, src_mgr: DashMap, } impl StcLsp { pub fn new(c: Client) -> Self { Self { - _client: c, + client: c, src_mgr: DashMap::new(), } } @@ -89,6 +83,10 @@ impl LanguageServer for StcLsp { } async fn shutdown(&self) -> Result<()> { + self.client + .show_message(MessageType::INFO, "shutdown") + .await; + Ok(()) } @@ -101,7 +99,7 @@ impl LanguageServer for StcLsp { } async fn did_change(&self, params: DidChangeTextDocumentParams) { - trace!("{:?}", params); + trace!("did_change: {}", params.text_document.uri); for change in params.content_changes.into_iter() { // Only full text support @@ -113,7 +111,11 @@ impl LanguageServer for StcLsp { } async fn did_save(&self, params: DidSaveTextDocumentParams) { - trace!("{:?}", params); + trace!("did_save: {}", params.text_document.uri); + + if let Some(content) = params.text { + self.on_file_change(¶ms.text_document.uri, content) + } } async fn did_close(&self, params: DidCloseTextDocumentParams) { @@ -125,26 +127,9 @@ impl LanguageServer for StcLsp { async fn document_highlight( &self, - params: DocumentHighlightParams, + _params: DocumentHighlightParams, ) -> Result>> { - trace!("{:?}", params.text_document_position_params); - - // let mut highlights = Vec::with_capacity(64); - // highlights.push(DocumentHighlight { - // range: Range { - // start: Position { - // line: 0, - // character: 0, - // }, - // end: Position { - // line: 0, - // character: 3, - // }, - // }, - // kind: None, - // }); - - // Ok(Some(highlights)) + // trace!("{:?}", params.text_document_position_params); Ok(None) } @@ -153,7 +138,7 @@ impl LanguageServer for StcLsp { &self, params: SemanticTokensParams, ) -> Result> { - trace!("{:?}", params); + trace!("tokens_full: {}", params.text_document.uri); let s = self.src_mgr.get(¶ms.text_document.uri).unwrap(); let lexer = StLexerBuilder::new().build_iter(s.chars());