From a0f6e51410d2957840b51535a1750084a338d02b Mon Sep 17 00:00:00 2001 From: Ashar Date: Mon, 13 Jan 2025 01:17:48 +0530 Subject: [PATCH] feat: Read import path and produce import diagnostics (#49) --- README.md | 8 +- protols.toml | 2 + ...rce_content.proto => source_context.proto} | 0 src/config/mod.rs | 11 +- src/config/workspace.rs | 17 +- src/lsp.rs | 31 ++- src/parser/diagnostics.rs | 35 ++- src/parser/docsymbol.rs | 2 +- ...gnostics__test__collect_parse_error-2.snap | 25 +-- ...iagnostics__test__collect_parse_error.snap | 6 +- src/parser/tree.rs | 44 +++- src/server.rs | 1 + src/state.rs | 211 ++++++++++-------- src/workspace/definition.rs | 9 +- src/workspace/hover.rs | 9 +- src/workspace/rename.rs | 16 +- 16 files changed, 257 insertions(+), 170 deletions(-) create mode 100644 protols.toml rename sample/google/protobuf/{source_content.proto => source_context.proto} (100%) diff --git a/README.md b/README.md index 06e6fca..7c4d5c1 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,8 @@ [![Crates.io](https://img.shields.io/crates/v/protols.svg)](https://crates.io/crates/protols) [![Build and Test](https://github.com/coder3101/protols/actions/workflows/ci.yml/badge.svg)](https://github.com/coder3101/protols/actions/workflows/ci.yml) +**WARNING** : Master branch is undergoing a massive refactoring, please use last relesed tag instead. + **Protols** is an open-source, feature-rich [Language Server Protocol (LSP)](https://microsoft.github.io/language-server-protocol/) for **Protocol Buffers (proto)** files. Powered by the efficient [tree-sitter](https://tree-sitter.github.io/tree-sitter/) parser, Protols offers intelligent code assistance for protobuf development, including features like auto-completion, diagnostics, formatting, and more. ![Protols Demo](./assets/protols.mov) @@ -71,7 +73,7 @@ If you're using Visual Studio Code, you can install the [Protobuf Language Suppo ## ⚙️ Configuration -Protols is configured using a `protols.toml` file, which you can place in any directory. **Protols** will search for the closest configuration file by recursively traversing the parent directories. +Protols is configured using a `protols.toml` file, which you can place in any directory. ### Sample `protols.toml` @@ -108,10 +110,6 @@ The `[formatter]` section allows configuration for code formatting. - `clang_format_path`: Specify the path to the `clang-format` binary. -### Multiple Configuration Files - -You can place multiple `protols.toml` files across different directories. **Protols** will use the closest configuration file by searching up the directory tree. - --- ## 🛠️ Usage diff --git a/protols.toml b/protols.toml new file mode 100644 index 0000000..611a64a --- /dev/null +++ b/protols.toml @@ -0,0 +1,2 @@ +[config] +include_paths = ["sample", "src/workspace/input"] diff --git a/sample/google/protobuf/source_content.proto b/sample/google/protobuf/source_context.proto similarity index 100% rename from sample/google/protobuf/source_content.proto rename to sample/google/protobuf/source_context.proto diff --git a/src/config/mod.rs b/src/config/mod.rs index e770af8..8c5e6f7 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -6,7 +6,7 @@ fn default_clang_format_path() -> String { "clang-format".to_string() } -#[derive(Serialize, Deserialize, Debug, Clone)] +#[derive(Serialize, Deserialize, Debug, Clone, Default)] #[serde(default)] pub struct ProtolsConfig { pub config: Config, @@ -34,15 +34,6 @@ pub struct ExperimentalConfig { pub use_protoc_diagnostics: bool, } -impl Default for ProtolsConfig { - fn default() -> Self { - Self { - config: Config::default(), - formatter: FormatterConfig::default(), - } - } -} - impl Default for FormatterConfig { fn default() -> Self { Self { diff --git a/src/config/workspace.rs b/src/config/workspace.rs index d88193b..b6a22b6 100644 --- a/src/config/workspace.rs +++ b/src/config/workspace.rs @@ -1,6 +1,6 @@ use std::{ collections::{HashMap, HashSet}, - path::Path, + path::{Path, PathBuf}, }; use async_lsp::lsp_types::{Url, WorkspaceFolder}; @@ -61,6 +61,21 @@ impl WorkspaceProtoConfigs { .iter() .find(|&k| upath.starts_with(k.to_file_path().unwrap())) } + + pub fn get_include_paths(&self, uri: &Url) -> Option> { + let c = self.get_config_for_uri(uri)?; + let w = self.get_workspace_for_uri(uri)?.to_file_path().ok()?; + let mut ipath: Vec = c + .config + .include_paths + .iter() + .map(PathBuf::from) + .map(|p| if p.is_relative() { w.join(p) } else { p }) + .collect(); + + ipath.push(w.to_path_buf()); + Some(ipath) + } } #[cfg(test)] diff --git a/src/lsp.rs b/src/lsp.rs index 55e0bc9..52c91af 100644 --- a/src/lsp.rs +++ b/src/lsp.rs @@ -1,6 +1,4 @@ use std::ops::ControlFlow; -use std::sync::mpsc; -use std::thread; use std::{collections::HashMap, fs::read_to_string}; use tracing::{error, info}; @@ -12,11 +10,12 @@ use async_lsp::lsp_types::{ DocumentSymbolParams, DocumentSymbolResponse, FileOperationFilter, FileOperationPattern, FileOperationPatternKind, FileOperationRegistrationOptions, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverContents, HoverParams, HoverProviderCapability, - InitializeParams, InitializeResult, Location, OneOf, PrepareRenameResponse, ProgressParams, - ReferenceParams, RenameFilesParams, RenameOptions, RenameParams, ServerCapabilities, - ServerInfo, TextDocumentPositionParams, TextDocumentSyncCapability, TextDocumentSyncKind, - TextEdit, Url, WorkspaceEdit, WorkspaceFileOperationsServerCapabilities, - WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities, + InitializeParams, InitializeResult, Location, OneOf, PrepareRenameResponse, + ReferenceParams, RenameFilesParams, RenameOptions, RenameParams, + ServerCapabilities, ServerInfo, TextDocumentPositionParams, TextDocumentSyncCapability, + TextDocumentSyncKind, TextEdit, Url, WorkspaceEdit, + WorkspaceFileOperationsServerCapabilities, WorkspaceFoldersServerCapabilities, + WorkspaceServerCapabilities, }; use async_lsp::{LanguageClient, LanguageServer, ResponseError}; use futures::future::BoxFuture; @@ -377,15 +376,19 @@ impl LanguageServer for ProtoLanguageServer { let uri = params.text_document.uri; let content = params.text_document.text; - let Some(diagnostics) = self.state.upsert_file(&uri, content) else { + let Some(ipath) = self.configs.get_include_paths(&uri) else { return ControlFlow::Continue(()); }; - let Some(ws) = self.configs.get_config_for_uri(&uri) else { + let Some(diagnostics) = self.state.upsert_file(&uri, content.clone(), &ipath) else { return ControlFlow::Continue(()); }; - if !ws.config.disable_parse_diagnostics { + let Some(pconf) = self.configs.get_config_for_uri(&uri) else { + return ControlFlow::Continue(()); + }; + + if !pconf.config.disable_parse_diagnostics { if let Err(e) = self.client.publish_diagnostics(diagnostics) { error!(error=%e, "failed to publish diagnostics") } @@ -397,7 +400,11 @@ impl LanguageServer for ProtoLanguageServer { let uri = params.text_document.uri; let content = params.content_changes[0].text.clone(); - let Some(diagnostics) = self.state.upsert_file(&uri, content) else { + let Some(ipath) = self.configs.get_include_paths(&uri) else { + return ControlFlow::Continue(()); + }; + + let Some(diagnostics) = self.state.upsert_file(&uri, content, &ipath) else { return ControlFlow::Continue(()); }; @@ -419,7 +426,7 @@ impl LanguageServer for ProtoLanguageServer { if let Ok(uri) = Url::from_file_path(&file.uri) { // Safety: The uri is always a file type let content = read_to_string(uri.to_file_path().unwrap()).unwrap_or_default(); - self.state.upsert_content(&uri, content); + self.state.upsert_content(&uri, content, &[]); } else { error!(uri=%file.uri, "failed parse uri"); } diff --git a/src/parser/diagnostics.rs b/src/parser/diagnostics.rs index 3fa007b..d40a72b 100644 --- a/src/parser/diagnostics.rs +++ b/src/parser/diagnostics.rs @@ -1,13 +1,12 @@ -use async_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, PublishDiagnosticsParams, Range}; +use async_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Range}; use crate::{nodekind::NodeKind, utils::ts_to_lsp_position}; use super::ParsedTree; impl ParsedTree { - pub fn collect_parse_errors(&self) -> PublishDiagnosticsParams { - let diagnostics = self - .find_all_nodes(NodeKind::is_error) + pub fn collect_parse_diagnostics(&self) -> Vec { + self.find_all_nodes(NodeKind::is_error) .into_iter() .map(|n| Diagnostic { range: Range { @@ -19,12 +18,24 @@ impl ParsedTree { message: "Syntax error".to_string(), ..Default::default() }) - .collect(); - PublishDiagnosticsParams { - uri: self.uri.clone(), - diagnostics, - version: None, - } + .collect() + } + + pub fn collect_import_diagnostics( + &self, + content: &[u8], + import: Vec, + ) -> Vec { + self.get_import_path_range(content, import) + .into_iter() + .map(|r| Diagnostic { + range: r, + severity: Some(DiagnosticSeverity::ERROR), + source: Some(String::from("protols")), + message: "failed to find proto file".to_string(), + ..Default::default() + }) + .collect() } } @@ -42,12 +53,12 @@ mod test { let parsed = ProtoParser::new().parse(url.clone(), contents); assert!(parsed.is_some()); - assert_yaml_snapshot!(parsed.unwrap().collect_parse_errors()); + assert_yaml_snapshot!(parsed.unwrap().collect_parse_diagnostics()); let contents = include_str!("input/test_collect_parse_error2.proto"); let parsed = ProtoParser::new().parse(url.clone(), contents); assert!(parsed.is_some()); - assert_yaml_snapshot!(parsed.unwrap().collect_parse_errors()); + assert_yaml_snapshot!(parsed.unwrap().collect_parse_diagnostics()); } } diff --git a/src/parser/docsymbol.rs b/src/parser/docsymbol.rs index 0d6bf04..9aa47b2 100644 --- a/src/parser/docsymbol.rs +++ b/src/parser/docsymbol.rs @@ -19,7 +19,7 @@ impl DocumentSymbolTreeBuilder { } pub(super) fn maybe_pop(&mut self, node: usize) { - let should_pop = self.stack.last().map_or(false, |(n, _)| *n == node); + let should_pop = self.stack.last().is_some_and(|(n, _)| *n == node); if should_pop { let (_, explored) = self.stack.pop().unwrap(); if let Some((_, parent)) = self.stack.last_mut() { diff --git a/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error-2.snap b/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error-2.snap index 1dd7eb8..55f2810 100644 --- a/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error-2.snap +++ b/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error-2.snap @@ -1,16 +1,15 @@ --- source: src/parser/diagnostics.rs -expression: parsed.unwrap().collect_parse_errors() +expression: parsed.unwrap().collect_parse_diagnostics() +snapshot_kind: text --- -uri: "file://foo/bar.proto" -diagnostics: - - range: - start: - line: 6 - character: 8 - end: - line: 6 - character: 19 - severity: 1 - source: protols - message: Syntax error +- range: + start: + line: 6 + character: 8 + end: + line: 6 + character: 19 + severity: 1 + source: protols + message: Syntax error diff --git a/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error.snap b/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error.snap index 3f41aea..d90c57d 100644 --- a/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error.snap +++ b/src/parser/snapshots/protols__parser__diagnostics__test__collect_parse_error.snap @@ -1,6 +1,6 @@ --- source: src/parser/diagnostics.rs -expression: parsed.unwrap().collect_parse_errors() +expression: parsed.unwrap().collect_parse_diagnostics() +snapshot_kind: text --- -uri: "file://foo/bar.proto" -diagnostics: [] +[] diff --git a/src/parser/tree.rs b/src/parser/tree.rs index e91872b..8032065 100644 --- a/src/parser/tree.rs +++ b/src/parser/tree.rs @@ -1,7 +1,10 @@ -use async_lsp::lsp_types::Position; +use async_lsp::lsp_types::{Position, Range}; use tree_sitter::{Node, TreeCursor}; -use crate::{nodekind::NodeKind, utils::lsp_to_ts_point}; +use crate::{ + nodekind::NodeKind, + utils::{lsp_to_ts_point, ts_to_lsp_position}, +}; use super::ParsedTree; @@ -133,15 +136,38 @@ impl ParsedTree { .first() .map(|n| n.utf8_text(content).expect("utf-8 parse error")) } - pub fn get_import_path<'a>(&self, content: &'a [u8]) -> Vec<&'a str> { + + pub fn get_import_node(&self) -> Vec { self.find_all_nodes(NodeKind::is_import_path) .into_iter() - .filter_map(|n| { - n.child_by_field_name("path").map(|c| { - c.utf8_text(content) - .expect("utf-8 parse error") - .trim_matches('"') - }) + .filter_map(|n| n.child_by_field_name("path")) + .collect() + } + + pub fn get_import_path<'a>(&self, content: &'a [u8]) -> Vec<&'a str> { + self.get_import_node() + .into_iter() + .map(|n| { + n.utf8_text(content) + .expect("utf-8 parse error") + .trim_matches('"') + }) + .collect() + } + + pub fn get_import_path_range(&self, content: &[u8], import: Vec) -> Vec { + self.get_import_node() + .into_iter() + .filter(|n| { + let t = n + .utf8_text(content) + .expect("utf8-parse error") + .trim_matches('"'); + import.iter().any(|i| i == t) + }) + .map(|n| Range { + start: ts_to_lsp_position(&n.start_position()), + end: ts_to_lsp_position(&n.end_position()), }) .collect() } diff --git a/src/server.rs b/src/server.rs index da69b6c..d5982fa 100644 --- a/src/server.rs +++ b/src/server.rs @@ -36,6 +36,7 @@ impl ProtoLanguageServer { ControlFlow::Continue(()) } + #[allow(unused)] fn with_report_progress(&self, token: NumberOrString) -> Sender { let (tx, rx) = mpsc::channel(); let mut socket = self.client.clone(); diff --git a/src/state.rs b/src/state.rs index 2a81dc9..9c076e0 100644 --- a/src/state.rs +++ b/src/state.rs @@ -1,18 +1,12 @@ use std::{ collections::HashMap, - fs::read_to_string, - sync::{mpsc::Sender, Arc, Mutex, MutexGuard, RwLock, RwLockWriteGuard}, - thread, + path::PathBuf, + sync::{Arc, Mutex, MutexGuard, RwLock, RwLockWriteGuard}, }; -use tracing::{error, info}; +use tracing::info; -use async_lsp::lsp_types::{ - CompletionItem, CompletionItemKind, ProgressParamsValue, PublishDiagnosticsParams, Url, - WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressEnd, WorkDoneProgressReport, - WorkspaceFolder, -}; +use async_lsp::lsp_types::{CompletionItem, CompletionItemKind, PublishDiagnosticsParams, Url}; use tree_sitter::Node; -use walkdir::WalkDir; use crate::{ nodekind::NodeKind, @@ -87,94 +81,127 @@ impl ProtoLanguageState { } } - pub fn upsert_content(&mut self, uri: &Url, content: String) -> bool { - let parser = self.parser.lock().expect("poison"); - let tree = self.trees.write().expect("poison"); - let docs = self.documents.write().expect("poison"); - Self::upsert_content_impl(parser, uri, content, docs, tree) - } - - pub fn add_workspace_folder_async( + pub fn upsert_content( &mut self, - workspace: WorkspaceFolder, - tx: Sender, - ) { - let parser = self.parser.clone(); - let tree = self.trees.clone(); - let docs = self.documents.clone(); - - let begin = ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(WorkDoneProgressBegin { - title: String::from("indexing"), - cancellable: Some(false), - percentage: Some(0), - ..Default::default() - })); - - if let Err(e) = tx.send(begin) { - error!(error=%e, "failed to send work begin progress"); + uri: &Url, + content: String, + ipath: &[PathBuf], + ) -> Vec { + // Drop locks at end of block + { + let parser = self.parser.lock().expect("poison"); + let tree = self.trees.write().expect("poison"); + let docs = self.documents.write().expect("poison"); + Self::upsert_content_impl(parser, uri, content.clone(), docs, tree); } - thread::spawn(move || { - let files: Vec<_> = WalkDir::new(workspace.uri.path()) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().is_some()) - .filter(|e| e.path().extension().unwrap() == "proto") - .collect(); - - let total_files = files.len(); - let mut current = 0; - - for file in files.into_iter() { - let path = file.path(); - if path.is_absolute() && path.is_file() { - let Ok(content) = read_to_string(path) else { - continue; - }; - - let Ok(uri) = Url::from_file_path(path) else { - continue; - }; - - Self::upsert_content_impl( - parser.lock().expect("poison"), - &uri, - content, - docs.write().expect("poison"), - tree.write().expect("poison"), - ); - - current += 1; - - let report = ProgressParamsValue::WorkDone(WorkDoneProgress::Report( - WorkDoneProgressReport { - cancellable: Some(false), - message: Some(path.display().to_string()), - percentage: Some((current * 100 / total_files) as u32), - }, - )); - - if let Err(e) = tx.send(report) { - error!(error=%e, "failed to send work report progress"); - } - } - } - let report = - ProgressParamsValue::WorkDone(WorkDoneProgress::End(WorkDoneProgressEnd { - message: Some(String::from("completed")), - })); - - info!(len = total_files, "workspace file parsing completed"); - if let Err(e) = tx.send(report) { - error!(error=%e, "failed to send work completed result"); - } - }); + // After content is upserted, those imports which couldn't be located + // are flagged as import error + self.get_tree(uri) + .map(|t| t.get_import_path(content.as_ref())) + .unwrap_or_default() + .into_iter() + .map(ToOwned::to_owned) + .filter(|import| !ipath.iter().any(|p| p.join(import.as_str()).exists())) + .collect() } - pub fn upsert_file(&mut self, uri: &Url, content: String) -> Option { + // #[allow(unused)] + // pub fn add_workspace_folder_async( + // &mut self, + // workspace: WorkspaceFolder, + // tx: Sender, + // ) { + // let parser = self.parser.clone(); + // let tree = self.trees.clone(); + // let docs = self.documents.clone(); + // + // let begin = ProgressParamsValue::WorkDone(WorkDoneProgress::Begin(WorkDoneProgressBegin { + // title: String::from("indexing"), + // cancellable: Some(false), + // percentage: Some(0), + // ..Default::default() + // })); + // + // if let Err(e) = tx.send(begin) { + // error!(error=%e, "failed to send work begin progress"); + // } + // + // thread::spawn(move || { + // let files: Vec<_> = WalkDir::new(workspace.uri.path()) + // .into_iter() + // .filter_map(|e| e.ok()) + // .filter(|e| e.path().extension().is_some()) + // .filter(|e| e.path().extension().unwrap() == "proto") + // .collect(); + // + // let total_files = files.len(); + // let mut current = 0; + // + // for file in files.into_iter() { + // let path = file.path(); + // if path.is_absolute() && path.is_file() { + // let Ok(content) = read_to_string(path) else { + // continue; + // }; + // + // let Ok(uri) = Url::from_file_path(path) else { + // continue; + // }; + // + // Self::upsert_content_impl( + // parser.lock().expect("poison"), + // &uri, + // content, + // docs.write().expect("poison"), + // tree.write().expect("poison"), + // ); + // + // current += 1; + // + // let report = ProgressParamsValue::WorkDone(WorkDoneProgress::Report( + // WorkDoneProgressReport { + // cancellable: Some(false), + // message: Some(path.display().to_string()), + // percentage: Some((current * 100 / total_files) as u32), + // }, + // )); + // + // if let Err(e) = tx.send(report) { + // error!(error=%e, "failed to send work report progress"); + // } + // } + // } + // let report = + // ProgressParamsValue::WorkDone(WorkDoneProgress::End(WorkDoneProgressEnd { + // message: Some(String::from("completed")), + // })); + // + // info!(len = total_files, "workspace file parsing completed"); + // if let Err(e) = tx.send(report) { + // error!(error=%e, "failed to send work completed result"); + // } + // }); + // } + + pub fn upsert_file( + &mut self, + uri: &Url, + content: String, + ipath: &[PathBuf], + ) -> Option { info!(uri=%uri, "upserting file"); - self.upsert_content(uri, content); - self.get_tree(uri).map(|tree| tree.collect_parse_errors()) + let diag = self.upsert_content(uri, content.clone(), ipath); + self.get_tree(uri).map(|tree| { + let diag = tree.collect_import_diagnostics(content.as_ref(), diag); + let mut d = tree.collect_parse_diagnostics(); + d.extend(diag); + PublishDiagnosticsParams { + uri: tree.uri.clone(), + diagnostics: d, + version: None, + } + }) } pub fn delete_file(&mut self, uri: &Url) { diff --git a/src/workspace/definition.rs b/src/workspace/definition.rs index 2e723d4..020e992 100644 --- a/src/workspace/definition.rs +++ b/src/workspace/definition.rs @@ -19,12 +19,15 @@ impl ProtoLanguageState { #[cfg(test)] mod test { + use std::path::PathBuf; + use insta::assert_yaml_snapshot; use crate::state::ProtoLanguageState; #[test] fn workspace_test_definition() { + let ipath = vec![PathBuf::from("src/workspace/input")]; let a_uri = "file://input/a.proto".parse().unwrap(); let b_uri = "file://input/b.proto".parse().unwrap(); let c_uri = "file://input/c.proto".parse().unwrap(); @@ -34,9 +37,9 @@ mod test { let c = include_str!("input/c.proto"); let mut state: ProtoLanguageState = ProtoLanguageState::new(); - state.upsert_file(&a_uri, a.to_owned()); - state.upsert_file(&b_uri, b.to_owned()); - state.upsert_file(&c_uri, c.to_owned()); + state.upsert_file(&a_uri, a.to_owned(), &ipath); + state.upsert_file(&b_uri, b.to_owned(), &ipath); + state.upsert_file(&c_uri, c.to_owned(), &ipath); assert_yaml_snapshot!(state.definition("com.workspace", "Author")); assert_yaml_snapshot!(state.definition("com.workspace", "Author.Address")); diff --git a/src/workspace/hover.rs b/src/workspace/hover.rs index 01e6064..8fa6c18 100644 --- a/src/workspace/hover.rs +++ b/src/workspace/hover.rs @@ -630,12 +630,15 @@ impl ProtoLanguageState { #[cfg(test)] mod test { + use std::path::PathBuf; + use insta::assert_yaml_snapshot; use crate::state::ProtoLanguageState; #[test] fn workspace_test_hover() { + let ipath = vec![PathBuf::from("src/workspace/input")]; let a_uri = "file://input/a.proto".parse().unwrap(); let b_uri = "file://input/b.proto".parse().unwrap(); let c_uri = "file://input/c.proto".parse().unwrap(); @@ -645,9 +648,9 @@ mod test { let c = include_str!("input/c.proto"); let mut state: ProtoLanguageState = ProtoLanguageState::new(); - state.upsert_file(&a_uri, a.to_owned()); - state.upsert_file(&b_uri, b.to_owned()); - state.upsert_file(&c_uri, c.to_owned()); + state.upsert_file(&a_uri, a.to_owned(), &ipath); + state.upsert_file(&b_uri, b.to_owned(), &ipath); + state.upsert_file(&c_uri, c.to_owned(), &ipath); assert_yaml_snapshot!(state.hover("com.workspace", "google.protobuf.Any")); assert_yaml_snapshot!(state.hover("com.workspace", "Author")); diff --git a/src/workspace/rename.rs b/src/workspace/rename.rs index 46bd91d..d248468 100644 --- a/src/workspace/rename.rs +++ b/src/workspace/rename.rs @@ -61,12 +61,15 @@ impl ProtoLanguageState { #[cfg(test)] mod test { + use std::path::PathBuf; + use insta::assert_yaml_snapshot; use crate::state::ProtoLanguageState; #[test] fn test_rename() { + let ipath = vec![PathBuf::from("src/workspace/input")]; let a_uri = "file://input/a.proto".parse().unwrap(); let b_uri = "file://input/b.proto".parse().unwrap(); let c_uri = "file://input/c.proto".parse().unwrap(); @@ -76,9 +79,9 @@ mod test { let c = include_str!("input/c.proto"); let mut state: ProtoLanguageState = ProtoLanguageState::new(); - state.upsert_file(&a_uri, a.to_owned()); - state.upsert_file(&b_uri, b.to_owned()); - state.upsert_file(&c_uri, c.to_owned()); + state.upsert_file(&a_uri, a.to_owned(), &ipath); + state.upsert_file(&b_uri, b.to_owned(), &ipath); + state.upsert_file(&c_uri, c.to_owned(), &ipath); assert_yaml_snapshot!(state.rename_fields("com.workspace", "Author", "Writer")); assert_yaml_snapshot!(state.rename_fields( @@ -91,6 +94,7 @@ mod test { #[test] fn test_reference() { + let ipath = vec![PathBuf::from("src/workspace/input")]; let a_uri = "file://input/a.proto".parse().unwrap(); let b_uri = "file://input/b.proto".parse().unwrap(); let c_uri = "file://input/c.proto".parse().unwrap(); @@ -100,9 +104,9 @@ mod test { let c = include_str!("input/c.proto"); let mut state: ProtoLanguageState = ProtoLanguageState::new(); - state.upsert_file(&a_uri, a.to_owned()); - state.upsert_file(&b_uri, b.to_owned()); - state.upsert_file(&c_uri, c.to_owned()); + state.upsert_file(&a_uri, a.to_owned(), &ipath); + state.upsert_file(&b_uri, b.to_owned(), &ipath); + state.upsert_file(&c_uri, c.to_owned(), &ipath); assert_yaml_snapshot!(state.reference_fields("com.workspace", "Author")); assert_yaml_snapshot!(state.reference_fields("com.workspace", "Author.Address"));