Remove the state

This commit is contained in:
Kirill Bulatov 2020-12-04 10:02:22 +02:00
parent 74c3bbacc9
commit 93bc009a59
7 changed files with 36 additions and 106 deletions

View file

@ -4,10 +4,10 @@ use std::fmt;
use hir::{Documentation, ModPath, Mutability};
use ide_db::helpers::{
insert_use::{self, ImportScope, ImportScopePtr, MergeBehaviour},
insert_use::{self, ImportScope, MergeBehaviour},
mod_path_to_ast,
};
use syntax::{algo, SyntaxNode, TextRange};
use syntax::{algo, TextRange};
use text_edit::TextEdit;
use crate::config::SnippetCap;
@ -275,32 +275,8 @@ pub struct ImportEdit {
pub merge_behaviour: Option<MergeBehaviour>,
}
#[derive(Debug, Clone)]
pub struct ImportEditPtr {
pub import_path: ModPath,
pub import_scope: ImportScopePtr,
pub merge_behaviour: Option<MergeBehaviour>,
}
impl ImportEditPtr {
pub fn into_import_edit(self, root: &SyntaxNode) -> Option<ImportEdit> {
Some(ImportEdit {
import_path: self.import_path,
import_scope: self.import_scope.into_scope(root)?,
merge_behaviour: self.merge_behaviour,
})
}
}
impl ImportEdit {
pub fn get_edit_ptr(&self) -> ImportEditPtr {
ImportEditPtr {
import_path: self.import_path.clone(),
import_scope: self.import_scope.get_ptr(),
merge_behaviour: self.merge_behaviour,
}
}
// TODO kb remove this at all now, since it's used only once?
/// Attempts to insert the import to the given scope, producing a text edit.
/// May return no edit in edge cases, such as scope already containing the import.
pub fn to_text_edit(&self) -> Option<TextEdit> {

View file

@ -18,10 +18,7 @@ use crate::{completions::Completions, context::CompletionContext, item::Completi
pub use crate::{
config::{CompletionConfig, CompletionResolveCapability},
item::{
CompletionItem, CompletionItemKind, CompletionScore, ImportEdit, ImportEditPtr,
InsertTextFormat,
},
item::{CompletionItem, CompletionItemKind, CompletionScore, ImportEdit, InsertTextFormat},
};
//FIXME: split the following feature into fine-grained features.

View file

@ -81,7 +81,7 @@ pub use crate::{
};
pub use completion::{
CompletionConfig, CompletionItem, CompletionItemKind, CompletionResolveCapability,
CompletionScore, ImportEdit, ImportEditPtr, InsertTextFormat,
CompletionScore, ImportEdit, InsertTextFormat,
};
pub use ide_db::{
call_info::CallInfo,

View file

@ -11,7 +11,7 @@ use syntax::{
edit::{AstNodeEdit, IndentLevel},
make, AstNode, PathSegmentKind, VisibilityOwner,
},
AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken,
AstToken, InsertPosition, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxToken,
};
use test_utils::mark;
@ -21,36 +21,6 @@ pub enum ImportScope {
Module(ast::ItemList),
}
impl ImportScope {
pub fn get_ptr(&self) -> ImportScopePtr {
match self {
ImportScope::File(file) => ImportScopePtr::File(SyntaxNodePtr::new(file.syntax())),
ImportScope::Module(module) => {
ImportScopePtr::Module(SyntaxNodePtr::new(module.syntax()))
}
}
}
}
#[derive(Debug, Clone)]
pub enum ImportScopePtr {
File(SyntaxNodePtr),
Module(SyntaxNodePtr),
}
impl ImportScopePtr {
pub fn into_scope(self, root: &SyntaxNode) -> Option<ImportScope> {
Some(match self {
ImportScopePtr::File(file_ptr) => {
ImportScope::File(ast::SourceFile::cast(file_ptr.to_node(root))?)
}
ImportScopePtr::Module(module_ptr) => {
ImportScope::File(ast::SourceFile::cast(module_ptr.to_node(root))?)
}
})
}
}
impl ImportScope {
pub fn from(syntax: SyntaxNode) -> Option<Self> {
if let Some(module) = ast::Module::cast(syntax.clone()) {

View file

@ -7,7 +7,7 @@ use std::{sync::Arc, time::Instant};
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
use ide::{Analysis, AnalysisHost, Change, FileId, ImportEditPtr};
use ide::{Analysis, AnalysisHost, Change, FileId};
use ide_db::base_db::{CrateId, VfsPath};
use lsp_types::{SemanticTokens, Url};
use parking_lot::{Mutex, RwLock};
@ -69,7 +69,6 @@ pub(crate) struct GlobalState {
pub(crate) config: Config,
pub(crate) analysis_host: AnalysisHost,
pub(crate) diagnostics: DiagnosticCollection,
pub(crate) completion_resolve_data: Arc<FxHashMap<usize, ImportEditPtr>>,
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
@ -91,7 +90,6 @@ pub(crate) struct GlobalStateSnapshot {
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
pub(crate) completion_resolve_data: Arc<FxHashMap<usize, ImportEditPtr>>,
}
impl GlobalState {
@ -123,7 +121,6 @@ impl GlobalState {
config,
analysis_host,
diagnostics: Default::default(),
completion_resolve_data: Arc::new(FxHashMap::default()),
mem_docs: FxHashMap::default(),
semantic_tokens_cache: Arc::new(Default::default()),
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
@ -194,7 +191,6 @@ impl GlobalState {
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
mem_docs: self.mem_docs.clone(),
semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
completion_resolve_data: Arc::clone(&self.completion_resolve_data),
}
}

View file

@ -5,7 +5,6 @@
use std::{
io::Write as _,
process::{self, Stdio},
sync::Arc,
};
use ide::{
@ -26,7 +25,6 @@ use lsp_types::{
SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
};
use project_model::TargetKind;
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use serde_json::to_value;
use stdx::{format_to, split_once};
@ -539,11 +537,10 @@ pub(crate) fn handle_runnables(
}
pub(crate) fn handle_completion(
global_state: &mut GlobalState,
snap: GlobalStateSnapshot,
params: lsp_types::CompletionParams,
) -> Result<Option<lsp_types::CompletionResponse>> {
let _p = profile::span("handle_completion");
let snap = global_state.snapshot();
let text_document_url = params.text_document_position.text_document.uri.clone();
let position = from_proto::file_position(&snap, params.text_document_position)?;
let completion_triggered_after_single_colon = {
@ -574,7 +571,6 @@ pub(crate) fn handle_completion(
};
let line_index = snap.analysis.file_line_index(position.file_id)?;
let line_endings = snap.file_line_endings(position.file_id);
let mut completion_resolve_data = FxHashMap::default();
let items: Vec<CompletionItem> = items
.into_iter()
@ -584,16 +580,15 @@ pub(crate) fn handle_completion(
to_proto::completion_item(&line_index, line_endings, item.clone());
if snap.config.completion.resolve_additional_edits_lazily() {
// TODO kb add resolve data somehow here
if let Some(import_edit) = item.import_to_add() {
completion_resolve_data.insert(item_index, import_edit.get_edit_ptr());
let data = serde_json::to_value(&CompletionData {
document_url: text_document_url.clone(),
import_id: item_index,
})
.expect(&format!("Should be able to serialize usize value {}", item_index));
// let data = serde_json::to_value(&CompletionData {
// document_url: text_document_url.clone(),
// import_id: item_index,
// })
// .expect(&format!("Should be able to serialize usize value {}", item_index));
for new_item in &mut new_completion_items {
new_item.data = Some(data.clone());
// new_item.data = Some(data.clone());
}
}
}
@ -602,8 +597,6 @@ pub(crate) fn handle_completion(
})
.collect();
global_state.completion_resolve_data = Arc::new(completion_resolve_data);
let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
Ok(Some(completion_list.into()))
}
@ -624,33 +617,31 @@ pub(crate) fn handle_completion_resolve(
return Ok(original_completion);
}
let (import_edit_ptr, document_url) = match original_completion
let resolve_data = match original_completion
.data
.as_ref()
.map(|data| serde_json::from_value::<CompletionData>(data.clone()))
.take()
.map(|data| serde_json::from_value::<CompletionResolveData>(data))
.transpose()?
.and_then(|data| {
let import_edit_ptr = snap.completion_resolve_data.get(&data.import_id).cloned();
Some((import_edit_ptr, data.document_url))
}) {
{
Some(data) => data,
None => return Ok(original_completion),
};
let file_id = from_proto::file_id(&snap, &document_url)?;
let root = snap.analysis.parse(file_id)?;
// TODO kb get the resolve data and somehow reparse the whole ast again?
// let file_id = from_proto::file_id(&snap, &document_url)?;
// let root = snap.analysis.parse(file_id)?;
if let Some(import_to_add) =
import_edit_ptr.and_then(|import_edit| import_edit.into_import_edit(root.syntax()))
{
// FIXME actually add all additional edits here? see `to_proto::completion_item` for more
append_import_edits(
&mut original_completion,
&import_to_add,
snap.analysis.file_line_index(file_id)?.as_ref(),
snap.file_line_endings(file_id),
);
}
// if let Some(import_to_add) =
// import_edit_ptr.and_then(|import_edit| import_edit.into_import_edit(root.syntax()))
// {
// // FIXME actually add all additional edits here? see `to_proto::completion_item` for more
// append_import_edits(
// &mut original_completion,
// &import_to_add,
// snap.analysis.file_line_index(file_id)?.as_ref(),
// snap.file_line_endings(file_id),
// );
// }
Ok(original_completion)
}
@ -1614,7 +1605,7 @@ fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>)
}
#[derive(Debug, Serialize, Deserialize)]
struct CompletionData {
struct CompletionResolveData {
document_url: Url,
import_id: usize,
}

View file

@ -436,8 +436,6 @@ impl GlobalState {
handlers::handle_matching_brace(s.snapshot(), p)
})?
.on_sync::<lsp_ext::MemoryUsage>(|s, p| handlers::handle_memory_usage(s, p))?
.on_sync::<lsp_types::request::Completion>(handlers::handle_completion)?
.on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
.on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
.on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
.on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
@ -455,6 +453,8 @@ impl GlobalState {
.on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
.on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
.on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
.on::<lsp_types::request::Completion>(handlers::handle_completion)
.on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
.on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
.on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
.on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)