From ee69fddf02b2c8d4b73f9412831f5fcc4fa931a1 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 15 Oct 2018 21:56:01 +0300 Subject: [PATCH] Drop dead code --- crates/ra_analysis/src/{db/mod.rs => db.rs} | 86 ----------- crates/ra_analysis/src/db/imp.rs | 153 -------------------- crates/ra_analysis/src/module_map.rs | 123 ---------------- crates/ra_analysis/src/queries.rs | 39 ----- 4 files changed, 401 deletions(-) rename crates/ra_analysis/src/{db/mod.rs => db.rs} (53%) delete mode 100644 crates/ra_analysis/src/db/imp.rs delete mode 100644 crates/ra_analysis/src/queries.rs diff --git a/crates/ra_analysis/src/db/mod.rs b/crates/ra_analysis/src/db.rs similarity index 53% rename from crates/ra_analysis/src/db/mod.rs rename to crates/ra_analysis/src/db.rs index 081510daa2b..0773edcc120 100644 --- a/crates/ra_analysis/src/db/mod.rs +++ b/crates/ra_analysis/src/db.rs @@ -101,89 +101,3 @@ fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Arc { let syntax = db.file_syntax(file_id); Arc::new(SymbolIndex::for_file(file_id, syntax)) } - -// mod imp; - -// use std::{ -// sync::Arc, -// }; -// use im; -// use salsa; -// use {FileId, imp::FileResolverImp}; - -// #[derive(Debug, Default, Clone)] -// pub(crate) struct State { -// pub(crate) file_map: im::HashMap>, -// pub(crate) file_resolver: FileResolverImp -// } - -// #[derive(Debug)] -// pub(crate) struct Db { -// imp: imp::Db, -// } - -// #[derive(Clone, Copy)] -// pub(crate) struct QueryCtx<'a> { -// imp: &'a salsa::QueryCtx, -// } - -// pub(crate) struct Query(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R); - -// pub(crate) struct QueryRegistry { -// imp: imp::QueryRegistry, -// } - -// impl Default for Db { -// fn default() -> Db { -// Db::new() -// } -// } - -// impl Db { -// pub(crate) fn new() -> Db { -// let reg = QueryRegistry::new(); -// Db { imp: imp::Db::new(reg.imp) } -// } -// pub(crate) fn state(&self) -> &State { -// self.imp.imp.ground_data() -// } -// pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { -// Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) } -// } -// pub(crate) fn make_query R, R>(&self, f: F) -> R { -// let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; -// f(ctx) -// } -// #[allow(unused)] -// pub(crate) fn trace_query R, R>(&self, f: F) -> (R, Vec<&'static str>) { -// let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() }; -// let res = f(ctx); -// let trace = self.imp.extract_trace(ctx.imp); -// (res, trace) -// } -// } - -// impl<'a> QueryCtx<'a> { -// pub(crate) fn get(&self, q: Q, params: Q::Params) -> Arc { -// q.get(self, params) -// } -// } - -// pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc { -// imp::file_text(ctx, file_id) -// } - -// pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec, FileResolverImp)> { -// imp::file_set(ctx) -// } -// impl QueryRegistry { -// fn new() -> QueryRegistry { -// let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() }; -// ::queries::register_queries(&mut reg); -// ::module_map::register_queries(&mut reg); -// reg -// } -// pub(crate) fn add(&mut self, q: Q, name: &'static str) { -// self.imp.add(q, name) -// } -// } diff --git a/crates/ra_analysis/src/db/imp.rs b/crates/ra_analysis/src/db/imp.rs deleted file mode 100644 index 7669b618455..00000000000 --- a/crates/ra_analysis/src/db/imp.rs +++ /dev/null @@ -1,153 +0,0 @@ -use std::{ - sync::Arc, - any::Any, - hash::{Hash, Hasher}, - collections::hash_map::{DefaultHasher}, - iter, -}; -use rustc_hash::FxHashMap; -use salsa; -use crate::{FileId, imp::FileResolverImp}; -use super::{State, Query, QueryCtx}; - -pub(super) type Data = Arc; - -#[derive(Debug)] -pub(super) struct Db { - names: Arc>, - pub(super) imp: salsa::Db, -} - -impl Db { - pub(super) fn new(mut reg: QueryRegistry) -> Db { - let config = reg.config.take().unwrap(); - Db { - names: Arc::new(reg.names), - imp: salsa::Db::new(config, State::default()) - } - } - pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db { - let names = self.names.clone(); - let mut invalidations = salsa::Invalidations::new(); - invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint)); - if resolver_changed { - invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&())))); - } else { - invalidations.invalidate(FILE_SET, iter::empty()); - } - let imp = self.imp.with_ground_data( - new_state, - invalidations, - ); - Db { names, imp } - } - pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx) -> Vec<&'static str> { - ctx.trace().into_iter().map(|it| self.names[&it]).collect() - } -} - -pub(crate) trait EvalQuery { - type Params; - type Output; - fn query_type(&self) -> salsa::QueryTypeId; - fn f(&self) -> salsa::QueryFn; - fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc; -} - -impl EvalQuery for Query -where - T: Hash + Send + Sync + 'static, - R: Hash + Send + Sync + 'static, -{ - type Params = T; - type Output = R; - fn query_type(&self) -> salsa::QueryTypeId { - salsa::QueryTypeId(self.0) - } - fn f(&self) -> salsa::QueryFn { - let f = self.1; - Box::new(move |ctx, data| { - let ctx = QueryCtx { imp: ctx }; - let data: &T = data.downcast_ref().unwrap(); - let res = f(ctx, data); - let h = hash(&res); - (Arc::new(res), salsa::OutputFingerprint(h)) - }) - } - fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc { - let query_id = salsa::QueryId( - self.query_type(), - salsa::InputFingerprint(hash(¶ms)), - ); - let res = ctx.imp.get(query_id, Arc::new(params)); - res.downcast().unwrap() - } -} - -pub(super) struct QueryRegistry { - config: Option>, - names: FxHashMap, -} - -impl QueryRegistry { - pub(super) fn new() -> QueryRegistry { - let mut config = salsa::QueryConfig::::new(); - config = config.with_ground_query( - FILE_TEXT, Box::new(|state, params| { - let file_id: &FileId = params.downcast_ref().unwrap(); - let res = state.file_map[file_id].clone(); - let fingerprint = salsa::OutputFingerprint(hash(&res)); - (res, fingerprint) - }) - ); - config = config.with_ground_query( - FILE_SET, Box::new(|state, _params| { - let file_ids: Vec = state.file_map.keys().cloned().collect(); - let hash = hash(&file_ids); - let file_resolver = state.file_resolver.clone(); - let res = (file_ids, file_resolver); - let fingerprint = salsa::OutputFingerprint(hash); - (Arc::new(res), fingerprint) - }) - ); - let mut names = FxHashMap::default(); - names.insert(FILE_TEXT, "FILE_TEXT"); - names.insert(FILE_SET, "FILE_SET"); - QueryRegistry { config: Some(config), names } - } - pub(super) fn add(&mut self, q: Q, name: &'static str) { - let id = q.query_type(); - let prev = self.names.insert(id, name); - assert!(prev.is_none(), "duplicate query: {:?}", id); - let config = self.config.take().unwrap(); - let config = config.with_query(id, q.f()); - self.config= Some(config); - } -} - -fn hash(x: &T) -> u64 { - let mut hasher = DefaultHasher::new(); - x.hash(&mut hasher); - hasher.finish() -} - -const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0); -pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc { - let query_id = salsa::QueryId( - FILE_TEXT, - salsa::InputFingerprint(hash(&file_id)), - ); - let res = ctx.imp.get(query_id, Arc::new(file_id)); - res.downcast().unwrap() -} - -const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1); -pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec, FileResolverImp)> { - let query_id = salsa::QueryId( - FILE_SET, - salsa::InputFingerprint(hash(&())), - ); - let res = ctx.imp.get(query_id, Arc::new(())); - res.downcast().unwrap() -} - diff --git a/crates/ra_analysis/src/module_map.rs b/crates/ra_analysis/src/module_map.rs index 95a770ae720..c1799e3d4a8 100644 --- a/crates/ra_analysis/src/module_map.rs +++ b/crates/ra_analysis/src/module_map.rs @@ -32,126 +32,3 @@ fn module_tree(db: &impl ModulesDatabase, (): ()) -> Arc { let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver); Arc::new(res) } - -// #[cfg(test)] -// mod tests { -// use std::collections::HashMap; -// use im; -// use relative_path::{RelativePath, RelativePathBuf}; -// use { -// db::{Db}, -// imp::FileResolverImp, -// FileId, FileResolver, -// }; -// use super::*; - -// #[derive(Debug)] -// struct FileMap(im::HashMap); - -// impl FileResolver for FileMap { -// fn file_stem(&self, file_id: FileId) -> String { -// self.0[&file_id].file_stem().unwrap().to_string() -// } -// fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option { -// let path = self.0[&file_id].join(rel).normalize(); -// self.0.iter() -// .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path)) -// .next() -// } -// } - -// struct Fixture { -// next_file_id: u32, -// fm: im::HashMap, -// db: Db, -// } - -// impl Fixture { -// fn new() -> Fixture { -// Fixture { -// next_file_id: 1, -// fm: im::HashMap::new(), -// db: Db::new(), -// } -// } -// fn add_file(&mut self, path: &str, text: &str) -> FileId { -// assert!(path.starts_with("/")); -// let file_id = FileId(self.next_file_id); -// self.next_file_id += 1; -// self.fm.insert(file_id, RelativePathBuf::from(&path[1..])); -// let mut new_state = self.db.state().clone(); -// new_state.file_map.insert(file_id, Arc::new(text.to_string())); -// new_state.file_resolver = FileResolverImp::new( -// Arc::new(FileMap(self.fm.clone())) -// ); -// self.db = self.db.with_changes(new_state, &[file_id], true); -// file_id -// } -// fn remove_file(&mut self, file_id: FileId) { -// self.fm.remove(&file_id); -// let mut new_state = self.db.state().clone(); -// new_state.file_map.remove(&file_id); -// new_state.file_resolver = FileResolverImp::new( -// Arc::new(FileMap(self.fm.clone())) -// ); -// self.db = self.db.with_changes(new_state, &[file_id], true); -// } -// fn change_file(&mut self, file_id: FileId, new_text: &str) { -// let mut new_state = self.db.state().clone(); -// new_state.file_map.insert(file_id, Arc::new(new_text.to_string())); -// self.db = self.db.with_changes(new_state, &[file_id], false); -// } -// fn check_parent_modules( -// &self, -// file_id: FileId, -// expected: &[FileId], -// queries: &[(&'static str, u64)] -// ) { -// let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx)); -// let actual = tree.parent_modules(file_id) -// .into_iter() -// .map(|link| link.owner(&tree)) -// .collect::>(); -// assert_eq!(actual.as_slice(), expected); -// let mut counts = HashMap::new(); -// events.into_iter() -// .for_each(|event| *counts.entry(event).or_insert(0) += 1); -// for &(query_id, expected_count) in queries.iter() { -// let actual_count = *counts.get(&query_id).unwrap_or(&0); -// assert_eq!( -// actual_count, -// expected_count, -// "counts for {} differ", -// query_id, -// ) -// } - -// } -// } - -// #[test] -// fn test_parent_module() { -// let mut f = Fixture::new(); -// let foo = f.add_file("/foo.rs", ""); -// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); - -// let lib = f.add_file("/lib.rs", "mod foo;"); -// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); -// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]); - -// f.change_file(lib, ""); -// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); - -// f.change_file(lib, "mod foo;"); -// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); - -// f.change_file(lib, "mod bar;"); -// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]); - -// f.change_file(lib, "mod foo;"); -// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]); - -// f.remove_file(lib); -// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]); -// } -// } diff --git a/crates/ra_analysis/src/queries.rs b/crates/ra_analysis/src/queries.rs deleted file mode 100644 index 613bf1e6103..00000000000 --- a/crates/ra_analysis/src/queries.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::sync::Arc; -use ra_syntax::File; -use ra_editor::LineIndex; -use crate::{ - FileId, - db::{Query, QueryCtx, QueryRegistry}, - symbol_index::SymbolIndex, -}; - -pub(crate) use crate::db::{file_text, file_set}; - -pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File { - (&*ctx.get(FILE_SYNTAX, file_id)).clone() -} -pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc { - ctx.get(FILE_LINES, file_id) -} -pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc { - ctx.get(FILE_SYMBOLS, file_id) -} - -const FILE_SYNTAX: Query = Query(16, |ctx, file_id: &FileId| { - let text = file_text(ctx, *file_id); - File::parse(&*text) -}); -const FILE_LINES: Query = Query(17, |ctx, file_id: &FileId| { - let text = file_text(ctx, *file_id); - LineIndex::new(&*text) -}); -const FILE_SYMBOLS: Query = Query(18, |ctx, file_id: &FileId| { - let syntax = file_syntax(ctx, *file_id); - SymbolIndex::for_file(*file_id, syntax) -}); - -pub(crate) fn register_queries(reg: &mut QueryRegistry) { - reg.add(FILE_SYNTAX, "FILE_SYNTAX"); - reg.add(FILE_LINES, "FILE_LINES"); - reg.add(FILE_SYMBOLS, "FILE_SYMBOLS"); -}