Drop dead code

This commit is contained in:
Aleksey Kladov 2018-10-15 21:56:01 +03:00
parent 7503c5528f
commit ee69fddf02
4 changed files with 0 additions and 401 deletions

View file

@ -101,89 +101,3 @@ fn file_symbols(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<SymbolIndex> {
let syntax = db.file_syntax(file_id);
Arc::new(SymbolIndex::for_file(file_id, syntax))
}
// mod imp;
// use std::{
// sync::Arc,
// };
// use im;
// use salsa;
// use {FileId, imp::FileResolverImp};
// #[derive(Debug, Default, Clone)]
// pub(crate) struct State {
// pub(crate) file_map: im::HashMap<FileId, Arc<String>>,
// pub(crate) file_resolver: FileResolverImp
// }
// #[derive(Debug)]
// pub(crate) struct Db {
// imp: imp::Db,
// }
// #[derive(Clone, Copy)]
// pub(crate) struct QueryCtx<'a> {
// imp: &'a salsa::QueryCtx<State, imp::Data>,
// }
// pub(crate) struct Query<T, R>(pub(crate) u16, pub(crate) fn(QueryCtx, &T) -> R);
// pub(crate) struct QueryRegistry {
// imp: imp::QueryRegistry,
// }
// impl Default for Db {
// fn default() -> Db {
// Db::new()
// }
// }
// impl Db {
// pub(crate) fn new() -> Db {
// let reg = QueryRegistry::new();
// Db { imp: imp::Db::new(reg.imp) }
// }
// pub(crate) fn state(&self) -> &State {
// self.imp.imp.ground_data()
// }
// pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
// Db { imp: self.imp.with_changes(new_state, changed_files, resolver_changed) }
// }
// pub(crate) fn make_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> R {
// let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
// f(ctx)
// }
// #[allow(unused)]
// pub(crate) fn trace_query<F: FnOnce(QueryCtx) -> R, R>(&self, f: F) -> (R, Vec<&'static str>) {
// let ctx = QueryCtx { imp: &self.imp.imp.query_ctx() };
// let res = f(ctx);
// let trace = self.imp.extract_trace(ctx.imp);
// (res, trace)
// }
// }
// impl<'a> QueryCtx<'a> {
// pub(crate) fn get<Q: imp::EvalQuery>(&self, q: Q, params: Q::Params) -> Arc<Q::Output> {
// q.get(self, params)
// }
// }
// pub(crate) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
// imp::file_text(ctx, file_id)
// }
// pub(crate) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
// imp::file_set(ctx)
// }
// impl QueryRegistry {
// fn new() -> QueryRegistry {
// let mut reg = QueryRegistry { imp: imp::QueryRegistry::new() };
// ::queries::register_queries(&mut reg);
// ::module_map::register_queries(&mut reg);
// reg
// }
// pub(crate) fn add<Q: imp::EvalQuery>(&mut self, q: Q, name: &'static str) {
// self.imp.add(q, name)
// }
// }

View file

@ -1,153 +0,0 @@
use std::{
sync::Arc,
any::Any,
hash::{Hash, Hasher},
collections::hash_map::{DefaultHasher},
iter,
};
use rustc_hash::FxHashMap;
use salsa;
use crate::{FileId, imp::FileResolverImp};
use super::{State, Query, QueryCtx};
pub(super) type Data = Arc<Any + Send + Sync + 'static>;
#[derive(Debug)]
pub(super) struct Db {
names: Arc<FxHashMap<salsa::QueryTypeId, &'static str>>,
pub(super) imp: salsa::Db<State, Data>,
}
impl Db {
pub(super) fn new(mut reg: QueryRegistry) -> Db {
let config = reg.config.take().unwrap();
Db {
names: Arc::new(reg.names),
imp: salsa::Db::new(config, State::default())
}
}
pub(crate) fn with_changes(&self, new_state: State, changed_files: &[FileId], resolver_changed: bool) -> Db {
let names = self.names.clone();
let mut invalidations = salsa::Invalidations::new();
invalidations.invalidate(FILE_TEXT, changed_files.iter().map(hash).map(salsa::InputFingerprint));
if resolver_changed {
invalidations.invalidate(FILE_SET, iter::once(salsa::InputFingerprint(hash(&()))));
} else {
invalidations.invalidate(FILE_SET, iter::empty());
}
let imp = self.imp.with_ground_data(
new_state,
invalidations,
);
Db { names, imp }
}
pub(super) fn extract_trace(&self, ctx: &salsa::QueryCtx<State, Data>) -> Vec<&'static str> {
ctx.trace().into_iter().map(|it| self.names[&it]).collect()
}
}
pub(crate) trait EvalQuery {
type Params;
type Output;
fn query_type(&self) -> salsa::QueryTypeId;
fn f(&self) -> salsa::QueryFn<State, Data>;
fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output>;
}
impl<T, R> EvalQuery for Query<T, R>
where
T: Hash + Send + Sync + 'static,
R: Hash + Send + Sync + 'static,
{
type Params = T;
type Output = R;
fn query_type(&self) -> salsa::QueryTypeId {
salsa::QueryTypeId(self.0)
}
fn f(&self) -> salsa::QueryFn<State, Data> {
let f = self.1;
Box::new(move |ctx, data| {
let ctx = QueryCtx { imp: ctx };
let data: &T = data.downcast_ref().unwrap();
let res = f(ctx, data);
let h = hash(&res);
(Arc::new(res), salsa::OutputFingerprint(h))
})
}
fn get(&self, ctx: &QueryCtx, params: Self::Params) -> Arc<Self::Output> {
let query_id = salsa::QueryId(
self.query_type(),
salsa::InputFingerprint(hash(&params)),
);
let res = ctx.imp.get(query_id, Arc::new(params));
res.downcast().unwrap()
}
}
pub(super) struct QueryRegistry {
config: Option<salsa::QueryConfig<State, Data>>,
names: FxHashMap<salsa::QueryTypeId, &'static str>,
}
impl QueryRegistry {
pub(super) fn new() -> QueryRegistry {
let mut config = salsa::QueryConfig::<State, Data>::new();
config = config.with_ground_query(
FILE_TEXT, Box::new(|state, params| {
let file_id: &FileId = params.downcast_ref().unwrap();
let res = state.file_map[file_id].clone();
let fingerprint = salsa::OutputFingerprint(hash(&res));
(res, fingerprint)
})
);
config = config.with_ground_query(
FILE_SET, Box::new(|state, _params| {
let file_ids: Vec<FileId> = state.file_map.keys().cloned().collect();
let hash = hash(&file_ids);
let file_resolver = state.file_resolver.clone();
let res = (file_ids, file_resolver);
let fingerprint = salsa::OutputFingerprint(hash);
(Arc::new(res), fingerprint)
})
);
let mut names = FxHashMap::default();
names.insert(FILE_TEXT, "FILE_TEXT");
names.insert(FILE_SET, "FILE_SET");
QueryRegistry { config: Some(config), names }
}
pub(super) fn add<Q: EvalQuery>(&mut self, q: Q, name: &'static str) {
let id = q.query_type();
let prev = self.names.insert(id, name);
assert!(prev.is_none(), "duplicate query: {:?}", id);
let config = self.config.take().unwrap();
let config = config.with_query(id, q.f());
self.config= Some(config);
}
}
fn hash<T: Hash>(x: &T) -> u64 {
let mut hasher = DefaultHasher::new();
x.hash(&mut hasher);
hasher.finish()
}
const FILE_TEXT: salsa::QueryTypeId = salsa::QueryTypeId(0);
pub(super) fn file_text(ctx: QueryCtx, file_id: FileId) -> Arc<String> {
let query_id = salsa::QueryId(
FILE_TEXT,
salsa::InputFingerprint(hash(&file_id)),
);
let res = ctx.imp.get(query_id, Arc::new(file_id));
res.downcast().unwrap()
}
const FILE_SET: salsa::QueryTypeId = salsa::QueryTypeId(1);
pub(super) fn file_set(ctx: QueryCtx) -> Arc<(Vec<FileId>, FileResolverImp)> {
let query_id = salsa::QueryId(
FILE_SET,
salsa::InputFingerprint(hash(&())),
);
let res = ctx.imp.get(query_id, Arc::new(()));
res.downcast().unwrap()
}

View file

@ -32,126 +32,3 @@ fn module_tree(db: &impl ModulesDatabase, (): ()) -> Arc<ModuleTreeDescriptor> {
let res = ModuleTreeDescriptor::new(files.iter().map(|(file_id, descr)| (*file_id, &**descr)), &file_set.resolver);
Arc::new(res)
}
// #[cfg(test)]
// mod tests {
// use std::collections::HashMap;
// use im;
// use relative_path::{RelativePath, RelativePathBuf};
// use {
// db::{Db},
// imp::FileResolverImp,
// FileId, FileResolver,
// };
// use super::*;
// #[derive(Debug)]
// struct FileMap(im::HashMap<FileId, RelativePathBuf>);
// impl FileResolver for FileMap {
// fn file_stem(&self, file_id: FileId) -> String {
// self.0[&file_id].file_stem().unwrap().to_string()
// }
// fn resolve(&self, file_id: FileId, rel: &RelativePath) -> Option<FileId> {
// let path = self.0[&file_id].join(rel).normalize();
// self.0.iter()
// .filter_map(|&(id, ref p)| Some(id).filter(|_| p == &path))
// .next()
// }
// }
// struct Fixture {
// next_file_id: u32,
// fm: im::HashMap<FileId, RelativePathBuf>,
// db: Db,
// }
// impl Fixture {
// fn new() -> Fixture {
// Fixture {
// next_file_id: 1,
// fm: im::HashMap::new(),
// db: Db::new(),
// }
// }
// fn add_file(&mut self, path: &str, text: &str) -> FileId {
// assert!(path.starts_with("/"));
// let file_id = FileId(self.next_file_id);
// self.next_file_id += 1;
// self.fm.insert(file_id, RelativePathBuf::from(&path[1..]));
// let mut new_state = self.db.state().clone();
// new_state.file_map.insert(file_id, Arc::new(text.to_string()));
// new_state.file_resolver = FileResolverImp::new(
// Arc::new(FileMap(self.fm.clone()))
// );
// self.db = self.db.with_changes(new_state, &[file_id], true);
// file_id
// }
// fn remove_file(&mut self, file_id: FileId) {
// self.fm.remove(&file_id);
// let mut new_state = self.db.state().clone();
// new_state.file_map.remove(&file_id);
// new_state.file_resolver = FileResolverImp::new(
// Arc::new(FileMap(self.fm.clone()))
// );
// self.db = self.db.with_changes(new_state, &[file_id], true);
// }
// fn change_file(&mut self, file_id: FileId, new_text: &str) {
// let mut new_state = self.db.state().clone();
// new_state.file_map.insert(file_id, Arc::new(new_text.to_string()));
// self.db = self.db.with_changes(new_state, &[file_id], false);
// }
// fn check_parent_modules(
// &self,
// file_id: FileId,
// expected: &[FileId],
// queries: &[(&'static str, u64)]
// ) {
// let (tree, events) = self.db.trace_query(|ctx| module_tree(ctx));
// let actual = tree.parent_modules(file_id)
// .into_iter()
// .map(|link| link.owner(&tree))
// .collect::<Vec<_>>();
// assert_eq!(actual.as_slice(), expected);
// let mut counts = HashMap::new();
// events.into_iter()
// .for_each(|event| *counts.entry(event).or_insert(0) += 1);
// for &(query_id, expected_count) in queries.iter() {
// let actual_count = *counts.get(&query_id).unwrap_or(&0);
// assert_eq!(
// actual_count,
// expected_count,
// "counts for {} differ",
// query_id,
// )
// }
// }
// }
// #[test]
// fn test_parent_module() {
// let mut f = Fixture::new();
// let foo = f.add_file("/foo.rs", "");
// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
// let lib = f.add_file("/lib.rs", "mod foo;");
// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 0)]);
// f.change_file(lib, "");
// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
// f.change_file(lib, "mod foo;");
// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
// f.change_file(lib, "mod bar;");
// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 1)]);
// f.change_file(lib, "mod foo;");
// f.check_parent_modules(foo, &[lib], &[("MODULE_DESCR", 1)]);
// f.remove_file(lib);
// f.check_parent_modules(foo, &[], &[("MODULE_DESCR", 0)]);
// }
// }

View file

@ -1,39 +0,0 @@
use std::sync::Arc;
use ra_syntax::File;
use ra_editor::LineIndex;
use crate::{
FileId,
db::{Query, QueryCtx, QueryRegistry},
symbol_index::SymbolIndex,
};
pub(crate) use crate::db::{file_text, file_set};
pub(crate) fn file_syntax(ctx: QueryCtx, file_id: FileId) -> File {
(&*ctx.get(FILE_SYNTAX, file_id)).clone()
}
pub(crate) fn file_lines(ctx: QueryCtx, file_id: FileId) -> Arc<LineIndex> {
ctx.get(FILE_LINES, file_id)
}
pub(crate) fn file_symbols(ctx: QueryCtx, file_id: FileId) -> Arc<SymbolIndex> {
ctx.get(FILE_SYMBOLS, file_id)
}
const FILE_SYNTAX: Query<FileId, File> = Query(16, |ctx, file_id: &FileId| {
let text = file_text(ctx, *file_id);
File::parse(&*text)
});
const FILE_LINES: Query<FileId, LineIndex> = Query(17, |ctx, file_id: &FileId| {
let text = file_text(ctx, *file_id);
LineIndex::new(&*text)
});
const FILE_SYMBOLS: Query<FileId, SymbolIndex> = Query(18, |ctx, file_id: &FileId| {
let syntax = file_syntax(ctx, *file_id);
SymbolIndex::for_file(*file_id, syntax)
});
pub(crate) fn register_queries(reg: &mut QueryRegistry) {
reg.add(FILE_SYNTAX, "FILE_SYNTAX");
reg.add(FILE_LINES, "FILE_LINES");
reg.add(FILE_SYMBOLS, "FILE_SYMBOLS");
}