5162: Try to reduce Semantics monomorphisations r=matklad a=lnicola



Co-authored-by: Laurențiu Nicola <lnicola@dend.ro>
This commit is contained in:
bors[bot] 2020-07-01 11:40:47 +00:00 committed by GitHub
commit 248b656c21
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 206 additions and 59 deletions

View file

@ -83,6 +83,11 @@ impl PathResolution {
/// Primary API to get semantic information, like types, from syntax trees.
pub struct Semantics<'db, DB> {
pub db: &'db DB,
imp: SemanticsImpl<'db>,
}
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
}
@ -95,7 +100,174 @@ impl<DB> fmt::Debug for Semantics<'_, DB> {
impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn new(db: &DB) -> Semantics<DB> {
Semantics { db, s2d_cache: Default::default(), cache: Default::default() }
let impl_ = SemanticsImpl::new(db);
Semantics { db, imp: impl_ }
}
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
self.imp.parse(file_id)
}
pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST {
let file_id = d.source().file_id;
let root = self.db.parse_or_expand(file_id).unwrap();
self.imp.cache(root, file_id);
d.ast(self.db.upcast())
}
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
self.imp.expand(macro_call)
}
pub fn expand_hypothetical(
&self,
actual_macro_call: &ast::MacroCall,
hypothetical_args: &ast::TokenTree,
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map)
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
self.imp.descend_into_macros(token)
}
pub fn descend_node_at_offset<N: ast::AstNode>(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
}
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
self.imp.original_range(node)
}
pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange {
self.imp.diagnostics_range(diagnostics)
}
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
self.imp.ancestors_with_macros(node)
}
pub fn ancestors_at_offset_with_macros(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> impl Iterator<Item = SyntaxNode> + '_ {
self.imp.ancestors_at_offset_with_macros(node, offset)
}
/// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
/// search up until it is of the target AstNode type
pub fn find_node_at_offset_with_macros<N: AstNode>(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
}
/// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
/// descend it and find again
pub fn find_node_at_offset_with_descend<N: AstNode>(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
if let Some(it) = find_node_at_offset(&node, offset) {
return Some(it);
}
self.imp.descend_node_at_offset(node, offset).find_map(N::cast)
}
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
self.imp.type_of_expr(expr)
}
pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> {
self.imp.type_of_pat(pat)
}
pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
self.imp.resolve_method_call(call)
}
pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
self.imp.resolve_field(field)
}
pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<(Field, Option<Local>)> {
self.imp.resolve_record_field(field)
}
pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<Field> {
self.imp.resolve_record_field_pat(field)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
self.imp.resolve_macro_call(macro_call)
}
pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
self.imp.resolve_path(path)
}
pub fn resolve_variant(&self, record_lit: ast::RecordLit) -> Option<VariantId> {
self.imp.resolve_variant(record_lit)
}
pub fn lower_path(&self, path: &ast::Path) -> Option<Path> {
self.imp.lower_path(path)
}
pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> {
self.imp.resolve_bind_pat_to_const(pat)
}
// FIXME: use this instead?
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> {
self.imp.record_literal_missing_fields(literal)
}
pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
self.imp.record_pattern_missing_fields(pattern)
}
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned();
T::to_def(&self.imp, src)
}
pub fn to_module_def(&self, file: FileId) -> Option<Module> {
self.imp.to_module_def(file)
}
pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> {
self.imp.scope(node)
}
pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> {
self.imp.scope_at_offset(node, offset)
}
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
self.imp.scope_for_def(def)
}
pub fn assert_contains_node(&self, node: &SyntaxNode) {
self.imp.assert_contains_node(node)
}
}
impl<'db> SemanticsImpl<'db> {
pub fn new(db: &'db dyn HirDatabase) -> Self {
Self { db, s2d_cache: Default::default(), cache: Default::default() }
}
pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
@ -104,13 +276,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
tree
}
pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST {
let file_id = d.source().file_id;
let root = self.db.parse_or_expand(file_id).unwrap();
self.cache(root, file_id);
d.ast(self.db)
}
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
@ -130,9 +295,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
let krate = sa.resolver.krate()?;
let macro_call_id = macro_call
.as_call_id(self.db, krate, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?;
hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map)
let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
sa.resolver.resolve_path_as_macro(self.db.upcast(), &path)
})?;
hir_expand::db::expand_hypothetical(
self.db.upcast(),
macro_call_id,
hypothetical_args,
token_to_map,
)
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
@ -147,7 +318,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
return None;
}
let file_id = sa.expand(self.db, token.with_value(&macro_call))?;
let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?;
let token = file_id.expansion_info(self.db.upcast())?.map_token_down(token.as_ref())?;
self.cache(find_root(&token.value.parent()), token.file_id);
@ -159,15 +330,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
token.value
}
pub fn descend_node_at_offset<N: ast::AstNode>(
pub fn descend_node_at_offset(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
) -> impl Iterator<Item = SyntaxNode> + '_ {
// Handle macro token cases
node.token_at_offset(offset)
.map(|token| self.descend_into_macros(token))
.find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast))
.map(|it| self.ancestors_with_macros(it.parent()))
.flatten()
}
pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
@ -184,7 +356,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
let node = self.find_file(node);
node.ancestors_with_macros(self.db).map(|it| it.value)
node.ancestors_with_macros(self.db.upcast()).map(|it| it.value)
}
pub fn ancestors_at_offset_with_macros(
@ -197,29 +369,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
.kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
}
/// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
/// search up until it is of the target AstNode type
pub fn find_node_at_offset_with_macros<N: AstNode>(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
}
/// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
/// descend it and find again
pub fn find_node_at_offset_with_descend<N: AstNode>(
&self,
node: &SyntaxNode,
offset: TextSize,
) -> Option<N> {
if let Some(it) = find_node_at_offset(&node, offset) {
return Some(it);
}
self.descend_node_at_offset(&node, offset)
}
pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
self.analyze(expr.syntax()).type_of(self.db, &expr)
}
@ -267,9 +416,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
}
// FIXME: use this instead?
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> {
self.analyze(literal.syntax())
.record_literal_missing_fields(self.db, literal)
@ -282,11 +428,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
.unwrap_or_default()
}
pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
let src = self.find_file(src.syntax().clone()).with_value(src).cloned();
T::to_def(self, src)
}
fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut();
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
@ -310,7 +451,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
}
pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
let resolver = def.id.resolver(self.db);
let resolver = def.id.resolver(self.db.upcast());
SemanticsScope { db: self.db, resolver }
}
@ -331,17 +472,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
ChildContainer::DefWithBodyId(def) => {
return SourceAnalyzer::new_for_body(self.db, def, src, offset)
}
ChildContainer::TraitId(it) => it.resolver(self.db),
ChildContainer::ImplId(it) => it.resolver(self.db),
ChildContainer::ModuleId(it) => it.resolver(self.db),
ChildContainer::EnumId(it) => it.resolver(self.db),
ChildContainer::VariantId(it) => it.resolver(self.db),
ChildContainer::GenericDefId(it) => it.resolver(self.db),
ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
};
SourceAnalyzer::new_for_resolver(resolver, src)
}
fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
pub fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
assert!(root_node.parent().is_none());
let mut cache = self.cache.borrow_mut();
let prev = cache.insert(root_node, file_id);
@ -357,7 +498,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
cache.get(root_node).copied()
}
fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
pub fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> {
let root_node = find_root(&node);
let file_id = self.lookup(&root_node).unwrap_or_else(|| {
panic!(
@ -382,14 +523,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
pub trait ToDef: AstNode + Clone {
type Def;
fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def>;
fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>;
}
macro_rules! to_def_impls {
($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
impl ToDef for $ast {
type Def = $def;
fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def> {
fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> {
sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
}
}

View file

@ -13,7 +13,7 @@ mod wasm_shims;
use std::sync::Arc;
use hir::db::{AstDatabase, DefDatabase};
use hir::db::{AstDatabase, DefDatabase, HirDatabase};
use ra_db::{
salsa::{self, Database, Durability},
Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase,
@ -52,6 +52,12 @@ impl Upcast<dyn DefDatabase> for RootDatabase {
}
}
impl Upcast<dyn HirDatabase> for RootDatabase {
fn upcast(&self) -> &(dyn HirDatabase + 'static) {
&*self
}
}
impl FileLoader for RootDatabase {
fn file_text(&self, file_id: FileId) -> Arc<String> {
FileLoaderDelegate(self).file_text(file_id)