2271: Force passing Source when creating a SourceAnalyzer r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-11-15 23:12:59 +00:00 committed by GitHub
commit d9d99369b2
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 51 additions and 54 deletions

View file

@ -117,7 +117,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
node: &SyntaxNode,
offset: Option<TextUnit>,
) -> SourceAnalyzer {
SourceAnalyzer::new(self.db, self.frange.file_id, node, offset)
SourceAnalyzer::new(self.db, hir::Source::new(self.frange.file_id.into(), node), offset)
}
pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {

View file

@ -12,7 +12,6 @@ use hir_def::{
path::known,
};
use hir_expand::{name::AsName, Source};
use ra_db::FileId;
use ra_syntax::{
ast::{self, AstNode},
match_ast, AstPtr,
@ -30,38 +29,32 @@ use crate::{
HirFileId, Local, MacroDef, Module, Name, Path, Resolver, Static, Struct, Ty,
};
fn try_get_resolver_for_node(
db: &impl HirDatabase,
file_id: FileId,
node: &SyntaxNode,
) -> Option<Resolver> {
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
match_ast! {
match node {
match (node.ast) {
ast::Module(it) => {
let src = crate::Source { file_id: file_id.into(), ast: it };
let src = node.with_ast(it);
Some(crate::Module::from_declaration(db, src)?.resolver(db))
},
ast::SourceFile(it) => {
let src =
crate::Source { file_id: file_id.into(), ast: crate::ModuleSource::SourceFile(it) };
let src = node.with_ast(crate::ModuleSource::SourceFile(it));
Some(crate::Module::from_definition(db, src)?.resolver(db))
},
ast::StructDef(it) => {
let src = crate::Source { file_id: file_id.into(), ast: it };
let src = node.with_ast(it);
Some(Struct::from_source(db, src)?.resolver(db))
},
ast::EnumDef(it) => {
let src = crate::Source { file_id: file_id.into(), ast: it };
let src = node.with_ast(it);
Some(Enum::from_source(db, src)?.resolver(db))
},
_ => {
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
Some(def_with_body_from_child_node(db, Source::new(file_id.into(), node))?.resolver(db))
} else {
// FIXME add missing cases
None
_ => match node.ast.kind() {
FN_DEF | CONST_DEF | STATIC_DEF => {
Some(def_with_body_from_child_node(db, node)?.resolver(db))
}
},
// FIXME add missing cases
_ => None
}
}
}
}
@ -90,7 +83,6 @@ fn def_with_body_from_child_node(
/// original source files. It should not be used inside the HIR itself.
#[derive(Debug)]
pub struct SourceAnalyzer {
// FIXME: this doesn't handle macros at all
file_id: HirFileId,
resolver: Resolver,
body_owner: Option<DefWithBody>,
@ -137,20 +129,16 @@ pub struct ReferenceDescriptor {
impl SourceAnalyzer {
pub fn new(
db: &impl HirDatabase,
file_id: FileId,
node: &SyntaxNode,
node: Source<&SyntaxNode>,
offset: Option<TextUnit>,
) -> SourceAnalyzer {
let node_source = Source::new(file_id.into(), node);
let def_with_body = def_with_body_from_child_node(db, node_source);
let def_with_body = def_with_body_from_child_node(db, node);
if let Some(def) = def_with_body {
let source_map = def.body_source_map(db);
let scopes = def.expr_scopes(db);
let scope = match offset {
None => scope_for(&scopes, &source_map, node_source),
Some(offset) => {
scope_for_offset(&scopes, &source_map, Source::new(file_id.into(), offset))
}
None => scope_for(&scopes, &source_map, node),
Some(offset) => scope_for_offset(&scopes, &source_map, node.with_ast(offset)),
};
let resolver = expr::resolver_for_scope(db, def, scope);
SourceAnalyzer {
@ -159,19 +147,20 @@ impl SourceAnalyzer {
body_source_map: Some(source_map),
infer: Some(def.infer(db)),
scopes: Some(scopes),
file_id: file_id.into(),
file_id: node.file_id,
}
} else {
SourceAnalyzer {
resolver: node
.ast
.ancestors()
.find_map(|node| try_get_resolver_for_node(db, file_id, &node))
.find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it)))
.unwrap_or_default(),
body_owner: None,
body_source_map: None,
infer: None,
scopes: None,
file_id: file_id.into(),
file_id: node.file_id,
}
}
}

View file

@ -1,3 +1,6 @@
mod never_type;
mod coercion;
use std::fmt::Write;
use std::sync::Arc;
@ -11,7 +14,7 @@ use ra_syntax::{
use test_utils::covers;
use crate::{
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult,
expr::BodySourceMap, test_db::TestDB, ty::display::HirDisplay, ty::InferenceResult, Source,
SourceAnalyzer,
};
@ -19,9 +22,6 @@ use crate::{
// against snapshots of the expected results using insta. Use cargo-insta to
// update the snapshots.
mod never_type;
mod coercion;
#[test]
fn cfg_impl_block() {
let (db, pos) = TestDB::with_position(
@ -4609,7 +4609,8 @@ fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
fn type_at_pos(db: &TestDB, pos: FilePosition) -> String {
let file = db.parse(pos.file_id).ok().unwrap();
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();
let analyzer = SourceAnalyzer::new(db, pos.file_id, expr.syntax(), Some(pos.offset));
let analyzer =
SourceAnalyzer::new(db, Source::new(pos.file_id.into(), expr.syntax()), Some(pos.offset));
let ty = analyzer.type_of(db, &expr).unwrap();
ty.display(db).to_string()
}
@ -4674,7 +4675,7 @@ fn infer(content: &str) -> String {
for node in source_file.syntax().descendants() {
if node.kind() == FN_DEF || node.kind() == CONST_DEF || node.kind() == STATIC_DEF {
let analyzer = SourceAnalyzer::new(&db, file_id, &node, None);
let analyzer = SourceAnalyzer::new(&db, Source::new(file_id.into(), &node), None);
infer_def(analyzer.inference_result(), analyzer.body_source_map());
}
}
@ -4715,7 +4716,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
let file = db.parse(pos.file_id).ok().unwrap();
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
let events = db.log_executed(|| {
SourceAnalyzer::new(&db, pos.file_id, &node, None);
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
});
assert!(format!("{:?}", events).contains("infer"))
}
@ -4735,7 +4736,7 @@ fn typing_whitespace_inside_a_function_should_not_invalidate_types() {
let file = db.parse(pos.file_id).ok().unwrap();
let node = file.syntax().token_at_offset(pos.offset).right_biased().unwrap().parent();
let events = db.log_executed(|| {
SourceAnalyzer::new(&db, pos.file_id, &node, None);
SourceAnalyzer::new(&db, Source::new(pos.file_id.into(), &node), None);
});
assert!(!format!("{:?}", events).contains("infer"), "{:#?}", events)
}

View file

@ -19,7 +19,11 @@ pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<Cal
let calling_node = FnCallNode::with_node(&syntax, position.offset)?;
let name_ref = calling_node.name_ref()?;
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, name_ref.syntax(), None);
let analyzer = hir::SourceAnalyzer::new(
db,
hir::Source::new(position.file_id.into(), name_ref.syntax()),
None,
);
let (mut call_info, has_self) = match &calling_node {
FnCallNode::CallExpr(expr) => {
//FIXME: apply subst

View file

@ -58,8 +58,11 @@ impl<'a> CompletionContext<'a> {
);
let token =
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
let analyzer =
hir::SourceAnalyzer::new(db, position.file_id, &token.parent(), Some(position.offset));
let analyzer = hir::SourceAnalyzer::new(
db,
hir::Source::new(position.file_id.into(), &token.parent()),
Some(position.offset),
);
let mut ctx = CompletionContext {
db,
analyzer,

View file

@ -18,7 +18,8 @@ pub(crate) fn goto_type_definition(
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
})?;
let analyzer = hir::SourceAnalyzer::new(db, position.file_id, &node, None);
let analyzer =
hir::SourceAnalyzer::new(db, hir::Source::new(position.file_id.into(), &node), None);
let ty: hir::Ty = if let Some(ty) =
ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))

View file

@ -230,7 +230,8 @@ pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> {
.ancestors()
.take_while(|it| it.text_range() == leaf_node.text_range())
.find(|it| ast::Expr::cast(it.clone()).is_some() || ast::Pat::cast(it.clone()).is_some())?;
let analyzer = hir::SourceAnalyzer::new(db, frange.file_id, &node, None);
let analyzer =
hir::SourceAnalyzer::new(db, hir::Source::new(frange.file_id.into(), &node), None);
let ty = if let Some(ty) = ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e))
{
ty

View file

@ -32,6 +32,7 @@ fn get_inlay_hints(
file_id: FileId,
node: &SyntaxNode,
) -> Option<Vec<InlayHint>> {
let analyzer = SourceAnalyzer::new(db, hir::Source::new(file_id.into(), node), None);
match_ast! {
match node {
ast::LetStmt(it) => {
@ -39,11 +40,9 @@ fn get_inlay_hints(
return None;
}
let pat = it.pat()?;
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
Some(get_pat_type_hints(db, &analyzer, pat, false))
},
ast::LambdaExpr(it) => {
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
it.param_list().map(|param_list| {
param_list
.params()
@ -56,21 +55,17 @@ fn get_inlay_hints(
},
ast::ForExpr(it) => {
let pat = it.pat()?;
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
Some(get_pat_type_hints(db, &analyzer, pat, false))
},
ast::IfExpr(it) => {
let pat = it.condition()?.pat()?;
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
Some(get_pat_type_hints(db, &analyzer, pat, true))
},
ast::WhileExpr(it) => {
let pat = it.condition()?.pat()?;
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
Some(get_pat_type_hints(db, &analyzer, pat, true))
},
ast::MatchArmList(it) => {
let analyzer = SourceAnalyzer::new(db, file_id, it.syntax(), None);
Some(
it
.arms()

View file

@ -129,7 +129,8 @@ pub(crate) fn classify_name_ref(
let _p = profile("classify_name_ref");
let parent = name_ref.syntax().parent()?;
let analyzer = SourceAnalyzer::new(db, file_id, name_ref.syntax(), None);
let analyzer =
SourceAnalyzer::new(db, hir::Source::new(file_id.into(), name_ref.syntax()), None);
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
tested_by!(goto_definition_works_for_methods);

View file

@ -176,9 +176,11 @@ impl SourceFile {
/// ```
#[macro_export]
macro_rules! match_ast {
(match $node:ident {
(match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
(match ($node:expr) {
$( ast::$ast:ident($it:ident) => $res:block, )*
_ => $catch_all:expr,
_ => $catch_all:expr $(,)?
}) => {{
$( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
{ $catch_all }