From 9167da66acff22b4fe68d7bbe60c25ab0b56ad72 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 15 Nov 2019 14:15:04 +0300 Subject: [PATCH 1/3] Reduce visibility --- crates/ra_hir/src/source_binder.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 59046edcc41..c1ecf18b963 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs @@ -285,7 +285,7 @@ impl SourceAnalyzer { self.resolve_hir_path(db, &hir_path) } - pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option { + fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option { let mut shadowed = FxHashSet::default(); let name = name_ref.as_name(); let source_map = self.body_source_map.as_ref()?; @@ -309,9 +309,9 @@ impl SourceAnalyzer { self.resolver.process_all_names(db, f) } + // FIXME: we only use this in `inline_local_variable` assist, ideally, we + // should switch to general reference search infra there. pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec { - // FIXME: at least, this should work with any DefWithBody, but ideally - // this should be hir-based altogether let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap(); let ptr = Either::A(AstPtr::new(&ast::Pat::from(pat.clone()))); fn_def From 2f6c0c314b749e25431f3fd6caaac5d3270751b6 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 15 Nov 2019 14:47:26 +0300 Subject: [PATCH 2/3] Move scope tests to hir_def --- crates/ra_hir/src/expr.rs | 189 ------------------ crates/ra_hir/src/source_binder.rs | 24 +-- crates/ra_hir_def/src/body/scope.rs | 219 +++++++++++++++++++++ crates/ra_hir_def/src/nameres.rs | 9 +- crates/ra_hir_def/src/nameres/collector.rs | 3 +- 5 files changed, 233 insertions(+), 211 deletions(-) diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index 899e0fa04ad..e4598eec074 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs @@ -42,192 +42,3 @@ pub(crate) fn resolver_for_scope( } r } - -#[cfg(test)] -mod tests { - use hir_expand::Source; - use ra_db::{fixture::WithFixture, SourceDatabase}; - use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; - use test_utils::{assert_eq_text, extract_offset}; - - use crate::{source_binder::SourceAnalyzer, test_db::TestDB}; - - fn do_check(code: &str, expected: &[&str]) { - let (off, code) = extract_offset(code); - let code = { - let mut buf = String::new(); - let off = u32::from(off) as usize; - buf.push_str(&code[..off]); - buf.push_str("marker"); - buf.push_str(&code[off..]); - buf - }; - - let (db, file_id) = TestDB::with_single_file(&code); - - let file = db.parse(file_id).ok().unwrap(); - let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap(); - let analyzer = SourceAnalyzer::new(&db, file_id, marker.syntax(), None); - - let scopes = analyzer.scopes(); - let expr_id = analyzer - .body_source_map() - .node_expr(Source { file_id: file_id.into(), ast: &marker.into() }) - .unwrap(); - let scope = scopes.scope_for(expr_id); - - let actual = scopes - .scope_chain(scope) - .flat_map(|scope| scopes.entries(scope)) - .map(|it| it.name().to_string()) - .collect::>() - .join("\n"); - let expected = expected.join("\n"); - assert_eq_text!(&expected, &actual); - } - - #[test] - fn test_lambda_scope() { - do_check( - r" - fn quux(foo: i32) { - let f = |bar, baz: i32| { - <|> - }; - }", - &["bar", "baz", "foo"], - ); - } - - #[test] - fn test_call_scope() { - do_check( - r" - fn quux() { - f(|x| <|> ); - }", - &["x"], - ); - } - - #[test] - fn test_method_call_scope() { - do_check( - r" - fn quux() { - z.f(|x| <|> ); - }", - &["x"], - ); - } - - #[test] - fn test_loop_scope() { - do_check( - r" - fn quux() { - loop { - let x = (); - <|> - }; - }", - &["x"], - ); - } - - #[test] - fn test_match() { - do_check( - r" - fn quux() { - match () { - Some(x) => { - <|> - } - }; - }", - &["x"], - ); - } - - #[test] - fn test_shadow_variable() { - do_check( - r" - fn foo(x: String) { - let x : &str = &x<|>; - }", - &["x"], - ); - } - - fn do_check_local_name(code: &str, expected_offset: u32) { - let (off, code) = extract_offset(code); - - let (db, file_id) = TestDB::with_single_file(&code); - let file = db.parse(file_id).ok().unwrap(); - let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()) - .expect("failed to find a name at the target offset"); - let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); - let analyzer = SourceAnalyzer::new(&db, file_id, name_ref.syntax(), None); - - let local_name_entry = analyzer.resolve_local_name(&name_ref).unwrap(); - let local_name = - local_name_entry.ptr().either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); - assert_eq!(local_name.range(), expected_name.syntax().text_range()); - } - - #[test] - fn test_resolve_local_name() { - do_check_local_name( - r#" - fn foo(x: i32, y: u32) { - { - let z = x * 2; - } - { - let t = x<|> * 3; - } - }"#, - 21, - ); - } - - #[test] - fn test_resolve_local_name_declaration() { - do_check_local_name( - r#" - fn foo(x: String) { - let x : &str = &x<|>; - }"#, - 21, - ); - } - - #[test] - fn test_resolve_local_name_shadow() { - do_check_local_name( - r" - fn foo(x: String) { - let x : &str = &x; - x<|> - } - ", - 53, - ); - } - - #[test] - fn ref_patterns_contribute_bindings() { - do_check_local_name( - r" - fn foo() { - if let Some(&from) = bar() { - from<|>; - } - } - ", - 53, - ); - } -} diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index c1ecf18b963..662d3f8808c 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs @@ -19,7 +19,6 @@ use ra_syntax::{ SyntaxKind::*, SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, }; -use rustc_hash::FxHashSet; use crate::{ db::HirDatabase, @@ -286,22 +285,14 @@ impl SourceAnalyzer { } fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option { - let mut shadowed = FxHashSet::default(); let name = name_ref.as_name(); let source_map = self.body_source_map.as_ref()?; let scopes = self.scopes.as_ref()?; - let scope = scope_for(scopes, source_map, self.file_id.into(), name_ref.syntax()); - let ret = scopes - .scope_chain(scope) - .flat_map(|scope| scopes.entries(scope).iter()) - .filter(|entry| shadowed.insert(entry.name())) - .filter(|entry| entry.name() == &name) - .nth(0); - ret.and_then(|entry| { - Some(ScopeEntryWithSyntax { - name: entry.name().clone(), - ptr: source_map.pat_syntax(entry.pat())?.ast, - }) + let scope = scope_for(scopes, source_map, self.file_id.into(), name_ref.syntax())?; + let entry = scopes.resolve_name_in_scope(scope, &name)?; + Some(ScopeEntryWithSyntax { + name: entry.name().clone(), + ptr: source_map.pat_syntax(entry.pat())?.ast, }) } @@ -413,11 +404,6 @@ impl SourceAnalyzer { pub(crate) fn inference_result(&self) -> Arc { self.infer.clone().unwrap() } - - #[cfg(test)] - pub(crate) fn scopes(&self) -> Arc { - self.scopes.clone().unwrap() - } } fn scope_for( diff --git a/crates/ra_hir_def/src/body/scope.rs b/crates/ra_hir_def/src/body/scope.rs index 09a39e721e6..10cb87d37bd 100644 --- a/crates/ra_hir_def/src/body/scope.rs +++ b/crates/ra_hir_def/src/body/scope.rs @@ -67,6 +67,11 @@ impl ExprScopes { std::iter::successors(scope, move |&scope| self.scopes[scope].parent) } + pub fn resolve_name_in_scope(&self, scope: ScopeId, name: &Name) -> Option<&ScopeEntry> { + self.scope_chain(Some(scope)) + .find_map(|scope| self.entries(scope).iter().find(|it| it.name == *name)) + } + pub fn scope_for(&self, expr: ExprId) -> Option { self.scope_by_expr.get(&expr).copied() } @@ -163,3 +168,217 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope e => e.walk_child_exprs(|e| compute_expr_scopes(e, body, scopes, scope)), }; } + +#[cfg(test)] +mod tests { + use hir_expand::{name::AsName, Source}; + use ra_db::{fixture::WithFixture, FileId, SourceDatabase}; + use ra_syntax::{algo::find_node_at_offset, ast, AstNode}; + use test_utils::{assert_eq_text, extract_offset}; + + use crate::{db::DefDatabase2, test_db::TestDB, FunctionId, ModuleDefId}; + + fn find_function(db: &TestDB, file_id: FileId) -> FunctionId { + let krate = db.test_crate(); + let crate_def_map = db.crate_def_map(krate); + + let module = crate_def_map.modules_for_file(file_id).next().unwrap(); + let (_, res) = crate_def_map[module].scope.entries().next().unwrap(); + match res.def.take_values().unwrap() { + ModuleDefId::FunctionId(it) => it, + _ => panic!(), + } + } + + fn do_check(code: &str, expected: &[&str]) { + let (off, code) = extract_offset(code); + let code = { + let mut buf = String::new(); + let off = u32::from(off) as usize; + buf.push_str(&code[..off]); + buf.push_str("marker"); + buf.push_str(&code[off..]); + buf + }; + + let (db, file_id) = TestDB::with_single_file(&code); + + let file_syntax = db.parse(file_id).syntax_node(); + let marker: ast::PathExpr = find_node_at_offset(&file_syntax, off).unwrap(); + let function = find_function(&db, file_id); + + let scopes = db.expr_scopes(function.into()); + let (_body, source_map) = db.body_with_source_map(function.into()); + + let expr_id = + source_map.node_expr(Source { file_id: file_id.into(), ast: &marker.into() }).unwrap(); + let scope = scopes.scope_for(expr_id); + + let actual = scopes + .scope_chain(scope) + .flat_map(|scope| scopes.entries(scope)) + .map(|it| it.name().to_string()) + .collect::>() + .join("\n"); + let expected = expected.join("\n"); + assert_eq_text!(&expected, &actual); + } + + #[test] + fn test_lambda_scope() { + do_check( + r" + fn quux(foo: i32) { + let f = |bar, baz: i32| { + <|> + }; + }", + &["bar", "baz", "foo"], + ); + } + + #[test] + fn test_call_scope() { + do_check( + r" + fn quux() { + f(|x| <|> ); + }", + &["x"], + ); + } + + #[test] + fn test_method_call_scope() { + do_check( + r" + fn quux() { + z.f(|x| <|> ); + }", + &["x"], + ); + } + + #[test] + fn test_loop_scope() { + do_check( + r" + fn quux() { + loop { + let x = (); + <|> + }; + }", + &["x"], + ); + } + + #[test] + fn test_match() { + do_check( + r" + fn quux() { + match () { + Some(x) => { + <|> + } + }; + }", + &["x"], + ); + } + + #[test] + fn test_shadow_variable() { + do_check( + r" + fn foo(x: String) { + let x : &str = &x<|>; + }", + &["x"], + ); + } + + fn do_check_local_name(code: &str, expected_offset: u32) { + let (off, code) = extract_offset(code); + + let (db, file_id) = TestDB::with_single_file(&code); + + let file = db.parse(file_id).ok().unwrap(); + let expected_name = find_node_at_offset::(file.syntax(), expected_offset.into()) + .expect("failed to find a name at the target offset"); + let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap(); + + let function = find_function(&db, file_id); + + let scopes = db.expr_scopes(function.into()); + let (_body, source_map) = db.body_with_source_map(function.into()); + + let expr_scope = { + let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap(); + let expr_id = + source_map.node_expr(Source { file_id: file_id.into(), ast: &expr_ast }).unwrap(); + scopes.scope_for(expr_id).unwrap() + }; + + let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap(); + let pat_src = source_map.pat_syntax(resolved.pat()).unwrap(); + + let local_name = pat_src.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr()); + assert_eq!(local_name.range(), expected_name.syntax().text_range()); + } + + #[test] + fn test_resolve_local_name() { + do_check_local_name( + r#" + fn foo(x: i32, y: u32) { + { + let z = x * 2; + } + { + let t = x<|> * 3; + } + }"#, + 21, + ); + } + + #[test] + fn test_resolve_local_name_declaration() { + do_check_local_name( + r#" + fn foo(x: String) { + let x : &str = &x<|>; + }"#, + 21, + ); + } + + #[test] + fn test_resolve_local_name_shadow() { + do_check_local_name( + r" + fn foo(x: String) { + let x : &str = &x; + x<|> + } + ", + 53, + ); + } + + #[test] + fn ref_patterns_contribute_bindings() { + do_check_local_name( + r" + fn foo() { + if let Some(&from) = bar() { + from<|>; + } + } + ", + 53, + ); + } +} diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs index 5fc59215008..21d5f62e042 100644 --- a/crates/ra_hir_def/src/nameres.rs +++ b/crates/ra_hir_def/src/nameres.rs @@ -58,7 +58,7 @@ mod tests; use std::sync::Arc; -use hir_expand::{diagnostics::DiagnosticSink, name::Name, MacroDefId}; +use hir_expand::{ast_id_map::FileAstId, diagnostics::DiagnosticSink, name::Name, MacroDefId}; use once_cell::sync::Lazy; use ra_arena::Arena; use ra_db::{CrateId, Edition, FileId}; @@ -73,7 +73,7 @@ use crate::{ diagnostics::DefDiagnostic, path_resolution::ResolveMode, per_ns::PerNs, raw::ImportId, }, path::Path, - AstId, CrateModuleId, ModuleDefId, ModuleId, TraitId, + AstId, CrateModuleId, FunctionId, ModuleDefId, ModuleId, TraitId, }; /// Contains all top-level defs from a macro-expanded crate @@ -124,6 +124,11 @@ pub struct ModuleData { pub definition: Option, } +#[derive(Default, Debug, PartialEq, Eq, Clone)] +pub(crate) struct Declarations { + fns: FxHashMap, FunctionId>, +} + #[derive(Debug, Default, PartialEq, Eq, Clone)] pub struct ModuleScope { pub items: FxHashMap, diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs index 83eef821f76..5c899aff35c 100644 --- a/crates/ra_hir_def/src/nameres/collector.rs +++ b/crates/ra_hir_def/src/nameres/collector.rs @@ -664,7 +664,8 @@ where let name = def.name.clone(); let def: PerNs = match def.kind { raw::DefKind::Function(ast_id) => { - PerNs::values(FunctionId::from_ast_id(ctx, ast_id).into()) + let f = FunctionId::from_ast_id(ctx, ast_id); + PerNs::values(f.into()) } raw::DefKind::Struct(ast_id) => { let id = StructOrUnionId::from_ast_id(ctx, ast_id).into(); From c3f84960aa99529a3afc8f28c16e657fb071db5f Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Fri, 15 Nov 2019 14:53:09 +0300 Subject: [PATCH 3/3] Flatten expr module --- crates/ra_hir/src/code_model.rs | 2 +- crates/ra_hir/src/expr.rs | 132 +++++++++++++++++++++++++- crates/ra_hir/src/expr/validation.rs | 137 --------------------------- 3 files changed, 130 insertions(+), 141 deletions(-) delete mode 100644 crates/ra_hir/src/expr/validation.rs diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index dd43271f4ca..078bd86090e 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs @@ -23,7 +23,7 @@ use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; use crate::{ adt::VariantDef, db::{AstDatabase, DefDatabase, HirDatabase}, - expr::{validation::ExprValidator, BindingAnnotation, Body, BodySourceMap, Pat, PatId}, + expr::{BindingAnnotation, Body, BodySourceMap, ExprValidator, Pat, PatId}, generics::{GenericDef, HasGenericParams}, ids::{ AstItemDef, ConstId, EnumId, FunctionId, MacroDefId, StaticId, StructId, TraitId, diff --git a/crates/ra_hir/src/expr.rs b/crates/ra_hir/src/expr.rs index e4598eec074..e3733779e9c 100644 --- a/crates/ra_hir/src/expr.rs +++ b/crates/ra_hir/src/expr.rs @@ -1,12 +1,19 @@ //! FIXME: write short doc here -pub(crate) mod validation; - use std::sync::Arc; +use hir_def::path::known; +use hir_expand::diagnostics::DiagnosticSink; +use ra_syntax::ast; use ra_syntax::AstPtr; +use rustc_hash::FxHashSet; -use crate::{db::HirDatabase, DefWithBody, HasBody, Resolver}; +use crate::{ + db::HirDatabase, + diagnostics::{MissingFields, MissingOkInTailExpr}, + ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, + Adt, DefWithBody, Function, HasBody, Name, Path, Resolver, +}; pub use hir_def::{ body::{ @@ -42,3 +49,122 @@ pub(crate) fn resolver_for_scope( } r } + +pub(crate) struct ExprValidator<'a, 'b: 'a> { + func: Function, + infer: Arc, + sink: &'a mut DiagnosticSink<'b>, +} + +impl<'a, 'b> ExprValidator<'a, 'b> { + pub(crate) fn new( + func: Function, + infer: Arc, + sink: &'a mut DiagnosticSink<'b>, + ) -> ExprValidator<'a, 'b> { + ExprValidator { func, infer, sink } + } + + pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) { + let body = self.func.body(db); + + for e in body.exprs() { + if let (id, Expr::RecordLit { path, fields, spread }) = e { + self.validate_record_literal(id, path, fields, *spread, db); + } + } + + let body_expr = &body[body.body_expr()]; + if let Expr::Block { statements: _, tail: Some(t) } = body_expr { + self.validate_results_in_tail_expr(body.body_expr(), *t, db); + } + } + + fn validate_record_literal( + &mut self, + id: ExprId, + _path: &Option, + fields: &[RecordLitField], + spread: Option, + db: &impl HirDatabase, + ) { + if spread.is_some() { + return; + } + + let struct_def = match self.infer[id].as_adt() { + Some((Adt::Struct(s), _)) => s, + _ => return, + }; + + let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); + let missed_fields: Vec = struct_def + .fields(db) + .iter() + .filter_map(|f| { + let name = f.name(db); + if lit_fields.contains(&name) { + None + } else { + Some(name) + } + }) + .collect(); + if missed_fields.is_empty() { + return; + } + let source_map = self.func.body_source_map(db); + + if let Some(source_ptr) = source_map.expr_syntax(id) { + if let Some(expr) = source_ptr.ast.a() { + let root = source_ptr.file_syntax(db); + if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { + if let Some(field_list) = record_lit.record_field_list() { + self.sink.push(MissingFields { + file: source_ptr.file_id, + field_list: AstPtr::new(&field_list), + missed_fields, + }) + } + } + } + } + } + + fn validate_results_in_tail_expr( + &mut self, + body_id: ExprId, + id: ExprId, + db: &impl HirDatabase, + ) { + // the mismatch will be on the whole block currently + let mismatch = match self.infer.type_mismatch_for_expr(body_id) { + Some(m) => m, + None => return, + }; + + let std_result_path = known::std_result_result(); + + let resolver = self.func.resolver(db); + let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) { + Some(it) => it, + _ => return, + }; + + let std_result_ctor = TypeCtor::Adt(Adt::Enum(std_result_enum)); + let params = match &mismatch.expected { + Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &std_result_ctor => parameters, + _ => return, + }; + + if params.len() == 2 && ¶ms[0] == &mismatch.actual { + let source_map = self.func.body_source_map(db); + + if let Some(source_ptr) = source_map.expr_syntax(id) { + if let Some(expr) = source_ptr.ast.a() { + self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); + } + } + } + } +} diff --git a/crates/ra_hir/src/expr/validation.rs b/crates/ra_hir/src/expr/validation.rs deleted file mode 100644 index 3054f1dcedf..00000000000 --- a/crates/ra_hir/src/expr/validation.rs +++ /dev/null @@ -1,137 +0,0 @@ -//! FIXME: write short doc here - -use std::sync::Arc; - -use hir_def::path::known; -use hir_expand::diagnostics::DiagnosticSink; -use ra_syntax::ast; -use rustc_hash::FxHashSet; - -use crate::{ - db::HirDatabase, - diagnostics::{MissingFields, MissingOkInTailExpr}, - expr::AstPtr, - ty::{ApplicationTy, InferenceResult, Ty, TypeCtor}, - Adt, Function, Name, Path, -}; - -use super::{Expr, ExprId, RecordLitField}; - -pub(crate) struct ExprValidator<'a, 'b: 'a> { - func: Function, - infer: Arc, - sink: &'a mut DiagnosticSink<'b>, -} - -impl<'a, 'b> ExprValidator<'a, 'b> { - pub(crate) fn new( - func: Function, - infer: Arc, - sink: &'a mut DiagnosticSink<'b>, - ) -> ExprValidator<'a, 'b> { - ExprValidator { func, infer, sink } - } - - pub(crate) fn validate_body(&mut self, db: &impl HirDatabase) { - let body = self.func.body(db); - - for e in body.exprs() { - if let (id, Expr::RecordLit { path, fields, spread }) = e { - self.validate_record_literal(id, path, fields, *spread, db); - } - } - - let body_expr = &body[body.body_expr()]; - if let Expr::Block { statements: _, tail: Some(t) } = body_expr { - self.validate_results_in_tail_expr(body.body_expr(), *t, db); - } - } - - fn validate_record_literal( - &mut self, - id: ExprId, - _path: &Option, - fields: &[RecordLitField], - spread: Option, - db: &impl HirDatabase, - ) { - if spread.is_some() { - return; - } - - let struct_def = match self.infer[id].as_adt() { - Some((Adt::Struct(s), _)) => s, - _ => return, - }; - - let lit_fields: FxHashSet<_> = fields.iter().map(|f| &f.name).collect(); - let missed_fields: Vec = struct_def - .fields(db) - .iter() - .filter_map(|f| { - let name = f.name(db); - if lit_fields.contains(&name) { - None - } else { - Some(name) - } - }) - .collect(); - if missed_fields.is_empty() { - return; - } - let source_map = self.func.body_source_map(db); - - if let Some(source_ptr) = source_map.expr_syntax(id) { - if let Some(expr) = source_ptr.ast.a() { - let root = source_ptr.file_syntax(db); - if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) { - if let Some(field_list) = record_lit.record_field_list() { - self.sink.push(MissingFields { - file: source_ptr.file_id, - field_list: AstPtr::new(&field_list), - missed_fields, - }) - } - } - } - } - } - - fn validate_results_in_tail_expr( - &mut self, - body_id: ExprId, - id: ExprId, - db: &impl HirDatabase, - ) { - // the mismatch will be on the whole block currently - let mismatch = match self.infer.type_mismatch_for_expr(body_id) { - Some(m) => m, - None => return, - }; - - let std_result_path = known::std_result_result(); - - let resolver = self.func.resolver(db); - let std_result_enum = match resolver.resolve_known_enum(db, &std_result_path) { - Some(it) => it, - _ => return, - }; - - let std_result_ctor = TypeCtor::Adt(Adt::Enum(std_result_enum)); - let params = match &mismatch.expected { - Ty::Apply(ApplicationTy { ctor, parameters }) if ctor == &std_result_ctor => parameters, - _ => return, - }; - - if params.len() == 2 && ¶ms[0] == &mismatch.actual { - let source_map = self.func.body_source_map(db); - - if let Some(source_ptr) = source_map.expr_syntax(id) { - if let Some(expr) = source_ptr.ast.a() { - self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr }); - } - } - } - } -}