449: switch to new rowan API r=matklad a=matklad

closes https://github.com/rust-analyzer/rust-analyzer/issues/448

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-01-08 09:05:55 +00:00
commit 3f4be81912
61 changed files with 2486 additions and 3744 deletions

30
Cargo.lock generated
View file

@ -221,7 +221,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -284,7 +284,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -295,7 +295,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "flexi_logger"
version = "0.10.3"
version = "0.10.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -501,7 +501,7 @@ dependencies = [
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -585,7 +585,7 @@ dependencies = [
"pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -706,7 +706,7 @@ version = "0.1.0"
dependencies = [
"arrayvec 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)",
"ena 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_arena 0.1.0",
@ -728,7 +728,7 @@ dependencies = [
"drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)",
"flexi_logger 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)",
"gen_lsp_server 0.1.0",
"im 12.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"languageserver-types 0.53.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -764,7 +764,7 @@ dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_text_edit 0.1.0",
"rowan 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rowan 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"test_utils 0.1.0",
"text_unit 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -969,7 +969,7 @@ dependencies = [
[[package]]
name = "rowan"
version = "0.1.4"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1075,7 +1075,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -1140,7 +1140,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "syn"
version = "0.15.23"
version = "0.15.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1155,7 +1155,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1510,7 +1510,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2"
"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1"
"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
"checksum flexi_logger 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4dda06444ccc8b0a6da19d939989b4a4e83f328710ada449eedaed48c8b903cd"
"checksum flexi_logger 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7d3681306880a7ce87740ceb3d1ce98ca92ae636ff30a629494488cbbcf85ff8"
"checksum fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "2fad85553e09a6f881f739c29f0b00b0f01357c743266d478b68951ce23285f3"
"checksum fst 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "db72126ca7dff566cdbbdd54af44668c544897d9d3862b198141f176f1238bdf"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
@ -1571,7 +1571,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
"checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5"
"checksum ron 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c48677d8a9247a4e0d1f3f9cb4b0a8e29167fdc3c04f383a5e669cd7a960ae0f"
"checksum rowan 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c218b4430ab922850b71b14fa9bca224425097f935f6155c0b6a4b1f398a54f0"
"checksum rowan 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9ae7dba5e703f423ceb8646d636c73e6d858a2f8c834808b4565e42ccda9e2"
"checksum rustc-demangle 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "adacaae16d02b6ec37fdc7acfcddf365978de76d1983d3ee22afc260e1ca9619"
"checksum rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8"
"checksum rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "138e3e0acb6c9fb258b19b67cb8abd63c00679d2851805ea151465464fe9030a"
@ -1593,7 +1593,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum stable_deref_trait 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dba1a27d3efae4351c8051072d619e3ade2820635c3958d826bfea39d59b54c8"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum superslice 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b50b13d42370e0f5fc62eafdd5c2d20065eaf5458dab215ff3e20e63eea96b30"
"checksum syn 0.15.23 (registry+https://github.com/rust-lang/crates.io-index)" = "9545a6a093a3f0bd59adb472700acc08cad3776f860f16a897dfce8c88721cbc"
"checksum syn 0.15.24 (registry+https://github.com/rust-lang/crates.io-index)" = "734ecc29cd36e8123850d9bf21dfd62ef8300aaa8f879aabaa899721808be37c"
"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
"checksum tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7e91405c14320e5c79b3d148e1c86f40749a36e490642202a31689cb1a3452b2"
"checksum tera 0.11.20 (registry+https://github.com/rust-lang/crates.io-index)" = "4b505279e19d8f7d24b1a9dc58327c9c36174b1a2c7ebdeac70792d017cb64f3"

View file

@ -39,9 +39,9 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
.add_to(acc)
});
fn process<'a, N: ast::FnDefOwner<'a>>(
node: N,
params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
fn process<'a, N: ast::FnDefOwner>(
node: &'a N,
params: &mut FxHashMap<String, (u32, &'a ast::Param)>,
) {
node.functions()
.filter_map(|it| it.param_list())

View file

@ -2,7 +2,7 @@ use ra_syntax::{
algo::visit::{visitor, Visitor},
AstNode,
ast::{self, LoopBodyOwner},
SyntaxKind::*, SyntaxNodeRef,
SyntaxKind::*, SyntaxNode,
};
use crate::completion::{CompletionContext, CompletionItem, Completions, CompletionKind, CompletionItemKind};
@ -76,7 +76,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
acc.add_all(complete_return(fn_def, ctx.can_be_stmt));
}
fn is_in_loop_body(leaf: SyntaxNodeRef) -> bool {
fn is_in_loop_body(leaf: &SyntaxNode) -> bool {
for node in leaf.ancestors() {
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
break;
@ -95,7 +95,7 @@ fn is_in_loop_body(leaf: SyntaxNodeRef) -> bool {
false
}
fn complete_return(fn_def: ast::FnDef, can_be_stmt: bool) -> Option<CompletionItem> {
fn complete_return(fn_def: &ast::FnDef, can_be_stmt: bool) -> Option<CompletionItem> {
let snip = match (can_be_stmt, fn_def.ret_type().is_some()) {
(true, true) => "return $0;",
(true, false) => "return;",

View file

@ -1,13 +1,9 @@
use ra_editor::find_node_at_offset;
use ra_text_edit::AtomTextEdit;
use ra_syntax::{
algo::{find_leaf_at_offset, find_covering_node},
AstNode, SyntaxNode, SourceFile, TextUnit, TextRange,
ast,
AstNode,
SyntaxNodeRef,
SourceFileNode,
TextUnit,
TextRange,
algo::{find_leaf_at_offset, find_covering_node},
SyntaxKind::*,
};
use hir::source_binder;
@ -20,11 +16,11 @@ use crate::{db, FilePosition, Cancelable};
pub(super) struct CompletionContext<'a> {
pub(super) db: &'a db::RootDatabase,
pub(super) offset: TextUnit,
pub(super) leaf: SyntaxNodeRef<'a>,
pub(super) leaf: &'a SyntaxNode,
pub(super) module: Option<hir::Module>,
pub(super) function: Option<hir::Function>,
pub(super) function_syntax: Option<ast::FnDef<'a>>,
pub(super) use_item_syntax: Option<ast::UseItem<'a>>,
pub(super) function_syntax: Option<&'a ast::FnDef>,
pub(super) use_item_syntax: Option<&'a ast::UseItem>,
pub(super) is_param: bool,
/// A single-indent path, like `foo`.
pub(super) is_trivial_path: bool,
@ -36,7 +32,7 @@ pub(super) struct CompletionContext<'a> {
/// Something is typed at the "top" level, in module or impl/trait.
pub(super) is_new_item: bool,
/// The receiver if this is a field or method access, i.e. writing something.<|>
pub(super) dot_receiver: Option<ast::Expr<'a>>,
pub(super) dot_receiver: Option<&'a ast::Expr>,
/// If this is a method call in particular, i.e. the () are already there.
pub(super) is_method_call: bool,
}
@ -44,7 +40,7 @@ pub(super) struct CompletionContext<'a> {
impl<'a> CompletionContext<'a> {
pub(super) fn new(
db: &'a db::RootDatabase,
original_file: &'a SourceFileNode,
original_file: &'a SourceFile,
position: FilePosition,
) -> Cancelable<Option<CompletionContext<'a>>> {
let module = source_binder::module_from_position(db, position)?;
@ -71,7 +67,7 @@ impl<'a> CompletionContext<'a> {
Ok(Some(ctx))
}
fn fill(&mut self, original_file: &'a SourceFileNode, offset: TextUnit) {
fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) {
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
@ -100,7 +96,7 @@ impl<'a> CompletionContext<'a> {
}
}
}
fn classify_name_ref(&mut self, original_file: &'a SourceFileNode, name_ref: ast::NameRef) {
fn classify_name_ref(&mut self, original_file: &'a SourceFile, name_ref: &ast::NameRef) {
let name_range = name_ref.syntax().range();
let top_node = name_ref
.syntax()
@ -197,15 +193,12 @@ impl<'a> CompletionContext<'a> {
}
}
fn find_node_with_range<'a, N: AstNode<'a>>(
syntax: SyntaxNodeRef<'a>,
range: TextRange,
) -> Option<N> {
fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Option<&N> {
let node = find_covering_node(syntax, range);
node.ancestors().find_map(N::cast)
}
fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
fn is_node<N: AstNode>(node: &SyntaxNode) -> bool {
match node.ancestors().filter_map(N::cast).next() {
None => false,
Some(n) => n.syntax().range() == node.range(),

View file

@ -1,6 +1,6 @@
use ra_db::SyntaxDatabase;
use ra_syntax::{
SyntaxNodeRef, AstNode, SourceFileNode,
SyntaxNode, AstNode, SourceFile,
ast, algo::find_covering_node,
};
@ -19,18 +19,18 @@ pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRang
fn extend_selection_in_macro(
_db: &RootDatabase,
source_file: &SourceFileNode,
source_file: &SourceFile,
frange: FileRange,
) -> Option<TextRange> {
let macro_call = find_macro_call(source_file.syntax(), frange.range)?;
let (off, exp) = hir::MacroDef::ast_expand(macro_call)?;
let dst_range = exp.map_range_forward(frange.range - off)?;
let dst_range = ra_editor::extend_selection(exp.syntax().borrowed(), dst_range)?;
let dst_range = ra_editor::extend_selection(&exp.syntax(), dst_range)?;
let src_range = exp.map_range_back(dst_range)? + off;
Some(src_range)
}
fn find_macro_call(node: SyntaxNodeRef, range: TextRange) -> Option<ast::MacroCall> {
fn find_macro_call(node: &SyntaxNode, range: TextRange) -> Option<&ast::MacroCall> {
find_covering_node(node, range)
.ancestors()
.find_map(ast::MacroCall::cast)

View file

@ -23,7 +23,7 @@ pub(crate) fn goto_defenition(
pub(crate) fn reference_defenition(
db: &RootDatabase,
file_id: FileId,
name_ref: ast::NameRef,
name_ref: &ast::NameRef,
) -> Cancelable<Vec<NavigationTarget>> {
if let Some(fn_descr) =
hir::source_binder::function_from_child_node(db, file_id, name_ref.syntax())?
@ -53,7 +53,7 @@ pub(crate) fn reference_defenition(
fn name_defenition(
db: &RootDatabase,
file_id: FileId,
name: ast::Name,
name: &ast::Name,
) -> Cancelable<Option<Vec<NavigationTarget>>> {
if let Some(module) = name.syntax().parent().and_then(ast::Module::cast) {
if module.has_semi() {

View file

@ -1,7 +1,7 @@
use ra_db::{Cancelable, SyntaxDatabase};
use ra_editor::find_node_at_offset;
use ra_syntax::{
AstNode, SyntaxNode,
AstNode, SyntaxNode, TreePtr,
ast::{self, NameOwner},
algo::{find_covering_node, find_leaf_at_offset, visit::{visitor, Visitor}},
};
@ -88,20 +88,19 @@ fn doc_text_for(db: &RootDatabase, nav: NavigationTarget) -> Cancelable<Option<S
}
impl NavigationTarget {
fn node(&self, db: &RootDatabase) -> Option<SyntaxNode> {
fn node(&self, db: &RootDatabase) -> Option<TreePtr<SyntaxNode>> {
let source_file = db.source_file(self.file_id);
let source_file = source_file.syntax();
let node = source_file
.descendants()
.find(|node| node.kind() == self.kind && node.range() == self.range)?
.owned();
.to_owned();
Some(node)
}
fn docs(&self, db: &RootDatabase) -> Option<String> {
let node = self.node(db)?;
let node = node.borrowed();
fn doc_comments<'a, N: ast::DocCommentsOwner<'a>>(node: N) -> Option<String> {
fn doc_comments<N: ast::DocCommentsOwner>(node: &N) -> Option<String> {
let comments = node.doc_comment_text();
if comments.is_empty() {
None
@ -119,7 +118,7 @@ impl NavigationTarget {
.visit(doc_comments::<ast::TypeDef>)
.visit(doc_comments::<ast::ConstDef>)
.visit(doc_comments::<ast::StaticDef>)
.accept(node)?
.accept(&node)?
}
/// Get a description of this node.
@ -128,50 +127,49 @@ impl NavigationTarget {
fn description(&self, db: &RootDatabase) -> Option<String> {
// TODO: After type inference is done, add type information to improve the output
let node = self.node(db)?;
let node = node.borrowed();
// TODO: Refactor to be have less repetition
visitor()
.visit(|node: ast::FnDef| {
.visit(|node: &ast::FnDef| {
let mut string = "fn ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: ast::StructDef| {
.visit(|node: &ast::StructDef| {
let mut string = "struct ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: ast::EnumDef| {
.visit(|node: &ast::EnumDef| {
let mut string = "enum ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: ast::TraitDef| {
.visit(|node: &ast::TraitDef| {
let mut string = "trait ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: ast::Module| {
.visit(|node: &ast::Module| {
let mut string = "mod ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: ast::TypeDef| {
.visit(|node: &ast::TypeDef| {
let mut string = "type ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: ast::ConstDef| {
.visit(|node: &ast::ConstDef| {
let mut string = "const ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.visit(|node: ast::StaticDef| {
.visit(|node: &ast::StaticDef| {
let mut string = "static ".to_string();
node.name()?.syntax().text().push_to(&mut string);
Some(string)
})
.accept(node)?
.accept(&node)?
}
}

View file

@ -8,10 +8,9 @@ use hir::{
use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase};
use ra_editor::{self, find_node_at_offset, assists, LocalEdit, Severity};
use ra_syntax::{
ast::{self, ArgListOwner, Expr, NameOwner},
AstNode, SourceFileNode,
SyntaxNode, TextRange, TextUnit, AstNode, SourceFile,
ast::{self, ArgListOwner, NameOwner},
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
};
use crate::{
@ -113,7 +112,6 @@ impl db::RootDatabase {
None => return Ok(Vec::new()),
Some(it) => it,
};
let ast_module = ast_module.borrowed();
let name = ast_module.name().unwrap();
Ok(vec![NavigationTarget {
file_id,
@ -163,9 +161,9 @@ impl db::RootDatabase {
fn find_binding<'a>(
db: &db::RootDatabase,
source_file: &'a SourceFileNode,
source_file: &'a SourceFile,
position: FilePosition,
) -> Cancelable<Option<(ast::BindPat<'a>, hir::Function)>> {
) -> Cancelable<Option<(&'a ast::BindPat, hir::Function)>> {
let syntax = source_file.syntax();
if let Some(binding) = find_node_at_offset::<ast::BindPat>(syntax, position.offset) {
let descr = ctry!(source_binder::function_from_child_node(
@ -281,7 +279,7 @@ impl db::RootDatabase {
if symbol.ptr.kind() == FN_DEF {
let fn_file = self.source_file(symbol.file_id);
let fn_def = symbol.ptr.resolve(&fn_file);
let fn_def = ast::FnDef::cast(fn_def.borrowed()).unwrap();
let fn_def = ast::FnDef::cast(&fn_def).unwrap();
let descr = ctry!(source_binder::function_from_source(
self,
symbol.file_id,
@ -352,7 +350,7 @@ impl db::RootDatabase {
.collect::<Vec<_>>();
Ok(res)
}
pub(crate) fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<FileSymbol>> {
pub(crate) fn index_resolve(&self, name_ref: &ast::NameRef) -> Cancelable<Vec<FileSymbol>> {
let name = name_ref.text();
let mut query = Query::new(name.to_string());
query.exact();
@ -379,12 +377,12 @@ impl SourceChange {
}
enum FnCallNode<'a> {
CallExpr(ast::CallExpr<'a>),
MethodCallExpr(ast::MethodCallExpr<'a>),
CallExpr(&'a ast::CallExpr),
MethodCallExpr(&'a ast::MethodCallExpr),
}
impl<'a> FnCallNode<'a> {
pub fn with_node(syntax: SyntaxNodeRef, offset: TextUnit) -> Option<FnCallNode> {
pub fn with_node(syntax: &'a SyntaxNode, offset: TextUnit) -> Option<FnCallNode<'a>> {
if let Some(expr) = find_node_at_offset::<ast::CallExpr>(syntax, offset) {
return Some(FnCallNode::CallExpr(expr));
}
@ -394,10 +392,10 @@ impl<'a> FnCallNode<'a> {
None
}
pub fn name_ref(&self) -> Option<ast::NameRef> {
pub fn name_ref(&self) -> Option<&'a ast::NameRef> {
match *self {
FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()? {
Expr::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
FnCallNode::CallExpr(call_expr) => Some(match call_expr.expr()?.kind() {
ast::ExprKind::PathExpr(path_expr) => path_expr.path()?.segment()?.name_ref()?,
_ => return None,
}),
@ -409,7 +407,7 @@ impl<'a> FnCallNode<'a> {
}
}
pub fn arg_list(&self) -> Option<ast::ArgList> {
pub fn arg_list(&self) -> Option<&'a ast::ArgList> {
match *self {
FnCallNode::CallExpr(expr) => expr.arg_list(),
FnCallNode::MethodCallExpr(expr) => expr.arg_list(),

View file

@ -26,7 +26,7 @@ mod syntax_highlighting;
use std::{fmt, sync::Arc};
use ra_syntax::{SmolStr, SourceFileNode, SyntaxKind, TextRange, TextUnit};
use ra_syntax::{SmolStr, SourceFile, TreePtr, SyntaxKind, TextRange, TextUnit};
use ra_text_edit::TextEdit;
use rayon::prelude::*;
use relative_path::RelativePathBuf;
@ -308,7 +308,7 @@ impl Analysis {
self.db.file_text(file_id)
}
/// Gets the syntax tree of the file.
pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode {
pub fn file_syntax(&self, file_id: FileId) -> TreePtr<SourceFile> {
self.db.source_file(file_id).clone()
}
/// Gets the file's `LineIndex`: data structure to convert between absolute
@ -322,7 +322,7 @@ impl Analysis {
}
/// Returns position of the mathcing brace (all types of braces are
/// supported).
pub fn matching_brace(&self, file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> {
pub fn matching_brace(&self, file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
ra_editor::matching_brace(file, offset)
}
/// Returns a syntax tree represented as `String`, for debug purposes.
@ -469,7 +469,7 @@ impl LibraryData {
files: Vec<(FileId, RelativePathBuf, Arc<String>)>,
) -> LibraryData {
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| {
let file = SourceFileNode::parse(text);
let file = SourceFile::parse(text);
(*file_id, file)
}));
let mut root_change = RootChange::default();

View file

@ -1,7 +1,7 @@
use itertools::Itertools;
use ra_syntax::{
TextRange, SyntaxNode,
ast::{self, AstNode, NameOwner, ModuleItemOwner},
TextRange, SyntaxNodeRef,
};
use ra_db::{Cancelable, SyntaxDatabase};
@ -30,7 +30,7 @@ pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<Ru
Ok(res)
}
fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNodeRef) -> Option<Runnable> {
fn runnable(db: &RootDatabase, file_id: FileId, item: &SyntaxNode) -> Option<Runnable> {
if let Some(fn_def) = ast::FnDef::cast(item) {
runnable_fn(fn_def)
} else if let Some(m) = ast::Module::cast(item) {
@ -40,7 +40,7 @@ fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNodeRef) -> Option<R
}
}
fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> {
fn runnable_fn(fn_def: &ast::FnDef) -> Option<Runnable> {
let name = fn_def.name()?.text();
let kind = if name == "main" {
RunnableKind::Bin
@ -57,12 +57,12 @@ fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> {
})
}
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> {
fn runnable_mod(db: &RootDatabase, file_id: FileId, module: &ast::Module) -> Option<Runnable> {
let has_test_function = module
.item_list()?
.items()
.filter_map(|it| match it {
ast::ModuleItem::FnDef(it) => Some(it),
.filter_map(|it| match it.kind() {
ast::ModuleItemKind::FnDef(it) => Some(it),
_ => None,
})
.any(|f| f.has_atom_attr("test"));

View file

@ -27,7 +27,7 @@ use std::{
use fst::{self, Streamer};
use ra_syntax::{
SyntaxNodeRef, SourceFileNode, SmolStr,
SyntaxNode, SourceFile, SmolStr, TreePtr, AstNode,
algo::{visit::{visitor, Visitor}, find_covering_node},
SyntaxKind::{self, *},
ast::{self, NameOwner},
@ -141,7 +141,7 @@ impl SymbolIndex {
}
pub(crate) fn for_files(
files: impl ParallelIterator<Item = (FileId, SourceFileNode)>,
files: impl ParallelIterator<Item = (FileId, TreePtr<SourceFile>)>,
) -> SymbolIndex {
let symbols = files
.flat_map(|(file_id, file)| {
@ -203,8 +203,8 @@ pub(crate) struct FileSymbol {
pub(crate) ptr: LocalSyntaxPtr,
}
fn to_symbol(node: SyntaxNodeRef) -> Option<(SmolStr, LocalSyntaxPtr)> {
fn decl<'a, N: NameOwner<'a>>(node: N) -> Option<(SmolStr, LocalSyntaxPtr)> {
fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, LocalSyntaxPtr)> {
fn decl<N: NameOwner>(node: &N) -> Option<(SmolStr, LocalSyntaxPtr)> {
let name = node.name()?.text();
let ptr = LocalSyntaxPtr::new(node.syntax());
Some((name, ptr))

View file

@ -16,7 +16,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Cancelable<Vec<Hi
.filter_map(ast::MacroCall::cast)
{
if let Some((off, exp)) = hir::MacroDef::ast_expand(macro_call) {
let mapped_ranges = ra_editor::highlight(exp.syntax().borrowed())
let mapped_ranges = ra_editor::highlight(&exp.syntax())
.into_iter()
.filter_map(|r| {
let mapped_range = exp.map_range_back(r.range)?;

View file

@ -3,7 +3,7 @@ use std::{fs, io::Read, path::Path, time::Instant};
use clap::{App, Arg, SubCommand};
use join_to_string::join;
use ra_editor::{extend_selection, file_structure, syntax_tree};
use ra_syntax::{SourceFileNode, TextRange};
use ra_syntax::{SourceFile, TextRange, TreePtr, AstNode};
use tools::collect_tests;
type Result<T> = ::std::result::Result<T, failure::Error>;
@ -71,9 +71,9 @@ fn main() -> Result<()> {
Ok(())
}
fn file() -> Result<SourceFileNode> {
fn file() -> Result<TreePtr<SourceFile>> {
let text = read_stdin()?;
Ok(SourceFileNode::parse(&text))
Ok(SourceFile::parse(&text))
}
fn read_stdin() -> Result<String> {
@ -92,12 +92,12 @@ fn render_test(file: &Path, line: usize) -> Result<(String, String)> {
None => failure::bail!("No test found at line {} at {}", line, file.display()),
Some((_start_line, test)) => test,
};
let file = SourceFileNode::parse(&test.text);
let file = SourceFile::parse(&test.text);
let tree = syntax_tree(&file);
Ok((test.text, tree))
}
fn selections(file: &SourceFileNode, start: u32, end: u32) -> String {
fn selections(file: &SourceFile, start: u32, end: u32) -> String {
let mut ranges = Vec::new();
let mut cur = Some(TextRange::from_to((start - 1).into(), (end - 1).into()));
while let Some(r) = cur {

View file

@ -8,7 +8,7 @@ pub mod mock;
use std::sync::Arc;
use ra_editor::LineIndex;
use ra_syntax::{TextUnit, TextRange, SourceFileNode};
use ra_syntax::{TextUnit, TextRange, SourceFile, TreePtr};
pub use crate::{
cancelation::{Canceled, Cancelable},
@ -47,7 +47,7 @@ pub trait BaseDatabase: salsa::Database {
salsa::query_group! {
pub trait SyntaxDatabase: crate::input::FilesDatabase + BaseDatabase {
fn source_file(file_id: FileId) -> SourceFileNode {
fn source_file(file_id: FileId) -> TreePtr<SourceFile> {
type SourceFileQuery;
}
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
@ -56,9 +56,9 @@ salsa::query_group! {
}
}
fn source_file(db: &impl SyntaxDatabase, file_id: FileId) -> SourceFileNode {
fn source_file(db: &impl SyntaxDatabase, file_id: FileId) -> TreePtr<SourceFile> {
let text = db.file_text(file_id);
SourceFileNode::parse(&*text)
SourceFile::parse(&*text)
}
fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);

View file

@ -1,4 +1,4 @@
use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
use ra_syntax::{AstNode, SourceFile, SyntaxKind, SyntaxNode, TextRange, TreePtr};
/// A pointer to a syntax node inside a file.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -8,18 +8,18 @@ pub struct LocalSyntaxPtr {
}
impl LocalSyntaxPtr {
pub fn new(node: SyntaxNodeRef) -> LocalSyntaxPtr {
pub fn new(node: &SyntaxNode) -> LocalSyntaxPtr {
LocalSyntaxPtr {
range: node.range(),
kind: node.kind(),
}
}
pub fn resolve(self, file: &SourceFileNode) -> SyntaxNode {
pub fn resolve(self, file: &SourceFile) -> TreePtr<SyntaxNode> {
let mut curr = file.syntax();
loop {
if curr.range() == self.range && curr.kind() == self.kind {
return curr.owned();
return curr.to_owned();
}
curr = curr
.children()
@ -40,7 +40,7 @@ impl LocalSyntaxPtr {
#[test]
fn test_local_syntax_ptr() {
use ra_syntax::{ast, AstNode};
let file = SourceFileNode::parse("struct Foo { f: u32, }");
let file = SourceFile::parse("struct Foo { f: u32, }");
let field = file
.syntax()
.descendants()
@ -48,5 +48,5 @@ fn test_local_syntax_ptr() {
.unwrap();
let ptr = LocalSyntaxPtr::new(field.syntax());
let field_syntax = ptr.resolve(&file);
assert_eq!(field.syntax(), field_syntax);
assert_eq!(field.syntax(), &*field_syntax);
}

View file

@ -12,7 +12,7 @@ mod split_import;
use ra_text_edit::{TextEdit, TextEditBuilder};
use ra_syntax::{
Direction, SyntaxNodeRef, TextUnit, TextRange,SourceFileNode, AstNode,
Direction, SyntaxNode, TextUnit, TextRange, SourceFile, AstNode,
algo::{find_leaf_at_offset, find_covering_node, LeafAtOffset},
};
@ -28,7 +28,7 @@ pub use self::{
};
/// Return all the assists applicable at the given position.
pub fn assists(file: &SourceFileNode, range: TextRange) -> Vec<LocalEdit> {
pub fn assists(file: &SourceFile, range: TextRange) -> Vec<LocalEdit> {
let ctx = AssistCtx::new(file, range);
[
flip_comma,
@ -50,7 +50,7 @@ pub struct LocalEdit {
pub cursor_position: Option<TextUnit>,
}
fn non_trivia_sibling(node: SyntaxNodeRef, direction: Direction) -> Option<SyntaxNodeRef> {
fn non_trivia_sibling(node: &SyntaxNode, direction: Direction) -> Option<&SyntaxNode> {
node.siblings(direction)
.skip(1)
.find(|node| !node.kind().is_trivia())
@ -88,7 +88,7 @@ fn non_trivia_sibling(node: SyntaxNodeRef, direction: Direction) -> Option<Synta
/// easier to just compute the edit eagarly :-)
#[derive(Debug, Clone)]
pub struct AssistCtx<'a> {
source_file: &'a SourceFileNode,
source_file: &'a SourceFile,
range: TextRange,
should_compute_edit: bool,
}
@ -106,7 +106,7 @@ struct AssistBuilder {
}
impl<'a> AssistCtx<'a> {
pub fn new(source_file: &'a SourceFileNode, range: TextRange) -> AssistCtx {
pub fn new(source_file: &'a SourceFile, range: TextRange) -> AssistCtx {
AssistCtx {
source_file,
range,
@ -145,13 +145,13 @@ impl<'a> AssistCtx<'a> {
}))
}
pub(crate) fn leaf_at_offset(&self) -> LeafAtOffset<SyntaxNodeRef<'a>> {
pub(crate) fn leaf_at_offset(&self) -> LeafAtOffset<&'a SyntaxNode> {
find_leaf_at_offset(self.source_file.syntax(), self.range.start())
}
pub(crate) fn node_at_offset<N: AstNode<'a>>(&self) -> Option<N> {
pub(crate) fn node_at_offset<N: AstNode>(&self) -> Option<&'a N> {
find_node_at_offset(self.source_file.syntax(), self.range.start())
}
pub(crate) fn covering_node(&self) -> SyntaxNodeRef<'a> {
pub(crate) fn covering_node(&self) -> &'a SyntaxNode {
find_covering_node(self.source_file.syntax(), self.range)
}
}

View file

@ -28,7 +28,7 @@ pub fn add_derive(ctx: AssistCtx) -> Option<Assist> {
}
// Insert `derive` after doc comments.
fn derive_insertion_offset(nominal: ast::NominalDef) -> Option<TextUnit> {
fn derive_insertion_offset(nominal: &ast::NominalDef) -> Option<TextUnit> {
let non_ws_child = nominal
.syntax()
.children()

View file

@ -46,7 +46,7 @@ fn add_vis(ctx: AssistCtx) -> Option<Assist> {
})
}
fn change_vis(ctx: AssistCtx, vis: ast::Visibility) -> Option<Assist> {
fn change_vis(ctx: AssistCtx, vis: &ast::Visibility) -> Option<Assist> {
if vis.syntax().text() != "pub" {
return None;
}

View file

@ -1,7 +1,7 @@
use ra_syntax::{
ast::{self, AstNode},
SyntaxKind::WHITESPACE,
SyntaxNodeRef, TextUnit,
SyntaxNode, TextUnit,
};
use crate::assists::{AssistCtx, Assist};
@ -39,7 +39,7 @@ pub fn introduce_variable<'a>(ctx: AssistCtx) -> Option<Assist> {
/// Statement or last in the block expression, which will follow
/// the freshly introduced var.
fn anchor_stmt(expr: ast::Expr) -> Option<SyntaxNodeRef> {
fn anchor_stmt(expr: &ast::Expr) -> Option<&SyntaxNode> {
expr.syntax().ancestors().find(|&node| {
if ast::Stmt::cast(node).is_some() {
return true;

View file

@ -1,25 +1,15 @@
use itertools::Itertools;
use ra_syntax::{
Location, SourceFile, SyntaxKind, TextRange, SyntaxNode,
ast::{self, AstNode},
Location,
SourceFileNode,
SyntaxKind,
TextRange,
};
use ra_syntax::SyntaxNodeRef;
use ra_text_edit::{
TextEdit,
TextEditBuilder,
};
use crate::{
Diagnostic,
LocalEdit,
Severity,
};
use ra_text_edit::{TextEdit, TextEditBuilder};
pub fn diagnostics(file: &SourceFileNode) -> Vec<Diagnostic> {
use crate::{Diagnostic, LocalEdit, Severity};
pub fn diagnostics(file: &SourceFile) -> Vec<Diagnostic> {
fn location_to_range(location: Location) -> TextRange {
match location {
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
@ -48,7 +38,7 @@ pub fn diagnostics(file: &SourceFileNode) -> Vec<Diagnostic> {
fn check_unnecessary_braces_in_use_statement(
acc: &mut Vec<Diagnostic>,
node: SyntaxNodeRef,
node: &SyntaxNode,
) -> Option<()> {
let use_tree_list = ast::UseTreeList::cast(node)?;
if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
@ -79,7 +69,7 @@ fn check_unnecessary_braces_in_use_statement(
}
fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
single_use_tree: ast::UseTree,
single_use_tree: &ast::UseTree,
) -> Option<TextEdit> {
let use_tree_list_node = single_use_tree.syntax().parent()?;
if single_use_tree
@ -102,7 +92,7 @@ fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
fn check_struct_shorthand_initialization(
acc: &mut Vec<Diagnostic>,
node: SyntaxNodeRef,
node: &SyntaxNode,
) -> Option<()> {
let struct_lit = ast::StructLit::cast(node)?;
let named_field_list = struct_lit.named_field_list()?;
@ -138,10 +128,10 @@ mod tests {
use super::*;
type DiagnosticChecker = fn(&mut Vec<Diagnostic>, SyntaxNodeRef) -> Option<()>;
type DiagnosticChecker = fn(&mut Vec<Diagnostic>, &SyntaxNode) -> Option<()>;
fn check_not_applicable(code: &str, func: DiagnosticChecker) {
let file = SourceFileNode::parse(code);
let file = SourceFile::parse(code);
let mut diagnostics = Vec::new();
for node in file.syntax().descendants() {
func(&mut diagnostics, node);
@ -150,7 +140,7 @@ mod tests {
}
fn check_apply(before: &str, after: &str, func: DiagnosticChecker) {
let file = SourceFileNode::parse(before);
let file = SourceFile::parse(before);
let mut diagnostics = Vec::new();
for node in file.syntax().descendants() {
func(&mut diagnostics, node);

View file

@ -1,11 +1,10 @@
use ra_syntax::{
Direction, SyntaxNode, TextRange, TextUnit,
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
Direction,
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
};
pub fn extend_selection(root: SyntaxNodeRef, range: TextRange) -> Option<TextRange> {
pub fn extend_selection(root: &SyntaxNode, range: TextRange) -> Option<TextRange> {
let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING];
if range.is_empty() {
let offset = range.start();
@ -40,7 +39,7 @@ pub fn extend_selection(root: SyntaxNodeRef, range: TextRange) -> Option<TextRan
}
fn extend_single_word_in_comment_or_string(
leaf: SyntaxNodeRef,
leaf: &SyntaxNode,
offset: TextUnit,
) -> Option<TextRange> {
let text: &str = leaf.leaf_text()?;
@ -66,7 +65,7 @@ fn extend_single_word_in_comment_or_string(
}
}
fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRange {
fn extend_ws(root: &SyntaxNode, ws: &SyntaxNode, offset: TextUnit) -> TextRange {
let ws_text = ws.leaf_text().unwrap();
let suffix = TextRange::from_to(offset, ws.range().end()) - ws.range().start();
let prefix = TextRange::from_to(ws.range().start(), offset) - ws.range().start();
@ -89,9 +88,9 @@ fn extend_ws(root: SyntaxNodeRef, ws: SyntaxNodeRef, offset: TextUnit) -> TextRa
ws.range()
}
fn pick_best<'a>(l: SyntaxNodeRef<'a>, r: SyntaxNodeRef<'a>) -> SyntaxNodeRef<'a> {
fn pick_best<'a>(l: &'a SyntaxNode, r: &'a SyntaxNode) -> &'a SyntaxNode {
return if priority(r) > priority(l) { r } else { l };
fn priority(n: SyntaxNodeRef) -> usize {
fn priority(n: &SyntaxNode) -> usize {
match n.kind() {
WHITESPACE => 0,
IDENT | SELF_KW | SUPER_KW | CRATE_KW | LIFETIME => 2,
@ -100,7 +99,7 @@ fn pick_best<'a>(l: SyntaxNodeRef<'a>, r: SyntaxNodeRef<'a>) -> SyntaxNodeRef<'a
}
}
fn extend_comments(node: SyntaxNodeRef) -> Option<TextRange> {
fn extend_comments(node: &SyntaxNode) -> Option<TextRange> {
let prev = adj_comments(node, Direction::Prev);
let next = adj_comments(node, Direction::Next);
if prev != next {
@ -110,7 +109,7 @@ fn extend_comments(node: SyntaxNodeRef) -> Option<TextRange> {
}
}
fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef {
fn adj_comments(node: &SyntaxNode, dir: Direction) -> &SyntaxNode {
let mut res = node;
for node in node.siblings(dir) {
match node.kind() {
@ -124,13 +123,14 @@ fn adj_comments(node: SyntaxNodeRef, dir: Direction) -> SyntaxNodeRef {
#[cfg(test)]
mod tests {
use super::*;
use ra_syntax::SourceFileNode;
use ra_syntax::{SourceFile, AstNode};
use test_utils::extract_offset;
use super::*;
fn do_check(before: &str, afters: &[&str]) {
let (cursor, before) = extract_offset(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
let mut range = TextRange::offset_len(cursor, 0.into());
for &after in afters {
range = extend_selection(file.syntax(), range).unwrap();

View file

@ -1,9 +1,8 @@
use rustc_hash::FxHashSet;
use ra_syntax::{
ast, AstNode, Direction, SourceFileNode,
ast, AstNode, Direction, SourceFile, SyntaxNode, TextRange,
SyntaxKind::{self, *},
SyntaxNodeRef, TextRange,
};
#[derive(Debug, PartialEq, Eq)]
@ -19,7 +18,7 @@ pub struct Fold {
pub kind: FoldKind,
}
pub fn folding_ranges(file: &SourceFileNode) -> Vec<Fold> {
pub fn folding_ranges(file: &SourceFile) -> Vec<Fold> {
let mut res = vec![];
let mut visited_comments = FxHashSet::default();
let mut visited_imports = FxHashSet::default();
@ -69,7 +68,7 @@ fn fold_kind(kind: SyntaxKind) -> Option<FoldKind> {
}
}
fn has_newline(node: SyntaxNodeRef) -> bool {
fn has_newline(node: &SyntaxNode) -> bool {
for descendant in node.descendants() {
if let Some(ws) = ast::Whitespace::cast(descendant) {
if ws.has_newlines() {
@ -86,8 +85,8 @@ fn has_newline(node: SyntaxNodeRef) -> bool {
}
fn contiguous_range_for_group<'a>(
first: SyntaxNodeRef<'a>,
visited: &mut FxHashSet<SyntaxNodeRef<'a>>,
first: &'a SyntaxNode,
visited: &mut FxHashSet<&'a SyntaxNode>,
) -> Option<TextRange> {
visited.insert(first);
@ -124,8 +123,8 @@ fn contiguous_range_for_group<'a>(
}
fn contiguous_range_for_comment<'a>(
first: SyntaxNodeRef<'a>,
visited: &mut FxHashSet<SyntaxNodeRef<'a>>,
first: &'a SyntaxNode,
visited: &mut FxHashSet<&'a SyntaxNode>,
) -> Option<TextRange> {
visited.insert(first);
@ -174,7 +173,7 @@ mod tests {
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
let (ranges, text) = extract_ranges(text, "fold");
let file = SourceFileNode::parse(&text);
let file = SourceFile::parse(&text);
let folds = folding_ranges(&file);
assert_eq!(

View file

@ -21,11 +21,10 @@ pub use self::{
};
use ra_text_edit::TextEditBuilder;
use ra_syntax::{
algo::find_leaf_at_offset,
ast::{self, AstNode},
SourceFileNode,
SourceFile, SyntaxNode, TextRange, TextUnit, Direction,
SyntaxKind::{self, *},
SyntaxNodeRef, TextRange, TextUnit, Direction,
ast::{self, AstNode},
algo::find_leaf_at_offset,
};
use rustc_hash::FxHashSet;
@ -49,7 +48,7 @@ pub struct Diagnostic {
pub fix: Option<LocalEdit>,
}
pub fn matching_brace(file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> {
pub fn matching_brace(file: &SourceFile, offset: TextUnit) -> Option<TextUnit> {
const BRACES: &[SyntaxKind] = &[
L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE,
];
@ -67,7 +66,7 @@ pub fn matching_brace(file: &SourceFileNode, offset: TextUnit) -> Option<TextUni
Some(matching_node.range().start())
}
pub fn highlight(root: SyntaxNodeRef) -> Vec<HighlightedRange> {
pub fn highlight(root: &SyntaxNode) -> Vec<HighlightedRange> {
// Visited nodes to handle highlighting priorities
let mut highlighted = FxHashSet::default();
let mut res = Vec::new();
@ -117,26 +116,25 @@ pub fn highlight(root: SyntaxNodeRef) -> Vec<HighlightedRange> {
res
}
pub fn syntax_tree(file: &SourceFileNode) -> String {
pub fn syntax_tree(file: &SourceFile) -> String {
::ra_syntax::utils::dump_tree(file.syntax())
}
pub fn find_node_at_offset<'a, N: AstNode<'a>>(
syntax: SyntaxNodeRef<'a>,
offset: TextUnit,
) -> Option<N> {
pub fn find_node_at_offset<N: AstNode>(syntax: &SyntaxNode, offset: TextUnit) -> Option<&N> {
find_leaf_at_offset(syntax, offset).find_map(|leaf| leaf.ancestors().find_map(N::cast))
}
#[cfg(test)]
mod tests {
use ra_syntax::AstNode;
use crate::test_utils::{add_cursor, assert_eq_dbg, assert_eq_text, extract_offset};
use super::*;
#[test]
fn test_highlighting() {
let file = SourceFileNode::parse(
let file = SourceFile::parse(
r#"
// comment
fn main() {}
@ -159,7 +157,7 @@ fn main() {}
fn test_matching_brace() {
fn do_check(before: &str, after: &str) {
let (pos, before) = extract_offset(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
let new_pos = match matching_brace(&file, pos) {
None => pos,
Some(pos) => pos,

View file

@ -3,7 +3,7 @@ use crate::TextRange;
use ra_syntax::{
algo::visit::{visitor, Visitor},
ast::{self, NameOwner},
AstNode, SourceFileNode, SyntaxKind, SyntaxNodeRef, WalkEvent,
AstNode, SourceFile, SyntaxKind, SyntaxNode, WalkEvent,
};
#[derive(Debug, Clone)]
@ -15,7 +15,7 @@ pub struct StructureNode {
pub kind: SyntaxKind,
}
pub fn file_structure(file: &SourceFileNode) -> Vec<StructureNode> {
pub fn file_structure(file: &SourceFile) -> Vec<StructureNode> {
let mut res = Vec::new();
let mut stack = Vec::new();
@ -38,8 +38,8 @@ pub fn file_structure(file: &SourceFileNode) -> Vec<StructureNode> {
res
}
fn structure_node(node: SyntaxNodeRef) -> Option<StructureNode> {
fn decl<'a, N: NameOwner<'a>>(node: N) -> Option<StructureNode> {
fn structure_node(node: &SyntaxNode) -> Option<StructureNode> {
fn decl<N: NameOwner>(node: &N) -> Option<StructureNode> {
let name = node.name()?;
Some(StructureNode {
parent: None,
@ -60,7 +60,7 @@ fn structure_node(node: SyntaxNodeRef) -> Option<StructureNode> {
.visit(decl::<ast::TypeDef>)
.visit(decl::<ast::ConstDef>)
.visit(decl::<ast::StaticDef>)
.visit(|im: ast::ImplBlock| {
.visit(|im: &ast::ImplBlock| {
let target_type = im.target_type()?;
let target_trait = im.target_trait();
let label = match target_trait {
@ -91,7 +91,7 @@ mod tests {
#[test]
fn test_file_structure() {
let file = SourceFileNode::parse(
let file = SourceFile::parse(
r#"
struct Foo {
x: i32

View file

@ -1,15 +1,15 @@
use ra_syntax::{SourceFileNode, TextRange, TextUnit};
use ra_syntax::{SourceFile, TextRange, TextUnit};
use crate::LocalEdit;
pub use test_utils::*;
pub fn check_action<F: Fn(&SourceFileNode, TextUnit) -> Option<LocalEdit>>(
pub fn check_action<F: Fn(&SourceFile, TextUnit) -> Option<LocalEdit>>(
before: &str,
after: &str,
f: F,
) {
let (before_cursor_pos, before) = extract_offset(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
let actual = result.edit.apply(&before);
let actual_cursor_pos = match result.cursor_position {
@ -20,13 +20,13 @@ pub fn check_action<F: Fn(&SourceFileNode, TextUnit) -> Option<LocalEdit>>(
assert_eq_text!(after, &actual);
}
pub fn check_action_range<F: Fn(&SourceFileNode, TextRange) -> Option<LocalEdit>>(
pub fn check_action_range<F: Fn(&SourceFile, TextRange) -> Option<LocalEdit>>(
before: &str,
after: &str,
f: F,
) {
let (range, before) = extract_range(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
let result = f(&file, range).expect("code action is not applicable");
let actual = result.edit.apply(&before);
let actual_cursor_pos = match result.cursor_position {

View file

@ -5,15 +5,15 @@ use ra_syntax::{
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
ast,
text_utils::intersect,
AstNode, Direction, SourceFileNode, SyntaxKind,
AstNode, Direction, SourceFile, SyntaxKind,
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
SyntaxNode, TextRange, TextUnit,
};
use ra_text_edit::text_utils::contains_offset_nonstrict;
use crate::{find_node_at_offset, LocalEdit, TextEditBuilder};
pub fn join_lines(file: &SourceFileNode, range: TextRange) -> LocalEdit {
pub fn join_lines(file: &SourceFile, range: TextRange) -> LocalEdit {
let range = if range.is_empty() {
let syntax = file.syntax();
let text = syntax.text().slice(range.start()..);
@ -59,7 +59,7 @@ pub fn join_lines(file: &SourceFileNode, range: TextRange) -> LocalEdit {
}
}
pub fn on_enter(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
pub fn on_enter(file: &SourceFile, offset: TextUnit) -> Option<LocalEdit> {
let comment = find_leaf_at_offset(file.syntax(), offset)
.left_biased()
.and_then(ast::Comment::cast)?;
@ -85,7 +85,7 @@ pub fn on_enter(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
})
}
fn node_indent<'a>(file: &'a SourceFileNode, node: SyntaxNodeRef) -> Option<&'a str> {
fn node_indent<'a>(file: &'a SourceFile, node: &SyntaxNode) -> Option<&'a str> {
let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) {
LeafAtOffset::Between(l, r) => {
assert!(r == node);
@ -105,8 +105,8 @@ fn node_indent<'a>(file: &'a SourceFileNode, node: SyntaxNodeRef) -> Option<&'a
Some(&text[pos..])
}
pub fn on_eq_typed(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
pub fn on_eq_typed(file: &SourceFile, offset: TextUnit) -> Option<LocalEdit> {
let let_stmt: &ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
if let_stmt.has_semi() {
return None;
}
@ -136,7 +136,7 @@ pub fn on_eq_typed(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit>
})
}
pub fn on_dot_typed(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
pub fn on_dot_typed(file: &SourceFile, offset: TextUnit) -> Option<LocalEdit> {
let before_dot_offset = offset - TextUnit::of_char('.');
let whitespace = find_leaf_at_offset(file.syntax(), before_dot_offset).left_biased()?;
@ -151,7 +151,7 @@ pub fn on_dot_typed(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit
.skip(1)
.next()?;
ast::MethodCallExprNode::cast(method_call)?;
ast::MethodCallExpr::cast(method_call)?;
// find how much the _method call is indented
let method_chain_indent = method_call
@ -188,7 +188,7 @@ fn last_line_indent_in_whitespace(ws: &str) -> &str {
fn remove_newline(
edit: &mut TextEditBuilder,
node: SyntaxNodeRef,
node: &SyntaxNode,
node_text: &str,
offset: TextUnit,
) {
@ -266,7 +266,7 @@ fn is_trailing_comma(left: SyntaxKind, right: SyntaxKind) -> bool {
}
}
fn join_single_expr_block(edit: &mut TextEditBuilder, node: SyntaxNodeRef) -> Option<()> {
fn join_single_expr_block(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> {
let block = ast::Block::cast(node.parent()?)?;
let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?;
let expr = single_expr(block)?;
@ -277,7 +277,7 @@ fn join_single_expr_block(edit: &mut TextEditBuilder, node: SyntaxNodeRef) -> Op
Some(())
}
fn single_expr(block: ast::Block) -> Option<ast::Expr> {
fn single_expr(block: &ast::Block) -> Option<&ast::Expr> {
let mut res = None;
for child in block.syntax().children() {
if let Some(expr) = ast::Expr::cast(child) {
@ -297,7 +297,7 @@ fn single_expr(block: ast::Block) -> Option<ast::Expr> {
res
}
fn join_single_use_tree(edit: &mut TextEditBuilder, node: SyntaxNodeRef) -> Option<()> {
fn join_single_use_tree(edit: &mut TextEditBuilder, node: &SyntaxNode) -> Option<()> {
let use_tree_list = ast::UseTreeList::cast(node.parent()?)?;
let (tree,) = use_tree_list.use_trees().collect_tuple()?;
edit.replace(
@ -307,7 +307,7 @@ fn join_single_use_tree(edit: &mut TextEditBuilder, node: SyntaxNodeRef) -> Opti
Some(())
}
fn compute_ws(left: SyntaxNodeRef, right: SyntaxNodeRef) -> &'static str {
fn compute_ws(left: &SyntaxNode, right: &SyntaxNode) -> &'static str {
match left.kind() {
L_PAREN | L_BRACK => return "",
L_CURLY => {
@ -547,7 +547,7 @@ fn foo() {
fn check_join_lines_sel(before: &str, after: &str) {
let (sel, before) = extract_range(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
let result = join_lines(&file, sel);
let actual = result.edit.apply(&before);
assert_eq_text!(after, &actual);
@ -626,7 +626,7 @@ pub fn handle_find_matching_brace() {
fn test_on_eq_typed() {
fn do_check(before: &str, after: &str) {
let (offset, before) = extract_offset(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
let result = on_eq_typed(&file, offset).unwrap();
let actual = result.edit.apply(&before);
assert_eq_text!(after, &actual);
@ -670,7 +670,7 @@ fn foo() {
fn test_on_dot_typed() {
fn do_check(before: &str, after: &str) {
let (offset, before) = extract_offset(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
if let Some(result) = on_eq_typed(&file, offset) {
let actual = result.edit.apply(&before);
assert_eq_text!(after, &actual);
@ -779,7 +779,7 @@ fn foo() {
fn test_on_enter() {
fn apply_on_enter(before: &str) -> Option<String> {
let (offset, before) = extract_offset(before);
let file = SourceFileNode::parse(&before);
let file = SourceFile::parse(&before);
let result = on_enter(&file, offset)?;
let actual = result.edit.apply(&before);
let actual = add_cursor(&actual, result.cursor_position.unwrap());

View file

@ -42,7 +42,7 @@ pub struct StructData {
}
impl StructData {
pub(crate) fn new(struct_def: ast::StructDef) -> StructData {
pub(crate) fn new(struct_def: &ast::StructDef) -> StructData {
let name = struct_def.name().map(|n| n.as_name());
let variant_data = VariantData::new(struct_def.flavor());
let variant_data = Arc::new(variant_data);
@ -87,7 +87,7 @@ pub struct EnumData {
}
impl EnumData {
pub(crate) fn new(enum_def: ast::EnumDef) -> Self {
pub(crate) fn new(enum_def: &ast::EnumDef) -> Self {
let name = enum_def.name().map(|n| n.as_name());
let variants = if let Some(evl) = enum_def.variant_list() {
evl.variants()

View file

@ -1,6 +1,6 @@
use relative_path::RelativePathBuf;
use ra_db::{CrateId, Cancelable, FileId};
use ra_syntax::{ast, SyntaxNode};
use ra_syntax::{ast, TreePtr, SyntaxNode};
use crate::{Name, db::HirDatabase, DefId, Path, PerNs, nameres::ModuleScope};
@ -36,8 +36,8 @@ pub struct Module {
}
pub enum ModuleSource {
SourceFile(ast::SourceFileNode),
Module(ast::ModuleNode),
SourceFile(TreePtr<ast::SourceFile>),
Module(TreePtr<ast::Module>),
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
@ -66,7 +66,7 @@ impl Module {
pub fn declaration_source(
&self,
db: &impl HirDatabase,
) -> Cancelable<Option<(FileId, ast::ModuleNode)>> {
) -> Cancelable<Option<(FileId, TreePtr<ast::Module>)>> {
self.declaration_source_impl(db)
}
@ -104,7 +104,10 @@ impl Module {
pub fn resolve_path(&self, db: &impl HirDatabase, path: &Path) -> Cancelable<PerNs<DefId>> {
self.resolve_path_impl(db, path)
}
pub fn problems(&self, db: &impl HirDatabase) -> Cancelable<Vec<(SyntaxNode, Problem)>> {
pub fn problems(
&self,
db: &impl HirDatabase,
) -> Cancelable<Vec<(TreePtr<SyntaxNode>, Problem)>> {
self.problems_impl(db)
}
}

View file

@ -1,5 +1,5 @@
use ra_db::{Cancelable, SourceRootId, FileId};
use ra_syntax::{ast, SyntaxNode, AstNode};
use ra_syntax::{ast, SyntaxNode, AstNode, TreePtr};
use crate::{
Module, ModuleSource, Problem,
@ -43,12 +43,11 @@ impl Module {
let loc = self.def_id.loc(db);
let file_id = loc.source_item_id.file_id.as_original_file();
let syntax_node = db.file_item(loc.source_item_id);
let syntax_node = syntax_node.borrowed();
let module_source = if let Some(source_file) = ast::SourceFile::cast(syntax_node) {
ModuleSource::SourceFile(source_file.owned())
let module_source = if let Some(source_file) = ast::SourceFile::cast(&syntax_node) {
ModuleSource::SourceFile(source_file.to_owned())
} else {
let module = ast::Module::cast(syntax_node).unwrap();
ModuleSource::Module(module.owned())
let module = ast::Module::cast(&syntax_node).unwrap();
ModuleSource::Module(module.to_owned())
};
Ok((file_id, module_source))
}
@ -56,7 +55,7 @@ impl Module {
pub fn declaration_source_impl(
&self,
db: &impl HirDatabase,
) -> Cancelable<Option<(FileId, ast::ModuleNode)>> {
) -> Cancelable<Option<(FileId, TreePtr<ast::Module>)>> {
let loc = self.def_id.loc(db);
let module_tree = db.module_tree(loc.source_root_id)?;
let link = ctry!(loc.module_id.parent_link(&module_tree));
@ -146,7 +145,10 @@ impl Module {
}
Ok(curr_per_ns)
}
pub fn problems_impl(&self, db: &impl HirDatabase) -> Cancelable<Vec<(SyntaxNode, Problem)>> {
pub fn problems_impl(
&self,
db: &impl HirDatabase,
) -> Cancelable<Vec<(TreePtr<SyntaxNode>, Problem)>> {
let loc = self.def_id.loc(db);
let module_tree = db.module_tree(loc.source_root_id)?;
Ok(loc.module_id.problems(&module_tree, db))

View file

@ -1,6 +1,6 @@
use std::sync::Arc;
use ra_syntax::{SyntaxNode, SourceFileNode};
use ra_syntax::{SyntaxNode, TreePtr, SourceFile};
use ra_db::{SourceRootId, LocationIntener, SyntaxDatabase, Cancelable};
use crate::{
@ -22,7 +22,7 @@ pub trait HirDatabase: SyntaxDatabase
+ AsRef<LocationIntener<DefLoc, DefId>>
+ AsRef<LocationIntener<MacroCallLoc, MacroCallId>>
{
fn hir_source_file(file_id: HirFileId) -> SourceFileNode {
fn hir_source_file(file_id: HirFileId) -> TreePtr<SourceFile> {
type HirSourceFileQuery;
use fn HirFileId::hir_source_file;
}
@ -66,7 +66,7 @@ pub trait HirDatabase: SyntaxDatabase
use fn query_definitions::file_items;
}
fn file_item(source_item_id: SourceItemId) -> SyntaxNode {
fn file_item(source_item_id: SourceItemId) -> TreePtr<SyntaxNode> {
type FileItemQuery;
use fn query_definitions::file_item;
}

View file

@ -77,7 +77,7 @@ impl BodySyntaxMapping {
pub fn syntax_expr(&self, ptr: LocalSyntaxPtr) -> Option<ExprId> {
self.expr_syntax_mapping.get(&ptr).cloned()
}
pub fn node_expr(&self, node: ast::Expr) -> Option<ExprId> {
pub fn node_expr(&self, node: &ast::Expr) -> Option<ExprId> {
self.expr_syntax_mapping
.get(&LocalSyntaxPtr::new(node.syntax()))
.cloned()
@ -88,7 +88,7 @@ impl BodySyntaxMapping {
pub fn syntax_pat(&self, ptr: LocalSyntaxPtr) -> Option<PatId> {
self.pat_syntax_mapping.get(&ptr).cloned()
}
pub fn node_pat(&self, node: ast::Pat) -> Option<PatId> {
pub fn node_pat(&self, node: &ast::Pat) -> Option<PatId> {
self.pat_syntax_mapping
.get(&LocalSyntaxPtr::new(node.syntax()))
.cloned()
@ -373,10 +373,10 @@ impl ExprCollector {
self.exprs.alloc(block)
}
fn collect_expr(&mut self, expr: ast::Expr) -> ExprId {
fn collect_expr(&mut self, expr: &ast::Expr) -> ExprId {
let syntax_ptr = LocalSyntaxPtr::new(expr.syntax());
match expr {
ast::Expr::IfExpr(e) => {
match expr.kind() {
ast::ExprKind::IfExpr(e) => {
if let Some(pat) = e.condition().and_then(|c| c.pat()) {
// if let -- desugar to match
let pat = self.collect_pat(pat);
@ -419,12 +419,12 @@ impl ExprCollector {
)
}
}
ast::Expr::BlockExpr(e) => self.collect_block_opt(e.block()),
ast::Expr::LoopExpr(e) => {
ast::ExprKind::BlockExpr(e) => self.collect_block_opt(e.block()),
ast::ExprKind::LoopExpr(e) => {
let body = self.collect_block_opt(e.loop_body());
self.alloc_expr(Expr::Loop { body }, syntax_ptr)
}
ast::Expr::WhileExpr(e) => {
ast::ExprKind::WhileExpr(e) => {
let condition = if let Some(condition) = e.condition() {
if condition.pat().is_none() {
self.collect_expr_opt(condition.expr())
@ -438,7 +438,7 @@ impl ExprCollector {
let body = self.collect_block_opt(e.loop_body());
self.alloc_expr(Expr::While { condition, body }, syntax_ptr)
}
ast::Expr::ForExpr(e) => {
ast::ExprKind::ForExpr(e) => {
let iterable = self.collect_expr_opt(e.iterable());
let pat = self.collect_pat_opt(e.pat());
let body = self.collect_block_opt(e.loop_body());
@ -451,7 +451,7 @@ impl ExprCollector {
syntax_ptr,
)
}
ast::Expr::CallExpr(e) => {
ast::ExprKind::CallExpr(e) => {
let callee = self.collect_expr_opt(e.expr());
let args = if let Some(arg_list) = e.arg_list() {
arg_list.args().map(|e| self.collect_expr(e)).collect()
@ -460,7 +460,7 @@ impl ExprCollector {
};
self.alloc_expr(Expr::Call { callee, args }, syntax_ptr)
}
ast::Expr::MethodCallExpr(e) => {
ast::ExprKind::MethodCallExpr(e) => {
let receiver = self.collect_expr_opt(e.expr());
let args = if let Some(arg_list) = e.arg_list() {
arg_list.args().map(|e| self.collect_expr(e)).collect()
@ -480,7 +480,7 @@ impl ExprCollector {
syntax_ptr,
)
}
ast::Expr::MatchExpr(e) => {
ast::ExprKind::MatchExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
let arms = if let Some(match_arm_list) = e.match_arm_list() {
match_arm_list
@ -495,7 +495,7 @@ impl ExprCollector {
};
self.alloc_expr(Expr::Match { expr, arms }, syntax_ptr)
}
ast::Expr::PathExpr(e) => {
ast::ExprKind::PathExpr(e) => {
let path = e
.path()
.and_then(Path::from_ast)
@ -503,25 +503,25 @@ impl ExprCollector {
.unwrap_or(Expr::Missing);
self.alloc_expr(path, syntax_ptr)
}
ast::Expr::ContinueExpr(_e) => {
ast::ExprKind::ContinueExpr(_e) => {
// TODO: labels
self.alloc_expr(Expr::Continue, syntax_ptr)
}
ast::Expr::BreakExpr(e) => {
ast::ExprKind::BreakExpr(e) => {
let expr = e.expr().map(|e| self.collect_expr(e));
self.alloc_expr(Expr::Break { expr }, syntax_ptr)
}
ast::Expr::ParenExpr(e) => {
ast::ExprKind::ParenExpr(e) => {
let inner = self.collect_expr_opt(e.expr());
// make the paren expr point to the inner expression as well
self.expr_syntax_mapping.insert(syntax_ptr, inner);
inner
}
ast::Expr::ReturnExpr(e) => {
ast::ExprKind::ReturnExpr(e) => {
let expr = e.expr().map(|e| self.collect_expr(e));
self.alloc_expr(Expr::Return { expr }, syntax_ptr)
}
ast::Expr::StructLit(e) => {
ast::ExprKind::StructLit(e) => {
let path = e.path().and_then(Path::from_ast);
let fields = if let Some(nfl) = e.named_field_list() {
nfl.fields()
@ -558,7 +558,7 @@ impl ExprCollector {
syntax_ptr,
)
}
ast::Expr::FieldExpr(e) => {
ast::ExprKind::FieldExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
let name = e
.name_ref()
@ -566,26 +566,26 @@ impl ExprCollector {
.unwrap_or_else(Name::missing);
self.alloc_expr(Expr::Field { expr, name }, syntax_ptr)
}
ast::Expr::TryExpr(e) => {
ast::ExprKind::TryExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
self.alloc_expr(Expr::Try { expr }, syntax_ptr)
}
ast::Expr::CastExpr(e) => {
ast::ExprKind::CastExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
let type_ref = TypeRef::from_ast_opt(e.type_ref());
self.alloc_expr(Expr::Cast { expr, type_ref }, syntax_ptr)
}
ast::Expr::RefExpr(e) => {
ast::ExprKind::RefExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
let mutability = Mutability::from_mutable(e.is_mut());
self.alloc_expr(Expr::Ref { expr, mutability }, syntax_ptr)
}
ast::Expr::PrefixExpr(e) => {
ast::ExprKind::PrefixExpr(e) => {
let expr = self.collect_expr_opt(e.expr());
let op = e.op();
self.alloc_expr(Expr::UnaryOp { expr, op }, syntax_ptr)
}
ast::Expr::LambdaExpr(e) => {
ast::ExprKind::LambdaExpr(e) => {
let mut args = Vec::new();
let mut arg_types = Vec::new();
if let Some(pl) = e.param_list() {
@ -606,7 +606,7 @@ impl ExprCollector {
syntax_ptr,
)
}
ast::Expr::BinExpr(e) => {
ast::ExprKind::BinExpr(e) => {
let lhs = self.collect_expr_opt(e.lhs());
let rhs = self.collect_expr_opt(e.rhs());
let op = e.op();
@ -614,16 +614,16 @@ impl ExprCollector {
}
// TODO implement HIR for these:
ast::Expr::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::IndexExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::TupleExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::ArrayExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::Expr::Literal(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::ExprKind::Label(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::ExprKind::IndexExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::ExprKind::TupleExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::ExprKind::ArrayExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::ExprKind::RangeExpr(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
ast::ExprKind::Literal(_e) => self.alloc_expr(Expr::Missing, syntax_ptr),
}
}
fn collect_expr_opt(&mut self, expr: Option<ast::Expr>) -> ExprId {
fn collect_expr_opt(&mut self, expr: Option<&ast::Expr>) -> ExprId {
if let Some(expr) = expr {
self.collect_expr(expr)
} else {
@ -631,11 +631,11 @@ impl ExprCollector {
}
}
fn collect_block(&mut self, block: ast::Block) -> ExprId {
fn collect_block(&mut self, block: &ast::Block) -> ExprId {
let statements = block
.statements()
.map(|s| match s {
ast::Stmt::LetStmt(stmt) => {
.map(|s| match s.kind() {
ast::StmtKind::LetStmt(stmt) => {
let pat = self.collect_pat_opt(stmt.pat());
let type_ref = stmt.type_ref().map(TypeRef::from_ast);
let initializer = stmt.initializer().map(|e| self.collect_expr(e));
@ -645,7 +645,9 @@ impl ExprCollector {
initializer,
}
}
ast::Stmt::ExprStmt(stmt) => Statement::Expr(self.collect_expr_opt(stmt.expr())),
ast::StmtKind::ExprStmt(stmt) => {
Statement::Expr(self.collect_expr_opt(stmt.expr()))
}
})
.collect();
let tail = block.expr().map(|e| self.collect_expr(e));
@ -655,7 +657,7 @@ impl ExprCollector {
)
}
fn collect_block_opt(&mut self, block: Option<ast::Block>) -> ExprId {
fn collect_block_opt(&mut self, block: Option<&ast::Block>) -> ExprId {
if let Some(block) = block {
self.collect_block(block)
} else {
@ -663,17 +665,17 @@ impl ExprCollector {
}
}
fn collect_pat(&mut self, pat: ast::Pat) -> PatId {
fn collect_pat(&mut self, pat: &ast::Pat) -> PatId {
let syntax_ptr = LocalSyntaxPtr::new(pat.syntax());
match pat {
ast::Pat::BindPat(bp) => {
match pat.kind() {
ast::PatKind::BindPat(bp) => {
let name = bp
.name()
.map(|nr| nr.as_name())
.unwrap_or_else(Name::missing);
self.alloc_pat(Pat::Bind { name }, syntax_ptr)
}
ast::Pat::TupleStructPat(p) => {
ast::PatKind::TupleStructPat(p) => {
let path = p.path().and_then(Path::from_ast);
let args = p.args().map(|p| self.collect_pat(p)).collect();
self.alloc_pat(Pat::TupleStruct { path, args }, syntax_ptr)
@ -685,7 +687,7 @@ impl ExprCollector {
}
}
fn collect_pat_opt(&mut self, pat: Option<ast::Pat>) -> PatId {
fn collect_pat_opt(&mut self, pat: Option<&ast::Pat>) -> PatId {
if let Some(pat) = pat {
self.collect_pat(pat)
} else {
@ -710,7 +712,7 @@ impl ExprCollector {
}
}
pub(crate) fn collect_fn_body_syntax(node: ast::FnDef) -> BodySyntaxMapping {
pub(crate) fn collect_fn_body_syntax(node: &ast::FnDef) -> BodySyntaxMapping {
let mut collector = ExprCollector::new();
let args = if let Some(param_list) = node.param_list() {
@ -758,9 +760,7 @@ pub(crate) fn body_syntax_mapping(
let body_syntax_mapping = match def {
Def::Function(f) => {
let node = f.syntax(db);
let node = node.borrowed();
collect_fn_body_syntax(node)
collect_fn_body_syntax(&node)
}
// TODO: consts, etc.
_ => panic!("Trying to get body for item type without body"),

View file

@ -7,7 +7,7 @@ use std::{
use ra_db::Cancelable;
use ra_syntax::{
TextRange, TextUnit,
TextRange, TextUnit, TreePtr,
ast::{self, AstNode, DocCommentsOwner, NameOwner},
};
@ -29,11 +29,11 @@ impl Function {
self.def_id
}
pub fn syntax(&self, db: &impl HirDatabase) -> ast::FnDefNode {
pub fn syntax(&self, db: &impl HirDatabase) -> TreePtr<ast::FnDef> {
let def_loc = self.def_id.loc(db);
assert!(def_loc.kind == DefKind::Function);
let syntax = db.file_item(def_loc.source_item_id);
ast::FnDef::cast(syntax.borrowed()).unwrap().owned()
ast::FnDef::cast(&syntax).unwrap().to_owned()
}
pub fn body(&self, db: &impl HirDatabase) -> Cancelable<Arc<Body>> {
@ -59,7 +59,7 @@ impl Function {
pub fn signature_info(&self, db: &impl HirDatabase) -> Option<FnSignatureInfo> {
let syntax = self.syntax(db);
FnSignatureInfo::new(syntax.borrowed())
FnSignatureInfo::new(&syntax)
}
pub fn infer(&self, db: &impl HirDatabase) -> Cancelable<Arc<InferenceResult>> {
@ -99,8 +99,7 @@ impl FnSignature {
pub(crate) fn fn_signature(db: &impl HirDatabase, def_id: DefId) -> Arc<FnSignature> {
let func = Function::new(def_id);
let syntax = func.syntax(db);
let node = syntax.borrowed();
let node = func.syntax(db);
let mut args = Vec::new();
if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() {
@ -144,7 +143,7 @@ pub struct FnSignatureInfo {
}
impl FnSignatureInfo {
fn new(node: ast::FnDef) -> Option<Self> {
fn new(node: &ast::FnDef) -> Option<Self> {
let name = node.name()?.text().to_string();
let mut doc = None;
@ -207,7 +206,7 @@ impl FnSignatureInfo {
})
}
fn extract_doc_comments(node: ast::FnDef) -> Option<(TextRange, String)> {
fn extract_doc_comments(node: &ast::FnDef) -> Option<(TextRange, String)> {
if node.doc_comments().count() == 0 {
return None;
}
@ -227,7 +226,7 @@ impl FnSignatureInfo {
Some((range, comment_text))
}
fn param_list(node: ast::FnDef) -> Vec<String> {
fn param_list(node: &ast::FnDef) -> Vec<String> {
let mut res = vec![];
if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() {

View file

@ -3,7 +3,7 @@ use std::sync::Arc;
use rustc_hash::{FxHashMap, FxHashSet};
use ra_syntax::{
AstNode, SyntaxNodeRef, TextUnit, TextRange,
AstNode, SyntaxNode, TextUnit, TextRange,
algo::generate,
ast,
};
@ -127,7 +127,7 @@ impl ScopeEntryWithSyntax {
}
impl ScopesWithSyntaxMapping {
pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a {
pub fn scope_chain<'a>(&'a self, node: &SyntaxNode) -> impl Iterator<Item = ScopeId> + 'a {
generate(self.scope_for(node), move |&scope| {
self.scopes.scopes[scope].parent
})
@ -178,7 +178,7 @@ impl ScopesWithSyntaxMapping {
.unwrap_or(original_scope)
}
pub fn resolve_local_name(&self, name_ref: ast::NameRef) -> Option<ScopeEntryWithSyntax> {
pub fn resolve_local_name(&self, name_ref: &ast::NameRef) -> Option<ScopeEntryWithSyntax> {
let mut shadowed = FxHashSet::default();
let name = name_ref.as_name();
let ret = self
@ -195,7 +195,7 @@ impl ScopesWithSyntaxMapping {
})
}
pub fn find_all_refs(&self, pat: ast::BindPat) -> Vec<ReferenceDescriptor> {
pub fn find_all_refs(&self, pat: &ast::BindPat) -> Vec<ReferenceDescriptor> {
let fn_def = pat.syntax().ancestors().find_map(ast::FnDef::cast).unwrap();
let name_ptr = LocalSyntaxPtr::new(pat.syntax());
fn_def
@ -213,7 +213,7 @@ impl ScopesWithSyntaxMapping {
.collect()
}
fn scope_for(&self, node: SyntaxNodeRef) -> Option<ScopeId> {
fn scope_for(&self, node: &SyntaxNode) -> Option<ScopeId> {
node.ancestors()
.map(LocalSyntaxPtr::new)
.filter_map(|ptr| self.syntax_mapping.syntax_expr(ptr))
@ -309,7 +309,7 @@ pub struct ReferenceDescriptor {
#[cfg(test)]
mod tests {
use ra_editor::find_node_at_offset;
use ra_syntax::SourceFileNode;
use ra_syntax::SourceFile;
use test_utils::{extract_offset, assert_eq_text};
use crate::expr;
@ -326,9 +326,9 @@ mod tests {
buf.push_str(&code[off..]);
buf
};
let file = SourceFileNode::parse(&code);
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let file = SourceFile::parse(&code);
let marker: &ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let body_hir = expr::collect_fn_body_syntax(fn_def);
let scopes = FnScopes::new(Arc::clone(body_hir.body()));
let scopes = ScopesWithSyntaxMapping {
@ -422,9 +422,9 @@ mod tests {
fn do_check_local_name(code: &str, expected_offset: u32) {
let (off, code) = extract_offset(code);
let file = SourceFileNode::parse(&code);
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
let file = SourceFile::parse(&code);
let fn_def: &ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let name_ref: &ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
let body_hir = expr::collect_fn_body_syntax(fn_def);
let scopes = FnScopes::new(Arc::clone(body_hir.body()));

View file

@ -1,5 +1,5 @@
use ra_db::{SourceRootId, LocationIntener, Cancelable, FileId};
use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, SourceFile, AstNode, ast};
use ra_syntax::{TreePtr, SyntaxKind, SyntaxNode, SourceFile, AstNode, ast};
use ra_arena::{Arena, RawId, impl_arena_id};
use crate::{HirDatabase, PerNs, ModuleId, Def, Function, Struct, Enum, ImplBlock, Crate};
@ -55,7 +55,10 @@ impl HirFileId {
}
}
pub(crate) fn hir_source_file(db: &impl HirDatabase, file_id: HirFileId) -> SourceFileNode {
pub(crate) fn hir_source_file(
db: &impl HirDatabase,
file_id: HirFileId,
) -> TreePtr<SourceFile> {
match file_id.0 {
HirFileIdRepr::File(file_id) => db.source_file(file_id),
HirFileIdRepr::Macro(m) => {
@ -63,7 +66,7 @@ impl HirFileId {
return exp.file();
}
// returning an empty string looks fishy...
SourceFileNode::parse("")
SourceFile::parse("")
}
}
}
@ -233,11 +236,11 @@ pub struct SourceItemId {
#[derive(Debug, PartialEq, Eq)]
pub struct SourceFileItems {
file_id: HirFileId,
arena: Arena<SourceFileItemId, SyntaxNode>,
arena: Arena<SourceFileItemId, TreePtr<SyntaxNode>>,
}
impl SourceFileItems {
pub(crate) fn new(file_id: HirFileId, source_file: SourceFile) -> SourceFileItems {
pub(crate) fn new(file_id: HirFileId, source_file: &SourceFile) -> SourceFileItems {
let mut res = SourceFileItems {
file_id,
arena: Arena::default(),
@ -246,20 +249,20 @@ impl SourceFileItems {
res
}
fn init(&mut self, source_file: SourceFile) {
fn init(&mut self, source_file: &SourceFile) {
source_file.syntax().descendants().for_each(|it| {
if let Some(module_item) = ast::ModuleItem::cast(it) {
self.alloc(module_item.syntax().owned());
self.alloc(module_item.syntax().to_owned());
} else if let Some(macro_call) = ast::MacroCall::cast(it) {
self.alloc(macro_call.syntax().owned());
self.alloc(macro_call.syntax().to_owned());
}
});
}
fn alloc(&mut self, item: SyntaxNode) -> SourceFileItemId {
fn alloc(&mut self, item: TreePtr<SyntaxNode>) -> SourceFileItemId {
self.arena.alloc(item)
}
pub(crate) fn id_of(&self, file_id: HirFileId, item: SyntaxNodeRef) -> SourceFileItemId {
pub(crate) fn id_of(&self, file_id: HirFileId, item: &SyntaxNode) -> SourceFileItemId {
assert_eq!(
self.file_id, file_id,
"SourceFileItems: wrong file, expected {:?}, got {:?}",
@ -267,8 +270,8 @@ impl SourceFileItems {
);
self.id_of_unchecked(item)
}
pub(crate) fn id_of_unchecked(&self, item: SyntaxNodeRef) -> SourceFileItemId {
if let Some((id, _)) = self.arena.iter().find(|(_id, i)| i.borrowed() == item) {
pub(crate) fn id_of_unchecked(&self, item: &SyntaxNode) -> SourceFileItemId {
if let Some((id, _)) = self.arena.iter().find(|(_id, i)| *i == item) {
return id;
}
// This should not happen. Let's try to give a sensible diagnostics.

View file

@ -62,7 +62,7 @@ impl ImplData {
db: &impl AsRef<LocationIntener<DefLoc, DefId>>,
file_items: &SourceFileItems,
module: &Module,
node: ast::ImplBlock,
node: &ast::ImplBlock,
) -> Self {
let target_trait = node.target_type().map(TypeRef::from_ast);
let target_type = TypeRef::from_ast_opt(node.target_type());
@ -71,10 +71,10 @@ impl ImplData {
item_list
.impl_items()
.map(|item_node| {
let kind = match item_node {
ast::ImplItem::FnDef(..) => DefKind::Function,
ast::ImplItem::ConstDef(..) => DefKind::Item,
ast::ImplItem::TypeDef(..) => DefKind::Item,
let kind = match item_node.kind() {
ast::ImplItemKind::FnDef(..) => DefKind::Function,
ast::ImplItemKind::ConstDef(..) => DefKind::Item,
ast::ImplItemKind::TypeDef(..) => DefKind::Item,
};
let item_id = file_items.id_of_unchecked(item_node.syntax());
let source_item_id = SourceItemId {
@ -87,10 +87,10 @@ impl ImplData {
..module_loc
};
let def_id = def_loc.id(db);
match item_node {
ast::ImplItem::FnDef(..) => ImplItem::Method(Function::new(def_id)),
ast::ImplItem::ConstDef(..) => ImplItem::Const(def_id),
ast::ImplItem::TypeDef(..) => ImplItem::Type(def_id),
match item_node.kind() {
ast::ImplItemKind::FnDef(..) => ImplItem::Method(Function::new(def_id)),
ast::ImplItemKind::ConstDef(..) => ImplItem::Const(def_id),
ast::ImplItemKind::TypeDef(..) => ImplItem::Type(def_id),
}
})
.collect()
@ -152,8 +152,8 @@ impl ModuleImplBlocks {
fn collect(&mut self, db: &impl HirDatabase, module: Module) -> Cancelable<()> {
let (file_id, module_source) = module.defenition_source(db)?;
let node = match &module_source {
ModuleSource::SourceFile(node) => node.borrowed().syntax(),
ModuleSource::Module(node) => node.borrowed().syntax(),
ModuleSource::SourceFile(node) => node.syntax(),
ModuleSource::Module(node) => node.syntax(),
};
let source_file_items = db.file_items(file_id.into());

View file

@ -11,7 +11,7 @@ use std::sync::Arc;
use ra_db::LocalSyntaxPtr;
use ra_syntax::{
TextRange, TextUnit, SourceFileNode, AstNode, SyntaxNode,
TextRange, TextUnit, SourceFile, AstNode, SyntaxNode, TreePtr,
ast::{self, NameOwner},
};
@ -28,14 +28,14 @@ pub enum MacroDef {
impl MacroDef {
/// Expands macro call, returning the expansion and offset to be used to
/// convert ranges between expansion and original source.
pub fn ast_expand(macro_call: ast::MacroCall) -> Option<(TextUnit, MacroExpansion)> {
pub fn ast_expand(macro_call: &ast::MacroCall) -> Option<(TextUnit, MacroExpansion)> {
let (def, input) = MacroDef::from_call(macro_call)?;
let exp = def.expand(input)?;
let off = macro_call.token_tree()?.syntax().range().start();
Some((off, exp))
}
fn from_call(macro_call: ast::MacroCall) -> Option<(MacroDef, MacroInput)> {
fn from_call(macro_call: &ast::MacroCall) -> Option<(MacroDef, MacroInput)> {
let def = {
let path = macro_call.path()?;
let name_ref = path.segment()?.name_ref()?;
@ -77,7 +77,7 @@ impl MacroDef {
}}",
input.text
);
let file = SourceFileNode::parse(&text);
let file = SourceFile::parse(&text);
let match_expr = file.syntax().descendants().find_map(ast::MatchExpr::cast)?;
let match_arg = match_expr.expr()?;
let ptr = LocalSyntaxPtr::new(match_arg.syntax());
@ -92,7 +92,7 @@ impl MacroDef {
}
fn expand_vec(self, input: MacroInput) -> Option<MacroExpansion> {
let text = format!(r"fn dummy() {{ {}; }}", input.text);
let file = SourceFileNode::parse(&text);
let file = SourceFile::parse(&text);
let array_expr = file.syntax().descendants().find_map(ast::ArrayExpr::cast)?;
let ptr = LocalSyntaxPtr::new(array_expr.syntax());
let src_range = TextRange::offset_len(0.into(), TextUnit::of_str(&input.text));
@ -116,7 +116,7 @@ impl MacroDef {
}
let src_range = TextRange::offset_len((pos as u32).into(), TextUnit::of_str(&trait_name));
let text = format!(r"trait {} {{ }}", trait_name);
let file = SourceFileNode::parse(&text);
let file = SourceFile::parse(&text);
let trait_def = file.syntax().descendants().find_map(ast::TraitDef::cast)?;
let name = trait_def.name()?;
let ptr = LocalSyntaxPtr::new(trait_def.syntax());
@ -152,11 +152,11 @@ pub struct MacroExpansion {
impl MacroExpansion {
// FIXME: does not really make sense, macro expansion is not neccessary a
// whole file. See `MacroExpansion::ptr` as well.
pub(crate) fn file(&self) -> SourceFileNode {
SourceFileNode::parse(&self.text)
pub(crate) fn file(&self) -> TreePtr<SourceFile> {
SourceFile::parse(&self.text)
}
pub fn syntax(&self) -> SyntaxNode {
pub fn syntax(&self) -> TreePtr<SyntaxNode> {
self.ptr.resolve(&self.file())
}
/// Maps range in the source code to the range in the expanded code.
@ -191,8 +191,7 @@ pub(crate) fn expand_macro_invocation(
) -> Option<Arc<MacroExpansion>> {
let loc = invoc.loc(db);
let syntax = db.file_item(loc.source_item_id);
let syntax = syntax.borrowed();
let macro_call = ast::MacroCall::cast(syntax).unwrap();
let macro_call = ast::MacroCall::cast(&syntax).unwrap();
let (def, input) = MacroDef::from_call(macro_call)?;
def.expand(input).map(Arc::new)

View file

@ -5,9 +5,9 @@ use arrayvec::ArrayVec;
use relative_path::RelativePathBuf;
use ra_db::{FileId, SourceRootId, Cancelable, SourceRoot};
use ra_syntax::{
SyntaxNode, TreePtr,
algo::generate,
ast::{self, AstNode, NameOwner},
SyntaxNode,
};
use ra_arena::{Arena, RawId, impl_arena_id};
@ -19,12 +19,11 @@ impl ModuleSource {
source_item_id: SourceItemId,
) -> ModuleSource {
let module_syntax = db.file_item(source_item_id);
let module_syntax = module_syntax.borrowed();
if let Some(source_file) = ast::SourceFile::cast(module_syntax) {
ModuleSource::SourceFile(source_file.owned())
} else if let Some(module) = ast::Module::cast(module_syntax) {
if let Some(source_file) = ast::SourceFile::cast(&module_syntax) {
ModuleSource::SourceFile(source_file.to_owned())
} else if let Some(module) = ast::Module::cast(&module_syntax) {
assert!(module.item_list().is_some(), "expected inline module");
ModuleSource::Module(module.owned())
ModuleSource::Module(module.to_owned())
} else {
panic!("expected file or inline module")
}
@ -49,19 +48,18 @@ impl Submodule {
let module_source = ModuleSource::from_source_item_id(db, source);
let submodules = match module_source {
ModuleSource::SourceFile(source_file) => {
collect_submodules(file_id, &file_items, source_file.borrowed())
collect_submodules(file_id, &file_items, &*source_file)
}
ModuleSource::Module(module) => {
let module = module.borrowed();
collect_submodules(file_id, &file_items, module.item_list().unwrap())
}
};
return Ok(Arc::new(submodules));
fn collect_submodules<'a>(
fn collect_submodules(
file_id: HirFileId,
file_items: &SourceFileItems,
root: impl ast::ModuleItemOwner<'a>,
root: &impl ast::ModuleItemOwner,
) -> Vec<Submodule> {
modules(root)
.map(|(name, m)| Submodule {
@ -120,8 +118,8 @@ impl ModuleTree {
source_root: SourceRootId,
) -> Cancelable<Arc<ModuleTree>> {
db.check_canceled()?;
let res = create_module_tree(db, source_root)?;
Ok(Arc::new(res))
let res = create_module_tree(db, source_root);
Ok(Arc::new(res?))
}
pub(crate) fn modules<'a>(&'a self) -> impl Iterator<Item = ModuleId> + 'a {
@ -172,14 +170,14 @@ impl ModuleId {
self,
tree: &ModuleTree,
db: &impl HirDatabase,
) -> Vec<(SyntaxNode, Problem)> {
) -> Vec<(TreePtr<SyntaxNode>, Problem)> {
tree.mods[self]
.children
.iter()
.filter_map(|&link| {
let p = tree.links[link].problem.clone()?;
let s = link.source(tree, db);
let s = s.borrowed().name().unwrap().syntax().owned();
let s = s.name().unwrap().syntax().to_owned();
Some((s, p))
})
.collect()
@ -193,11 +191,9 @@ impl LinkId {
pub(crate) fn name(self, tree: &ModuleTree) -> &Name {
&tree.links[self].name
}
pub(crate) fn source(self, tree: &ModuleTree, db: &impl HirDatabase) -> ast::ModuleNode {
pub(crate) fn source(self, tree: &ModuleTree, db: &impl HirDatabase) -> TreePtr<ast::Module> {
let syntax_node = db.file_item(tree.links[self].source);
ast::ModuleNode::cast(syntax_node.borrowed())
.unwrap()
.owned()
ast::Module::cast(&syntax_node).unwrap().to_owned()
}
}
@ -213,12 +209,10 @@ impl ModuleTree {
}
}
fn modules<'a>(
root: impl ast::ModuleItemOwner<'a>,
) -> impl Iterator<Item = (Name, ast::Module<'a>)> {
fn modules(root: &impl ast::ModuleItemOwner) -> impl Iterator<Item = (Name, &ast::Module)> {
root.items()
.filter_map(|item| match item {
ast::ModuleItem::Module(m) => Some(m),
.filter_map(|item| match item.kind() {
ast::ModuleItemKind::Module(m) => Some(m),
_ => None,
})
.filter_map(|module| {

View file

@ -74,13 +74,13 @@ pub(crate) trait AsName {
fn as_name(&self) -> Name;
}
impl AsName for ast::NameRef<'_> {
impl AsName for ast::NameRef {
fn as_name(&self) -> Name {
Name::new(self.text())
}
}
impl AsName for ast::Name<'_> {
impl AsName for ast::Name {
fn as_name(&self) -> Name {
Name::new(self.text())
}

View file

@ -103,7 +103,7 @@ impl NamedImport {
item_id: Some(self.file_item_id),
};
let syntax = db.file_item(source_item_id);
let offset = syntax.borrowed().range().start();
let offset = syntax.range().start();
self.relative_range + offset
}
}
@ -215,45 +215,45 @@ impl InputModuleItems {
&mut self,
file_id: HirFileId,
file_items: &SourceFileItems,
item: ast::ModuleItem,
item: &ast::ModuleItem,
) -> Option<()> {
match item {
ast::ModuleItem::StructDef(it) => {
match item.kind() {
ast::ModuleItemKind::StructDef(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
ast::ModuleItem::EnumDef(it) => {
ast::ModuleItemKind::EnumDef(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
ast::ModuleItem::FnDef(it) => {
ast::ModuleItemKind::FnDef(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
ast::ModuleItem::TraitDef(it) => {
ast::ModuleItemKind::TraitDef(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
ast::ModuleItem::TypeDef(it) => {
ast::ModuleItemKind::TypeDef(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
ast::ModuleItem::ImplBlock(_) => {
ast::ModuleItemKind::ImplBlock(_) => {
// impls don't define items
}
ast::ModuleItem::UseItem(it) => self.add_use_item(file_items, it),
ast::ModuleItem::ExternCrateItem(_) => {
ast::ModuleItemKind::UseItem(it) => self.add_use_item(file_items, it),
ast::ModuleItemKind::ExternCrateItem(_) => {
// TODO
}
ast::ModuleItem::ConstDef(it) => {
ast::ModuleItemKind::ConstDef(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
ast::ModuleItem::StaticDef(it) => {
ast::ModuleItemKind::StaticDef(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
ast::ModuleItem::Module(it) => {
ast::ModuleItemKind::Module(it) => {
self.items.push(ModuleItem::new(file_id, file_items, it)?)
}
}
Some(())
}
fn add_use_item(&mut self, file_items: &SourceFileItems, item: ast::UseItem) {
fn add_use_item(&mut self, file_items: &SourceFileItems, item: &ast::UseItem) {
let file_item_id = file_items.id_of_unchecked(item.syntax());
let start_offset = item.syntax().range().start();
Path::expand_use_item(item, |path, range| {
@ -270,10 +270,10 @@ impl InputModuleItems {
}
impl ModuleItem {
fn new<'a>(
fn new(
file_id: HirFileId,
file_items: &SourceFileItems,
item: impl ast::NameOwner<'a>,
item: &impl ast::NameOwner,
) -> Option<ModuleItem> {
let name = item.name()?.as_name();
let kind = item.syntax().kind();

View file

@ -18,14 +18,14 @@ pub enum PathKind {
impl Path {
/// Calls `cb` with all paths, represented by this use item.
pub fn expand_use_item(item: ast::UseItem, mut cb: impl FnMut(Path, Option<TextRange>)) {
pub fn expand_use_item(item: &ast::UseItem, mut cb: impl FnMut(Path, Option<TextRange>)) {
if let Some(tree) = item.use_tree() {
expand_use_tree(None, tree, &mut cb);
}
}
/// Converts an `ast::Path` to `Path`. Works with use trees.
pub fn from_ast(mut path: ast::Path) -> Option<Path> {
pub fn from_ast(mut path: &ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut segments = Vec::new();
loop {
@ -53,7 +53,7 @@ impl Path {
segments.reverse();
return Some(Path { kind, segments });
fn qualifier(path: ast::Path) -> Option<ast::Path> {
fn qualifier(path: &ast::Path) -> Option<&ast::Path> {
if let Some(q) = path.qualifier() {
return Some(q);
}
@ -66,7 +66,7 @@ impl Path {
}
/// Converts an `ast::NameRef` into a single-identifier `Path`.
pub fn from_name_ref(name_ref: ast::NameRef) -> Path {
pub fn from_name_ref(name_ref: &ast::NameRef) -> Path {
name_ref.as_name().into()
}
@ -100,7 +100,7 @@ impl From<Name> for Path {
fn expand_use_tree(
prefix: Option<Path>,
tree: ast::UseTree,
tree: &ast::UseTree,
cb: &mut impl FnMut(Path, Option<TextRange>),
) {
if let Some(use_tree_list) = tree.use_tree_list() {
@ -146,7 +146,7 @@ fn expand_use_tree(
}
}
fn convert_path(prefix: Option<Path>, path: ast::Path) -> Option<Path> {
fn convert_path(prefix: Option<Path>, path: &ast::Path) -> Option<Path> {
let prefix = if let Some(qual) = path.qualifier() {
Some(convert_path(prefix, qual)?)
} else {

View file

@ -5,7 +5,7 @@ use std::{
use rustc_hash::FxHashMap;
use ra_syntax::{
AstNode, SyntaxNode,
AstNode, SyntaxNode, TreePtr,
ast::{self, ModuleItemOwner}
};
use ra_db::{SourceRootId, Cancelable,};
@ -31,30 +31,34 @@ pub(super) fn struct_data(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ar
assert!(def_loc.kind == DefKind::Struct);
let syntax = db.file_item(def_loc.source_item_id);
let struct_def =
ast::StructDef::cast(syntax.borrowed()).expect("struct def should point to StructDef node");
Ok(Arc::new(StructData::new(struct_def.borrowed())))
ast::StructDef::cast(&syntax).expect("struct def should point to StructDef node");
Ok(Arc::new(StructData::new(struct_def)))
}
pub(super) fn enum_data(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Arc<EnumData>> {
let def_loc = def_id.loc(db);
assert!(def_loc.kind == DefKind::Enum);
let syntax = db.file_item(def_loc.source_item_id);
let enum_def =
ast::EnumDef::cast(syntax.borrowed()).expect("enum def should point to EnumDef node");
Ok(Arc::new(EnumData::new(enum_def.borrowed())))
let enum_def = ast::EnumDef::cast(&syntax).expect("enum def should point to EnumDef node");
Ok(Arc::new(EnumData::new(enum_def)))
}
pub(super) fn file_items(db: &impl HirDatabase, file_id: HirFileId) -> Arc<SourceFileItems> {
let source_file = db.hir_source_file(file_id);
let source_file = source_file.borrowed();
let res = SourceFileItems::new(file_id, source_file);
let res = SourceFileItems::new(file_id, &source_file);
Arc::new(res)
}
pub(super) fn file_item(db: &impl HirDatabase, source_item_id: SourceItemId) -> SyntaxNode {
pub(super) fn file_item(
db: &impl HirDatabase,
source_item_id: SourceItemId,
) -> TreePtr<SyntaxNode> {
match source_item_id.item_id {
Some(id) => db.file_items(source_item_id.file_id)[id].clone(),
None => db.hir_source_file(source_item_id.file_id).syntax().owned(),
Some(id) => db.file_items(source_item_id.file_id)[id].to_owned(),
None => db
.hir_source_file(source_item_id.file_id)
.syntax()
.to_owned(),
}
}
@ -88,7 +92,7 @@ pub(super) fn input_module_items(
let file_id = HirFileId::from(id);
let file_items = db.file_items(file_id);
//FIXME: expand recursively
for item in db.hir_source_file(file_id).borrowed().items() {
for item in db.hir_source_file(file_id).items() {
acc.add_item(file_id, &file_items, item);
}
}
@ -98,9 +102,9 @@ pub(super) fn input_module_items(
let mut res = InputModuleItems::default();
match source {
ModuleSource::SourceFile(it) => fill(&mut res, &mut it.borrowed().items_with_macros()),
ModuleSource::SourceFile(it) => fill(&mut res, &mut it.items_with_macros()),
ModuleSource::Module(it) => {
if let Some(item_list) = it.borrowed().item_list() {
if let Some(item_list) = it.item_list() {
fill(&mut res, &mut item_list.items_with_macros())
}
}

View file

@ -8,7 +8,7 @@
use ra_db::{FileId, FilePosition, Cancelable};
use ra_editor::find_node_at_offset;
use ra_syntax::{
SmolStr, TextRange, SyntaxNodeRef,
SmolStr, TextRange, SyntaxNode,
ast::{self, AstNode, NameOwner},
};
@ -30,7 +30,7 @@ pub fn module_from_file_id(db: &impl HirDatabase, file_id: FileId) -> Cancelable
pub fn module_from_declaration(
db: &impl HirDatabase,
file_id: FileId,
decl: ast::Module,
decl: &ast::Module,
) -> Cancelable<Option<Module>> {
let parent_module = module_from_file_id(db, file_id)?;
let child_name = decl.name();
@ -60,7 +60,7 @@ pub fn module_from_position(
fn module_from_inline(
db: &impl HirDatabase,
file_id: FileId,
module: ast::Module,
module: &ast::Module,
) -> Cancelable<Option<Module>> {
assert!(!module.has_semi());
let file_id = file_id.into();
@ -77,7 +77,7 @@ fn module_from_inline(
pub fn module_from_child_node(
db: &impl HirDatabase,
file_id: FileId,
child: SyntaxNodeRef,
child: &SyntaxNode,
) -> Cancelable<Option<Module>> {
if let Some(m) = child
.ancestors()
@ -112,7 +112,7 @@ pub fn function_from_position(
pub fn function_from_source(
db: &impl HirDatabase,
file_id: FileId,
fn_def: ast::FnDef,
fn_def: &ast::FnDef,
) -> Cancelable<Option<Function>> {
let module = ctry!(module_from_child_node(db, file_id, fn_def.syntax())?);
let res = function_from_module(db, &module, fn_def);
@ -122,7 +122,7 @@ pub fn function_from_source(
pub fn function_from_module(
db: &impl HirDatabase,
module: &Module,
fn_def: ast::FnDef,
fn_def: &ast::FnDef,
) -> Function {
let loc = module.def_id.loc(db);
let file_id = loc.source_item_id.file_id;
@ -144,7 +144,7 @@ pub fn function_from_module(
pub fn function_from_child_node(
db: &impl HirDatabase,
file_id: FileId,
node: SyntaxNodeRef,
node: &SyntaxNode,
) -> Cancelable<Option<Function>> {
let fn_def = ctry!(node.ancestors().find_map(ast::FnDef::cast));
function_from_source(db, file_id, fn_def)
@ -170,8 +170,7 @@ pub fn macro_symbols(
if let Some(exp) = db.expand_macro_invocation(macro_call_id) {
let loc = macro_call_id.loc(db);
let syntax = db.file_item(loc.source_item_id);
let syntax = syntax.borrowed();
let macro_call = ast::MacroCall::cast(syntax).unwrap();
let macro_call = ast::MacroCall::cast(&syntax).unwrap();
let off = macro_call.token_tree().unwrap().syntax().range().start();
let file = exp.file();
for trait_def in file.syntax().descendants().filter_map(ast::TraitDef::cast) {

View file

@ -56,9 +56,9 @@ pub enum TypeRef {
impl TypeRef {
/// Converts an `ast::TypeRef` to a `hir::TypeRef`.
pub(crate) fn from_ast(node: ast::TypeRef) -> Self {
use ra_syntax::ast::TypeRef::*;
match node {
pub(crate) fn from_ast(node: &ast::TypeRef) -> Self {
use ra_syntax::ast::TypeRefKind::*;
match node.kind() {
ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
TupleType(inner) => TypeRef::Tuple(inner.fields().map(TypeRef::from_ast).collect()),
NeverType(..) => TypeRef::Never,
@ -100,7 +100,7 @@ impl TypeRef {
}
}
pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
pub(crate) fn from_ast_opt(node: Option<&ast::TypeRef>) -> Self {
if let Some(node) = node {
TypeRef::from_ast(node)
} else {

View file

@ -11,7 +11,7 @@ use languageserver_types::{
use ra_analysis::{
FileId, FilePosition, FileRange, FoldKind, Query, RunnableKind, Severity, SourceChange,
};
use ra_syntax::{text_utils::intersect, TextUnit};
use ra_syntax::{text_utils::intersect, TextUnit, AstNode};
use ra_text_edit::text_utils::contains_offset_nonstrict;
use rustc_hash::FxHashMap;
use serde_json::to_value;

View file

@ -13,7 +13,7 @@ unicode-xid = "0.1.0"
itertools = "0.8.0"
drop_bomb = "0.1.4"
parking_lot = "0.7.0"
rowan = "0.1.2"
rowan = "0.2.0"
text_unit = "0.1.5"
ra_text_edit = { path = "../ra_text_edit" }

View file

@ -1,19 +1,23 @@
pub mod visit;
use crate::{SyntaxNode, SyntaxNodeRef, TextRange, TextUnit};
use rowan::TransparentNewType;
use crate::{SyntaxNode, TextRange, TextUnit};
pub use rowan::LeafAtOffset;
pub fn find_leaf_at_offset(node: SyntaxNodeRef, offset: TextUnit) -> LeafAtOffset<SyntaxNodeRef> {
pub fn find_leaf_at_offset(node: &SyntaxNode, offset: TextUnit) -> LeafAtOffset<&SyntaxNode> {
match node.0.leaf_at_offset(offset) {
LeafAtOffset::None => LeafAtOffset::None,
LeafAtOffset::Single(n) => LeafAtOffset::Single(SyntaxNode(n)),
LeafAtOffset::Between(l, r) => LeafAtOffset::Between(SyntaxNode(l), SyntaxNode(r)),
LeafAtOffset::Single(n) => LeafAtOffset::Single(SyntaxNode::from_repr(n)),
LeafAtOffset::Between(l, r) => {
LeafAtOffset::Between(SyntaxNode::from_repr(l), SyntaxNode::from_repr(r))
}
}
}
pub fn find_covering_node(root: SyntaxNodeRef, range: TextRange) -> SyntaxNodeRef {
SyntaxNode(root.0.covering_node(range))
pub fn find_covering_node(root: &SyntaxNode, range: TextRange) -> &SyntaxNode {
SyntaxNode::from_repr(root.0.covering_node(range))
}
pub fn generate<T>(seed: Option<T>, step: impl Fn(&T) -> Option<T>) -> impl Iterator<Item = T> {

View file

@ -1,4 +1,4 @@
use crate::{AstNode, SyntaxNodeRef};
use crate::{AstNode, SyntaxNode};
use std::marker::PhantomData;
@ -15,11 +15,11 @@ pub fn visitor_ctx<'a, T, C>(ctx: C) -> impl VisitorCtx<'a, Output = T, Ctx = C>
pub trait Visitor<'a>: Sized {
type Output;
fn accept(self, node: SyntaxNodeRef<'a>) -> Option<Self::Output>;
fn accept(self, node: &'a SyntaxNode) -> Option<Self::Output>;
fn visit<N, F>(self, f: F) -> Vis<Self, N, F>
where
N: AstNode<'a>,
F: FnOnce(N) -> Self::Output,
N: AstNode + 'a,
F: FnOnce(&'a N) -> Self::Output,
{
Vis {
inner: self,
@ -32,11 +32,11 @@ pub trait Visitor<'a>: Sized {
pub trait VisitorCtx<'a>: Sized {
type Output;
type Ctx;
fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx>;
fn accept(self, node: &'a SyntaxNode) -> Result<Self::Output, Self::Ctx>;
fn visit<N, F>(self, f: F) -> VisCtx<Self, N, F>
where
N: AstNode<'a>,
F: FnOnce(N, Self::Ctx) -> Self::Output,
N: AstNode + 'a,
F: FnOnce(&'a N, Self::Ctx) -> Self::Output,
{
VisCtx {
inner: self,
@ -54,7 +54,7 @@ struct EmptyVisitor<T> {
impl<'a, T> Visitor<'a> for EmptyVisitor<T> {
type Output = T;
fn accept(self, _node: SyntaxNodeRef<'a>) -> Option<T> {
fn accept(self, _node: &'a SyntaxNode) -> Option<T> {
None
}
}
@ -69,7 +69,7 @@ impl<'a, T, C> VisitorCtx<'a> for EmptyVisitorCtx<T, C> {
type Output = T;
type Ctx = C;
fn accept(self, _node: SyntaxNodeRef<'a>) -> Result<T, C> {
fn accept(self, _node: &'a SyntaxNode) -> Result<T, C> {
Err(self.ctx)
}
}
@ -84,12 +84,12 @@ pub struct Vis<V, N, F> {
impl<'a, V, N, F> Visitor<'a> for Vis<V, N, F>
where
V: Visitor<'a>,
N: AstNode<'a>,
F: FnOnce(N) -> <V as Visitor<'a>>::Output,
N: AstNode + 'a,
F: FnOnce(&'a N) -> <V as Visitor<'a>>::Output,
{
type Output = <V as Visitor<'a>>::Output;
fn accept(self, node: SyntaxNodeRef<'a>) -> Option<Self::Output> {
fn accept(self, node: &'a SyntaxNode) -> Option<Self::Output> {
let Vis { inner, f, .. } = self;
inner.accept(node).or_else(|| N::cast(node).map(f))
}
@ -105,13 +105,13 @@ pub struct VisCtx<V, N, F> {
impl<'a, V, N, F> VisitorCtx<'a> for VisCtx<V, N, F>
where
V: VisitorCtx<'a>,
N: AstNode<'a>,
F: FnOnce(N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output,
N: AstNode + 'a,
F: FnOnce(&'a N, <V as VisitorCtx<'a>>::Ctx) -> <V as VisitorCtx<'a>>::Output,
{
type Output = <V as VisitorCtx<'a>>::Output;
type Ctx = <V as VisitorCtx<'a>>::Ctx;
fn accept(self, node: SyntaxNodeRef<'a>) -> Result<Self::Output, Self::Ctx> {
fn accept(self, node: &'a SyntaxNode) -> Result<Self::Output, Self::Ctx> {
let VisCtx { inner, f, .. } = self;
inner.accept(node).or_else(|ctx| match N::cast(node) {
None => Err(ctx),

View file

@ -1,119 +1,115 @@
mod generated;
use std::marker::PhantomData;
use std::string::String as RustString;
use itertools::Itertools;
pub use self::generated::*;
use crate::{
yellow::{RefRoot, SyntaxNodeChildren},
yellow::{SyntaxNode, SyntaxNodeChildren, TreePtr, RaTypes},
SmolStr,
SyntaxKind::*,
SyntaxNodeRef,
};
/// The main trait to go from untyped `SyntaxNode` to a typed ast. The
/// conversion itself has zero runtime cost: ast and syntax nodes have exactly
/// the same representation: a pointer to the tree root and a pointer to the
/// node itself.
pub trait AstNode<'a>: Clone + Copy + 'a {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self>
pub trait AstNode: rowan::TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>> {
fn cast(syntax: &SyntaxNode) -> Option<&Self>
where
Self: Sized;
fn syntax(self) -> SyntaxNodeRef<'a>;
fn syntax(&self) -> &SyntaxNode;
fn to_owned(&self) -> TreePtr<Self>;
}
pub trait NameOwner<'a>: AstNode<'a> {
fn name(self) -> Option<Name<'a>> {
pub trait NameOwner: AstNode {
fn name(&self) -> Option<&Name> {
child_opt(self)
}
}
pub trait VisibilityOwner<'a>: AstNode<'a> {
fn visibility(self) -> Option<Visibility<'a>> {
pub trait VisibilityOwner: AstNode {
fn visibility(&self) -> Option<&Visibility> {
child_opt(self)
}
}
pub trait LoopBodyOwner<'a>: AstNode<'a> {
fn loop_body(self) -> Option<Block<'a>> {
pub trait LoopBodyOwner: AstNode {
fn loop_body(&self) -> Option<&Block> {
child_opt(self)
}
}
pub trait ArgListOwner<'a>: AstNode<'a> {
fn arg_list(self) -> Option<ArgList<'a>> {
pub trait ArgListOwner: AstNode {
fn arg_list(&self) -> Option<&ArgList> {
child_opt(self)
}
}
pub trait FnDefOwner<'a>: AstNode<'a> {
fn functions(self) -> AstChildren<'a, FnDef<'a>> {
pub trait FnDefOwner: AstNode {
fn functions(&self) -> AstChildren<FnDef> {
children(self)
}
}
// ModuleItem
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ItemOrMacro<'a> {
Item(ModuleItem<'a>),
Macro(MacroCall<'a>),
Item(&'a ModuleItem),
Macro(&'a MacroCall),
}
impl<'a> AstNode<'a> for ItemOrMacro<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
let res = if let Some(item) = ModuleItem::cast(syntax) {
ItemOrMacro::Item(item)
} else if let Some(macro_call) = MacroCall::cast(syntax) {
ItemOrMacro::Macro(macro_call)
} else {
return None;
};
Some(res)
pub trait ModuleItemOwner: AstNode {
fn items(&self) -> AstChildren<ModuleItem> {
children(self)
}
fn syntax(self) -> SyntaxNodeRef<'a> {
match self {
ItemOrMacro::Item(it) => it.syntax(),
ItemOrMacro::Macro(it) => it.syntax(),
fn items_with_macros(&self) -> ItemOrMacroIter {
ItemOrMacroIter(self.syntax().children())
}
}
#[derive(Debug)]
pub struct ItemOrMacroIter<'a>(SyntaxNodeChildren<'a>);
impl<'a> Iterator for ItemOrMacroIter<'a> {
type Item = ItemOrMacro<'a>;
fn next(&mut self) -> Option<ItemOrMacro<'a>> {
loop {
let n = self.0.next()?;
if let Some(item) = ModuleItem::cast(n) {
return Some(ItemOrMacro::Item(item));
}
if let Some(call) = MacroCall::cast(n) {
return Some(ItemOrMacro::Macro(call));
}
}
}
}
pub trait ModuleItemOwner<'a>: AstNode<'a> {
fn items(self) -> AstChildren<'a, ModuleItem<'a>> {
children(self)
}
fn items_with_macros(self) -> AstChildren<'a, ItemOrMacro<'a>> {
children(self)
}
}
pub trait TypeParamsOwner<'a>: AstNode<'a> {
fn type_param_list(self) -> Option<TypeParamList<'a>> {
pub trait TypeParamsOwner: AstNode {
fn type_param_list(&self) -> Option<&TypeParamList> {
child_opt(self)
}
fn where_clause(self) -> Option<WhereClause<'a>> {
fn where_clause(&self) -> Option<&WhereClause> {
child_opt(self)
}
}
pub trait AttrsOwner<'a>: AstNode<'a> {
fn attrs(self) -> AstChildren<'a, Attr<'a>> {
pub trait AttrsOwner: AstNode {
fn attrs(&self) -> AstChildren<Attr> {
children(self)
}
}
pub trait DocCommentsOwner<'a>: AstNode<'a> {
fn doc_comments(self) -> AstChildren<'a, Comment<'a>> {
pub trait DocCommentsOwner: AstNode {
fn doc_comments(&self) -> AstChildren<Comment> {
children(self)
}
/// Returns the textual content of a doc comment block as a single string.
/// That is, strips leading `///` and joins lines
fn doc_comment_text(self) -> RustString {
fn doc_comment_text(&self) -> std::string::String {
self.doc_comments()
.filter(|comment| comment.is_doc_comment())
.map(|comment| {
@ -130,13 +126,13 @@ pub trait DocCommentsOwner<'a>: AstNode<'a> {
}
}
impl<'a> FnDef<'a> {
impl FnDef {
pub fn has_atom_attr(&self, atom: &str) -> bool {
self.attrs().filter_map(|x| x.as_atom()).any(|x| x == atom)
}
}
impl<'a> Attr<'a> {
impl Attr {
pub fn as_atom(&self) -> Option<SmolStr> {
let tt = self.value()?;
let (_bra, attr, _ket) = tt.syntax().children().collect_tuple()?;
@ -147,7 +143,7 @@ impl<'a> Attr<'a> {
}
}
pub fn as_call(&self) -> Option<(SmolStr, TokenTree<'a>)> {
pub fn as_call(&self) -> Option<(SmolStr, &TokenTree)> {
let tt = self.value()?;
let (_bra, attr, args, _ket) = tt.syntax().children().collect_tuple()?;
let args = TokenTree::cast(args)?;
@ -159,37 +155,37 @@ impl<'a> Attr<'a> {
}
}
impl<'a> Lifetime<'a> {
impl Lifetime {
pub fn text(&self) -> SmolStr {
self.syntax().leaf_text().unwrap().clone()
}
}
impl<'a> Char<'a> {
impl Char {
pub fn text(&self) -> &SmolStr {
&self.syntax().leaf_text().unwrap()
}
}
impl<'a> Byte<'a> {
impl Byte {
pub fn text(&self) -> &SmolStr {
&self.syntax().leaf_text().unwrap()
}
}
impl<'a> ByteString<'a> {
impl ByteString {
pub fn text(&self) -> &SmolStr {
&self.syntax().leaf_text().unwrap()
}
}
impl<'a> String<'a> {
impl String {
pub fn text(&self) -> &SmolStr {
&self.syntax().leaf_text().unwrap()
}
}
impl<'a> Comment<'a> {
impl Comment {
pub fn text(&self) -> &SmolStr {
self.syntax().leaf_text().unwrap()
}
@ -251,7 +247,7 @@ impl CommentFlavor {
}
}
impl<'a> Whitespace<'a> {
impl Whitespace {
pub fn text(&self) -> &SmolStr {
&self.syntax().leaf_text().unwrap()
}
@ -265,36 +261,36 @@ impl<'a> Whitespace<'a> {
}
}
impl<'a> Name<'a> {
impl Name {
pub fn text(&self) -> SmolStr {
let ident = self.syntax().first_child().unwrap();
ident.leaf_text().unwrap().clone()
}
}
impl<'a> NameRef<'a> {
impl NameRef {
pub fn text(&self) -> SmolStr {
let ident = self.syntax().first_child().unwrap();
ident.leaf_text().unwrap().clone()
}
}
impl<'a> ImplBlock<'a> {
pub fn target_type(self) -> Option<TypeRef<'a>> {
impl ImplBlock {
pub fn target_type(&self) -> Option<&TypeRef> {
match self.target() {
(Some(t), None) | (_, Some(t)) => Some(t),
_ => None,
}
}
pub fn target_trait(self) -> Option<TypeRef<'a>> {
pub fn target_trait(&self) -> Option<&TypeRef> {
match self.target() {
(Some(t), Some(_)) => Some(t),
_ => None,
}
}
fn target(self) -> (Option<TypeRef<'a>>, Option<TypeRef<'a>>) {
fn target(&self) -> (Option<&TypeRef>, Option<&TypeRef>) {
let mut types = children(self);
let first = types.next();
let second = types.next();
@ -302,8 +298,8 @@ impl<'a> ImplBlock<'a> {
}
}
impl<'a> Module<'a> {
pub fn has_semi(self) -> bool {
impl Module {
pub fn has_semi(&self) -> bool {
match self.syntax().last_child() {
None => false,
Some(node) => node.kind() == SEMI,
@ -311,8 +307,8 @@ impl<'a> Module<'a> {
}
}
impl<'a> LetStmt<'a> {
pub fn has_semi(self) -> bool {
impl LetStmt {
pub fn has_semi(&self) -> bool {
match self.syntax().last_child() {
None => false,
Some(node) => node.kind() == SEMI,
@ -320,35 +316,35 @@ impl<'a> LetStmt<'a> {
}
}
impl<'a> IfExpr<'a> {
pub fn then_branch(self) -> Option<Block<'a>> {
impl IfExpr {
pub fn then_branch(&self) -> Option<&Block> {
self.blocks().nth(0)
}
pub fn else_branch(self) -> Option<Block<'a>> {
pub fn else_branch(&self) -> Option<&Block> {
self.blocks().nth(1)
}
fn blocks(self) -> AstChildren<'a, Block<'a>> {
fn blocks(&self) -> AstChildren<Block> {
children(self)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PathSegmentKind<'a> {
Name(NameRef<'a>),
Name(&'a NameRef),
SelfKw,
SuperKw,
CrateKw,
}
impl<'a> PathSegment<'a> {
pub fn parent_path(self) -> Path<'a> {
impl PathSegment {
pub fn parent_path(&self) -> &Path {
self.syntax()
.parent()
.and_then(Path::cast)
.expect("segments are always nested in paths")
}
pub fn kind(self) -> Option<PathSegmentKind<'a>> {
pub fn kind(&self) -> Option<PathSegmentKind> {
let res = if let Some(name_ref) = self.name_ref() {
PathSegmentKind::Name(name_ref)
} else {
@ -363,20 +359,20 @@ impl<'a> PathSegment<'a> {
}
}
impl<'a> Path<'a> {
pub fn parent_path(self) -> Option<Path<'a>> {
impl Path {
pub fn parent_path(&self) -> Option<&Path> {
self.syntax().parent().and_then(Path::cast)
}
}
impl<'a> UseTree<'a> {
pub fn has_star(self) -> bool {
impl UseTree {
pub fn has_star(&self) -> bool {
self.syntax().children().any(|it| it.kind() == STAR)
}
}
impl<'a> UseTreeList<'a> {
pub fn parent_use_tree(self) -> UseTree<'a> {
impl UseTreeList {
pub fn parent_use_tree(&self) -> &UseTree {
self.syntax()
.parent()
.and_then(UseTree::cast)
@ -384,22 +380,22 @@ impl<'a> UseTreeList<'a> {
}
}
fn child_opt<'a, P: AstNode<'a>, C: AstNode<'a>>(parent: P) -> Option<C> {
fn child_opt<P: AstNode, C: AstNode>(parent: &P) -> Option<&C> {
children(parent).next()
}
fn children<'a, P: AstNode<'a>, C: AstNode<'a>>(parent: P) -> AstChildren<'a, C> {
fn children<P: AstNode, C: AstNode>(parent: &P) -> AstChildren<C> {
AstChildren::new(parent.syntax())
}
#[derive(Debug)]
pub struct AstChildren<'a, N> {
inner: SyntaxNodeChildren<RefRoot<'a>>,
inner: SyntaxNodeChildren<'a>,
ph: PhantomData<N>,
}
impl<'a, N> AstChildren<'a, N> {
fn new(parent: SyntaxNodeRef<'a>) -> Self {
fn new(parent: &'a SyntaxNode) -> Self {
AstChildren {
inner: parent.children(),
ph: PhantomData,
@ -407,9 +403,9 @@ impl<'a, N> AstChildren<'a, N> {
}
}
impl<'a, N: AstNode<'a>> Iterator for AstChildren<'a, N> {
type Item = N;
fn next(&mut self) -> Option<N> {
impl<'a, N: AstNode + 'a> Iterator for AstChildren<'a, N> {
type Item = &'a N;
fn next(&mut self) -> Option<&'a N> {
loop {
if let Some(n) = N::cast(self.inner.next()?) {
return Some(n);
@ -420,13 +416,13 @@ impl<'a, N: AstNode<'a>> Iterator for AstChildren<'a, N> {
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum StructFlavor<'a> {
Tuple(PosFieldList<'a>),
Named(NamedFieldDefList<'a>),
Tuple(&'a PosFieldList),
Named(&'a NamedFieldDefList),
Unit,
}
impl<'a> StructFlavor<'a> {
fn from_node<N: AstNode<'a>>(node: N) -> StructFlavor<'a> {
impl StructFlavor<'_> {
fn from_node<N: AstNode>(node: &N) -> StructFlavor {
if let Some(nfdl) = child_opt::<_, NamedFieldDefList>(node) {
StructFlavor::Named(nfdl)
} else if let Some(pfl) = child_opt::<_, PosFieldList>(node) {
@ -437,31 +433,31 @@ impl<'a> StructFlavor<'a> {
}
}
impl<'a> StructDef<'a> {
pub fn flavor(self) -> StructFlavor<'a> {
impl StructDef {
pub fn flavor(&self) -> StructFlavor {
StructFlavor::from_node(self)
}
}
impl<'a> EnumVariant<'a> {
pub fn flavor(self) -> StructFlavor<'a> {
impl EnumVariant {
pub fn flavor(&self) -> StructFlavor {
StructFlavor::from_node(self)
}
}
impl<'a> PointerType<'a> {
impl PointerType {
pub fn is_mut(&self) -> bool {
self.syntax().children().any(|n| n.kind() == MUT_KW)
}
}
impl<'a> ReferenceType<'a> {
impl ReferenceType {
pub fn is_mut(&self) -> bool {
self.syntax().children().any(|n| n.kind() == MUT_KW)
}
}
impl<'a> RefExpr<'a> {
impl RefExpr {
pub fn is_mut(&self) -> bool {
self.syntax().children().any(|n| n.kind() == MUT_KW)
}
@ -477,7 +473,7 @@ pub enum PrefixOp {
Neg,
}
impl<'a> PrefixExpr<'a> {
impl PrefixExpr {
pub fn op(&self) -> Option<PrefixOp> {
match self.syntax().first_child()?.kind() {
STAR => Some(PrefixOp::Deref),
@ -552,7 +548,7 @@ pub enum BinOp {
BitXorAssign,
}
impl<'a> BinExpr<'a> {
impl BinExpr {
pub fn op(&self) -> Option<BinOp> {
self.syntax()
.children()
@ -592,15 +588,15 @@ impl<'a> BinExpr<'a> {
.next()
}
pub fn lhs(self) -> Option<Expr<'a>> {
pub fn lhs(&self) -> Option<&Expr> {
children(self).nth(0)
}
pub fn rhs(self) -> Option<Expr<'a>> {
pub fn rhs(&self) -> Option<&Expr> {
children(self).nth(1)
}
pub fn sub_exprs(self) -> (Option<Expr<'a>>, Option<Expr<'a>>) {
pub fn sub_exprs(&self) -> (Option<&Expr>, Option<&Expr>) {
let mut children = children(self);
let first = children.next();
let second = children.next();
@ -618,7 +614,7 @@ pub enum SelfParamFlavor {
MutRef,
}
impl<'a> SelfParam<'a> {
impl SelfParam {
pub fn flavor(&self) -> SelfParamFlavor {
let borrowed = self.syntax().children().any(|n| n.kind() == AMP);
if borrowed {
@ -641,7 +637,7 @@ impl<'a> SelfParam<'a> {
#[test]
fn test_doc_comment_of_items() {
let file = SourceFileNode::parse(
let file = SourceFile::parse(
r#"
//! doc
// non-doc

File diff suppressed because it is too large Load diff

View file

@ -11,89 +11,92 @@ the below applies to the result of this template
#![cfg_attr(rustfmt, rustfmt_skip)]
use std::hash::{Hash, Hasher};
use rowan::TransparentNewType;
use crate::{
ast,
SyntaxNode, SyntaxNodeRef, AstNode,
yellow::{TreeRoot, RaTypes, OwnedRoot, RefRoot},
SyntaxKind::*,
SyntaxNode, SyntaxKind::*,
yellow::{RaTypes, TreePtr},
ast::{self, AstNode},
};
{% for node, methods in ast %}
// {{ node }}
{%- if methods.enum %}
#[derive(Debug, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct {{ node }} {
pub(crate) syntax: SyntaxNode,
}
unsafe impl TransparentNewType for {{ node }} {
type Repr = rowan::SyntaxNode<RaTypes>;
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum {{ node }}<'a> {
pub enum {{ node }}Kind<'a> {
{%- for kind in methods.enum %}
{{ kind }}({{ kind }}<'a>),
{{ kind }}(&'a {{ kind }}),
{%- endfor %}
}
impl<'a> AstNode<'a> for {{ node }}<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
impl AstNode for {{ node }} {
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
match syntax.kind() {
{%- for kind in methods.enum %}
{{ kind | SCREAM }} => Some({{ node }}::{{ kind }}({{ kind }} { syntax })),
{%- endfor %}
{%- for kind in methods.enum %}
| {{ kind | SCREAM }}
{%- endfor %} => Some({{ node }}::from_repr(syntax.into_repr())),
_ => None,
}
}
fn syntax(self) -> SyntaxNodeRef<'a> {
match self {
{%- for kind in methods.enum %}
{{ node }}::{{ kind }}(inner) => inner.syntax(),
{%- endfor %}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
fn to_owned(&self) -> TreePtr<{{ node }}> { TreePtr::cast(self.syntax.to_owned()) }
}
impl {{ node }} {
pub fn kind(&self) -> {{ node }}Kind {
match self.syntax.kind() {
{%- for kind in methods.enum %}
{{ kind | SCREAM }} => {{ node }}Kind::{{ kind }}({{ kind }}::cast(&self.syntax).unwrap()),
{%- endfor %}
_ => unreachable!(),
}
}
}
{% else %}
#[derive(Debug, Clone, Copy,)]
pub struct {{ node }}Node<R: TreeRoot<RaTypes> = OwnedRoot> {
pub(crate) syntax: SyntaxNode<R>,
#[derive(Debug, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct {{ node }} {
pub(crate) syntax: SyntaxNode,
}
pub type {{ node }}<'a> = {{ node }}Node<RefRoot<'a>>;
impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<{{node}}Node<R1>> for {{node}}Node<R2> {
fn eq(&self, other: &{{node}}Node<R1>) -> bool { self.syntax == other.syntax }
}
impl<R: TreeRoot<RaTypes>> Eq for {{node}}Node<R> {}
impl<R: TreeRoot<RaTypes>> Hash for {{node}}Node<R> {
fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
unsafe impl TransparentNewType for {{ node }} {
type Repr = rowan::SyntaxNode<RaTypes>;
}
impl<'a> AstNode<'a> for {{ node }}<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
impl AstNode for {{ node }} {
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
match syntax.kind() {
{{ node | SCREAM }} => Some({{ node }} { syntax }),
{{ node | SCREAM }} => Some({{ node }}::from_repr(syntax.into_repr())),
_ => None,
}
}
fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
}
impl<R: TreeRoot<RaTypes>> {{ node }}Node<R> {
pub fn borrowed(&self) -> {{ node }} {
{{ node }}Node { syntax: self.syntax.borrowed() }
}
pub fn owned(&self) -> {{ node }}Node {
{{ node }}Node { syntax: self.syntax.owned() }
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
fn to_owned(&self) -> TreePtr<{{ node }}> { TreePtr::cast(self.syntax.to_owned()) }
}
{% endif %}
{% if methods.traits -%}
{%- for t in methods.traits -%}
impl<'a> ast::{{ t }}<'a> for {{ node }}<'a> {}
impl ast::{{ t }} for {{ node }} {}
{% endfor -%}
{%- endif -%}
impl<'a> {{ node }}<'a> {
impl {{ node }} {
{%- if methods.collections -%}
{%- for m in methods.collections -%}
{%- set method_name = m.0 -%}
{%- set ChildName = m.1 %}
pub fn {{ method_name }}(self) -> impl Iterator<Item = {{ ChildName }}<'a>> + 'a {
pub fn {{ method_name }}(&self) -> impl Iterator<Item = &{{ ChildName }}> {
super::children(self)
}
{% endfor -%}
@ -109,7 +112,7 @@ impl<'a> {{ node }}<'a> {
{%- set method_name = m.0 -%}
{%- set ChildName = m.1 %}
{%- endif %}
pub fn {{ method_name }}(self) -> Option<{{ ChildName }}<'a>> {
pub fn {{ method_name }}(&self) -> Option<&{{ ChildName }}> {
super::child_opt(self)
}
{% endfor -%}

View file

@ -42,52 +42,42 @@ pub use crate::{
ast::AstNode,
lexer::{tokenize, Token},
syntax_kinds::SyntaxKind,
yellow::{
Direction, OwnedRoot, RefRoot, SyntaxError, SyntaxNode, SyntaxNodeRef, TreeRoot, WalkEvent, Location,
},
yellow::{Direction, SyntaxError, SyntaxNode, WalkEvent, Location, TreePtr},
};
use ra_text_edit::AtomTextEdit;
use crate::yellow::GreenNode;
/// `SourceFileNode` represents a parse tree for a single Rust file.
pub use crate::ast::{SourceFile, SourceFileNode};
/// `SourceFile` represents a parse tree for a single Rust file.
pub use crate::ast::SourceFile;
impl SourceFileNode {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> SourceFileNode {
impl SourceFile {
fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreePtr<SourceFile> {
let root = SyntaxNode::new(green, errors);
if cfg!(debug_assertions) {
utils::validate_block_structure(root.borrowed());
utils::validate_block_structure(&root);
}
assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
ast::SourceFileNode { syntax: root }
TreePtr::cast(root)
}
pub fn parse(text: &str) -> SourceFileNode {
pub fn parse(text: &str) -> TreePtr<SourceFile> {
let tokens = tokenize(&text);
let (green, errors) =
parser_impl::parse_with(yellow::GreenBuilder::new(), text, &tokens, grammar::root);
SourceFileNode::new(green, errors)
SourceFile::new(green, errors)
}
pub fn reparse(&self, edit: &AtomTextEdit) -> SourceFileNode {
pub fn reparse(&self, edit: &AtomTextEdit) -> TreePtr<SourceFile> {
self.incremental_reparse(edit)
.unwrap_or_else(|| self.full_reparse(edit))
}
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<SourceFileNode> {
pub fn incremental_reparse(&self, edit: &AtomTextEdit) -> Option<TreePtr<SourceFile>> {
reparsing::incremental_reparse(self.syntax(), edit, self.errors())
.map(|(green_node, errors)| SourceFileNode::new(green_node, errors))
.map(|(green_node, errors)| SourceFile::new(green_node, errors))
}
fn full_reparse(&self, edit: &AtomTextEdit) -> SourceFileNode {
fn full_reparse(&self, edit: &AtomTextEdit) -> TreePtr<SourceFile> {
let text =
text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
SourceFileNode::parse(&text)
}
/// Typed AST representation of the parse tree.
pub fn ast(&self) -> ast::SourceFile {
self.borrowed()
}
/// Untyped homogeneous representation of the parse tree.
pub fn syntax(&self) -> SyntaxNodeRef {
self.syntax.borrowed()
SourceFile::parse(&text)
}
pub fn errors(&self) -> Vec<SyntaxError> {
let mut errors = self.syntax.root_data().clone();

View file

@ -4,12 +4,12 @@ use crate::lexer::{tokenize, Token};
use crate::parser_api::Parser;
use crate::parser_impl;
use crate::text_utils::replace_range;
use crate::yellow::{self, GreenNode, SyntaxError, SyntaxNodeRef};
use crate::yellow::{self, GreenNode, SyntaxError, SyntaxNode};
use crate::{SyntaxKind::*, TextRange, TextUnit};
use ra_text_edit::AtomTextEdit;
pub(crate) fn incremental_reparse(
node: SyntaxNodeRef,
node: &SyntaxNode,
edit: &AtomTextEdit,
errors: Vec<SyntaxError>,
) -> Option<(GreenNode, Vec<SyntaxError>)> {
@ -21,9 +21,9 @@ pub(crate) fn incremental_reparse(
}
fn reparse_leaf<'node>(
node: SyntaxNodeRef<'node>,
node: &'node SyntaxNode,
edit: &AtomTextEdit,
) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
let node = algo::find_covering_node(node, edit.delete);
match node.kind() {
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
@ -47,9 +47,9 @@ fn reparse_leaf<'node>(
}
fn reparse_block<'node>(
node: SyntaxNodeRef<'node>,
node: &'node SyntaxNode,
edit: &AtomTextEdit,
) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
) -> Option<(&'node SyntaxNode, GreenNode, Vec<SyntaxError>)> {
let (node, reparser) = find_reparsable_node(node, edit.delete)?;
let text = get_text_after_edit(node, &edit);
let tokens = tokenize(&text);
@ -61,7 +61,7 @@ fn reparse_block<'node>(
Some((node, green, new_errors))
}
fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomTextEdit) -> String {
fn get_text_after_edit(node: &SyntaxNode, edit: &AtomTextEdit) -> String {
replace_range(
node.text().to_string(),
edit.delete - node.range().start(),
@ -77,17 +77,14 @@ fn is_contextual_kw(text: &str) -> bool {
}
type ParseFn = fn(&mut Parser);
fn find_reparsable_node(
node: SyntaxNodeRef<'_>,
range: TextRange,
) -> Option<(SyntaxNodeRef<'_>, ParseFn)> {
fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(&SyntaxNode, ParseFn)> {
let node = algo::find_covering_node(node, range);
return node
.ancestors()
.filter_map(|node| reparser(node).map(|r| (node, r)))
.next();
fn reparser(node: SyntaxNodeRef) -> Option<ParseFn> {
fn reparser(node: &SyntaxNode) -> Option<ParseFn> {
let res = match node.kind() {
BLOCK => grammar::block,
NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
@ -138,7 +135,7 @@ fn is_balanced(tokens: &[Token]) -> bool {
fn merge_errors(
old_errors: Vec<SyntaxError>,
new_errors: Vec<SyntaxError>,
old_node: SyntaxNodeRef,
old_node: &SyntaxNode,
edit: &AtomTextEdit,
) -> Vec<SyntaxError> {
let mut res = Vec::new();
@ -159,22 +156,22 @@ fn merge_errors(
mod tests {
use test_utils::{extract_range, assert_eq_text};
use crate::{SourceFileNode, text_utils::replace_range, utils::dump_tree };
use crate::{SourceFile, AstNode, text_utils::replace_range, utils::dump_tree};
use super::*;
fn do_check<F>(before: &str, replace_with: &str, reparser: F)
where
for<'a> F: Fn(
SyntaxNodeRef<'a>,
&'a SyntaxNode,
&AtomTextEdit,
) -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>,
) -> Option<(&'a SyntaxNode, GreenNode, Vec<SyntaxError>)>,
{
let (range, before) = extract_range(before);
let after = replace_range(before.clone(), range, replace_with);
let fully_reparsed = SourceFileNode::parse(&after);
let fully_reparsed = SourceFile::parse(&after);
let incrementally_reparsed = {
let f = SourceFileNode::parse(&before);
let f = SourceFile::parse(&before);
let edit = AtomTextEdit {
delete: range,
insert: replace_with.to_string(),
@ -183,7 +180,7 @@ mod tests {
reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
let green_root = node.replace_with(green);
let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
SourceFileNode::new(green_root, errors)
SourceFile::new(green_root, errors)
};
assert_eq_text!(

View file

@ -1,11 +1,11 @@
use crate::{SourceFileNode, SyntaxKind, SyntaxNodeRef, WalkEvent, AstNode};
use std::fmt::Write;
use std::str;
use std::{str, fmt::Write};
use crate::{SourceFile, SyntaxKind, WalkEvent, AstNode, SyntaxNode};
/// Parse a file and create a string representation of the resulting parse tree.
pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
let mut errors: Vec<_> = match syntax.ancestors().find_map(SourceFileNode::cast) {
Some(file) => file.owned().errors(),
pub fn dump_tree(syntax: &SyntaxNode) -> String {
let mut errors: Vec<_> = match syntax.ancestors().find_map(SourceFile::cast) {
Some(file) => file.errors(),
None => syntax.root_data().to_vec(),
};
errors.sort_by_key(|e| e.offset());
@ -48,14 +48,13 @@ pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
}
pub fn check_fuzz_invariants(text: &str) {
let file = SourceFileNode::parse(text);
let file = SourceFile::parse(text);
let root = file.syntax();
validate_block_structure(root);
let _ = file.ast();
let _ = file.errors();
}
pub(crate) fn validate_block_structure(root: SyntaxNodeRef) {
pub(crate) fn validate_block_structure(root: &SyntaxNode) {
let mut stack = Vec::new();
for node in root.descendants() {
match node.kind() {

View file

@ -1,16 +1,15 @@
use crate::{
algo::visit::{visitor_ctx, VisitorCtx},
ast,
SourceFileNode,
yellow::SyntaxError,
};
mod byte;
mod byte_string;
mod char;
mod string;
pub(crate) fn validate(file: &SourceFileNode) -> Vec<SyntaxError> {
use crate::{
SourceFile, yellow::SyntaxError, AstNode,
ast,
algo::visit::{visitor_ctx, VisitorCtx},
};
pub(crate) fn validate(file: &SourceFile) -> Vec<SyntaxError> {
let mut errors = Vec::new();
for node in file.syntax().descendants() {
let _ = visitor_ctx(&mut errors)

View file

@ -11,7 +11,7 @@ use crate::{
},
};
pub(super) fn validate_byte_node(node: ast::Byte, errors: &mut Vec<SyntaxError>) {
pub(super) fn validate_byte_node(node: &ast::Byte, errors: &mut Vec<SyntaxError>) {
let literal_text = node.text();
let literal_range = node.syntax().range();
let mut components = string_lexing::parse_byte_literal(literal_text);
@ -106,11 +106,11 @@ fn validate_byte_code_escape(text: &str, range: TextRange, errors: &mut Vec<Synt
#[cfg(test)]
mod test {
use crate::SourceFileNode;
use crate::{SourceFile, TreePtr};
fn build_file(literal: &str) -> SourceFileNode {
fn build_file(literal: &str) -> TreePtr<SourceFile> {
let src = format!("const C: u8 = b'{}';", literal);
SourceFileNode::parse(&src)
SourceFile::parse(&src)
}
fn assert_valid_byte(literal: &str) {

View file

@ -9,7 +9,7 @@ use crate::{
use super::byte;
pub(crate) fn validate_byte_string_node(node: ast::ByteString, errors: &mut Vec<SyntaxError>) {
pub(crate) fn validate_byte_string_node(node: &ast::ByteString, errors: &mut Vec<SyntaxError>) {
let literal_text = node.text();
let literal_range = node.syntax().range();
let mut components = string_lexing::parse_byte_string_literal(literal_text);
@ -43,12 +43,12 @@ pub(crate) fn validate_byte_string_node(node: ast::ByteString, errors: &mut Vec<
#[cfg(test)]
mod test {
use crate::SourceFileNode;
use crate::{SourceFile, TreePtr};
fn build_file(literal: &str) -> SourceFileNode {
fn build_file(literal: &str) -> TreePtr<SourceFile> {
let src = format!(r#"const S: &'static [u8] = b"{}";"#, literal);
println!("Source: {}", src);
SourceFileNode::parse(&src)
SourceFile::parse(&src)
}
fn assert_valid_str(literal: &str) {

View file

@ -14,7 +14,7 @@ use crate::{
},
};
pub(super) fn validate_char_node(node: ast::Char, errors: &mut Vec<SyntaxError>) {
pub(super) fn validate_char_node(node: &ast::Char, errors: &mut Vec<SyntaxError>) {
let literal_text = node.text();
let literal_range = node.syntax().range();
let mut components = string_lexing::parse_char_literal(literal_text);
@ -175,11 +175,11 @@ fn validate_unicode_escape(text: &str, range: TextRange, errors: &mut Vec<Syntax
#[cfg(test)]
mod test {
use crate::SourceFileNode;
use crate::{SourceFile, TreePtr};
fn build_file(literal: &str) -> SourceFileNode {
fn build_file(literal: &str) -> TreePtr<SourceFile> {
let src = format!("const C: char = '{}';", literal);
SourceFileNode::parse(&src)
SourceFile::parse(&src)
}
fn assert_valid_char(literal: &str) {

View file

@ -9,7 +9,7 @@ use crate::{
use super::char;
pub(crate) fn validate_string_node(node: ast::String, errors: &mut Vec<SyntaxError>) {
pub(crate) fn validate_string_node(node: &ast::String, errors: &mut Vec<SyntaxError>) {
let literal_text = node.text();
let literal_range = node.syntax().range();
let mut components = string_lexing::parse_string_literal(literal_text);
@ -38,12 +38,12 @@ pub(crate) fn validate_string_node(node: ast::String, errors: &mut Vec<SyntaxErr
#[cfg(test)]
mod test {
use crate::SourceFileNode;
use crate::{SourceFile, TreePtr};
fn build_file(literal: &str) -> SourceFileNode {
fn build_file(literal: &str) -> TreePtr<SourceFile> {
let src = format!(r#"const S: &'static str = "{}";"#, literal);
println!("Source: {}", src);
SourceFileNode::parse(&src)
SourceFile::parse(&src)
}
fn assert_valid_str(literal: &str) {

View file

@ -4,15 +4,12 @@ mod syntax_text;
use self::syntax_text::SyntaxText;
use crate::{SmolStr, SyntaxKind, TextRange};
use rowan::Types;
use std::{
fmt,
hash::{Hash, Hasher},
};
use rowan::{Types, TransparentNewType};
use std::fmt;
pub(crate) use self::builder::GreenBuilder;
pub use self::syntax_error::{SyntaxError, SyntaxErrorKind, Location};
pub use rowan::{TreeRoot, WalkEvent};
pub use rowan::WalkEvent;
#[derive(Debug, Clone, Copy)]
pub enum RaTypes {}
@ -21,35 +18,76 @@ impl Types for RaTypes {
type RootData = Vec<SyntaxError>;
}
pub type OwnedRoot = ::rowan::OwnedRoot<RaTypes>;
pub type RefRoot<'a> = ::rowan::RefRoot<'a, RaTypes>;
pub type GreenNode = rowan::GreenNode<RaTypes>;
pub type GreenNode = ::rowan::GreenNode<RaTypes>;
#[derive(PartialEq, Eq, Hash)]
pub struct TreePtr<T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>>(
pub(crate) rowan::TreePtr<RaTypes, T>,
);
#[derive(Clone, Copy)]
pub struct SyntaxNode<R: TreeRoot<RaTypes> = OwnedRoot>(pub(crate) ::rowan::SyntaxNode<RaTypes, R>);
pub type SyntaxNodeRef<'a> = SyntaxNode<RefRoot<'a>>;
impl<R1, R2> PartialEq<SyntaxNode<R1>> for SyntaxNode<R2>
impl<T> TreePtr<T>
where
R1: TreeRoot<RaTypes>,
R2: TreeRoot<RaTypes>,
T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>,
{
fn eq(&self, other: &SyntaxNode<R1>) -> bool {
self.0 == other.0
pub(crate) fn cast<U>(this: TreePtr<T>) -> TreePtr<U>
where
U: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>,
{
TreePtr(rowan::TreePtr::cast(this.0))
}
}
impl<R: TreeRoot<RaTypes>> Eq for SyntaxNode<R> {}
impl<R: TreeRoot<RaTypes>> Hash for SyntaxNode<R> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state)
impl<T> std::ops::Deref for TreePtr<T>
where
T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>,
{
type Target = T;
fn deref(&self) -> &T {
self.0.deref()
}
}
impl<T> PartialEq<T> for TreePtr<T>
where
T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>,
T: PartialEq<T>,
{
fn eq(&self, other: &T) -> bool {
let t: &T = self;
t == other
}
}
impl<T> Clone for TreePtr<T>
where
T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>,
{
fn clone(&self) -> TreePtr<T> {
TreePtr(self.0.clone())
}
}
impl<T> fmt::Debug for TreePtr<T>
where
T: TransparentNewType<Repr = rowan::SyntaxNode<RaTypes>>,
T: fmt::Debug,
{
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(&self.0, fmt)
}
}
#[derive(PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct SyntaxNode(pub(crate) rowan::SyntaxNode<RaTypes>);
unsafe impl TransparentNewType for SyntaxNode {
type Repr = rowan::SyntaxNode<RaTypes>;
}
impl SyntaxNode {
pub(crate) fn new(green: GreenNode, errors: Vec<SyntaxError>) -> SyntaxNode {
SyntaxNode(::rowan::SyntaxNode::new(green, errors))
pub(crate) fn new(green: GreenNode, errors: Vec<SyntaxError>) -> TreePtr<SyntaxNode> {
let ptr = TreePtr(rowan::SyntaxNode::new(green, errors));
TreePtr::cast(ptr)
}
}
@ -59,45 +97,43 @@ pub enum Direction {
Prev,
}
impl<'a> SyntaxNodeRef<'a> {
pub fn leaf_text(self) -> Option<&'a SmolStr> {
impl SyntaxNode {
pub fn leaf_text(&self) -> Option<&SmolStr> {
self.0.leaf_text()
}
pub fn ancestors(self) -> impl Iterator<Item = SyntaxNodeRef<'a>> {
pub fn ancestors(&self) -> impl Iterator<Item = &SyntaxNode> {
crate::algo::generate(Some(self), |&node| node.parent())
}
pub fn descendants(self) -> impl Iterator<Item = SyntaxNodeRef<'a>> {
pub fn descendants(&self) -> impl Iterator<Item = &SyntaxNode> {
self.preorder().filter_map(|event| match event {
WalkEvent::Enter(node) => Some(node),
WalkEvent::Leave(_) => None,
})
}
pub fn siblings(self, direction: Direction) -> impl Iterator<Item = SyntaxNodeRef<'a>> {
pub fn siblings(&self, direction: Direction) -> impl Iterator<Item = &SyntaxNode> {
crate::algo::generate(Some(self), move |&node| match direction {
Direction::Next => node.next_sibling(),
Direction::Prev => node.prev_sibling(),
})
}
pub fn preorder(self) -> impl Iterator<Item = WalkEvent<SyntaxNodeRef<'a>>> {
pub fn preorder(&self) -> impl Iterator<Item = WalkEvent<&SyntaxNode>> {
self.0.preorder().map(|event| match event {
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode(n)),
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode(n)),
WalkEvent::Enter(n) => WalkEvent::Enter(SyntaxNode::from_repr(n)),
WalkEvent::Leave(n) => WalkEvent::Leave(SyntaxNode::from_repr(n)),
})
}
}
impl<R: TreeRoot<RaTypes>> SyntaxNode<R> {
impl SyntaxNode {
pub(crate) fn root_data(&self) -> &Vec<SyntaxError> {
self.0.root_data()
}
pub(crate) fn replace_with(&self, replacement: GreenNode) -> GreenNode {
self.0.replace_with(replacement)
self.0.replace_self(replacement)
}
pub fn borrowed<'a>(&'a self) -> SyntaxNode<RefRoot<'a>> {
SyntaxNode(self.0.borrowed())
}
pub fn owned(&self) -> SyntaxNode<OwnedRoot> {
SyntaxNode(self.0.owned())
pub fn to_owned(&self) -> TreePtr<SyntaxNode> {
let ptr = TreePtr(self.0.to_owned());
TreePtr::cast(ptr)
}
pub fn kind(&self) -> SyntaxKind {
self.0.kind()
@ -106,32 +142,32 @@ impl<R: TreeRoot<RaTypes>> SyntaxNode<R> {
self.0.range()
}
pub fn text(&self) -> SyntaxText {
SyntaxText::new(self.borrowed())
SyntaxText::new(self)
}
pub fn is_leaf(&self) -> bool {
self.0.is_leaf()
}
pub fn parent(&self) -> Option<SyntaxNode<R>> {
self.0.parent().map(SyntaxNode)
pub fn parent(&self) -> Option<&SyntaxNode> {
self.0.parent().map(SyntaxNode::from_repr)
}
pub fn first_child(&self) -> Option<SyntaxNode<R>> {
self.0.first_child().map(SyntaxNode)
pub fn first_child(&self) -> Option<&SyntaxNode> {
self.0.first_child().map(SyntaxNode::from_repr)
}
pub fn last_child(&self) -> Option<SyntaxNode<R>> {
self.0.last_child().map(SyntaxNode)
pub fn last_child(&self) -> Option<&SyntaxNode> {
self.0.last_child().map(SyntaxNode::from_repr)
}
pub fn next_sibling(&self) -> Option<SyntaxNode<R>> {
self.0.next_sibling().map(SyntaxNode)
pub fn next_sibling(&self) -> Option<&SyntaxNode> {
self.0.next_sibling().map(SyntaxNode::from_repr)
}
pub fn prev_sibling(&self) -> Option<SyntaxNode<R>> {
self.0.prev_sibling().map(SyntaxNode)
pub fn prev_sibling(&self) -> Option<&SyntaxNode> {
self.0.prev_sibling().map(SyntaxNode::from_repr)
}
pub fn children(&self) -> SyntaxNodeChildren<R> {
pub fn children(&self) -> SyntaxNodeChildren {
SyntaxNodeChildren(self.0.children())
}
}
impl<R: TreeRoot<RaTypes>> fmt::Debug for SyntaxNode<R> {
impl fmt::Debug for SyntaxNode {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
write!(fmt, "{:?}@{:?}", self.kind(), self.range())?;
if has_short_text(self.kind()) {
@ -142,13 +178,13 @@ impl<R: TreeRoot<RaTypes>> fmt::Debug for SyntaxNode<R> {
}
#[derive(Debug)]
pub struct SyntaxNodeChildren<R: TreeRoot<RaTypes>>(::rowan::SyntaxNodeChildren<RaTypes, R>);
pub struct SyntaxNodeChildren<'a>(rowan::SyntaxNodeChildren<'a, RaTypes>);
impl<R: TreeRoot<RaTypes>> Iterator for SyntaxNodeChildren<R> {
type Item = SyntaxNode<R>;
impl<'a> Iterator for SyntaxNodeChildren<'a> {
type Item = &'a SyntaxNode;
fn next(&mut self) -> Option<SyntaxNode<R>> {
self.0.next().map(SyntaxNode)
fn next(&mut self) -> Option<&'a SyntaxNode> {
self.0.next().map(SyntaxNode::from_repr)
}
}

View file

@ -3,17 +3,17 @@ use std::{fmt, ops};
use ra_text_edit::text_utils::contains_offset_nonstrict;
use crate::{
text_utils::intersect,
SyntaxNodeRef, TextRange, TextUnit,
SyntaxNode, TextRange, TextUnit,
};
#[derive(Clone)]
pub struct SyntaxText<'a> {
node: SyntaxNodeRef<'a>,
node: &'a SyntaxNode,
range: TextRange,
}
impl<'a> SyntaxText<'a> {
pub(crate) fn new(node: SyntaxNodeRef<'a>) -> SyntaxText<'a> {
pub(crate) fn new(node: &'a SyntaxNode) -> SyntaxText<'a> {
SyntaxText {
node,
range: node.range(),

View file

@ -9,8 +9,8 @@ use std::{
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
use ra_syntax::{
SourceFile, AstNode,
utils::{check_fuzz_invariants, dump_tree},
SourceFileNode,
};
#[test]
@ -27,7 +27,7 @@ fn parser_tests() {
&test_data_dir(),
&["parser/inline/ok", "parser/ok"],
|text, path| {
let file = SourceFileNode::parse(text);
let file = SourceFile::parse(text);
let errors = file.errors();
assert_eq!(
&*errors,
@ -42,7 +42,7 @@ fn parser_tests() {
&test_data_dir(),
&["parser/err", "parser/inline/err"],
|text, path| {
let file = SourceFileNode::parse(text);
let file = SourceFile::parse(text);
let errors = file.errors();
assert_ne!(
&*errors,
@ -85,7 +85,7 @@ fn self_hosting_parsing() {
{
count += 1;
let text = read_text(entry.path());
let node = SourceFileNode::parse(&text);
let node = SourceFile::parse(&text);
let errors = node.errors();
assert_eq!(
&*errors,