Use deref coercions
This commit is contained in:
parent
5b4986fa57
commit
791003ad3c
7 changed files with 20 additions and 20 deletions
|
@ -86,7 +86,7 @@ fn filter_foreign_item<F>(cx: &mut Context<F>,
|
||||||
-> Option<P<ast::ForeignItem>> where
|
-> Option<P<ast::ForeignItem>> where
|
||||||
F: FnMut(&[ast::Attribute]) -> bool
|
F: FnMut(&[ast::Attribute]) -> bool
|
||||||
{
|
{
|
||||||
if foreign_item_in_cfg(cx, &*item) {
|
if foreign_item_in_cfg(cx, &item) {
|
||||||
Some(item)
|
Some(item)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -109,7 +109,7 @@ fn fold_foreign_mod<F>(cx: &mut Context<F>,
|
||||||
fn fold_item<F>(cx: &mut Context<F>, item: P<ast::Item>) -> SmallVector<P<ast::Item>> where
|
fn fold_item<F>(cx: &mut Context<F>, item: P<ast::Item>) -> SmallVector<P<ast::Item>> where
|
||||||
F: FnMut(&[ast::Attribute]) -> bool
|
F: FnMut(&[ast::Attribute]) -> bool
|
||||||
{
|
{
|
||||||
if item_in_cfg(cx, &*item) {
|
if item_in_cfg(cx, &item) {
|
||||||
SmallVector::one(item.map(|i| cx.fold_item_simple(i)))
|
SmallVector::one(item.map(|i| cx.fold_item_simple(i)))
|
||||||
} else {
|
} else {
|
||||||
SmallVector::zero()
|
SmallVector::zero()
|
||||||
|
@ -189,7 +189,7 @@ fn retain_stmt<F>(cx: &mut Context<F>, stmt: &ast::Stmt) -> bool where
|
||||||
ast::StmtDecl(ref decl, _) => {
|
ast::StmtDecl(ref decl, _) => {
|
||||||
match decl.node {
|
match decl.node {
|
||||||
ast::DeclItem(ref item) => {
|
ast::DeclItem(ref item) => {
|
||||||
item_in_cfg(cx, &**item)
|
item_in_cfg(cx, item)
|
||||||
}
|
}
|
||||||
_ => true
|
_ => true
|
||||||
}
|
}
|
||||||
|
@ -203,7 +203,7 @@ fn fold_block<F>(cx: &mut Context<F>, b: P<ast::Block>) -> P<ast::Block> where
|
||||||
{
|
{
|
||||||
b.map(|ast::Block {id, stmts, expr, rules, span}| {
|
b.map(|ast::Block {id, stmts, expr, rules, span}| {
|
||||||
let resulting_stmts: Vec<P<ast::Stmt>> =
|
let resulting_stmts: Vec<P<ast::Stmt>> =
|
||||||
stmts.into_iter().filter(|a| retain_stmt(cx, &**a)).collect();
|
stmts.into_iter().filter(|a| retain_stmt(cx, a)).collect();
|
||||||
let resulting_stmts = resulting_stmts.into_iter()
|
let resulting_stmts = resulting_stmts.into_iter()
|
||||||
.flat_map(|stmt| cx.fold_stmt(stmt).into_iter())
|
.flat_map(|stmt| cx.fold_stmt(stmt).into_iter())
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -263,7 +263,7 @@ fn in_cfg(diagnostic: &SpanHandler, cfg: &[P<ast::MetaItem>], attrs: &[ast::Attr
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
attr::cfg_matches(diagnostic, cfg, &*mis[0],
|
attr::cfg_matches(diagnostic, cfg, &mis[0],
|
||||||
feature_gated_cfgs)
|
feature_gated_cfgs)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,7 +33,7 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
}
|
}
|
||||||
|
|
||||||
let matches_cfg = attr::cfg_matches(&cx.parse_sess.span_diagnostic, &cx.cfg, &*cfg,
|
let matches_cfg = attr::cfg_matches(&cx.parse_sess.span_diagnostic, &cx.cfg, &cfg,
|
||||||
cx.feature_gated_cfgs);
|
cx.feature_gated_cfgs);
|
||||||
MacEager::expr(cx.expr_bool(sp, matches_cfg))
|
MacEager::expr(cx.expr_bool(sp, matches_cfg))
|
||||||
}
|
}
|
||||||
|
|
|
@ -547,7 +547,7 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE
|
||||||
// names, as well... but that should be okay, as long as
|
// names, as well... but that should be okay, as long as
|
||||||
// the new names are gensyms for the old ones.
|
// the new names are gensyms for the old ones.
|
||||||
// generate fresh names, push them to a new pending list
|
// generate fresh names, push them to a new pending list
|
||||||
let idents = pattern_bindings(&*expanded_pat);
|
let idents = pattern_bindings(&expanded_pat);
|
||||||
let mut new_pending_renames =
|
let mut new_pending_renames =
|
||||||
idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect();
|
idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect();
|
||||||
// rewrite the pattern using the new names (the old
|
// rewrite the pattern using the new names (the old
|
||||||
|
@ -634,7 +634,7 @@ fn rename_in_scope<X, F>(pats: Vec<P<ast::Pat>>,
|
||||||
{
|
{
|
||||||
// all of the pats must have the same set of bindings, so use the
|
// all of the pats must have the same set of bindings, so use the
|
||||||
// first one to extract them and generate new names:
|
// first one to extract them and generate new names:
|
||||||
let idents = pattern_bindings(&*pats[0]);
|
let idents = pattern_bindings(&pats[0]);
|
||||||
let new_renames = idents.into_iter().map(|id| (id, fresh_name(id))).collect();
|
let new_renames = idents.into_iter().map(|id| (id, fresh_name(id))).collect();
|
||||||
// apply the renaming, but only to the PatIdents:
|
// apply the renaming, but only to the PatIdents:
|
||||||
let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames};
|
let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames};
|
||||||
|
@ -659,7 +659,7 @@ impl<'v> Visitor<'v> for PatIdentFinder {
|
||||||
self.ident_accumulator.push(path1.node);
|
self.ident_accumulator.push(path1.node);
|
||||||
// visit optional subpattern of PatIdent:
|
// visit optional subpattern of PatIdent:
|
||||||
if let Some(ref subpat) = *inner {
|
if let Some(ref subpat) = *inner {
|
||||||
self.visit_pat(&**subpat)
|
self.visit_pat(subpat)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// use the default traversal for non-PatIdents
|
// use the default traversal for non-PatIdents
|
||||||
|
@ -679,7 +679,7 @@ fn pattern_bindings(pat: &ast::Pat) -> Vec<ast::Ident> {
|
||||||
fn fn_decl_arg_bindings(fn_decl: &ast::FnDecl) -> Vec<ast::Ident> {
|
fn fn_decl_arg_bindings(fn_decl: &ast::FnDecl) -> Vec<ast::Ident> {
|
||||||
let mut pat_idents = PatIdentFinder{ident_accumulator:Vec::new()};
|
let mut pat_idents = PatIdentFinder{ident_accumulator:Vec::new()};
|
||||||
for arg in &fn_decl.inputs {
|
for arg in &fn_decl.inputs {
|
||||||
pat_idents.visit_pat(&*arg.pat);
|
pat_idents.visit_pat(&arg.pat);
|
||||||
}
|
}
|
||||||
pat_idents.ident_accumulator
|
pat_idents.ident_accumulator
|
||||||
}
|
}
|
||||||
|
@ -1078,7 +1078,7 @@ fn expand_and_rename_fn_decl_and_block(fn_decl: P<ast::FnDecl>, block: P<ast::Bl
|
||||||
fld: &mut MacroExpander)
|
fld: &mut MacroExpander)
|
||||||
-> (P<ast::FnDecl>, P<ast::Block>) {
|
-> (P<ast::FnDecl>, P<ast::Block>) {
|
||||||
let expanded_decl = fld.fold_fn_decl(fn_decl);
|
let expanded_decl = fld.fold_fn_decl(fn_decl);
|
||||||
let idents = fn_decl_arg_bindings(&*expanded_decl);
|
let idents = fn_decl_arg_bindings(&expanded_decl);
|
||||||
let renames =
|
let renames =
|
||||||
idents.iter().map(|id| (*id,fresh_name(*id))).collect();
|
idents.iter().map(|id| (*id,fresh_name(*id))).collect();
|
||||||
// first, a renamer for the PatIdents, for the fn_decl:
|
// first, a renamer for the PatIdents, for the fn_decl:
|
||||||
|
@ -1807,7 +1807,7 @@ foo_module!();
|
||||||
fn pat_idents(){
|
fn pat_idents(){
|
||||||
let pat = string_to_pat(
|
let pat = string_to_pat(
|
||||||
"(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_string());
|
"(a,Foo{x:c @ (b,9),y:Bar(4,d)})".to_string());
|
||||||
let idents = pattern_bindings(&*pat);
|
let idents = pattern_bindings(&pat);
|
||||||
assert_eq!(idents, strs_to_idents(vec!("a","c","b","d")));
|
assert_eq!(idents, strs_to_idents(vec!("a","c","b","d")));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -289,7 +289,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
};
|
};
|
||||||
|
|
||||||
for lhs in &lhses {
|
for lhs in &lhses {
|
||||||
check_lhs_nt_follows(cx, &**lhs, def.span);
|
check_lhs_nt_follows(cx, lhs, def.span);
|
||||||
}
|
}
|
||||||
|
|
||||||
let rhses = match **argument_map.get(&rhs_nm.name).unwrap() {
|
let rhses = match **argument_map.get(&rhs_nm.name).unwrap() {
|
||||||
|
|
|
@ -673,7 +673,7 @@ pub fn noop_fold_interpolated<T: Folder>(nt: token::Nonterminal, fld: &mut T)
|
||||||
token::NtIdent(Box::new(fld.fold_ident(*id)), is_mod_name),
|
token::NtIdent(Box::new(fld.fold_ident(*id)), is_mod_name),
|
||||||
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
|
token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)),
|
||||||
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
|
token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))),
|
||||||
token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))),
|
token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))),
|
||||||
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
|
token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)),
|
||||||
token::NtImplItem(arm) =>
|
token::NtImplItem(arm) =>
|
||||||
token::NtImplItem(fld.fold_impl_item(arm)
|
token::NtImplItem(fld.fold_impl_item(arm)
|
||||||
|
|
|
@ -53,7 +53,7 @@ pub fn stmt_ends_with_semi(stmt: &ast::Stmt_) -> bool {
|
||||||
ast::DeclItem(_) => false
|
ast::DeclItem(_) => false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ast::StmtExpr(ref e, _) => { expr_requires_semi_to_be_stmt(&**e) }
|
ast::StmtExpr(ref e, _) => { expr_requires_semi_to_be_stmt(e) }
|
||||||
ast::StmtSemi(..) => { false }
|
ast::StmtSemi(..) => { false }
|
||||||
ast::StmtMac(..) => { false }
|
ast::StmtMac(..) => { false }
|
||||||
}
|
}
|
||||||
|
|
|
@ -446,10 +446,10 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
|
||||||
Some(suf) => {
|
Some(suf) => {
|
||||||
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
|
||||||
// if it looks like a width, lets try to be helpful.
|
// if it looks like a width, lets try to be helpful.
|
||||||
sd.span_err(sp, &*format!("invalid width `{}` for float literal", &suf[1..]));
|
sd.span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..]));
|
||||||
sd.fileline_help(sp, "valid widths are 32 and 64");
|
sd.fileline_help(sp, "valid widths are 32 and 64");
|
||||||
} else {
|
} else {
|
||||||
sd.span_err(sp, &*format!("invalid suffix `{}` for float literal", suf));
|
sd.span_err(sp, &format!("invalid suffix `{}` for float literal", suf));
|
||||||
sd.fileline_help(sp, "valid suffixes are `f32` and `f64`");
|
sd.fileline_help(sp, "valid suffixes are `f32` and `f64`");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -619,11 +619,11 @@ pub fn integer_lit(s: &str,
|
||||||
// i<digits> and u<digits> look like widths, so lets
|
// i<digits> and u<digits> look like widths, so lets
|
||||||
// give an error message along those lines
|
// give an error message along those lines
|
||||||
if looks_like_width_suffix(&['i', 'u'], suf) {
|
if looks_like_width_suffix(&['i', 'u'], suf) {
|
||||||
sd.span_err(sp, &*format!("invalid width `{}` for integer literal",
|
sd.span_err(sp, &format!("invalid width `{}` for integer literal",
|
||||||
&suf[1..]));
|
&suf[1..]));
|
||||||
sd.fileline_help(sp, "valid widths are 8, 16, 32 and 64");
|
sd.fileline_help(sp, "valid widths are 8, 16, 32 and 64");
|
||||||
} else {
|
} else {
|
||||||
sd.span_err(sp, &*format!("invalid suffix `{}` for numeric literal", suf));
|
sd.span_err(sp, &format!("invalid suffix `{}` for numeric literal", suf));
|
||||||
sd.fileline_help(sp, "the suffix must be one of the integral types \
|
sd.fileline_help(sp, "the suffix must be one of the integral types \
|
||||||
(`u32`, `isize`, etc)");
|
(`u32`, `isize`, etc)");
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue