libsyntax: "import" -> "use"

This commit is contained in:
Patrick Walton 2012-09-04 11:37:29 -07:00
parent 3f92cf2659
commit 8ff18acc82
44 changed files with 203 additions and 208 deletions

View file

@ -1,7 +1,7 @@
// The Rust abstract syntax tree.
import codemap::{span, filename};
import std::serialization::{serializer,
use codemap::{span, filename};
use std::serialization::{serializer,
deserializer,
serialize_Option,
deserialize_Option,
@ -17,7 +17,7 @@ import std::serialization::{serializer,
deserialize_str,
serialize_bool,
deserialize_bool};
import parse::token;
use parse::token;
/* Note #1972 -- spans are serialized but not deserialized */
fn serialize_span<S>(_s: S, _v: span) {

View file

@ -1,10 +1,10 @@
import std::map;
import std::map::hashmap;
import ast::*;
import print::pprust;
import ast_util::{path_to_ident, stmt_id};
import diagnostic::span_handler;
import parse::token::ident_interner;
use std::map;
use std::map::hashmap;
use ast::*;
use print::pprust;
use ast_util::{path_to_ident, stmt_id};
use diagnostic::span_handler;
use parse::token::ident_interner;
enum path_elt {
path_mod(ident),

View file

@ -1,5 +1,5 @@
import codemap::span;
import ast::*;
use codemap::span;
use ast::*;
pure fn spanned<T>(lo: uint, hi: uint, +t: T) -> spanned<T> {
respan(mk_sp(lo, hi), t)

View file

@ -1,11 +1,11 @@
// Functions dealing with attributes and meta_items
import std::map;
import std::map::hashmap;
import either::Either;
import diagnostic::span_handler;
import ast_util::{spanned, dummy_spanned};
import parse::comments::{doc_comment_style, strip_doc_comment_decoration};
use std::map;
use std::map::hashmap;
use either::Either;
use diagnostic::span_handler;
use ast_util::{spanned, dummy_spanned};
use parse::comments::{doc_comment_style, strip_doc_comment_decoration};
// Constructors
export mk_name_value_item_str;

View file

@ -1,4 +1,4 @@
import dvec::DVec;
use dvec::DVec;
export filename;
export filemap;

View file

@ -1,6 +1,6 @@
import std::term;
import io::WriterUtil;
import codemap::span;
use std::term;
use io::WriterUtil;
use codemap::span;
export emitter, emit;
export level, fatal, error, warning, note;

View file

@ -69,10 +69,10 @@ into the tree. This is intended to prevent us from inserting the same
node twice.
*/
import base::*;
import codemap::span;
import std::map;
import std::map::hashmap;
use base::*;
use codemap::span;
use std::map;
use std::map::hashmap;
export expand;

View file

@ -1,8 +1,8 @@
import std::map::hashmap;
import parse::parser;
import diagnostic::span_handler;
import codemap::{codemap, span, expn_info, expanded_from};
import std::map::str_hash;
use std::map::hashmap;
use parse::parser;
use diagnostic::span_handler;
use codemap::{codemap, span, expn_info, expanded_from};
use std::map::str_hash;
// obsolete old-style #macro code:
//

View file

@ -1,5 +1,5 @@
import codemap::span;
import base::ext_ctxt;
use codemap::span;
use base::ext_ctxt;
fn mk_expr(cx: ext_ctxt, sp: codemap::span, expr: ast::expr_) ->
@ast::expr {

View file

@ -1,4 +1,4 @@
import base::*;
use base::*;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {

View file

@ -4,8 +4,8 @@
* should all get sucked into either the compiler syntax extension plugin
* interface.
*/
import base::*;
import build::mk_uniq_str;
use base::*;
use build::mk_uniq_str;
export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,

View file

@ -1,14 +1,14 @@
import std::map::hashmap;
use std::map::hashmap;
import ast::{crate, expr_, expr_mac, mac_invoc, mac_invoc_tt,
use ast::{crate, expr_, expr_mac, mac_invoc, mac_invoc_tt,
tt_delim, tt_tok, item_mac};
import fold::*;
import ext::base::*;
import ext::qquote::{qq_helper};
import parse::{parser, parse_expr_from_source_str, new_parser_from_tt};
use fold::*;
use ext::base::*;
use ext::qquote::{qq_helper};
use parse::{parser, parse_expr_from_source_str, new_parser_from_tt};
import codemap::{span, expanded_from};
use codemap::{span, expanded_from};
fn expand_expr(exts: hashmap<~str, syntax_extension>, cx: ext_ctxt,
e: expr_, s: span, fld: ast_fold,

View file

@ -5,10 +5,10 @@
* should all get sucked into either the standard library extfmt module or the
* compiler syntax extension plugin interface.
*/
import extfmt::ct::*;
import base::*;
import codemap::span;
import ext::build::*;
use extfmt::ct::*;
use base::*;
use codemap::span;
use ext::build::*;
export expand_syntax_ext;
fn expand_syntax_ext(cx: ext_ctxt, sp: span, arg: ast::mac_arg,

View file

@ -1,6 +1,5 @@
import base::*;
import build::mk_uniq_str;
import option;
use base::*;
use build::mk_uniq_str;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, arg: ast::mac_arg,
_body: ast::mac_body) -> @ast::expr {

View file

@ -1,5 +1,5 @@
import base::*;
import io::WriterUtil;
use base::*;
use io::WriterUtil;
fn expand_syntax_ext(cx: ext_ctxt, sp: codemap::span, tt: ~[ast::token_tree])
-> base::mac_result {

View file

@ -33,16 +33,16 @@ FIXME (#3072) - This is still incomplete.
*/
import codemap::span;
import ext::base::ext_ctxt;
import ast::tt_delim;
import parse::lexer::{new_tt_reader, reader};
import parse::parser::{parser, SOURCE_FILE};
import parse::common::parser_common;
use codemap::span;
use ext::base::ext_ctxt;
use ast::tt_delim;
use parse::lexer::{new_tt_reader, reader};
use parse::parser::{parser, SOURCE_FILE};
use parse::common::parser_common;
import pipes::parse_proto::proto_parser;
use pipes::parse_proto::proto_parser;
import pipes::proto::{visit, protocol};
use pipes::proto::{visit, protocol};
fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident,
tt: ~[ast::token_tree]) -> base::mac_result

View file

@ -3,10 +3,10 @@
// To start with, it will be use dummy spans, but it might someday do
// something smarter.
import ast::{ident, node_id};
import ast_util::respan;
import codemap::span;
import ext::base::mk_ctxt;
use ast::{ident, node_id};
use ast_util::respan;
use codemap::span;
use ext::base::mk_ctxt;
// Transitional reexports so qquote can find the paths it is looking for
mod syntax {

View file

@ -19,10 +19,10 @@ that.
*/
import ext::base::ext_ctxt;
use ext::base::ext_ctxt;
import proto::{state, protocol, next_state};
import ast_builder::empty_span;
use proto::{state, protocol, next_state};
use ast_builder::empty_span;
impl ext_ctxt: proto::visitor<(), (), ()> {
fn visit_proto(_proto: protocol,

View file

@ -27,9 +27,9 @@ updating the states using rule (2) until there are no changes.
*/
import std::bitv::{Bitv};
use std::bitv::{Bitv};
import ast_builder::empty_span;
use ast_builder::empty_span;
fn analyze(proto: protocol, _cx: ext_ctxt) {
debug!("initializing colive analysis");

View file

@ -1,9 +1,9 @@
// Parsing pipes protocols from token trees.
import parse::parser;
import parse::token;
use parse::parser;
use parse::token;
import pipec::*;
use pipec::*;
trait proto_parser {
fn parse_proto(id: ~str) -> protocol;

View file

@ -1,19 +1,18 @@
// A protocol compiler for Rust.
import to_str::ToStr;
use to_str::ToStr;
import dvec::DVec;
use dvec::DVec;
import ast::ident;
import util::interner;
import print::pprust;
import pprust::{item_to_str, ty_to_str};
import ext::base::{mk_ctxt, ext_ctxt};
import parse;
import parse::*;
import proto::*;
use ast::ident;
use util::interner;
use print::pprust;
use pprust::{item_to_str, ty_to_str};
use ext::base::{mk_ctxt, ext_ctxt};
use parse::*;
use proto::*;
import ast_builder::{append_types, path, empty_span};
use ast_builder::{append_types, path, empty_span};
// Transitional reexports so qquote can find the paths it is looking for
mod syntax {

View file

@ -1,7 +1,7 @@
import to_str::ToStr;
import dvec::DVec;
use to_str::ToStr;
use dvec::DVec;
import ast_builder::{path, append_types};
use ast_builder::{path, append_types};
enum direction { send, recv }

View file

@ -1,18 +1,18 @@
import ast::{crate, expr_, mac_invoc,
use ast::{crate, expr_, mac_invoc,
mac_aq, mac_var};
import parse::parser;
import parse::parser::parse_from_source_str;
import dvec::DVec;
import parse::token::ident_interner;
use parse::parser;
use parse::parser::parse_from_source_str;
use dvec::DVec;
use parse::token::ident_interner;
import fold::*;
import visit::*;
import ext::base::*;
import ext::build::*;
import print::*;
import io::*;
use fold::*;
use visit::*;
use ext::base::*;
use ext::build::*;
use print::*;
use io::*;
import codemap::span;
use codemap::span;
struct gather_item {
lo: uint;

View file

@ -1,12 +1,12 @@
import codemap::span;
import std::map::{hashmap, str_hash, uint_hash};
import dvec::DVec;
use codemap::span;
use std::map::{hashmap, str_hash, uint_hash};
use dvec::DVec;
import base::*;
use base::*;
import fold::*;
import ast_util::respan;
import ast::{ident, path, ty, blk_, expr, expr_path,
use fold::*;
use ast_util::respan;
use ast::{ident, path, ty, blk_, expr, expr_path,
expr_vec, expr_mac, mac_invoc, node_id, expr_index};
export add_new_extension;

View file

@ -1,8 +1,7 @@
import base::*;
import ast;
import codemap::span;
import print::pprust;
import build::{mk_base_vec_e,mk_uint,mk_u8,mk_uniq_str};
use base::*;
use codemap::span;
use print::pprust;
use build::{mk_base_vec_e,mk_uint,mk_u8,mk_uniq_str};
export expand_line;
export expand_col;

View file

@ -1,9 +1,9 @@
import codemap::span;
import ext::base::ext_ctxt;
import ast::tt_delim;
import parse::lexer::{new_tt_reader, reader};
import parse::parser::{parser, SOURCE_FILE};
import parse::common::parser_common;
use codemap::span;
use ext::base::ext_ctxt;
use ast::tt_delim;
use parse::lexer::{new_tt_reader, reader};
use parse::parser::{parser, SOURCE_FILE};
use parse::common::parser_common;
fn expand_trace_macros(cx: ext_ctxt, sp: span,
tt: ~[ast::token_tree]) -> base::mac_result

View file

@ -1,16 +1,16 @@
// Earley-like parser for macros.
import parse::token;
import parse::token::{token, EOF, to_str, nonterminal};
import parse::lexer::*; //resolve bug?
use parse::token;
use parse::token::{token, EOF, to_str, nonterminal};
use parse::lexer::*; //resolve bug?
//import parse::lexer::{reader, tt_reader, tt_reader_as_reader};
import parse::parser::{parser,SOURCE_FILE};
use parse::parser::{parser,SOURCE_FILE};
//import parse::common::parser_common;
import parse::common::*; //resolve bug?
import parse::parse_sess;
import dvec::DVec;
import ast::{matcher, match_tok, match_seq, match_nonterminal, ident};
import ast_util::mk_sp;
import std::map::{hashmap, uint_hash};
use parse::common::*; //resolve bug?
use parse::parse_sess;
use dvec::DVec;
use ast::{matcher, match_tok, match_seq, match_nonterminal, ident};
use ast_util::mk_sp;
use std::map::{hashmap, uint_hash};
/* This is an Earley-like parser, without support for in-grammar nonterminals,
only by calling out to the main rust parser for named nonterminals (which it

View file

@ -1,14 +1,14 @@
import base::{ext_ctxt, mac_result, mr_expr, mr_def, expr_tt};
import codemap::span;
import ast::{ident, matcher_, matcher, match_tok,
use base::{ext_ctxt, mac_result, mr_expr, mr_def, expr_tt};
use codemap::span;
use ast::{ident, matcher_, matcher, match_tok,
match_nonterminal, match_seq, tt_delim};
import parse::lexer::{new_tt_reader, reader};
import parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt};
import parse::parser::{parser, SOURCE_FILE};
import macro_parser::{parse, parse_or_else, success, failure, named_match,
use parse::lexer::{new_tt_reader, reader};
use parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt};
use parse::parser::{parser, SOURCE_FILE};
use macro_parser::{parse, parse_or_else, success, failure, named_match,
matched_seq, matched_nonterminal, error};
import std::map::hashmap;
import parse::token::special_idents;
use std::map::hashmap;
use parse::token::special_idents;
fn add_new_extension(cx: ext_ctxt, sp: span, name: ident,
arg: ~[ast::token_tree]) -> base::mac_result {

View file

@ -1,10 +1,10 @@
import diagnostic::span_handler;
import ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
import macro_parser::{named_match, matched_seq, matched_nonterminal};
import codemap::span;
import parse::token::{EOF, INTERPOLATED, IDENT, token, nt_ident,
use diagnostic::span_handler;
use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
use macro_parser::{named_match, matched_seq, matched_nonterminal};
use codemap::span;
use parse::token::{EOF, INTERPOLATED, IDENT, token, nt_ident,
ident_interner};
import std::map::{hashmap, box_str_hash};
use std::map::{hashmap, box_str_hash};
export tt_reader, new_tt_reader, dup_tt_reader, tt_next_token;

View file

@ -1,5 +1,5 @@
import codemap::span;
import ast::*;
use codemap::span;
use ast::*;
export ast_fold_precursor;
export ast_fold;

View file

@ -12,14 +12,14 @@ export parse_expr_from_source_str, parse_item_from_source_str;
export parse_stmt_from_source_str;
export parse_from_source_str;
import parser::parser;
import attr::parser_attr;
import common::parser_common;
import ast::node_id;
import util::interner;
import diagnostic::{span_handler, mk_span_handler, mk_handler, emitter};
import lexer::{reader, string_reader};
import parse::token::{ident_interner, mk_ident_interner};
use parser::parser;
use attr::parser_attr;
use common::parser_common;
use ast::node_id;
use util::interner;
use diagnostic::{span_handler, mk_span_handler, mk_handler, emitter};
use lexer::{reader, string_reader};
use parse::token::{ident_interner, mk_ident_interner};
type parse_sess = @{
cm: codemap::codemap,

View file

@ -1,6 +1,6 @@
import either::{Either, Left, Right};
import ast_util::spanned;
import common::*; //resolve bug?
use either::{Either, Left, Right};
use ast_util::spanned;
use common::*; //resolve bug?
export attr_or_ext;
export parser_attr;

View file

@ -2,7 +2,7 @@
Predicates on exprs and stmts that the pretty-printer and parser use
*/
import ast_util::operator_prec;
use ast_util::operator_prec;
fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
match e.node {

View file

@ -1,6 +1,6 @@
import io::println;//XXXXXXXXxxx
import util::interner;
import lexer::{string_reader, bump, is_eof, nextch,
use io::println;//XXXXXXXXxxx
use util::interner;
use lexer::{string_reader, bump, is_eof, nextch,
is_whitespace, get_str_from, reader};
export cmnt;

View file

@ -1,7 +1,7 @@
import std::map::{hashmap};
import ast_util::spanned;
import parser::parser;
import lexer::reader;
use std::map::{hashmap};
use ast_util::spanned;
use parser::parser;
use lexer::reader;
type seq_sep = {
sep: Option<token::token>,

View file

@ -1,5 +1,5 @@
import parser::{parser, SOURCE_FILE};
import attr::parser_attr;
use parser::{parser, SOURCE_FILE};
use attr::parser_attr;
export eval_crate_directives_to_mod;

View file

@ -1,6 +1,6 @@
import diagnostic::span_handler;
import codemap::span;
import ext::tt::transcribe::{tt_reader, new_tt_reader, dup_tt_reader,
use diagnostic::span_handler;
use codemap::span;
use ext::tt::transcribe::{tt_reader, new_tt_reader, dup_tt_reader,
tt_next_token};
export reader, string_reader, new_string_reader, is_whitespace;

View file

@ -1,21 +1,21 @@
import print::pprust::expr_to_str;
use print::pprust::expr_to_str;
import result::Result;
import either::{Either, Left, Right};
import std::map::{hashmap, str_hash};
import token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
use result::Result;
use either::{Either, Left, Right};
use std::map::{hashmap, str_hash};
use token::{can_begin_expr, is_ident, is_ident_or_path, is_plain_ident,
INTERPOLATED};
import codemap::{span,fss_none};
import util::interner::interner;
import ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
import lexer::reader;
import prec::{as_prec, token_to_binop};
import attr::parser_attr;
import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
use codemap::{span,fss_none};
use util::interner::interner;
use ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
use lexer::reader;
use prec::{as_prec, token_to_binop};
use attr::parser_attr;
use common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
seq_sep_none, token_to_str};
import dvec::DVec;
import vec::{push};
import ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
use dvec::DVec;
use vec::{push};
use ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
bind_by_ref, bind_by_implicit_ref, bind_by_value, bind_by_move,
bitand, bitor, bitxor, blk, blk_check_mode, bound_const,
bound_copy, bound_send, bound_trait, bound_owned, box, by_copy,
@ -69,7 +69,7 @@ export SOURCE_FILE;
// defined in `parse` Fixing this will be easier when we have export
// decls on individual items -- then parse can export this publicly, and
// everything else crate-visibly.
import parse_from_source_str;
use parse::parse_from_source_str;
export parse_from_source_str;
export item_or_view_item, iovi_none, iovi_view_item, iovi_item;

View file

@ -2,9 +2,9 @@ export as_prec;
export unop_prec;
export token_to_binop;
import token::*;
import token::token;
import ast::*;
use token::*;
use token::token;
use ast::*;
/// Unary operators have higher precedence than binary
const unop_prec: uint = 100u;

View file

@ -1,7 +1,7 @@
import util::interner;
import util::interner::interner;
import std::map::{hashmap, str_hash};
import std::serialization::{serializer,
use util::interner;
use util::interner::interner;
use std::map::{hashmap, str_hash};
use std::serialization::{serializer,
deserializer,
serialize_uint,
deserialize_uint,

View file

@ -1,5 +1,5 @@
import io::WriterUtil;
import dvec::DVec;
use io::WriterUtil;
use dvec::DVec;
/*
* This pretty-printer is a direct reimplementation of Philip Karlton's

View file

@ -1,14 +1,12 @@
import parse::{comments, lexer, token};
import codemap::codemap;
import pp::{break_offset, word, printer,
space, zerobreak, hardbreak, breaks, consistent,
inconsistent, eof};
import diagnostic;
import ast::{required, provided};
import ast_util::{operator_prec};
import dvec::DVec;
import parse::classify::*;
import parse::token::ident_interner;
use parse::{comments, lexer, token};
use codemap::codemap;
use pp::{break_offset, word, printer, space, zerobreak, hardbreak, breaks};
use pp::{consistent, inconsistent, eof};
use ast::{required, provided};
use ast_util::{operator_prec};
use dvec::DVec;
use parse::classify::*;
use parse::token::ident_interner;
// The ps is stored here to prevent recursive type.
enum ann_node {

View file

@ -1,9 +1,9 @@
// An "interner" is a data structure that associates values with uint tags and
// allows bidirectional lookup; i.e. given a value, one can easily find the
// type, and vice versa.
import std::map;
import std::map::{hashmap, hashfn, eqfn};
import dvec::DVec;
use std::map;
use std::map::{hashmap, hashfn, eqfn};
use dvec::DVec;
type hash_interner<T: const> =
{map: hashmap<T, uint>,

View file

@ -1,6 +1,6 @@
import ast::*;
import codemap::span;
use ast::*;
use codemap::span;
// Context-passing AST walker. Each overridden visit method has full control
// over what happens with its node, it can do its own traversal of the node's