Merge remote-tracking branch 'mozilla/master'

Conflicts:
	src/libextra/test.rs
	src/libstd/rt/global_heap.rs
	src/libstd/unstable/lang.rs
	src/libstd/vec.rs
This commit is contained in:
Brian Anderson 2013-07-08 16:20:38 -07:00
commit fae3336769
247 changed files with 5152 additions and 5749 deletions

View file

@ -139,11 +139,11 @@ endif
# version-string calculation
CFG_GIT_DIR := $(CFG_SRC_DIR).git
CFG_RELEASE = 0.7
CFG_RELEASE = 0.8-pre
CFG_VERSION = $(CFG_RELEASE)
# windows exe's need numeric versions - don't use anything but
# numbers and dots here
CFG_VERSION_WIN = 0.7
CFG_VERSION_WIN = 0.8
ifneq ($(wildcard $(CFG_GIT)),)
ifneq ($(wildcard $(CFG_GIT_DIR)),)

2
configure vendored
View file

@ -402,7 +402,7 @@ fi
step_msg "looking for build programs"
probe_need CFG_PERL perl
probe_need CFG_CURL curl
probe_need CFG_CURLORWGET curl wget
probe_need CFG_PYTHON python2.7 python2.6 python2 python
python_version=$($CFG_PYTHON -V 2>&1)

View file

@ -207,7 +207,7 @@ The keywords are the following strings:
as
break
copy
do drop
do
else enum extern
false fn for
if impl
@ -1107,11 +1107,11 @@ The derived types are borrowed pointers with the `'static` lifetime,
fixed-size arrays, tuples, and structs.
~~~~
static bit1: uint = 1 << 0;
static bit2: uint = 1 << 1;
static BIT1: uint = 1 << 0;
static BIT2: uint = 1 << 1;
static bits: [uint, ..2] = [bit1, bit2];
static string: &'static str = "bitstring";
static BITS: [uint, ..2] = [BIT1, BIT2];
static STRING: &'static str = "bitstring";
struct BitsNStrings<'self> {
mybits: [uint, ..2],
@ -1119,8 +1119,8 @@ struct BitsNStrings<'self> {
}
static bits_n_strings: BitsNStrings<'static> = BitsNStrings {
mybits: bits,
mystring: string
mybits: BITS,
mystring: STRING
};
~~~~
@ -2869,9 +2869,6 @@ The kinds are:
: Types of this kind can be safely sent between tasks.
This kind includes scalars, owning pointers, owned closures, and
structural types containing only other owned types. All `Send` types are `Static`.
`Static`
: Types of this kind do not contain any borrowed pointers;
this can be a useful guarantee for code that breaks borrowing assumptions using [`unsafe` operations](#unsafe-functions).
`Copy`
: This kind includes all types that can be copied. All types with
sendable kind are copyable, as are managed boxes, managed closures,
@ -2879,14 +2876,12 @@ The kinds are:
Types with destructors (types that implement `Drop`) can not implement `Copy`.
`Drop`
: This is not strictly a kind, but its presence interacts with kinds: the `Drop`
trait provides a single method `finalize` that takes no parameters, and is run
trait provides a single method `drop` that takes no parameters, and is run
when values of the type are dropped. Such a method is called a "destructor",
and are always executed in "top-down" order: a value is completely destroyed
before any of the values it owns run their destructors. Only `Send` types
that do not implement `Copy` can implement `Drop`.
> **Note:** The `finalize` method may be renamed in future versions of Rust.
_Default_
: Types with destructors, closure environments,
and various other _non-first-class_ types,

View file

@ -108,7 +108,7 @@ impl Iterator<int> for ZeroStream {
## Container iterators
Containers implement iteration over the contained elements by returning an
iterator object. For example, vectors have four iterators available:
iterator object. For example, vector slices have four iterators available:
* `vector.iter()`, for immutable references to the elements
* `vector.mut_iter()`, for mutable references to the elements

View file

@ -237,8 +237,8 @@ can specify a variable's type by following it with a colon, then the type
name. Static items, on the other hand, always require a type annotation.
~~~~
static monster_factor: float = 57.8;
let monster_size = monster_factor * 10.0;
static MONSTER_FACTOR: float = 57.8;
let monster_size = MONSTER_FACTOR * 10.0;
let monster_size: int = 50;
~~~~

View file

@ -323,9 +323,9 @@ AR_i686-pc-mingw32=$(AR)
CFG_LIB_NAME_i686-pc-mingw32=$(1).dll
CFG_LIB_GLOB_i686-pc-mingw32=$(1)-*.dll
CFG_LIB_DSYM_GLOB_i686-pc-mingw32=$(1)-*.dylib.dSYM
CFG_GCCISH_CFLAGS_i686-pc-mingw32 := -Wall -Werror -g -march=i686
CFG_GCCISH_CFLAGS_i686-pc-mingw32 := -Wall -Werror -g -m32 -march=i686 -D_WIN32_WINNT=0x0600
CFG_GCCISH_CXXFLAGS_i686-pc-mingw32 := -fno-rtti
CFG_GCCISH_LINK_FLAGS_i686-pc-mingw32 := -shared -fPIC -g
CFG_GCCISH_LINK_FLAGS_i686-pc-mingw32 := -shared -fPIC -g -m32
CFG_GCCISH_DEF_FLAG_i686-pc-mingw32 :=
CFG_GCCISH_PRE_LIB_FLAGS_i686-pc-mingw32 :=
CFG_GCCISH_POST_LIB_FLAGS_i686-pc-mingw32 :=
@ -367,6 +367,31 @@ CFG_LDPATH_i586-mingw32msvc :=
CFG_RUN_i586-mingw32msvc=
CFG_RUN_TARG_i586-mingw32msvc=
# x86_64-w64-mingw32 configuration
CC_x86_64-w64-mingw32=$(CC)
CXX_x86_64-w64-mingw32=$(CXX)
CPP_x86_64-w64-mingw32=$(CPP)
AR_x86_64-w64-mingw32=$(AR)
CFG_LIB_NAME_x86_64-w64-mingw32=$(1).dll
CFG_LIB_GLOB_x86_64-w64-mingw32=$(1)-*.dll
CFG_LIB_DSYM_GLOB_x86_64-w64-mingw32=$(1)-*.dylib.dSYM
CFG_GCCISH_CFLAGS_x86_64-w64-mingw32 := -Wall -Werror -g -m64 -D_WIN32_WINNT=0x0600
CFG_GCCISH_CXXFLAGS_x86_64-w64-mingw32 := -fno-rtti
CFG_GCCISH_LINK_FLAGS_x86_64-w64-mingw32 := -shared -fPIC -g -m64
CFG_GCCISH_DEF_FLAG_x86_64-w64-mingw32 :=
CFG_GCCISH_PRE_LIB_FLAGS_x86_64-w64-mingw32 :=
CFG_GCCISH_POST_LIB_FLAGS_x86_64-w64-mingw32 :=
CFG_DEF_SUFFIX_x86_64-w64-mingw32 := .mingw32.def
CFG_INSTALL_NAME_x86_64-w64-mingw32 =
CFG_LIBUV_LINK_FLAGS_x86_64-w64-mingw32 := -lWs2_32 -lpsapi -liphlpapi
CFG_EXE_SUFFIX_x86_64-w64-mingw32 := .exe
CFG_WINDOWSY_x86_64-w64-mingw32 := 1
CFG_UNIXY_x86_64-w64-mingw32 :=
CFG_PATH_MUNGE_x86_64-w64-mingw32 :=
CFG_LDPATH_x86_64-w64-mingw32 :=$(CFG_LDPATH_x86_64-w64-mingw32):$(PATH)
CFG_RUN_x86_64-w64-mingw32=PATH="$(CFG_LDPATH_x86_64-w64-mingw32):$(1)" $(2)
CFG_RUN_TARG_x86_64-w64-mingw32=$(call CFG_RUN_x86_64-w64-mingw32,$(HLIB$(1)_H_$(CFG_BUILD_TRIPLE)),$(2))
# x86_64-unknown-freebsd configuration
CC_x86_64-unknown-freebsd=$(CC)
CXX_x86_64-unknown-freebsd=$(CXX)

View file

@ -15,7 +15,7 @@
# The names of crates that must be tested
TEST_TARGET_CRATES = std extra
TEST_HOST_CRATES = syntax rustc rustdoc rusti rust rustpkg
TEST_HOST_CRATES = syntax rustc rustdoc rust rustpkg rusti
TEST_CRATES = $(TEST_TARGET_CRATES) $(TEST_HOST_CRATES)
# Markdown files under doc/ that should have their code extracted and run
@ -157,6 +157,7 @@ check-test: cleantestlibs cleantmptestlogs all check-stage2-rfail
check-lite: cleantestlibs cleantmptestlogs \
check-stage2-std check-stage2-extra check-stage2-rpass \
check-stage2-rustpkg check-stage2-rusti \
check-stage2-rfail check-stage2-cfail
$(Q)$(CFG_PYTHON) $(S)src/etc/check-summary.py tmp/*.log

View file

@ -50,7 +50,7 @@ $$(TLIB$(1)_T_$(4)_H_$(3))/$(CFG_LIBRUSTPKG_$(4)): \
$$(TLIB$(1)_T_$(4)_H_$(3))/$(CFG_EXTRALIB_$(4)) \
$$(TLIB$(1)_T_$(4)_H_$(3))/$(CFG_LIBRUSTC_$(4))
@$$(call E, compile_and_link: $$@)
$$(STAGE$(1)_T_$(4)_H_$(3)) -o $$@ $$< && touch $$@
$$(STAGE$(1)_T_$(4)_H_$(3)) $$(WFLAGS_ST$(1)) -o $$@ $$< && touch $$@
$$(TBIN$(1)_T_$(4)_H_$(3))/rustpkg$$(X_$(4)): \
$$(DRIVER_CRATE) \

View file

@ -8,8 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
#[deriving(Eq)]
pub enum mode {
mode_compile_fail,

View file

@ -11,21 +11,16 @@
#[crate_type = "bin"];
#[allow(non_camel_case_types)];
#[allow(unrecognized_lint)]; // NOTE: remove after snapshot
#[deny(warnings)];
#[no_core]; // XXX: Remove after snapshot
#[no_std];
extern mod extra;
extern mod core(name = "std", vers = "0.7");
extern mod extra(name = "extra", vers = "0.7");
use core::prelude::*;
use core::*;
use std::os;
use extra::getopts;
use extra::test;
use core::result::{Ok, Err};
use common::config;
use common::mode_run_pass;
use common::mode_run_fail;
@ -42,13 +37,6 @@ pub mod runtest;
pub mod common;
pub mod errors;
mod std {
pub use core::cmp;
pub use core::str;
pub use core::sys;
pub use core::unstable;
}
pub fn main() {
let args = os::args();
let config = parse_config(args);
@ -98,8 +86,8 @@ pub fn parse_config(args: ~[~str]) -> config {
run_ignored: getopts::opt_present(matches, "ignored"),
filter:
if !matches.free.is_empty() {
option::Some(copy matches.free[0])
} else { option::None },
Some(copy matches.free[0])
} else { None },
logfile: getopts::opt_maybe_str(matches, "logfile").map(|s| Path(*s)),
runtool: getopts::opt_maybe_str(matches, "runtool"),
rustcflags: getopts::opt_maybe_str(matches, "rustcflags"),
@ -148,8 +136,8 @@ pub fn log_config(config: &config) {
pub fn opt_str<'a>(maybestr: &'a Option<~str>) -> &'a str {
match *maybestr {
option::None => "(none)",
option::Some(ref s) => {
None => "(none)",
Some(ref s) => {
let s: &'a str = *s;
s
}
@ -161,7 +149,7 @@ pub fn opt_str2(maybestr: Option<~str>) -> ~str {
}
pub fn str_opt(maybestr: ~str) -> Option<~str> {
if maybestr != ~"(none)" { option::Some(maybestr) } else { option::None }
if maybestr != ~"(none)" { Some(maybestr) } else { None }
}
pub fn str_mode(s: ~str) -> mode {
@ -199,8 +187,8 @@ pub fn test_opts(config: &config) -> test::TestOpts {
logfile: copy config.logfile,
run_tests: true,
run_benchmarks: false,
save_results: option::None,
compare_results: option::None
save_results: None,
compare_results: None
}
}
@ -268,7 +256,7 @@ pub fn make_test_name(config: &config, testfile: &Path) -> test::TestName {
}
pub fn make_test_closure(config: &config, testfile: &Path) -> test::TestFn {
use core::cell::Cell;
use std::cell::Cell;
let config = Cell::new(copy *config);
let testfile = Cell::new(testfile.to_str());
test::DynTestFn(|| { runtest::run(config.take(), testfile.take()) })

View file

@ -8,9 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use core::io;
use std::io;
pub struct ExpectedError { line: uint, kind: ~str, msg: ~str }

View file

@ -8,13 +8,11 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use common::config;
use common;
use core::io;
use core::os;
use std::io;
use std::os;
pub struct TestProps {
// Lines that should be expected, in order, on standard out

View file

@ -8,11 +8,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use core::os;
use core::run;
use core::str;
use std::os;
use std::run;
use std::str;
#[cfg(target_os = "win32")]
fn target_env(lib_path: &str, prog: &str) -> ~[(~str,~str)] {

View file

@ -8,8 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use common::mode_run_pass;
use common::mode_run_fail;
use common::mode_compile_fail;
@ -22,10 +20,10 @@ use procsrv;
use util;
use util::logv;
use core::io;
use core::os;
use core::uint;
use core::vec;
use std::io;
use std::os;
use std::uint;
use std::vec;
pub fn run(config: config, testfile: ~str) {
if config.verbose {
@ -79,8 +77,8 @@ fn run_rfail_test(config: &config, props: &TestProps, testfile: &Path) {
};
// The value our Makefile configures valgrind to return on failure
static valgrind_err: int = 100;
if ProcRes.status == valgrind_err {
static VALGRIND_ERR: int = 100;
if ProcRes.status == VALGRIND_ERR {
fatal_ProcRes(~"run-fail test isn't valgrind-clean!", &ProcRes);
}
@ -102,8 +100,8 @@ fn run_rfail_test(config: &config, props: &TestProps, testfile: &Path) {
fn check_correct_failure_status(ProcRes: &ProcRes) {
// The value the rust runtime returns on failure
static rust_err: int = 101;
if ProcRes.status != rust_err {
static RUST_ERR: int = 101;
if ProcRes.status != RUST_ERR {
fatal_ProcRes(
fmt!("failure produced the wrong error code: %d",
ProcRes.status),
@ -601,9 +599,8 @@ fn make_run_args(config: &config, _props: &TestProps, testfile: &Path) ->
ProcArgs {
// If we've got another tool to run under (valgrind),
// then split apart its command
let toolargs = split_maybe_args(&config.runtool);
let mut args = toolargs + [make_exe_name(config, testfile).to_str()];
let mut args = split_maybe_args(&config.runtool);
args.push(make_exe_name(config, testfile).to_str());
let prog = args.shift();
return ProcArgs {prog: prog, args: args};
}

View file

@ -8,12 +8,10 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::prelude::*;
use common::config;
use core::io;
use core::os::getenv;
use std::io;
use std::os::getenv;
pub fn make_new_path(path: &str) -> ~str {

View file

@ -8,11 +8,6 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[no_core];
#[no_std];
extern mod core(name = "std", vers = "0.7");
#[cfg(rustpkg)]
extern mod this(name = "rustpkg");

View file

@ -7,7 +7,7 @@
<!ENTITY rustIdent "[a-zA-Z_][a-zA-Z_0-9]*">
<!ENTITY rustIntSuf "([iu](8|16|32|64)?)?">
]>
<language name="Rust" version="0.7" kateversion="2.4" section="Sources" extensions="*.rs;*.rc" mimetype="text/x-rust" priority="15">
<language name="Rust" version="0.8-pre" kateversion="2.4" section="Sources" extensions="*.rs;*.rc" mimetype="text/x-rust" priority="15">
<highlighting>
<list name="fn">
<item> fn </item>

View file

@ -43,7 +43,7 @@ fi
cp ${PREFIX}/bin/rustc ${TARG_DIR}/stage0/bin/
cp ${PREFIX}/lib/rustc/${TARG_DIR}/${LIBDIR}/* ${TARG_DIR}/stage0/${LIBDIR}/
cp ${PREFIX}/lib/libextra*${LIB_SUF} ${TARG_DIR}/stage0/${LIBDIR}/
cp ${PREFIX}/lib/librust*${LIB_SUF} ${TARG_DIR}/stage0/${LIBDIR}/
cp ${PREFIX}/lib/libcore*${LIB_SUF} ${TARG_DIR}/stage0/${LIBDIR}/
cp ${PREFIX}/lib/libstd*${LIB_SUF} ${TARG_DIR}/stage0/${LIBDIR}/
cp ${PREFIX}/lib/libsyntax*${LIB_SUF} ${TARG_DIR}/stage0/${LIBDIR}/

View file

@ -1,6 +1,6 @@
# xfail-license
import re, os, sys, glob, tarfile, shutil, subprocess, tempfile
import re, os, sys, glob, tarfile, shutil, subprocess, tempfile, distutils.spawn
try:
import hashlib
@ -132,7 +132,13 @@ def local_rev_committer_date():
def get_url_to_file(u,f):
# no security issue, just to stop partial download leaving a stale file
tmpf = f + '.tmp'
returncode = subprocess.call(["curl", "-o", tmpf, u])
returncode = -1
if distutils.spawn.find_executable("curl"):
returncode = subprocess.call(["curl", "-o", tmpf, u])
elif distutils.spawn.find_executable("wget"):
returncode = subprocess.call(["wget", "-O", tmpf, u])
if returncode != 0:
os.unlink(tmpf)
raise

View file

@ -250,6 +250,7 @@ rf.write('''// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGH
// The following code was generated by "src/etc/unicode.py"
#[allow(missing_doc)];
#[allow(non_uppercase_statics)];
''')

View file

@ -95,7 +95,7 @@ syn region rustDeriving start="deriving(" end=")" contains=rustTrait
" Number literals
syn match rustNumber display "\<[0-9][0-9_]*\>"
syn match rustNumber display "\<[0-9][0-9_]*\(u\|u8\|u16\|u32\|u64\)\>"
syn match rustNumber display "\<[0-9][0-9_]*\(i8\|i16\|i32\|i64\)\>"
syn match rustNumber display "\<[0-9][0-9_]*\(i\|i8\|i16\|i32\|i64\)\>"
syn match rustHexNumber display "\<0x[a-fA-F0-9_]\+\>"
syn match rustHexNumber display "\<0x[a-fA-F0-9_]\+\(u\|u8\|u16\|u32\|u64\)\>"

View file

@ -10,17 +10,37 @@
//! Base64 binary-to-text encoding
use std::vec;
/// A trait for converting a value to base64 encoding.
pub trait ToBase64 {
/// Converts the value of `self` to a base64 value, returning the owned
/// string
fn to_base64(&self) -> ~str;
/// Available encoding character sets
pub enum CharacterSet {
/// The standard character set (uses '+' and '/')
Standard,
/// The URL safe character set (uses '-' and '_')
UrlSafe
}
static CHARS: [char, ..64] = [
/// Contains configuration parameters for to_base64
pub struct Config {
/// Character set to use
char_set: CharacterSet,
/// True to pad output with '=' characters
pad: bool,
/// Some(len) to wrap lines at len, None to disable line wrapping
line_length: Option<uint>
}
/// Configuration for RFC 4648 standard base64 encoding
pub static STANDARD: Config =
Config {char_set: Standard, pad: true, line_length: None};
/// Configuration for RFC 4648 base64url encoding
pub static URL_SAFE: Config =
Config {char_set: UrlSafe, pad: false, line_length: None};
/// Configuration for RFC 2045 MIME base64 encoding
pub static MIME: Config =
Config {char_set: Standard, pad: true, line_length: Some(76)};
static STANDARD_CHARS: [char, ..64] = [
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
@ -28,6 +48,21 @@ static CHARS: [char, ..64] = [
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/'
];
static URLSAFE_CHARS: [char, ..64] = [
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '-', '_'
];
/// A trait for converting a value to base64 encoding.
pub trait ToBase64 {
/// Converts the value of `self` to a base64 value following the specified
/// format configuration, returning the owned string.
fn to_base64(&self, config: Config) -> ~str;
}
impl<'self> ToBase64 for &'self [u8] {
/**
* Turn a vector of `u8` bytes into a base64 string.
@ -36,55 +71,81 @@ impl<'self> ToBase64 for &'self [u8] {
*
* ~~~ {.rust}
* extern mod extra;
* use extra::base64::ToBase64;
* use extra::base64::{ToBase64, standard};
*
* fn main () {
* let str = [52,32].to_base64();
* let str = [52,32].to_base64(standard);
* println(fmt!("%s", str));
* }
* ~~~
*/
fn to_base64(&self) -> ~str {
fn to_base64(&self, config: Config) -> ~str {
let chars = match config.char_set {
Standard => STANDARD_CHARS,
UrlSafe => URLSAFE_CHARS
};
let mut s = ~"";
let mut i = 0;
let mut cur_length = 0;
let len = self.len();
s.reserve(((len + 3u) / 4u) * 3u);
while i < len - (len % 3) {
match config.line_length {
Some(line_length) =>
if cur_length >= line_length {
s.push_str("\r\n");
cur_length = 0;
},
None => ()
}
let mut i = 0u;
while i < len - (len % 3u) {
let n = (self[i] as uint) << 16u |
(self[i + 1u] as uint) << 8u |
(self[i + 2u] as uint);
let n = (self[i] as u32) << 16 |
(self[i + 1] as u32) << 8 |
(self[i + 2] as u32);
// This 24-bit number gets separated into four 6-bit numbers.
s.push_char(CHARS[(n >> 18u) & 63u]);
s.push_char(CHARS[(n >> 12u) & 63u]);
s.push_char(CHARS[(n >> 6u) & 63u]);
s.push_char(CHARS[n & 63u]);
s.push_char(chars[(n >> 18) & 63]);
s.push_char(chars[(n >> 12) & 63]);
s.push_char(chars[(n >> 6 ) & 63]);
s.push_char(chars[n & 63]);
i += 3u;
cur_length += 4;
i += 3;
}
if len % 3 != 0 {
match config.line_length {
Some(line_length) =>
if cur_length >= line_length {
s.push_str("\r\n");
},
None => ()
}
}
// Heh, would be cool if we knew this was exhaustive
// (the dream of bounded integer types)
match len % 3 {
0 => (),
1 => {
let n = (self[i] as uint) << 16u;
s.push_char(CHARS[(n >> 18u) & 63u]);
s.push_char(CHARS[(n >> 12u) & 63u]);
s.push_char('=');
s.push_char('=');
}
2 => {
let n = (self[i] as uint) << 16u |
(self[i + 1u] as uint) << 8u;
s.push_char(CHARS[(n >> 18u) & 63u]);
s.push_char(CHARS[(n >> 12u) & 63u]);
s.push_char(CHARS[(n >> 6u) & 63u]);
s.push_char('=');
}
_ => fail!("Algebra is broken, please alert the math police")
0 => (),
1 => {
let n = (self[i] as u32) << 16;
s.push_char(chars[(n >> 18) & 63]);
s.push_char(chars[(n >> 12) & 63]);
if config.pad {
s.push_str("==");
}
}
2 => {
let n = (self[i] as u32) << 16 |
(self[i + 1u] as u32) << 8;
s.push_char(chars[(n >> 18) & 63]);
s.push_char(chars[(n >> 12) & 63]);
s.push_char(chars[(n >> 6 ) & 63]);
if config.pad {
s.push_char('=');
}
}
_ => fail!("Algebra is broken, please alert the math police")
}
s
}
@ -99,23 +160,25 @@ impl<'self> ToBase64 for &'self str {
*
* ~~~ {.rust}
* extern mod extra;
* use extra::base64::ToBase64;
* use extra::base64::{ToBase64, standard};
*
* fn main () {
* let str = "Hello, World".to_base64();
* let str = "Hello, World".to_base64(standard);
* println(fmt!("%s",str));
* }
* ~~~
*
*/
fn to_base64(&self) -> ~str {
self.as_bytes().to_base64()
fn to_base64(&self, config: Config) -> ~str {
self.as_bytes().to_base64(config)
}
}
#[allow(missing_doc)]
/// A trait for converting from base64 encoded values.
pub trait FromBase64 {
fn from_base64(&self) -> ~[u8];
/// Converts the value of `self`, interpreted as base64 encoded data, into
/// an owned vector of bytes, returning the vector.
fn from_base64(&self) -> Result<~[u8], ~str>;
}
impl<'self> FromBase64 for &'self [u8] {
@ -127,69 +190,64 @@ impl<'self> FromBase64 for &'self [u8] {
*
* ~~~ {.rust}
* extern mod extra;
* use extra::base64::ToBase64;
* use extra::base64::FromBase64;
* use extra::base64::{ToBase64, FromBase64, standard};
*
* fn main () {
* let str = [52,32].to_base64();
* let str = [52,32].to_base64(standard);
* println(fmt!("%s", str));
* let bytes = str.from_base64();
* println(fmt!("%?",bytes));
* }
* ~~~
*/
fn from_base64(&self) -> ~[u8] {
if self.len() % 4u != 0u { fail!("invalid base64 length"); }
fn from_base64(&self) -> Result<~[u8], ~str> {
let mut r = ~[];
let mut buf: u32 = 0;
let mut modulus = 0;
let len = self.len();
let mut padding = 0u;
let mut it = self.iter();
for it.advance |&byte| {
let ch = byte as char;
let val = byte as u32;
if len != 0u {
if self[len - 1u] == '=' as u8 { padding += 1u; }
if self[len - 2u] == '=' as u8 { padding += 1u; }
match ch {
'A'..'Z' => buf |= val - 0x41,
'a'..'z' => buf |= val - 0x47,
'0'..'9' => buf |= val + 0x04,
'+'|'-' => buf |= 0x3E,
'/'|'_' => buf |= 0x3F,
'\r'|'\n' => loop,
'=' => break,
_ => return Err(~"Invalid Base64 character")
}
buf <<= 6;
modulus += 1;
if modulus == 4 {
modulus = 0;
r.push((buf >> 22) as u8);
r.push((buf >> 14) as u8);
r.push((buf >> 6 ) as u8);
}
}
let mut r = vec::with_capacity((len / 4u) * 3u - padding);
let mut i = 0u;
while i < len {
let mut n = 0u;
for 4u.times {
let ch = self[i] as char;
n <<= 6u;
match ch {
'A'..'Z' => n |= (ch as uint) - 0x41,
'a'..'z' => n |= (ch as uint) - 0x47,
'0'..'9' => n |= (ch as uint) + 0x04,
'+' => n |= 0x3E,
'/' => n |= 0x3F,
'=' => {
match len - i {
1u => {
r.push(((n >> 16u) & 0xFFu) as u8);
r.push(((n >> 8u ) & 0xFFu) as u8);
return copy r;
}
2u => {
r.push(((n >> 10u) & 0xFFu) as u8);
return copy r;
}
_ => fail!("invalid base64 padding")
}
}
_ => fail!("invalid base64 character")
}
i += 1u;
};
r.push(((n >> 16u) & 0xFFu) as u8);
r.push(((n >> 8u ) & 0xFFu) as u8);
r.push(((n ) & 0xFFu) as u8);
if !it.all(|&byte| {byte as char == '='}) {
return Err(~"Invalid Base64 character");
}
r
match modulus {
2 => {
r.push((buf >> 10) as u8);
}
3 => {
r.push((buf >> 16) as u8);
r.push((buf >> 8 ) as u8);
}
0 => (),
_ => return Err(~"Invalid Base64 length")
}
Ok(r)
}
}
@ -199,7 +257,8 @@ impl<'self> FromBase64 for &'self str {
* to the byte values it encodes.
*
* You can use the `from_bytes` function in `std::str`
* to turn a `[u8]` into a string with characters corresponding to those values.
* to turn a `[u8]` into a string with characters corresponding to those
* values.
*
* # Example
*
@ -207,12 +266,11 @@ impl<'self> FromBase64 for &'self str {
*
* ~~~ {.rust}
* extern mod extra;
* use extra::base64::ToBase64;
* use extra::base64::FromBase64;
* use extra::base64::{ToBase64, FromBase64, standard};
* use std::str;
*
* fn main () {
* let hello_str = "Hello, World".to_base64();
* let hello_str = "Hello, World".to_base64(standard);
* println(fmt!("%s",hello_str));
* let bytes = hello_str.from_base64();
* println(fmt!("%?",bytes));
@ -221,32 +279,86 @@ impl<'self> FromBase64 for &'self str {
* }
* ~~~
*/
fn from_base64(&self) -> ~[u8] {
fn from_base64(&self) -> Result<~[u8], ~str> {
self.as_bytes().from_base64()
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_to_base64() {
assert_eq!("".to_base64(), ~"");
assert_eq!("f".to_base64(), ~"Zg==");
assert_eq!("fo".to_base64(), ~"Zm8=");
assert_eq!("foo".to_base64(), ~"Zm9v");
assert_eq!("foob".to_base64(), ~"Zm9vYg==");
assert_eq!("fooba".to_base64(), ~"Zm9vYmE=");
assert_eq!("foobar".to_base64(), ~"Zm9vYmFy");
}
#[test]
fn test_to_base64_basic() {
assert_eq!("".to_base64(STANDARD), ~"");
assert_eq!("f".to_base64(STANDARD), ~"Zg==");
assert_eq!("fo".to_base64(STANDARD), ~"Zm8=");
assert_eq!("foo".to_base64(STANDARD), ~"Zm9v");
assert_eq!("foob".to_base64(STANDARD), ~"Zm9vYg==");
assert_eq!("fooba".to_base64(STANDARD), ~"Zm9vYmE=");
assert_eq!("foobar".to_base64(STANDARD), ~"Zm9vYmFy");
}
#[test]
fn test_from_base64() {
assert_eq!("".from_base64(), "".as_bytes().to_owned());
assert_eq!("Zg==".from_base64(), "f".as_bytes().to_owned());
assert_eq!("Zm8=".from_base64(), "fo".as_bytes().to_owned());
assert_eq!("Zm9v".from_base64(), "foo".as_bytes().to_owned());
assert_eq!("Zm9vYg==".from_base64(), "foob".as_bytes().to_owned());
assert_eq!("Zm9vYmE=".from_base64(), "fooba".as_bytes().to_owned());
assert_eq!("Zm9vYmFy".from_base64(), "foobar".as_bytes().to_owned());
#[test]
fn test_to_base64_line_break() {
assert!(![0u8, 1000].to_base64(Config {line_length: None, ..STANDARD})
.contains("\r\n"));
assert_eq!("foobar".to_base64(Config {line_length: Some(4), ..STANDARD}),
~"Zm9v\r\nYmFy");
}
#[test]
fn test_to_base64_padding() {
assert_eq!("f".to_base64(Config {pad: false, ..STANDARD}), ~"Zg");
assert_eq!("fo".to_base64(Config {pad: false, ..STANDARD}), ~"Zm8");
}
#[test]
fn test_to_base64_url_safe() {
assert_eq!([251, 255].to_base64(URL_SAFE), ~"-_8");
assert_eq!([251, 255].to_base64(STANDARD), ~"+/8=");
}
#[test]
fn test_from_base64_basic() {
assert_eq!("".from_base64().get(), "".as_bytes().to_owned());
assert_eq!("Zg==".from_base64().get(), "f".as_bytes().to_owned());
assert_eq!("Zm8=".from_base64().get(), "fo".as_bytes().to_owned());
assert_eq!("Zm9v".from_base64().get(), "foo".as_bytes().to_owned());
assert_eq!("Zm9vYg==".from_base64().get(), "foob".as_bytes().to_owned());
assert_eq!("Zm9vYmE=".from_base64().get(), "fooba".as_bytes().to_owned());
assert_eq!("Zm9vYmFy".from_base64().get(), "foobar".as_bytes().to_owned());
}
#[test]
fn test_from_base64_newlines() {
assert_eq!("Zm9v\r\nYmFy".from_base64().get(),
"foobar".as_bytes().to_owned());
}
#[test]
fn test_from_base64_urlsafe() {
assert_eq!("-_8".from_base64().get(), "+/8=".from_base64().get());
}
#[test]
fn test_from_base64_invalid_char() {
assert!("Zm$=".from_base64().is_err())
assert!("Zg==$".from_base64().is_err());
}
#[test]
fn test_from_base64_invalid_padding() {
assert!("Z===".from_base64().is_err());
}
#[test]
fn test_base64_random() {
use std::rand::{task_rng, random, RngUtil};
use std::vec;
for 1000.times {
let v: ~[u8] = do vec::build |push| {
for task_rng().gen_uint_range(1, 100).times {
push(random());
}
};
assert_eq!(v.to_base64(STANDARD).from_base64().get(), v);
}
}

View file

@ -872,7 +872,7 @@ mod tests {
use std::rand;
use std::rand::Rng;
static bench_bits : uint = 1 << 14;
static BENCH_BITS : uint = 1 << 14;
#[test]
fn test_to_str() {
@ -1452,19 +1452,19 @@ mod tests {
fn bench_big_bitv_big(b: &mut BenchHarness) {
let mut r = rng();
let mut storage = ~[];
storage.grow(bench_bits / uint::bits, &0);
storage.grow(BENCH_BITS / uint::bits, &0);
let mut bitv = BigBitv::new(storage);
do b.iter {
bitv.set((r.next() as uint) % bench_bits, true);
bitv.set((r.next() as uint) % BENCH_BITS, true);
}
}
#[bench]
fn bench_bitv_big(b: &mut BenchHarness) {
let mut r = rng();
let mut bitv = Bitv::new(bench_bits, false);
let mut bitv = Bitv::new(BENCH_BITS, false);
do b.iter {
bitv.set((r.next() as uint) % bench_bits, true);
bitv.set((r.next() as uint) % BENCH_BITS, true);
}
}
@ -1491,14 +1491,14 @@ mod tests {
let mut r = rng();
let mut bitv = BitvSet::new();
do b.iter {
bitv.insert((r.next() as uint) % bench_bits);
bitv.insert((r.next() as uint) % BENCH_BITS);
}
}
#[bench]
fn bench_bitv_big_union(b: &mut BenchHarness) {
let mut b1 = Bitv::new(bench_bits, false);
let b2 = Bitv::new(bench_bits, false);
let mut b1 = Bitv::new(BENCH_BITS, false);
let b2 = Bitv::new(BENCH_BITS, false);
do b.iter {
b1.union(&b2);
}

View file

@ -240,7 +240,6 @@ impl Digest for Sha1 {
#[cfg(test)]
mod tests {
use std::vec;
use digest::{Digest, DigestUtil};
use sha1::Sha1;
@ -337,7 +336,7 @@ mod tests {
for tests.iter().advance |t| {
(*sh).input_str(t.input);
sh.result(out);
assert!(vec::eq(t.output, out));
assert!(t.output.as_slice() == out);
let out_str = (*sh).result_str();
assert_eq!(out_str.len(), 40);
@ -357,7 +356,7 @@ mod tests {
left = left - take;
}
sh.result(out);
assert!(vec::eq(t.output, out));
assert!(t.output.as_slice() == out);
let out_str = (*sh).result_str();
assert_eq!(out_str.len(), 40);

View file

@ -11,17 +11,17 @@
//! A double-ended queue implemented as a circular buffer
use std::uint;
use std::util::replace;
use std::vec;
use std::cast::transmute;
use std::iterator::FromIterator;
static initial_capacity: uint = 32u; // 2^5
static INITIAL_CAPACITY: uint = 8u; // 2^3
static MINIMUM_CAPACITY: uint = 2u;
#[allow(missing_doc)]
#[deriving(Clone)]
pub struct Deque<T> {
priv nelts: uint,
priv lo: uint,
priv hi: uint,
priv elts: ~[Option<T>]
}
@ -39,26 +39,36 @@ impl<T> Mutable for Deque<T> {
for self.elts.mut_iter().advance |x| { *x = None }
self.nelts = 0;
self.lo = 0;
self.hi = 0;
}
}
impl<T> Deque<T> {
/// Create an empty Deque
pub fn new() -> Deque<T> {
Deque{nelts: 0, lo: 0, hi: 0,
elts: vec::from_fn(initial_capacity, |_| None)}
Deque::with_capacity(INITIAL_CAPACITY)
}
/// Create an empty Deque with space for at least `n` elements.
pub fn with_capacity(n: uint) -> Deque<T> {
Deque{nelts: 0, lo: 0,
elts: vec::from_fn(uint::max(MINIMUM_CAPACITY, n), |_| None)}
}
/// Return a reference to the first element in the deque
///
/// Fails if the deque is empty
pub fn peek_front<'a>(&'a self) -> &'a T { get(self.elts, self.lo) }
pub fn peek_front<'a>(&'a self) -> &'a T { get(self.elts, self.raw_index(0)) }
/// Return a reference to the last element in the deque
///
/// Fails if the deque is empty
pub fn peek_back<'a>(&'a self) -> &'a T { get(self.elts, self.hi - 1u) }
pub fn peek_back<'a>(&'a self) -> &'a T {
if self.nelts > 0 {
get(self.elts, self.raw_index(self.nelts - 1))
} else {
fail!("peek_back: empty deque");
}
}
/// Retrieve an element in the deque by index
///
@ -68,16 +78,6 @@ impl<T> Deque<T> {
get(self.elts, idx)
}
/// Iterate over the elements in the deque
pub fn each(&self, f: &fn(&T) -> bool) -> bool {
self.eachi(|_i, e| f(e))
}
/// Iterate over the elements in the deque by index
pub fn eachi(&self, f: &fn(uint, &T) -> bool) -> bool {
uint::range(0, self.nelts, |i| f(i, self.get(i as int)))
}
/// Remove and return the first element in the deque
///
/// Fails if the deque is empty
@ -88,43 +88,39 @@ impl<T> Deque<T> {
result
}
/// Return index in underlying vec for a given logical element index
fn raw_index(&self, idx: uint) -> uint {
raw_index(self.lo, self.elts.len(), idx)
}
/// Remove and return the last element in the deque
///
/// Fails if the deque is empty
pub fn pop_back(&mut self) -> T {
if self.hi == 0u {
self.hi = self.elts.len() - 1u;
} else { self.hi -= 1u; }
let result = self.elts[self.hi].swap_unwrap();
self.elts[self.hi] = None;
self.nelts -= 1u;
result
self.nelts -= 1;
let hi = self.raw_index(self.nelts);
self.elts[hi].swap_unwrap()
}
/// Prepend an element to the deque
pub fn add_front(&mut self, t: T) {
let oldlo = self.lo;
if self.nelts == self.elts.len() {
grow(self.nelts, &mut self.lo, &mut self.elts);
}
if self.lo == 0u {
self.lo = self.elts.len() - 1u;
} else { self.lo -= 1u; }
if self.lo == self.hi {
self.elts = grow(self.nelts, oldlo, self.elts);
self.lo = self.elts.len() - 1u;
self.hi = self.nelts;
}
self.elts[self.lo] = Some(t);
self.nelts += 1u;
}
/// Append an element to the deque
pub fn add_back(&mut self, t: T) {
if self.lo == self.hi && self.nelts != 0u {
self.elts = grow(self.nelts, self.lo, self.elts);
self.lo = 0u;
self.hi = self.nelts;
if self.nelts == self.elts.len() {
grow(self.nelts, &mut self.lo, &mut self.elts);
}
self.elts[self.hi] = Some(t);
self.hi = (self.hi + 1u) % self.elts.len();
let hi = self.raw_index(self.nelts);
self.elts[hi] = Some(t);
self.nelts += 1u;
}
@ -155,42 +151,39 @@ impl<T> Deque<T> {
/// Front-to-back iterator.
pub fn iter<'a>(&'a self) -> DequeIterator<'a, T> {
DequeIterator { idx: self.lo, nelts: self.nelts, used: 0, vec: self.elts }
DequeIterator{index: 0, nelts: self.nelts, elts: self.elts, lo: self.lo}
}
/// Front-to-back iterator which returns mutable values.
pub fn mut_iter<'a>(&'a mut self) -> DequeMutIterator<'a, T> {
DequeMutIterator { idx: self.lo, nelts: self.nelts, used: 0, vec: self.elts }
DequeMutIterator{index: 0, nelts: self.nelts, elts: self.elts, lo: self.lo}
}
/// Back-to-front iterator.
pub fn rev_iter<'a>(&'a self) -> DequeRevIterator<'a, T> {
DequeRevIterator { idx: self.hi - 1u, nelts: self.nelts, used: 0, vec: self.elts }
DequeRevIterator{index: self.nelts-1, nelts: self.nelts, elts: self.elts,
lo: self.lo}
}
/// Back-to-front iterator which returns mutable values.
pub fn mut_rev_iter<'a>(&'a mut self) -> DequeMutRevIterator<'a, T> {
DequeMutRevIterator { idx: self.hi - 1u, nelts: self.nelts, used: 0, vec: self.elts }
DequeMutRevIterator{index: self.nelts-1, nelts: self.nelts, elts: self.elts,
lo: self.lo}
}
}
macro_rules! iterator {
(impl $name:ident -> $elem:ty, $step:expr) => {
(impl $name:ident -> $elem:ty, $getter:ident, $step:expr) => {
impl<'self, T> Iterator<$elem> for $name<'self, T> {
#[inline]
fn next(&mut self) -> Option<$elem> {
if self.used >= self.nelts {
if self.nelts == 0 {
return None;
}
let ret = unsafe {
match self.vec[self.idx % self.vec.len()] {
Some(ref e) => Some(transmute(e)),
None => None
}
};
self.idx += $step;
self.used += 1;
ret
let raw_index = raw_index(self.lo, self.elts.len(), self.index);
self.index += $step;
self.nelts -= 1;
Some(self.elts[raw_index]. $getter ())
}
}
}
@ -198,64 +191,115 @@ macro_rules! iterator {
/// Deque iterator
pub struct DequeIterator<'self, T> {
priv idx: uint,
priv lo: uint,
priv nelts: uint,
priv used: uint,
priv vec: &'self [Option<T>]
priv index: uint,
priv elts: &'self [Option<T>],
}
iterator!{impl DequeIterator -> &'self T, 1}
iterator!{impl DequeIterator -> &'self T, get_ref, 1}
/// Deque reverse iterator
pub struct DequeRevIterator<'self, T> {
priv idx: uint,
priv lo: uint,
priv nelts: uint,
priv used: uint,
priv vec: &'self [Option<T>]
priv index: uint,
priv elts: &'self [Option<T>],
}
iterator!{impl DequeRevIterator -> &'self T, -1}
iterator!{impl DequeRevIterator -> &'self T, get_ref, -1}
/// Deque mutable iterator
pub struct DequeMutIterator<'self, T> {
priv idx: uint,
priv lo: uint,
priv nelts: uint,
priv used: uint,
priv vec: &'self mut [Option<T>]
priv index: uint,
priv elts: &'self mut [Option<T>],
}
iterator!{impl DequeMutIterator -> &'self mut T, 1}
iterator!{impl DequeMutIterator -> &'self mut T, get_mut_ref, 1}
/// Deque mutable reverse iterator
pub struct DequeMutRevIterator<'self, T> {
priv idx: uint,
priv lo: uint,
priv nelts: uint,
priv used: uint,
priv vec: &'self mut [Option<T>]
priv index: uint,
priv elts: &'self mut [Option<T>],
}
iterator!{impl DequeMutRevIterator -> &'self mut T, -1}
iterator!{impl DequeMutRevIterator -> &'self mut T, get_mut_ref, -1}
/// Grow is only called on full elts, so nelts is also len(elts), unlike
/// elsewhere.
fn grow<T>(nelts: uint, lo: uint, elts: &mut [Option<T>]) -> ~[Option<T>] {
fn grow<T>(nelts: uint, loptr: &mut uint, elts: &mut ~[Option<T>]) {
assert_eq!(nelts, elts.len());
let mut rv = ~[];
let lo = *loptr;
let newlen = nelts * 2;
elts.reserve(newlen);
do rv.grow_fn(nelts + 1) |i| {
replace(&mut elts[(lo + i) % nelts], None)
/* fill with None */
for uint::range(elts.len(), elts.capacity()) |_| {
elts.push(None);
}
rv
/*
Move the shortest half into the newly reserved area.
lo ---->|
nelts ----------->|
[o o o|o o o o o]
A [. . .|o o o o o o o o|. . . . .]
B [o o o|. . . . . . . .|o o o o o]
*/
assert!(newlen - nelts/2 >= nelts);
if lo <= (nelts - lo) { // A
for uint::range(0, lo) |i| {
elts.swap(i, nelts + i);
}
} else { // B
for uint::range(lo, nelts) |i| {
elts.swap(i, newlen - nelts + i);
}
*loptr += newlen - nelts;
}
}
fn get<'r, T>(elts: &'r [Option<T>], i: uint) -> &'r T {
match elts[i] { Some(ref t) => t, _ => fail!() }
}
/// Return index in underlying vec for a given logical element index
fn raw_index(lo: uint, len: uint, index: uint) -> uint {
if lo >= len - index {
lo + index - len
} else {
lo + index
}
}
impl<A: Eq> Eq for Deque<A> {
fn eq(&self, other: &Deque<A>) -> bool {
self.nelts == other.nelts &&
self.iter().zip(other.iter()).all(|(a, b)| a.eq(b))
}
fn ne(&self, other: &Deque<A>) -> bool {
!self.eq(other)
}
}
impl<A, T: Iterator<A>> FromIterator<A, T> for Deque<A> {
fn from_iterator(iterator: &mut T) -> Deque<A> {
let mut deq = Deque::new();
for iterator.advance |elt| {
deq.add_back(elt);
}
deq
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::cmp::Eq;
use std::kinds::Copy;
use std::int;
use std::{int, uint};
use extra::test;
#[test]
fn test_simple() {
@ -369,6 +413,61 @@ mod tests {
assert_eq!(copy *deq.get(3), copy d);
}
#[test]
fn test_add_front_grow() {
let mut deq = Deque::new();
for int::range(0, 66) |i| {
deq.add_front(i);
}
assert_eq!(deq.len(), 66);
for int::range(0, 66) |i| {
assert_eq!(*deq.get(i), 65 - i);
}
let mut deq = Deque::new();
for int::range(0, 66) |i| {
deq.add_back(i);
}
for int::range(0, 66) |i| {
assert_eq!(*deq.get(i), i);
}
}
#[bench]
fn bench_new(b: &mut test::BenchHarness) {
do b.iter {
let _ = Deque::new::<u64>();
}
}
#[bench]
fn bench_add_back(b: &mut test::BenchHarness) {
let mut deq = Deque::new();
do b.iter {
deq.add_back(0);
}
}
#[bench]
fn bench_add_front(b: &mut test::BenchHarness) {
let mut deq = Deque::new();
do b.iter {
deq.add_front(0);
}
}
#[bench]
fn bench_grow(b: &mut test::BenchHarness) {
let mut deq = Deque::new();
do b.iter {
for 65.times {
deq.add_front(1);
}
}
}
#[deriving(Eq)]
enum Taggy { One(int), Two(int, int), Three(int, int, int), }
@ -417,22 +516,13 @@ mod tests {
}
#[test]
fn test_eachi() {
let mut deq = Deque::new();
deq.add_back(1);
deq.add_back(2);
deq.add_back(3);
for deq.eachi |i, e| {
assert_eq!(*e, i + 1);
}
deq.pop_front();
for deq.eachi |i, e| {
assert_eq!(*e, i + 2);
}
fn test_with_capacity() {
let mut d = Deque::with_capacity(0);
d.add_back(1);
assert_eq!(d.len(), 1);
let mut d = Deque::with_capacity(50);
d.add_back(1);
assert_eq!(d.len(), 1);
}
#[test]
@ -462,6 +552,8 @@ mod tests {
#[test]
fn test_iter() {
let mut d = Deque::new();
assert_eq!(d.iter().next(), None);
for int::range(0,5) |i| {
d.add_back(i);
}
@ -476,6 +568,8 @@ mod tests {
#[test]
fn test_rev_iter() {
let mut d = Deque::new();
assert_eq!(d.rev_iter().next(), None);
for int::range(0,5) |i| {
d.add_back(i);
}
@ -486,4 +580,104 @@ mod tests {
}
assert_eq!(d.rev_iter().collect::<~[&int]>(), ~[&4,&3,&2,&1,&0,&6,&7,&8]);
}
#[test]
fn test_mut_iter() {
let mut d = Deque::new();
assert!(d.mut_iter().next().is_none());
for uint::range(0,3) |i| {
d.add_front(i);
}
for d.mut_iter().enumerate().advance |(i, elt)| {
assert_eq!(*elt, 2 - i);
*elt = i;
}
{
let mut it = d.mut_iter();
assert_eq!(*it.next().unwrap(), 0);
assert_eq!(*it.next().unwrap(), 1);
assert_eq!(*it.next().unwrap(), 2);
assert!(it.next().is_none());
}
}
#[test]
fn test_mut_rev_iter() {
let mut d = Deque::new();
assert!(d.mut_rev_iter().next().is_none());
for uint::range(0,3) |i| {
d.add_front(i);
}
for d.mut_rev_iter().enumerate().advance |(i, elt)| {
assert_eq!(*elt, i);
*elt = i;
}
{
let mut it = d.mut_rev_iter();
assert_eq!(*it.next().unwrap(), 0);
assert_eq!(*it.next().unwrap(), 1);
assert_eq!(*it.next().unwrap(), 2);
assert!(it.next().is_none());
}
}
#[test]
fn test_from_iterator() {
use std::iterator;
let v = ~[1,2,3,4,5,6,7];
let deq: Deque<int> = v.iter().transform(|&x| x).collect();
let u: ~[int] = deq.iter().transform(|&x| x).collect();
assert_eq!(u, v);
let mut seq = iterator::Counter::new(0u, 2).take_(256);
let deq: Deque<uint> = seq.collect();
for deq.iter().enumerate().advance |(i, &x)| {
assert_eq!(2*i, x);
}
assert_eq!(deq.len(), 256);
}
#[test]
fn test_clone() {
let mut d = Deque::new();
d.add_front(17);
d.add_front(42);
d.add_back(137);
d.add_back(137);
assert_eq!(d.len(), 4u);
let mut e = d.clone();
assert_eq!(e.len(), 4u);
while !d.is_empty() {
assert_eq!(d.pop_back(), e.pop_back());
}
assert_eq!(d.len(), 0u);
assert_eq!(e.len(), 0u);
}
#[test]
fn test_eq() {
let mut d = Deque::new();
assert_eq!(&d, &Deque::with_capacity(0));
d.add_front(137);
d.add_front(17);
d.add_front(42);
d.add_back(137);
let mut e = Deque::with_capacity(0);
e.add_back(42);
e.add_back(17);
e.add_back(137);
e.add_back(137);
assert_eq!(&e, &d);
e.pop_back();
e.add_back(0);
assert!(e != d);
e.clear();
assert_eq!(e, Deque::new());
}
}

View file

@ -748,7 +748,7 @@ pub mod writer {
// Set to true to generate more debugging in EBML code.
// Totally lame approach.
static debug: bool = true;
static DEBUG: bool = true;
impl Encoder {
// used internally to emit things like the vector length and so on
@ -764,7 +764,7 @@ pub mod writer {
// efficiency. When debugging, though, we can emit such
// labels and then they will be checked by decoder to
// try and check failures more quickly.
if debug { self.wr_tagged_str(EsLabel as uint, label) }
if DEBUG { self.wr_tagged_str(EsLabel as uint, label) }
}
}

View file

@ -21,7 +21,7 @@ Rust extras are part of the standard Rust distribution.
*/
#[link(name = "extra",
vers = "0.7",
vers = "0.8-pre",
uuid = "122bed0b-c19b-4b82-b0b7-7ae8aead7297",
url = "https://github.com/mozilla/rust/tree/master/src/libextra")];
@ -40,10 +40,8 @@ pub mod uv_ll;
// General io and system-services modules
#[path = "net/mod.rs"]
pub mod net;
pub mod net_ip;
pub mod net_tcp;
pub mod net_url;
// libuv modules
pub mod uv;
@ -74,7 +72,6 @@ pub mod deque;
pub mod fun_treemap;
pub mod list;
pub mod priority_queue;
pub mod rope;
pub mod smallintmap;
pub mod sort;

View file

@ -39,20 +39,20 @@ pub mod rustrt {
}
}
static lz_none : c_int = 0x0; // Huffman-coding only.
static lz_fast : c_int = 0x1; // LZ with only one probe
static lz_norm : c_int = 0x80; // LZ with 128 probes, "normal"
static lz_best : c_int = 0xfff; // LZ with 4095 probes, "best"
static LZ_NONE : c_int = 0x0; // Huffman-coding only.
static LZ_FAST : c_int = 0x1; // LZ with only one probe
static LZ_NORM : c_int = 0x80; // LZ with 128 probes, "normal"
static LZ_BEST : c_int = 0xfff; // LZ with 4095 probes, "best"
pub fn deflate_bytes(bytes: &[u8]) -> ~[u8] {
do vec::as_imm_buf(bytes) |b, len| {
do bytes.as_imm_buf |b, len| {
unsafe {
let mut outsz : size_t = 0;
let res =
rustrt::tdefl_compress_mem_to_heap(b as *c_void,
len as size_t,
&mut outsz,
lz_norm);
LZ_NORM);
assert!(res as int != 0);
let out = vec::raw::from_buf_raw(res as *u8,
outsz as uint);
@ -63,7 +63,7 @@ pub fn deflate_bytes(bytes: &[u8]) -> ~[u8] {
}
pub fn inflate_bytes(bytes: &[u8]) -> ~[u8] {
do vec::as_imm_buf(bytes) |b, len| {
do bytes.as_imm_buf |b, len| {
unsafe {
let mut outsz : size_t = 0;
let res =

View file

@ -55,7 +55,6 @@ use std::io;
use std::comm::GenericChan;
use std::comm::GenericPort;
use std::sys::size_of;
use std::vec;
/**
A FlatPort, consisting of a `BytePort` that receives byte vectors,
@ -274,7 +273,7 @@ impl<T,U:Unflattener<T>,P:BytePort> GenericPort<T> for FlatPort<T, U, P> {
}
};
if vec::eq(command, CONTINUE) {
if CONTINUE.as_slice() == command {
let msg_len = match self.byte_port.try_recv(size_of::<u64>()) {
Some(bytes) => {
io::u64_from_be_bytes(bytes, 0, size_of::<u64>())
@ -931,7 +930,7 @@ mod test {
fn test_try_recv_none3<P:BytePort>(loader: PortLoader<P>) {
static CONTINUE: [u8, ..4] = [0xAA, 0xBB, 0xCC, 0xDD];
// The control word is followed by garbage
let bytes = CONTINUE.to_owned() + [0];
let bytes = CONTINUE.to_owned() + &[0u8];
let port = loader(bytes);
let res: Option<int> = port.try_recv();
assert!(res.is_none());
@ -955,7 +954,7 @@ mod test {
1, sys::size_of::<u64>()) |len_bytes| {
len_bytes.to_owned()
};
let bytes = CONTINUE.to_owned() + len_bytes + [0, 0, 0, 0];
let bytes = CONTINUE.to_owned() + len_bytes + &[0u8, 0, 0, 0];
let port = loader(bytes);

View file

@ -176,7 +176,7 @@ fn name_str(nm: &Name) -> ~str {
}
fn find_opt(opts: &[Opt], nm: Name) -> Option<uint> {
opts.iter().position_(|opt| opt.name == nm)
opts.iter().position(|opt| opt.name == nm)
}
/**

328
src/libextra/iter.rs Normal file
View file

@ -0,0 +1,328 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*! Composable internal iterators
Internal iterators are functions implementing the protocol used by the `for` loop.
An internal iterator takes `fn(...) -> bool` as a parameter, with returning `false` used to signal
breaking out of iteration. The adaptors in the module work with any such iterator, not just ones
tied to specific traits. For example:
~~~ {.rust}
println(iter::to_vec(|f| uint::range(0, 20, f)).to_str());
~~~
An external iterator object implementing the interface in the `iterator` module can be used as an
internal iterator by calling the `advance` method. For example:
~~~ {.rust}
let xs = [0u, 1, 2, 3, 4, 5];
let ys = [30, 40, 50, 60];
let mut it = xs.iter().chain(ys.iter());
for it.advance |&x: &uint| {
println(x.to_str());
}
~~~
Internal iterators provide a subset of the functionality of an external iterator. It's not possible
to interleave them to implement algorithms like `zip`, `union` and `merge`. However, they're often
much easier to implement.
*/
use std::vec;
use std::cmp::Ord;
use std::option::{Option, Some, None};
use std::num::{One, Zero};
use std::ops::{Add, Mul};
#[allow(missing_doc)]
pub trait FromIter<T> {
/// Build a container with elements from an internal iterator.
///
/// # Example:
///
/// ~~~ {.rust}
/// let xs = ~[1, 2, 3];
/// let ys: ~[int] = do FromIter::from_iter |f| { xs.iter().advance(|x| f(*x)) };
/// assert_eq!(xs, ys);
/// ~~~
pub fn from_iter(iter: &fn(f: &fn(T) -> bool) -> bool) -> Self;
}
/**
* Return true if `predicate` is true for any values yielded by an internal iterator.
*
* Example:
*
* ~~~ {.rust}
* let xs = ~[1u, 2, 3, 4, 5];
* assert!(any(|&x: &uint| x > 2, |f| xs.iter().advance(f)));
* assert!(!any(|&x: &uint| x > 5, |f| xs.iter().advance(f)));
* ~~~
*/
#[inline]
pub fn any<T>(predicate: &fn(T) -> bool,
iter: &fn(f: &fn(T) -> bool) -> bool) -> bool {
for iter |x| {
if predicate(x) {
return true;
}
}
return false;
}
/**
* Return true if `predicate` is true for all values yielded by an internal iterator.
*
* # Example:
*
* ~~~ {.rust}
* assert!(all(|&x: &uint| x < 6, |f| uint::range(1, 6, f)));
* assert!(!all(|&x: &uint| x < 5, |f| uint::range(1, 6, f)));
* ~~~
*/
#[inline]
pub fn all<T>(predicate: &fn(T) -> bool,
iter: &fn(f: &fn(T) -> bool) -> bool) -> bool {
// If we ever break, iter will return false, so this will only return true
// if predicate returns true for everything.
iter(|x| predicate(x))
}
/**
* Return the first element where `predicate` returns `true`. Return `None` if no element is found.
*
* # Example:
*
* ~~~ {.rust}
* let xs = ~[1u, 2, 3, 4, 5, 6];
* assert_eq!(*find(|& &x: & &uint| x > 3, |f| xs.iter().advance(f)).unwrap(), 4);
* ~~~
*/
#[inline]
pub fn find<T>(predicate: &fn(&T) -> bool,
iter: &fn(f: &fn(T) -> bool) -> bool) -> Option<T> {
for iter |x| {
if predicate(&x) {
return Some(x);
}
}
None
}
/**
* Return the largest item yielded by an iterator. Return `None` if the iterator is empty.
*
* # Example:
*
* ~~~ {.rust}
* let xs = ~[8, 2, 3, 1, -5, 9, 11, 15];
* assert_eq!(max(|f| xs.iter().advance(f)).unwrap(), &15);
* ~~~
*/
#[inline]
pub fn max<T: Ord>(iter: &fn(f: &fn(T) -> bool) -> bool) -> Option<T> {
let mut result = None;
for iter |x| {
match result {
Some(ref mut y) => {
if x > *y {
*y = x;
}
}
None => result = Some(x)
}
}
result
}
/**
* Return the smallest item yielded by an iterator. Return `None` if the iterator is empty.
*
* # Example:
*
* ~~~ {.rust}
* let xs = ~[8, 2, 3, 1, -5, 9, 11, 15];
* assert_eq!(max(|f| xs.iter().advance(f)).unwrap(), &-5);
* ~~~
*/
#[inline]
pub fn min<T: Ord>(iter: &fn(f: &fn(T) -> bool) -> bool) -> Option<T> {
let mut result = None;
for iter |x| {
match result {
Some(ref mut y) => {
if x < *y {
*y = x;
}
}
None => result = Some(x)
}
}
result
}
/**
* Reduce an iterator to an accumulated value.
*
* # Example:
*
* ~~~ {.rust}
* assert_eq!(fold(0i, |f| int::range(1, 5, f), |a, x| *a += x), 10);
* ~~~
*/
#[inline]
pub fn fold<T, U>(start: T, iter: &fn(f: &fn(U) -> bool) -> bool, f: &fn(&mut T, U)) -> T {
let mut result = start;
for iter |x| {
f(&mut result, x);
}
result
}
/**
* Reduce an iterator to an accumulated value.
*
* `fold_ref` is usable in some generic functions where `fold` is too lenient to type-check, but it
* forces the iterator to yield borrowed pointers.
*
* # Example:
*
* ~~~ {.rust}
* fn product<T: One + Mul<T, T>>(iter: &fn(f: &fn(&T) -> bool) -> bool) -> T {
* fold_ref(One::one::<T>(), iter, |a, x| *a = a.mul(x))
* }
* ~~~
*/
#[inline]
pub fn fold_ref<T, U>(start: T, iter: &fn(f: &fn(&U) -> bool) -> bool, f: &fn(&mut T, &U)) -> T {
let mut result = start;
for iter |x| {
f(&mut result, x);
}
result
}
/**
* Return the sum of the items yielding by an iterator.
*
* # Example:
*
* ~~~ {.rust}
* let xs: ~[int] = ~[1, 2, 3, 4];
* assert_eq!(do sum |f| { xs.iter().advance(f) }, 10);
* ~~~
*/
#[inline]
pub fn sum<T: Zero + Add<T, T>>(iter: &fn(f: &fn(&T) -> bool) -> bool) -> T {
fold_ref(Zero::zero::<T>(), iter, |a, x| *a = a.add(x))
}
/**
* Return the product of the items yielded by an iterator.
*
* # Example:
*
* ~~~ {.rust}
* let xs: ~[int] = ~[1, 2, 3, 4];
* assert_eq!(do product |f| { xs.iter().advance(f) }, 24);
* ~~~
*/
#[inline]
pub fn product<T: One + Mul<T, T>>(iter: &fn(f: &fn(&T) -> bool) -> bool) -> T {
fold_ref(One::one::<T>(), iter, |a, x| *a = a.mul(x))
}
impl<T> FromIter<T> for ~[T]{
#[inline]
pub fn from_iter(iter: &fn(f: &fn(T) -> bool) -> bool) -> ~[T] {
let mut v = ~[];
for iter |x| { v.push(x) }
v
}
}
#[cfg(test)]
mod tests {
use super::*;
use prelude::*;
use int;
use uint;
#[test]
fn test_from_iter() {
let xs = ~[1, 2, 3];
let ys: ~[int] = do FromIter::from_iter |f| { xs.iter().advance(|x| f(*x)) };
assert_eq!(xs, ys);
}
#[test]
fn test_any() {
let xs = ~[1u, 2, 3, 4, 5];
assert!(any(|&x: &uint| x > 2, |f| xs.iter().advance(f)));
assert!(!any(|&x: &uint| x > 5, |f| xs.iter().advance(f)));
}
#[test]
fn test_all() {
assert!(all(|x: uint| x < 6, |f| uint::range(1, 6, f)));
assert!(!all(|x: uint| x < 5, |f| uint::range(1, 6, f)));
}
#[test]
fn test_find() {
let xs = ~[1u, 2, 3, 4, 5, 6];
assert_eq!(*find(|& &x: & &uint| x > 3, |f| xs.iter().advance(f)).unwrap(), 4);
}
#[test]
fn test_max() {
let xs = ~[8, 2, 3, 1, -5, 9, 11, 15];
assert_eq!(max(|f| xs.iter().advance(f)).unwrap(), &15);
}
#[test]
fn test_min() {
let xs = ~[8, 2, 3, 1, -5, 9, 11, 15];
assert_eq!(min(|f| xs.iter().advance(f)).unwrap(), &-5);
}
#[test]
fn test_fold() {
assert_eq!(fold(0i, |f| int::range(1, 5, f), |a, x| *a += x), 10);
}
#[test]
fn test_sum() {
let xs: ~[int] = ~[1, 2, 3, 4];
assert_eq!(do sum |f| { xs.iter().advance(f) }, 10);
}
#[test]
fn test_empty_sum() {
let xs: ~[int] = ~[];
assert_eq!(do sum |f| { xs.iter().advance(f) }, 0);
}
#[test]
fn test_product() {
let xs: ~[int] = ~[1, 2, 3, 4];
assert_eq!(do product |f| { xs.iter().advance(f) }, 24);
}
#[test]
fn test_empty_product() {
let xs: ~[int] = ~[];
assert_eq!(do product |f| { xs.iter().advance(f) }, 1);
}
}

View file

@ -24,7 +24,6 @@ use std::io::{WriterUtil, ReaderUtil};
use std::io;
use std::str;
use std::to_str;
use std::vec;
use serialize::Encodable;
use serialize;
@ -482,9 +481,13 @@ pub fn to_pretty_str(json: &Json) -> ~str {
io::with_str_writer(|wr| to_pretty_writer(wr, json))
}
static BUF_SIZE : uint = 64000;
#[allow(missing_doc)]
pub struct Parser {
priv rdr: @io::Reader,
priv buf: ~[char],
priv buf_idx: uint,
priv ch: char,
priv line: uint,
priv col: uint,
@ -492,12 +495,16 @@ pub struct Parser {
/// Decode a json value from an io::reader
pub fn Parser(rdr: @io::Reader) -> Parser {
Parser {
let mut p = Parser {
rdr: rdr,
ch: rdr.read_char(),
buf: rdr.read_chars(BUF_SIZE),
buf_idx: 0,
ch: 0 as char,
line: 1,
col: 1,
}
col: 0,
};
p.bump();
p
}
impl Parser {
@ -522,13 +529,26 @@ impl Parser {
fn eof(&self) -> bool { self.ch == -1 as char }
fn bump(&mut self) {
self.ch = self.rdr.read_char();
if self.eof() {
return;
}
self.col += 1u;
if self.buf_idx >= self.buf.len() {
self.buf = self.rdr.read_chars(BUF_SIZE);
if self.buf.len() == 0 {
self.ch = -1 as char;
return;
}
self.buf_idx = 0;
}
self.ch = self.buf[self.buf_idx];
self.buf_idx += 1;
if self.ch == '\n' {
self.line += 1u;
self.col = 1u;
} else {
self.col += 1u;
}
}
@ -941,7 +961,7 @@ impl serialize::Decoder for Decoder {
let name = match self.stack.pop() {
String(s) => s,
List(list) => {
do vec::consume_reverse(list) |_i, v| {
for list.consume_rev_iter().advance |v| {
self.stack.push(v);
}
match self.stack.pop() {
@ -951,7 +971,7 @@ impl serialize::Decoder for Decoder {
}
ref json => fail!("invalid variant: %?", *json),
};
let idx = match names.iter().position_(|n| str::eq_slice(*n, name)) {
let idx = match names.iter().position(|n| str::eq_slice(*n, name)) {
Some(idx) => idx,
None => fail!("Unknown variant name: %?", name),
};
@ -1059,7 +1079,7 @@ impl serialize::Decoder for Decoder {
let len = match self.stack.pop() {
List(list) => {
let len = list.len();
do vec::consume_reverse(list) |_i, v| {
for list.consume_rev_iter().advance |v| {
self.stack.push(v);
}
len

View file

@ -22,20 +22,20 @@ use std::str;
use iotask = uv::iotask::IoTask;
use interact = uv::iotask::interact;
use sockaddr_in = super::uv_ll::sockaddr_in;
use sockaddr_in6 = super::uv_ll::sockaddr_in6;
use addrinfo = super::uv_ll::addrinfo;
use uv_getaddrinfo_t = super::uv_ll::uv_getaddrinfo_t;
use uv_ip4_name = super::uv_ll::ip4_name;
use uv_ip4_port = super::uv_ll::ip4_port;
use uv_ip6_name = super::uv_ll::ip6_name;
use uv_ip6_port = super::uv_ll::ip6_port;
use uv_getaddrinfo = super::uv_ll::getaddrinfo;
use uv_freeaddrinfo = super::uv_ll::freeaddrinfo;
use create_uv_getaddrinfo_t = super::uv_ll::getaddrinfo_t;
use set_data_for_req = super::uv_ll::set_data_for_req;
use get_data_for_req = super::uv_ll::get_data_for_req;
use ll = super::uv_ll;
use sockaddr_in = uv_ll::sockaddr_in;
use sockaddr_in6 = uv_ll::sockaddr_in6;
use addrinfo = uv_ll::addrinfo;
use uv_getaddrinfo_t = uv_ll::uv_getaddrinfo_t;
use uv_ip4_name = uv_ll::ip4_name;
use uv_ip4_port = uv_ll::ip4_port;
use uv_ip6_name = uv_ll::ip6_name;
use uv_ip6_port = uv_ll::ip6_port;
use uv_getaddrinfo = uv_ll::getaddrinfo;
use uv_freeaddrinfo = uv_ll::freeaddrinfo;
use create_uv_getaddrinfo_t = uv_ll::getaddrinfo_t;
use set_data_for_req = uv_ll::set_data_for_req;
use get_data_for_req = uv_ll::get_data_for_req;
use ll = uv_ll;
/// An IP address
pub enum IpAddr {
@ -203,7 +203,7 @@ pub mod v4 {
}).collect();
if parts.len() != 4 {
Err(fmt!("'%s' doesn't have 4 parts", ip))
} else if parts.iter().any_(|x| *x == 256u) {
} else if parts.iter().any(|x| *x == 256u) {
Err(fmt!("invalid octal in addr '%s'", ip))
} else {
Ok(Ipv4Rep {
@ -363,9 +363,9 @@ extern fn get_addr_cb(handle: *uv_getaddrinfo_t,
#[cfg(test)]
mod test {
use net_ip::*;
use net_ip::v4;
use net_ip::v6;
use net::ip::*;
use net::ip::v4;
use net::ip::v6;
use uv;
use std::result;

View file

@ -13,13 +13,13 @@ Top-level module for network-related functionality.
Basically, including this module gives you:
* `net_tcp`
* `net_ip`
* `net_url`
* `tcp`
* `ip`
* `url`
See each of those three modules for documentation on what they do.
*/
pub use tcp = net_tcp;
pub use ip = net_ip;
pub use url = net_url;
pub mod tcp;
pub mod ip;
pub mod url;

View file

@ -16,7 +16,7 @@
use future;
use future_spawn = future::spawn;
use ip = net_ip;
use ip = net::ip;
use uv;
use uv::iotask;
use uv::iotask::IoTask;

View file

@ -522,7 +522,7 @@ fn get_authority(rawurl: &str) ->
let host_is_end_plus_one: &fn() -> bool = || {
let xs = ['?', '#', '/'];
end+1 == len
&& !xs.iter().any_(|x| *x == (rawurl[end] as char))
&& !xs.iter().any(|x| *x == (rawurl[end] as char))
};
// finish up
@ -800,7 +800,7 @@ fn test_get_path() {
#[cfg(test)]
mod tests {
use net_url::*;
use net::url::*;
use std::hashmap::HashMap;

View file

@ -17,6 +17,7 @@ A BigInt is a combination of BigUint and Sign.
*/
#[allow(missing_doc)];
#[allow(non_uppercase_statics)];
use std::cmp::{Eq, Ord, TotalEq, TotalOrd, Ordering, Less, Equal, Greater};
use std::int;
@ -206,7 +207,7 @@ impl Add<BigUint, BigUint> for BigUint {
let new_len = uint::max(self.data.len(), other.data.len());
let mut carry = 0;
let sum = do vec::from_fn(new_len) |i| {
let mut sum = do vec::from_fn(new_len) |i| {
let ai = if i < self.data.len() { self.data[i] } else { 0 };
let bi = if i < other.data.len() { other.data[i] } else { 0 };
let (hi, lo) = BigDigit::from_uint(
@ -215,8 +216,8 @@ impl Add<BigUint, BigUint> for BigUint {
carry = hi;
lo
};
if carry == 0 { return BigUint::new(sum) };
return BigUint::new(sum + [carry]);
if carry != 0 { sum.push(carry); }
return BigUint::new(sum);
}
}
@ -283,15 +284,15 @@ impl Mul<BigUint, BigUint> for BigUint {
if n == 1 { return copy *a; }
let mut carry = 0;
let prod = do a.data.iter().transform |ai| {
let mut prod = do a.data.iter().transform |ai| {
let (hi, lo) = BigDigit::from_uint(
(*ai as uint) * (n as uint) + (carry as uint)
);
carry = hi;
lo
}.collect::<~[BigDigit]>();
if carry == 0 { return BigUint::new(prod) };
return BigUint::new(prod + [carry]);
if carry != 0 { prod.push(carry); }
return BigUint::new(prod);
}
@ -519,10 +520,12 @@ impl ToStrRadix for BigUint {
fn fill_concat(v: &[BigDigit], radix: uint, l: uint) -> ~str {
if v.is_empty() { return ~"0" }
let s = vec::reversed(v).map(|n| {
let s = uint::to_str_radix(*n as uint, radix);
str::from_chars(vec::from_elem(l - s.len(), '0')) + s
}).concat();
let mut s = str::with_capacity(v.len() * l);
for v.rev_iter().advance |n| {
let ss = uint::to_str_radix(*n as uint, radix);
s.push_str("0".repeat(l - ss.len()));
s.push_str(ss);
}
s.trim_left_chars(&'0').to_owned()
}
}
@ -618,15 +621,15 @@ impl BigUint {
if n_bits == 0 || self.is_zero() { return copy *self; }
let mut carry = 0;
let shifted = do self.data.iter().transform |elem| {
let mut shifted = do self.data.iter().transform |elem| {
let (hi, lo) = BigDigit::from_uint(
(*elem as uint) << n_bits | (carry as uint)
);
carry = hi;
lo
}.collect::<~[BigDigit]>();
if carry == 0 { return BigUint::new(shifted); }
return BigUint::new(shifted + [carry]);
if carry != 0 { shifted.push(carry); }
return BigUint::new(shifted);
}
@ -1628,7 +1631,6 @@ mod bigint_tests {
use std::int;
use std::num::{IntConvertible, Zero, One, FromStrRadix};
use std::uint;
use std::vec;
#[test]
fn test_from_biguint() {
@ -1645,9 +1647,11 @@ mod bigint_tests {
#[test]
fn test_cmp() {
let vs = [ &[2], &[1, 1], &[2, 1], &[1, 1, 1] ];
let mut nums = vec::reversed(vs)
.map(|s| BigInt::from_slice(Minus, *s));
let vs = [ &[2 as BigDigit], &[1, 1], &[2, 1], &[1, 1, 1] ];
let mut nums = ~[];
for vs.rev_iter().advance |s| {
nums.push(BigInt::from_slice(Minus, *s));
}
nums.push(Zero::zero());
nums.push_all_move(vs.map(|s| BigInt::from_slice(Plus, *s)));

View file

@ -191,6 +191,8 @@ impl<T: ToStrRadix + Num + Ord> ToStrRadix for Cmplx<T> {
#[cfg(test)]
mod test {
#[allow(non_uppercase_statics)];
use super::*;
use std::num::{Zero,One,Real};

View file

@ -20,10 +20,10 @@ use future_spawn = future::spawn;
* The maximum number of tasks this module will spawn for a single
* operation.
*/
static max_tasks : uint = 32u;
static MAX_TASKS : uint = 32u;
/// The minimum number of elements each task will process.
static min_granularity : uint = 1024u;
static MIN_GRANULARITY : uint = 1024u;
/**
* An internal helper to map a function over a large vector and
@ -38,13 +38,13 @@ fn map_slices<A:Copy + Send,B:Copy + Send>(
-> ~[B] {
let len = xs.len();
if len < min_granularity {
if len < MIN_GRANULARITY {
info!("small slice");
// This is a small vector, fall back on the normal map.
~[f()(0u, xs)]
}
else {
let num_tasks = uint::min(max_tasks, len / min_granularity);
let num_tasks = uint::min(MAX_TASKS, len / MIN_GRANULARITY);
let items_per_task = len / num_tasks;
@ -53,7 +53,7 @@ fn map_slices<A:Copy + Send,B:Copy + Send>(
info!("spawning tasks");
while base < len {
let end = uint::min(len, base + items_per_task);
do vec::as_imm_buf(xs) |p, _len| {
do xs.as_imm_buf |p, _len| {
let f = f();
let base = base;
let f = do future_spawn() || {
@ -78,11 +78,10 @@ fn map_slices<A:Copy + Send,B:Copy + Send>(
info!("num_tasks: %?", (num_tasks, futures.len()));
assert_eq!(num_tasks, futures.len());
let r = do vec::map_consume(futures) |ys| {
do futures.consume_iter().transform |ys| {
let mut ys = ys;
ys.get()
};
r
}.collect()
}
}
@ -137,8 +136,8 @@ pub fn any<A:Copy + Send>(
fn_factory: &fn() -> ~fn(&A) -> bool) -> bool {
let mapped = map_slices(xs, || {
let f = fn_factory();
let result: ~fn(uint, &[A]) -> bool = |_, slice| slice.iter().any_(f);
let result: ~fn(uint, &[A]) -> bool = |_, slice| slice.iter().any(f);
result
});
mapped.iter().any_(|&x| x)
mapped.iter().any(|&x| x)
}

View file

@ -186,6 +186,9 @@ pub struct PriorityQueueIterator <'self, T> {
impl<'self, T> Iterator<&'self T> for PriorityQueueIterator<'self, T> {
#[inline]
fn next(&mut self) -> Option<(&'self T)> { self.iter.next() }
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) { self.iter.size_hint() }
}
#[cfg(test)]

File diff suppressed because it is too large Load diff

View file

@ -682,7 +682,7 @@ impl<
> Encodable<S> for Deque<T> {
fn encode(&self, s: &mut S) {
do s.emit_seq(self.len()) |s| {
for self.eachi |i, e| {
for self.iter().enumerate().advance |(i, e)| {
s.emit_seq_elt(i, |s| e.encode(s));
}
}

View file

@ -8,32 +8,135 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(missing_doc)];
use std::f64;
use std::cmp;
use std::num;
use std::vec;
use sort;
use std::cmp;
use std::io;
use std::num;
use std::f64;
use std::vec;
// NB: this can probably be rewritten in terms of num::Num
// to be less f64-specific.
/// Trait that provides simple descriptive statistics on a univariate set of numeric samples.
pub trait Stats {
/// Sum of the samples.
fn sum(self) -> f64;
/// Minimum value of the samples.
fn min(self) -> f64;
/// Maximum value of the samples.
fn max(self) -> f64;
/// Arithmetic mean (average) of the samples: sum divided by sample-count.
///
/// See: https://en.wikipedia.org/wiki/Arithmetic_mean
fn mean(self) -> f64;
/// Median of the samples: value separating the lower half of the samples from the higher half.
/// Equal to `self.percentile(50.0)`.
///
/// See: https://en.wikipedia.org/wiki/Median
fn median(self) -> f64;
/// Variance of the samples: bias-corrected mean of the squares of the differences of each
/// sample from the sample mean. Note that this calculates the _sample variance_ rather than the
/// population variance, which is assumed to be unknown. It therefore corrects the `(n-1)/n`
/// bias that would appear if we calculated a population variance, by dividing by `(n-1)` rather
/// than `n`.
///
/// See: https://en.wikipedia.org/wiki/Variance
fn var(self) -> f64;
/// Standard deviation: the square root of the sample variance.
///
/// Note: this is not a robust statistic for non-normal distributions. Prefer the
/// `median_abs_dev` for unknown distributions.
///
/// See: https://en.wikipedia.org/wiki/Standard_deviation
fn std_dev(self) -> f64;
/// Standard deviation as a percent of the mean value. See `std_dev` and `mean`.
///
/// Note: this is not a robust statistic for non-normal distributions. Prefer the
/// `median_abs_dev_pct` for unknown distributions.
fn std_dev_pct(self) -> f64;
/// Scaled median of the absolute deviations of each sample from the sample median. This is a
/// robust (distribution-agnostic) estimator of sample variability. Use this in preference to
/// `std_dev` if you cannot assume your sample is normally distributed. Note that this is scaled
/// by the constant `1.4826` to allow its use as a consistent estimator for the standard
/// deviation.
///
/// See: http://en.wikipedia.org/wiki/Median_absolute_deviation
fn median_abs_dev(self) -> f64;
/// Median absolute deviation as a percent of the median. See `median_abs_dev` and `median`.
fn median_abs_dev_pct(self) -> f64;
/// Percentile: the value below which `pct` percent of the values in `self` fall. For example,
/// percentile(95.0) will return the value `v` such that that 95% of the samples `s` in `self`
/// satisfy `s <= v`.
///
/// Calculated by linear interpolation between closest ranks.
///
/// See: http://en.wikipedia.org/wiki/Percentile
fn percentile(self, pct: f64) -> f64;
/// Quartiles of the sample: three values that divide the sample into four equal groups, each
/// with 1/4 of the data. The middle value is the median. See `median` and `percentile`. This
/// function may calculate the 3 quartiles more efficiently than 3 calls to `percentile`, but
/// is otherwise equivalent.
///
/// See also: https://en.wikipedia.org/wiki/Quartile
fn quartiles(self) -> (f64,f64,f64);
/// Inter-quartile range: the difference between the 25th percentile (1st quartile) and the 75th
/// percentile (3rd quartile). See `quartiles`.
///
/// See also: https://en.wikipedia.org/wiki/Interquartile_range
fn iqr(self) -> f64;
}
/// Extracted collection of all the summary statistics of a sample set.
struct Summary {
sum: f64,
min: f64,
max: f64,
mean: f64,
median: f64,
var: f64,
std_dev: f64,
std_dev_pct: f64,
median_abs_dev: f64,
median_abs_dev_pct: f64,
quartiles: (f64,f64,f64),
iqr: f64,
}
impl Summary {
fn new(samples: &[f64]) -> Summary {
Summary {
sum: samples.sum(),
min: samples.min(),
max: samples.max(),
mean: samples.mean(),
median: samples.median(),
var: samples.var(),
std_dev: samples.std_dev(),
std_dev_pct: samples.std_dev_pct(),
median_abs_dev: samples.median_abs_dev(),
median_abs_dev_pct: samples.median_abs_dev_pct(),
quartiles: samples.quartiles(),
iqr: samples.iqr()
}
}
}
impl<'self> Stats for &'self [f64] {
fn sum(self) -> f64 {
self.iter().fold(0.0, |p,q| p + *q)
}
@ -54,19 +157,11 @@ impl<'self> Stats for &'self [f64] {
}
fn median(self) -> f64 {
assert!(self.len() != 0);
let mut tmp = vec::to_owned(self);
sort::tim_sort(tmp);
if tmp.len() & 1 == 0 {
let m = tmp.len() / 2;
(tmp[m] + tmp[m-1]) / 2.0
} else {
tmp[tmp.len() / 2]
}
self.percentile(50.0)
}
fn var(self) -> f64 {
if self.len() == 0 {
if self.len() < 2 {
0.0
} else {
let mean = self.mean();
@ -75,7 +170,10 @@ impl<'self> Stats for &'self [f64] {
let x = *s - mean;
v += x*x;
}
v/(self.len() as f64)
// NB: this is _supposed to be_ len-1, not len. If you
// change it back to len, you will be calculating a
// population variance, not a sample variance.
v/((self.len()-1) as f64)
}
}
@ -89,11 +187,725 @@ impl<'self> Stats for &'self [f64] {
fn median_abs_dev(self) -> f64 {
let med = self.median();
let abs_devs = self.map(|v| num::abs(med - *v));
abs_devs.median()
let abs_devs = self.map(|&v| num::abs(med - v));
// This constant is derived by smarter statistics brains than me, but it is
// consistent with how R and other packages treat the MAD.
abs_devs.median() * 1.4826
}
fn median_abs_dev_pct(self) -> f64 {
(self.median_abs_dev() / self.median()) * 100.0
}
fn percentile(self, pct: f64) -> f64 {
let mut tmp = vec::to_owned(self);
sort::tim_sort(tmp);
percentile_of_sorted(tmp, pct)
}
fn quartiles(self) -> (f64,f64,f64) {
let mut tmp = vec::to_owned(self);
sort::tim_sort(tmp);
let a = percentile_of_sorted(tmp, 25.0);
let b = percentile_of_sorted(tmp, 50.0);
let c = percentile_of_sorted(tmp, 75.0);
(a,b,c)
}
fn iqr(self) -> f64 {
let (a,_,c) = self.quartiles();
c - a
}
}
// Helper function: extract a value representing the `pct` percentile of a sorted sample-set, using
// linear interpolation. If samples are not sorted, return nonsensical value.
priv fn percentile_of_sorted(sorted_samples: &[f64],
pct: f64) -> f64 {
assert!(sorted_samples.len() != 0);
if sorted_samples.len() == 1 {
return sorted_samples[0];
}
assert!(0.0 <= pct);
assert!(pct <= 100.0);
if pct == 100.0 {
return sorted_samples[sorted_samples.len() - 1];
}
let rank = (pct / 100.0) * ((sorted_samples.len() - 1) as f64);
let lrank = rank.floor();
let d = rank - lrank;
let n = lrank as uint;
let lo = sorted_samples[n];
let hi = sorted_samples[n+1];
lo + (hi - lo) * d
}
/// Winsorize a set of samples, replacing values above the `100-pct` percentile and below the `pct`
/// percentile with those percentiles themselves. This is a way of minimizing the effect of
/// outliers, at the cost of biasing the sample. It differs from trimming in that it does not
/// change the number of samples, just changes the values of those that are outliers.
///
/// See: http://en.wikipedia.org/wiki/Winsorising
pub fn winsorize(samples: &mut [f64], pct: f64) {
let mut tmp = vec::to_owned(samples);
sort::tim_sort(tmp);
let lo = percentile_of_sorted(tmp, pct);
let hi = percentile_of_sorted(tmp, 100.0-pct);
for samples.mut_iter().advance |samp| {
if *samp > hi {
*samp = hi
} else if *samp < lo {
*samp = lo
}
}
}
/// Render writes the min, max and quartiles of the provided `Summary` to the provided `Writer`.
pub fn write_5_number_summary(w: @io::Writer, s: &Summary) {
let (q1,q2,q3) = s.quartiles;
w.write_str(fmt!("(min=%f, q1=%f, med=%f, q3=%f, max=%f)",
s.min as float,
q1 as float,
q2 as float,
q3 as float,
s.max as float));
}
/// Render a boxplot to the provided writer. The boxplot shows the min, max and quartiles of the
/// provided `Summary` (thus includes the mean) and is scaled to display within the range of the
/// nearest multiple-of-a-power-of-ten above and below the min and max of possible values, and
/// target `width_hint` characters of display (though it will be wider if necessary).
///
/// As an example, the summary with 5-number-summary `(min=15, q1=17, med=20, q3=24, max=31)` might
/// display as:
///
/// ~~~~
/// 10 | [--****#******----------] | 40
/// ~~~~
pub fn write_boxplot(w: @io::Writer, s: &Summary, width_hint: uint) {
let (q1,q2,q3) = s.quartiles;
let lomag = (10.0_f64).pow(&s.min.log10().floor());
let himag = (10.0_f64).pow(&(s.max.log10().floor()));
let lo = (s.min / lomag).floor() * lomag;
let hi = (s.max / himag).ceil() * himag;
let range = hi - lo;
let lostr = lo.to_str();
let histr = hi.to_str();
let overhead_width = lostr.len() + histr.len() + 4;
let range_width = width_hint - overhead_width;;
let char_step = range / (range_width as f64);
w.write_str(lostr);
w.write_char(' ');
w.write_char('|');
let mut c = 0;
let mut v = lo;
while c < range_width && v < s.min {
w.write_char(' ');
v += char_step;
c += 1;
}
w.write_char('[');
c += 1;
while c < range_width && v < q1 {
w.write_char('-');
v += char_step;
c += 1;
}
while c < range_width && v < q2 {
w.write_char('*');
v += char_step;
c += 1;
}
w.write_char('#');
c += 1;
while c < range_width && v < q3 {
w.write_char('*');
v += char_step;
c += 1;
}
while c < range_width && v < s.max {
w.write_char('-');
v += char_step;
c += 1;
}
w.write_char(']');
while c < range_width {
w.write_char(' ');
v += char_step;
c += 1;
}
w.write_char('|');
w.write_char(' ');
w.write_str(histr);
}
// Test vectors generated from R, using the script src/etc/stat-test-vectors.r.
#[cfg(test)]
mod tests {
use stats::Stats;
use stats::Summary;
use stats::write_5_number_summary;
use stats::write_boxplot;
use std::io;
fn check(samples: &[f64], summ: &Summary) {
let summ2 = Summary::new(samples);
let w = io::stdout();
w.write_char('\n');
write_5_number_summary(w, &summ2);
w.write_char('\n');
write_boxplot(w, &summ2, 50);
w.write_char('\n');
assert_eq!(summ.sum, summ2.sum);
assert_eq!(summ.min, summ2.min);
assert_eq!(summ.max, summ2.max);
assert_eq!(summ.mean, summ2.mean);
assert_eq!(summ.median, summ2.median);
// We needed a few more digits to get exact equality on these
// but they're within float epsilon, which is 1.0e-6.
assert_approx_eq!(summ.var, summ2.var);
assert_approx_eq!(summ.std_dev, summ2.std_dev);
assert_approx_eq!(summ.std_dev_pct, summ2.std_dev_pct);
assert_approx_eq!(summ.median_abs_dev, summ2.median_abs_dev);
assert_approx_eq!(summ.median_abs_dev_pct, summ2.median_abs_dev_pct);
assert_eq!(summ.quartiles, summ2.quartiles);
assert_eq!(summ.iqr, summ2.iqr);
}
#[test]
fn test_norm2() {
let val = &[
958.0000000000,
924.0000000000,
];
let summ = &Summary {
sum: 1882.0000000000,
min: 924.0000000000,
max: 958.0000000000,
mean: 941.0000000000,
median: 941.0000000000,
var: 578.0000000000,
std_dev: 24.0416305603,
std_dev_pct: 2.5549022912,
median_abs_dev: 25.2042000000,
median_abs_dev_pct: 2.6784484591,
quartiles: (932.5000000000,941.0000000000,949.5000000000),
iqr: 17.0000000000,
};
check(val, summ);
}
#[test]
fn test_norm10narrow() {
let val = &[
966.0000000000,
985.0000000000,
1110.0000000000,
848.0000000000,
821.0000000000,
975.0000000000,
962.0000000000,
1157.0000000000,
1217.0000000000,
955.0000000000,
];
let summ = &Summary {
sum: 9996.0000000000,
min: 821.0000000000,
max: 1217.0000000000,
mean: 999.6000000000,
median: 970.5000000000,
var: 16050.7111111111,
std_dev: 126.6914010938,
std_dev_pct: 12.6742097933,
median_abs_dev: 102.2994000000,
median_abs_dev_pct: 10.5408964451,
quartiles: (956.7500000000,970.5000000000,1078.7500000000),
iqr: 122.0000000000,
};
check(val, summ);
}
#[test]
fn test_norm10medium() {
let val = &[
954.0000000000,
1064.0000000000,
855.0000000000,
1000.0000000000,
743.0000000000,
1084.0000000000,
704.0000000000,
1023.0000000000,
357.0000000000,
869.0000000000,
];
let summ = &Summary {
sum: 8653.0000000000,
min: 357.0000000000,
max: 1084.0000000000,
mean: 865.3000000000,
median: 911.5000000000,
var: 48628.4555555556,
std_dev: 220.5186059170,
std_dev_pct: 25.4846418487,
median_abs_dev: 195.7032000000,
median_abs_dev_pct: 21.4704552935,
quartiles: (771.0000000000,911.5000000000,1017.2500000000),
iqr: 246.2500000000,
};
check(val, summ);
}
#[test]
fn test_norm10wide() {
let val = &[
505.0000000000,
497.0000000000,
1591.0000000000,
887.0000000000,
1026.0000000000,
136.0000000000,
1580.0000000000,
940.0000000000,
754.0000000000,
1433.0000000000,
];
let summ = &Summary {
sum: 9349.0000000000,
min: 136.0000000000,
max: 1591.0000000000,
mean: 934.9000000000,
median: 913.5000000000,
var: 239208.9888888889,
std_dev: 489.0899599142,
std_dev_pct: 52.3146817750,
median_abs_dev: 611.5725000000,
median_abs_dev_pct: 66.9482758621,
quartiles: (567.2500000000,913.5000000000,1331.2500000000),
iqr: 764.0000000000,
};
check(val, summ);
}
#[test]
fn test_norm25verynarrow() {
let val = &[
991.0000000000,
1018.0000000000,
998.0000000000,
1013.0000000000,
974.0000000000,
1007.0000000000,
1014.0000000000,
999.0000000000,
1011.0000000000,
978.0000000000,
985.0000000000,
999.0000000000,
983.0000000000,
982.0000000000,
1015.0000000000,
1002.0000000000,
977.0000000000,
948.0000000000,
1040.0000000000,
974.0000000000,
996.0000000000,
989.0000000000,
1015.0000000000,
994.0000000000,
1024.0000000000,
];
let summ = &Summary {
sum: 24926.0000000000,
min: 948.0000000000,
max: 1040.0000000000,
mean: 997.0400000000,
median: 998.0000000000,
var: 393.2066666667,
std_dev: 19.8294393937,
std_dev_pct: 1.9888308788,
median_abs_dev: 22.2390000000,
median_abs_dev_pct: 2.2283567134,
quartiles: (983.0000000000,998.0000000000,1013.0000000000),
iqr: 30.0000000000,
};
check(val, summ);
}
#[test]
fn test_exp10a() {
let val = &[
23.0000000000,
11.0000000000,
2.0000000000,
57.0000000000,
4.0000000000,
12.0000000000,
5.0000000000,
29.0000000000,
3.0000000000,
21.0000000000,
];
let summ = &Summary {
sum: 167.0000000000,
min: 2.0000000000,
max: 57.0000000000,
mean: 16.7000000000,
median: 11.5000000000,
var: 287.7888888889,
std_dev: 16.9643416875,
std_dev_pct: 101.5828843560,
median_abs_dev: 13.3434000000,
median_abs_dev_pct: 116.0295652174,
quartiles: (4.2500000000,11.5000000000,22.5000000000),
iqr: 18.2500000000,
};
check(val, summ);
}
#[test]
fn test_exp10b() {
let val = &[
24.0000000000,
17.0000000000,
6.0000000000,
38.0000000000,
25.0000000000,
7.0000000000,
51.0000000000,
2.0000000000,
61.0000000000,
32.0000000000,
];
let summ = &Summary {
sum: 263.0000000000,
min: 2.0000000000,
max: 61.0000000000,
mean: 26.3000000000,
median: 24.5000000000,
var: 383.5666666667,
std_dev: 19.5848580967,
std_dev_pct: 74.4671410520,
median_abs_dev: 22.9803000000,
median_abs_dev_pct: 93.7971428571,
quartiles: (9.5000000000,24.5000000000,36.5000000000),
iqr: 27.0000000000,
};
check(val, summ);
}
#[test]
fn test_exp10c() {
let val = &[
71.0000000000,
2.0000000000,
32.0000000000,
1.0000000000,
6.0000000000,
28.0000000000,
13.0000000000,
37.0000000000,
16.0000000000,
36.0000000000,
];
let summ = &Summary {
sum: 242.0000000000,
min: 1.0000000000,
max: 71.0000000000,
mean: 24.2000000000,
median: 22.0000000000,
var: 458.1777777778,
std_dev: 21.4050876611,
std_dev_pct: 88.4507754589,
median_abs_dev: 21.4977000000,
median_abs_dev_pct: 97.7168181818,
quartiles: (7.7500000000,22.0000000000,35.0000000000),
iqr: 27.2500000000,
};
check(val, summ);
}
#[test]
fn test_exp25() {
let val = &[
3.0000000000,
24.0000000000,
1.0000000000,
19.0000000000,
7.0000000000,
5.0000000000,
30.0000000000,
39.0000000000,
31.0000000000,
13.0000000000,
25.0000000000,
48.0000000000,
1.0000000000,
6.0000000000,
42.0000000000,
63.0000000000,
2.0000000000,
12.0000000000,
108.0000000000,
26.0000000000,
1.0000000000,
7.0000000000,
44.0000000000,
25.0000000000,
11.0000000000,
];
let summ = &Summary {
sum: 593.0000000000,
min: 1.0000000000,
max: 108.0000000000,
mean: 23.7200000000,
median: 19.0000000000,
var: 601.0433333333,
std_dev: 24.5161851301,
std_dev_pct: 103.3565983562,
median_abs_dev: 19.2738000000,
median_abs_dev_pct: 101.4410526316,
quartiles: (6.0000000000,19.0000000000,31.0000000000),
iqr: 25.0000000000,
};
check(val, summ);
}
#[test]
fn test_binom25() {
let val = &[
18.0000000000,
17.0000000000,
27.0000000000,
15.0000000000,
21.0000000000,
25.0000000000,
17.0000000000,
24.0000000000,
25.0000000000,
24.0000000000,
26.0000000000,
26.0000000000,
23.0000000000,
15.0000000000,
23.0000000000,
17.0000000000,
18.0000000000,
18.0000000000,
21.0000000000,
16.0000000000,
15.0000000000,
31.0000000000,
20.0000000000,
17.0000000000,
15.0000000000,
];
let summ = &Summary {
sum: 514.0000000000,
min: 15.0000000000,
max: 31.0000000000,
mean: 20.5600000000,
median: 20.0000000000,
var: 20.8400000000,
std_dev: 4.5650848842,
std_dev_pct: 22.2037202539,
median_abs_dev: 5.9304000000,
median_abs_dev_pct: 29.6520000000,
quartiles: (17.0000000000,20.0000000000,24.0000000000),
iqr: 7.0000000000,
};
check(val, summ);
}
#[test]
fn test_pois25lambda30() {
let val = &[
27.0000000000,
33.0000000000,
34.0000000000,
34.0000000000,
24.0000000000,
39.0000000000,
28.0000000000,
27.0000000000,
31.0000000000,
28.0000000000,
38.0000000000,
21.0000000000,
33.0000000000,
36.0000000000,
29.0000000000,
37.0000000000,
32.0000000000,
34.0000000000,
31.0000000000,
39.0000000000,
25.0000000000,
31.0000000000,
32.0000000000,
40.0000000000,
24.0000000000,
];
let summ = &Summary {
sum: 787.0000000000,
min: 21.0000000000,
max: 40.0000000000,
mean: 31.4800000000,
median: 32.0000000000,
var: 26.5933333333,
std_dev: 5.1568724372,
std_dev_pct: 16.3814245145,
median_abs_dev: 5.9304000000,
median_abs_dev_pct: 18.5325000000,
quartiles: (28.0000000000,32.0000000000,34.0000000000),
iqr: 6.0000000000,
};
check(val, summ);
}
#[test]
fn test_pois25lambda40() {
let val = &[
42.0000000000,
50.0000000000,
42.0000000000,
46.0000000000,
34.0000000000,
45.0000000000,
34.0000000000,
49.0000000000,
39.0000000000,
28.0000000000,
40.0000000000,
35.0000000000,
37.0000000000,
39.0000000000,
46.0000000000,
44.0000000000,
32.0000000000,
45.0000000000,
42.0000000000,
37.0000000000,
48.0000000000,
42.0000000000,
33.0000000000,
42.0000000000,
48.0000000000,
];
let summ = &Summary {
sum: 1019.0000000000,
min: 28.0000000000,
max: 50.0000000000,
mean: 40.7600000000,
median: 42.0000000000,
var: 34.4400000000,
std_dev: 5.8685603004,
std_dev_pct: 14.3978417577,
median_abs_dev: 5.9304000000,
median_abs_dev_pct: 14.1200000000,
quartiles: (37.0000000000,42.0000000000,45.0000000000),
iqr: 8.0000000000,
};
check(val, summ);
}
#[test]
fn test_pois25lambda50() {
let val = &[
45.0000000000,
43.0000000000,
44.0000000000,
61.0000000000,
51.0000000000,
53.0000000000,
59.0000000000,
52.0000000000,
49.0000000000,
51.0000000000,
51.0000000000,
50.0000000000,
49.0000000000,
56.0000000000,
42.0000000000,
52.0000000000,
51.0000000000,
43.0000000000,
48.0000000000,
48.0000000000,
50.0000000000,
42.0000000000,
43.0000000000,
42.0000000000,
60.0000000000,
];
let summ = &Summary {
sum: 1235.0000000000,
min: 42.0000000000,
max: 61.0000000000,
mean: 49.4000000000,
median: 50.0000000000,
var: 31.6666666667,
std_dev: 5.6273143387,
std_dev_pct: 11.3913245723,
median_abs_dev: 4.4478000000,
median_abs_dev_pct: 8.8956000000,
quartiles: (44.0000000000,50.0000000000,52.0000000000),
iqr: 8.0000000000,
};
check(val, summ);
}
#[test]
fn test_unif25() {
let val = &[
99.0000000000,
55.0000000000,
92.0000000000,
79.0000000000,
14.0000000000,
2.0000000000,
33.0000000000,
49.0000000000,
3.0000000000,
32.0000000000,
84.0000000000,
59.0000000000,
22.0000000000,
86.0000000000,
76.0000000000,
31.0000000000,
29.0000000000,
11.0000000000,
41.0000000000,
53.0000000000,
45.0000000000,
44.0000000000,
98.0000000000,
98.0000000000,
7.0000000000,
];
let summ = &Summary {
sum: 1242.0000000000,
min: 2.0000000000,
max: 99.0000000000,
mean: 49.6800000000,
median: 45.0000000000,
var: 1015.6433333333,
std_dev: 31.8691595957,
std_dev_pct: 64.1488719719,
median_abs_dev: 45.9606000000,
median_abs_dev_pct: 102.1346666667,
quartiles: (29.0000000000,45.0000000000,79.0000000000),
iqr: 50.0000000000,
};
check(val, summ);
}
}

View file

@ -26,23 +26,23 @@ use std::io;
pub mod color {
pub type Color = u16;
pub static black: Color = 0u16;
pub static red: Color = 1u16;
pub static green: Color = 2u16;
pub static yellow: Color = 3u16;
pub static blue: Color = 4u16;
pub static magenta: Color = 5u16;
pub static cyan: Color = 6u16;
pub static white: Color = 7u16;
pub static BLACK: Color = 0u16;
pub static RED: Color = 1u16;
pub static GREEN: Color = 2u16;
pub static YELLOW: Color = 3u16;
pub static BLUE: Color = 4u16;
pub static MAGENTA: Color = 5u16;
pub static CYAN: Color = 6u16;
pub static WHITE: Color = 7u16;
pub static bright_black: Color = 8u16;
pub static bright_red: Color = 9u16;
pub static bright_green: Color = 10u16;
pub static bright_yellow: Color = 11u16;
pub static bright_blue: Color = 12u16;
pub static bright_magenta: Color = 13u16;
pub static bright_cyan: Color = 14u16;
pub static bright_white: Color = 15u16;
pub static BRIGHT_BLACK: Color = 8u16;
pub static BRIGHT_RED: Color = 9u16;
pub static BRIGHT_GREEN: Color = 10u16;
pub static BRIGHT_YELLOW: Color = 11u16;
pub static BRIGHT_BLUE: Color = 12u16;
pub static BRIGHT_MAGENTA: Color = 13u16;
pub static BRIGHT_CYAN: Color = 14u16;
pub static BRIGHT_WHITE: Color = 15u16;
}
#[cfg(not(target_os = "win32"))]

View file

@ -8,6 +8,8 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(non_uppercase_statics)];
/// ncurses-compatible compiled terminfo format parsing (term(5))
@ -292,7 +294,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
// Find the offset of the NUL we want to go to
let nulpos = string_table.slice(offset as uint, string_table_bytes as uint)
.iter().position_(|&b| b == 0);
.iter().position(|&b| b == 0);
match nulpos {
Some(len) => {
string_map.insert(name.to_owned(),

View file

@ -323,19 +323,19 @@ pub fn run_tests_console(opts: &TestOpts,
}
fn write_ok(out: @io::Writer, use_color: bool) {
write_pretty(out, "ok", term::color::green, use_color);
write_pretty(out, "ok", term::color::GREEN, use_color);
}
fn write_failed(out: @io::Writer, use_color: bool) {
write_pretty(out, "FAILED", term::color::red, use_color);
write_pretty(out, "FAILED", term::color::RED, use_color);
}
fn write_ignored(out: @io::Writer, use_color: bool) {
write_pretty(out, "ignored", term::color::yellow, use_color);
write_pretty(out, "ignored", term::color::YELLOW, use_color);
}
fn write_bench(out: @io::Writer, use_color: bool) {
write_pretty(out, "bench", term::color::cyan, use_color);
write_pretty(out, "bench", term::color::CYAN, use_color);
}
fn write_pretty(out: @io::Writer,
@ -469,7 +469,7 @@ fn run_tests(opts: &TestOpts,
}
// All benchmarks run at the end, in serial.
do vec::consume(filtered_benchs) |_, b| {
for filtered_benchs.consume_iter().advance |b| {
callback(TeWait(copy b.desc));
run_test(!opts.run_benchmarks, b, ch.clone());
let (test, result) = p.recv();
@ -479,16 +479,16 @@ fn run_tests(opts: &TestOpts,
// Windows tends to dislike being overloaded with threads.
#[cfg(windows)]
static sched_overcommit : uint = 1;
static SCHED_OVERCOMMIT : uint = 1;
#[cfg(unix)]
static sched_overcommit : uint = 4u;
static SCHED_OVERCOMMIT : uint = 4u;
fn get_concurrency() -> uint {
use std::rt;
let threads = rt::util::default_sched_threads();
if threads == 1 { 1 }
else { threads * sched_overcommit }
else { threads * SCHED_OVERCOMMIT }
}
#[allow(non_implicitly_copyable_typarams)]
@ -514,7 +514,7 @@ pub fn filter_tests(
} else { return option::None; }
}
vec::filter_map(filtered, |x| filter_fn(x, filter_str))
filtered.consume_iter().filter_map(|x| filter_fn(x, filter_str)).collect()
};
// Maybe pull out the ignored test and unignore them
@ -532,7 +532,7 @@ pub fn filter_tests(
None
}
};
vec::filter_map(filtered, |x| filter(x))
filtered.consume_iter().filter_map(|x| filter(x)).collect()
};
// Sort the tests alphabetically
@ -711,9 +711,9 @@ impl BenchHarness {
// Eliminate outliers
let med = samples.median();
let mad = samples.median_abs_dev();
let samples = do vec::filter(samples) |f| {
let samples = do samples.consume_iter().filter |f| {
num::abs(*f - med) <= 3.0 * mad
};
}.collect::<~[f64]>();
debug!("%u samples, median %f, MAD=%f, %u survived filter",
n_samples, med as float, mad as float,

View file

@ -868,20 +868,20 @@ mod tests {
use std::str;
fn test_get_time() {
static some_recent_date: i64 = 1325376000i64; // 2012-01-01T00:00:00Z
static some_future_date: i64 = 1577836800i64; // 2020-01-01T00:00:00Z
static SOME_RECENT_DATE: i64 = 1325376000i64; // 2012-01-01T00:00:00Z
static SOME_FUTURE_DATE: i64 = 1577836800i64; // 2020-01-01T00:00:00Z
let tv1 = get_time();
debug!("tv1=%? sec + %? nsec", tv1.sec as uint, tv1.nsec as uint);
assert!(tv1.sec > some_recent_date);
assert!(tv1.sec > SOME_RECENT_DATE);
assert!(tv1.nsec < 1000000000i32);
let tv2 = get_time();
debug!("tv2=%? sec + %? nsec", tv2.sec as uint, tv2.nsec as uint);
assert!(tv2.sec >= tv1.sec);
assert!(tv2.sec < some_future_date);
assert!(tv2.sec < SOME_FUTURE_DATE);
assert!(tv2.nsec < 1000000000i32);
if tv2.sec == tv1.sec {
assert!(tv2.nsec >= tv1.nsec);

View file

@ -57,23 +57,25 @@ impl<K: Eq + TotalOrd, V: Eq> Eq for TreeMap<K, V> {
}
// Lexicographical comparison
fn lt<K: Ord + TotalOrd, V>(a: &TreeMap<K, V>,
fn lt<K: Ord + TotalOrd, V: Ord>(a: &TreeMap<K, V>,
b: &TreeMap<K, V>) -> bool {
let mut x = a.iter();
let mut y = b.iter();
let (a_len, b_len) = (a.len(), b.len());
for uint::min(a_len, b_len).times {
let (key_a,_) = x.next().unwrap();
let (key_b,_) = y.next().unwrap();
let (key_a, value_a) = x.next().unwrap();
let (key_b, value_b) = y.next().unwrap();
if *key_a < *key_b { return true; }
if *key_a > *key_b { return false; }
};
if *value_a < *value_b { return true; }
if *value_a > *value_b { return false; }
}
a_len < b_len
}
impl<K: Ord + TotalOrd, V> Ord for TreeMap<K, V> {
impl<K: Ord + TotalOrd, V: Ord> Ord for TreeMap<K, V> {
#[inline]
fn lt(&self, other: &TreeMap<K, V>) -> bool { lt(self, other) }
#[inline]
@ -196,14 +198,15 @@ impl<K: TotalOrd, V> TreeMap<K, V> {
/// Get a lazy iterator over the key-value pairs in the map.
/// Requires that it be frozen (immutable).
pub fn iter<'a>(&'a self) -> TreeMapIterator<'a, K, V> {
TreeMapIterator{stack: ~[], node: &self.root}
TreeMapIterator{stack: ~[], node: &self.root, remaining: self.length}
}
}
/// Lazy forward iterator over a map
pub struct TreeMapIterator<'self, K, V> {
priv stack: ~[&'self ~TreeNode<K, V>],
priv node: &'self Option<~TreeNode<K, V>>
priv node: &'self Option<~TreeNode<K, V>>,
priv remaining: uint
}
impl<'self, K, V> Iterator<(&'self K, &'self V)> for TreeMapIterator<'self, K, V> {
@ -220,12 +223,18 @@ impl<'self, K, V> Iterator<(&'self K, &'self V)> for TreeMapIterator<'self, K, V
None => {
let res = self.stack.pop();
self.node = &res.right;
self.remaining -= 1;
return Some((&res.key, &res.value));
}
}
}
None
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
(self.remaining, Some(self.remaining))
}
}
impl<'self, T> Iterator<&'self T> for TreeSetIterator<'self, T> {
@ -835,7 +844,7 @@ mod test_treemap {
for 90.times {
let k = rng.gen();
let v = rng.gen();
if !ctrl.iter().any_(|x| x == &(k, v)) {
if !ctrl.iter().any(|x| x == &(k, v)) {
assert!(map.insert(k, v));
ctrl.push((k, v));
check_structure(&map);
@ -935,7 +944,7 @@ mod test_treemap {
assert!(b.insert(0, 5));
assert!(a < b);
assert!(a.insert(0, 7));
assert!(!(a < b) && !(b < a));
assert!(!(a < b) && b < a);
assert!(b.insert(-2, 0));
assert!(b < a);
assert!(a.insert(-5, 2));

View file

@ -1046,7 +1046,7 @@ pub unsafe fn ip4_name(src: &sockaddr_in) -> ~str {
// ipv4 addr max size: 15 + 1 trailing null byte
let dst: ~[u8] = ~[0u8,0u8,0u8,0u8,0u8,0u8,0u8,0u8,
0u8,0u8,0u8,0u8,0u8,0u8,0u8,0u8];
do vec::as_imm_buf(dst) |dst_buf, size| {
do dst.as_imm_buf |dst_buf, size| {
rust_uv_ip4_name(to_unsafe_ptr(src),
dst_buf, size as libc::size_t);
// seems that checking the result of uv_ip4_name
@ -1066,7 +1066,7 @@ pub unsafe fn ip6_name(src: &sockaddr_in6) -> ~str {
0u8,0u8,0u8,0u8,0u8,0u8,0u8,0u8,
0u8,0u8,0u8,0u8,0u8,0u8,0u8,0u8,
0u8,0u8,0u8,0u8,0u8,0u8];
do vec::as_imm_buf(dst) |dst_buf, size| {
do dst.as_imm_buf |dst_buf, size| {
let src_unsafe_ptr = to_unsafe_ptr(src);
let result = rust_uv_ip6_name(src_unsafe_ptr,
dst_buf, size as libc::size_t);

View file

@ -13,7 +13,7 @@
// FIXME #2238 Make run only accept source that emits an executable
#[link(name = "rust",
vers = "0.7",
vers = "0.8-pre",
uuid = "4a24da33-5cc8-4037-9352-2cbe9bd9d27c",
url = "https://github.com/mozilla/rust/tree/master/src/rust")];
@ -60,7 +60,7 @@ struct Command<'self> {
usage_full: UsageSource<'self>,
}
static commands: &'static [Command<'static>] = &[
static COMMANDS: &'static [Command<'static>] = &[
Command{
cmd: "build",
action: CallMain("rustc", rustc::main),
@ -122,7 +122,7 @@ fn rustc_help() {
}
fn find_cmd(command_string: &str) -> Option<Command> {
do commands.iter().find_ |command| {
do COMMANDS.iter().find_ |command| {
command.cmd == command_string
}.map_consume(|x| copy *x)
}
@ -197,7 +197,7 @@ fn do_command(command: &Command, args: &[~str]) -> ValidUsage {
}
fn usage() {
static indent: uint = 8;
static INDENT: uint = 8;
io::print(
"The rust tool is a convenience for managing rust source code.\n\
@ -209,8 +209,8 @@ fn usage() {
\n"
);
for commands.iter().advance |command| {
let padding = " ".repeat(indent - command.cmd.len());
for COMMANDS.iter().advance |command| {
let padding = " ".repeat(INDENT - command.cmd.len());
io::println(fmt!(" %s%s%s",
command.cmd, padding, command.usage_line));
}

View file

@ -893,7 +893,7 @@ pub fn link_args(sess: Session,
// Add all the link args for external crates.
do cstore::iter_crate_data(cstore) |crate_num, _| {
let link_args = csearch::get_link_args_for_crate(cstore, crate_num);
do vec::consume(link_args) |_, link_arg| {
for link_args.consume_iter().advance |link_arg| {
args.push(link_arg);
}
}

View file

@ -123,10 +123,10 @@ pub fn build_configuration(sess: Session, argv0: @str, input: &input) ->
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: ~[~str],
demitter: diagnostic::Emitter) -> ast::crate_cfg {
do vec::map_consume(cfgspecs) |s| {
do cfgspecs.consume_iter().transform |s| {
let sess = parse::new_parse_sess(Some(demitter));
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
}
}.collect()
}
pub enum input {

View file

@ -10,10 +10,9 @@
use std::option;
use std::vec;
use syntax::{ast, fold, attr};
type in_cfg_pred = @fn(attrs: ~[ast::attribute]) -> bool;
type in_cfg_pred = @fn(attrs: &[ast::attribute]) -> bool;
struct Context {
in_cfg: in_cfg_pred
@ -51,8 +50,7 @@ fn filter_item(cx: @Context, item: @ast::item) ->
if item_in_cfg(cx, item) { option::Some(item) } else { option::None }
}
fn filter_view_item(cx: @Context, view_item: @ast::view_item
)-> Option<@ast::view_item> {
fn filter_view_item<'r>(cx: @Context, view_item: &'r ast::view_item)-> Option<&'r ast::view_item> {
if view_item_in_cfg(cx, view_item) {
option::Some(view_item)
} else {
@ -61,13 +59,15 @@ fn filter_view_item(cx: @Context, view_item: @ast::view_item
}
fn fold_mod(cx: @Context, m: &ast::_mod, fld: @fold::ast_fold) -> ast::_mod {
let filtered_items =
m.items.filter_mapped(|a| filter_item(cx, *a));
let filtered_view_items =
m.view_items.filter_mapped(|a| filter_view_item(cx, *a));
let filtered_items = do m.items.iter().filter_map |a| {
filter_item(cx, *a).chain(|x| fld.fold_item(x))
}.collect();
let filtered_view_items = do m.view_items.iter().filter_map |a| {
filter_view_item(cx, a).map(|&x| fld.fold_view_item(x))
}.collect();
ast::_mod {
view_items: filtered_view_items.map(|x| fld.fold_view_item(*x)),
items: vec::filter_map(filtered_items, |x| fld.fold_item(x))
view_items: filtered_view_items,
items: filtered_items
}
}
@ -83,14 +83,14 @@ fn fold_foreign_mod(
nm: &ast::foreign_mod,
fld: @fold::ast_fold
) -> ast::foreign_mod {
let filtered_items =
nm.items.filter_mapped(|a| filter_foreign_item(cx, *a));
let filtered_view_items =
nm.view_items.filter_mapped(|a| filter_view_item(cx, *a));
let filtered_items = nm.items.iter().filter_map(|a| filter_foreign_item(cx, *a)).collect();
let filtered_view_items = do nm.view_items.iter().filter_map |a| {
filter_view_item(cx, a).map(|&x| fld.fold_view_item(x))
}.collect();
ast::foreign_mod {
sort: nm.sort,
abis: nm.abis,
view_items: filtered_view_items.iter().transform(|x| fld.fold_view_item(*x)).collect(),
view_items: filtered_view_items,
items: filtered_items
}
}
@ -98,12 +98,14 @@ fn fold_foreign_mod(
fn fold_item_underscore(cx: @Context, item: &ast::item_,
fld: @fold::ast_fold) -> ast::item_ {
let item = match *item {
ast::item_impl(ref a, b, c, ref methods) => {
let methods = methods.filtered(|m| method_in_cfg(cx, *m) );
ast::item_impl(/*bad*/ copy *a, b, c, methods)
ast::item_impl(ref a, ref b, ref c, ref methods) => {
let methods = methods.iter().filter(|m| method_in_cfg(cx, **m))
.transform(|x| *x).collect();
ast::item_impl(/*bad*/ copy *a, /*bad*/ copy *b, /*bad*/ copy *c, methods)
}
ast::item_trait(ref a, ref b, ref methods) => {
let methods = methods.filtered(|m| trait_method_in_cfg(cx, m) );
let methods = methods.iter().filter(|m| trait_method_in_cfg(cx, *m) )
.transform(|x| /* bad */copy *x).collect();
ast::item_trait(/*bad*/copy *a, /*bad*/copy *b, methods)
}
ref item => /*bad*/ copy *item
@ -134,19 +136,12 @@ fn fold_block(
b: &ast::blk_,
fld: @fold::ast_fold
) -> ast::blk_ {
let filtered_stmts =
b.stmts.filter_mapped(|a| filter_stmt(cx, *a));
let filtered_view_items =
b.view_items.filter_mapped(|a| filter_view_item(cx, *a));
let filtered_view_items =
filtered_view_items.map(|x| fld.fold_view_item(*x));
let mut resulting_stmts = ~[];
for filtered_stmts.iter().advance |stmt| {
match fld.fold_stmt(*stmt) {
None => {}
Some(stmt) => resulting_stmts.push(stmt),
}
}
let resulting_stmts = do b.stmts.iter().filter_map |a| {
filter_stmt(cx, *a).chain(|stmt| fld.fold_stmt(stmt))
}.collect();
let filtered_view_items = do b.view_items.iter().filter_map |a| {
filter_view_item(cx, a).map(|&x| fld.fold_view_item(x))
}.collect();
ast::blk_ {
view_items: filtered_view_items,
stmts: resulting_stmts,
@ -164,8 +159,8 @@ fn foreign_item_in_cfg(cx: @Context, item: @ast::foreign_item) -> bool {
return (cx.in_cfg)(/*bad*/copy item.attrs);
}
fn view_item_in_cfg(cx: @Context, item: @ast::view_item) -> bool {
return (cx.in_cfg)(/*bad*/copy item.attrs);
fn view_item_in_cfg(cx: @Context, item: &ast::view_item) -> bool {
return (cx.in_cfg)(item.attrs);
}
fn method_in_cfg(cx: @Context, meth: @ast::method) -> bool {
@ -193,11 +188,13 @@ pub fn metas_in_cfg(cfg: &[@ast::meta_item],
// Pull the inner meta_items from the #[cfg(meta_item, ...)] attributes,
// so we can match against them. This is the list of configurations for
// which the item is valid
let cfg_metas = vec::filter_map(cfg_metas, |i| attr::get_meta_item_list(i));
let cfg_metas = cfg_metas.consume_iter()
.filter_map(|i| attr::get_meta_item_list(i))
.collect::<~[~[@ast::meta_item]]>();
if cfg_metas.iter().all(|c| c.is_empty()) { return true; }
cfg_metas.iter().any_(|cfg_meta| {
cfg_metas.iter().any(|cfg_meta| {
cfg_meta.iter().all(|cfg_mi| {
match cfg_mi.node {
ast::meta_list(s, ref it) if "not" == s

View file

@ -18,7 +18,7 @@ use syntax::codemap::dummy_sp;
use syntax::codemap;
use syntax::fold;
static STD_VERSION: &'static str = "0.7";
static STD_VERSION: &'static str = "0.8-pre";
pub fn maybe_inject_libstd_ref(sess: Session, crate: @ast::crate)
-> @ast::crate {
@ -41,7 +41,7 @@ fn inject_libstd_ref(sess: Session, crate: &ast::crate) -> @ast::crate {
let precursor = @fold::AstFoldFns {
fold_crate: |crate, span, fld| {
let n1 = sess.next_node_id();
let vi1 = @ast::view_item {
let vi1 = ast::view_item {
node: ast::view_item_extern_mod(
sess.ident_of("std"), ~[], n1),
attrs: ~[
@ -75,7 +75,7 @@ fn inject_libstd_ref(sess: Session, crate: &ast::crate) -> @ast::crate {
fold_mod: |module, fld| {
let n2 = sess.next_node_id();
let prelude_path = @ast::Path {
let prelude_path = ast::Path {
span: dummy_sp(),
global: false,
idents: ~[
@ -87,7 +87,7 @@ fn inject_libstd_ref(sess: Session, crate: &ast::crate) -> @ast::crate {
};
let vp = @spanned(ast::view_path_glob(prelude_path, n2));
let vi2 = @ast::view_item { node: ast::view_item_use(~[vp]),
let vi2 = ast::view_item { node: ast::view_item_use(~[vp]),
attrs: ~[],
vis: ast::private,
span: dummy_sp() };

View file

@ -17,7 +17,7 @@ use front::config;
use std::vec;
use syntax::ast_util::*;
use syntax::attr;
use syntax::codemap::{dummy_sp, span, ExpandedFrom, CallInfo, NameAndSpan};
use syntax::codemap::{dummy_sp, span, ExpnInfo, NameAndSpan};
use syntax::codemap;
use syntax::ext::base::ExtCtxt;
use syntax::fold;
@ -72,13 +72,13 @@ fn generate_test_harness(sess: session::Session,
};
let ext_cx = cx.ext_cx;
ext_cx.bt_push(ExpandedFrom(CallInfo {
ext_cx.bt_push(ExpnInfo {
call_site: dummy_sp(),
callee: NameAndSpan {
name: @"test",
span: None
}
}));
});
let precursor = @fold::AstFoldFns {
fold_crate: fold::wrap(|a,b| fold_crate(cx, a, b) ),
@ -109,9 +109,11 @@ fn fold_mod(cx: @mut TestCtxt,
fn nomain(cx: @mut TestCtxt, item: @ast::item) -> @ast::item {
if !*cx.sess.building_library {
@ast::item{attrs: item.attrs.filtered(|attr| {
"main" != attr::get_attr_name(attr)
}),.. copy *item}
@ast::item{
attrs: do item.attrs.iter().filter_map |attr| {
if "main" != attr::get_attr_name(attr) {Some(*attr)} else {None}
}.collect(),
.. copy *item}
} else { item }
}
@ -229,10 +231,10 @@ fn is_ignored(cx: @mut TestCtxt, i: @ast::item) -> bool {
let ignoreattrs = attr::find_attrs_by_name(i.attrs, "ignore");
let ignoreitems = attr::attr_metas(ignoreattrs);
return if !ignoreitems.is_empty() {
let cfg_metas =
vec::concat(
vec::filter_map(ignoreitems,
|i| attr::get_meta_item_list(i)));
let cfg_metas = ignoreitems.consume_iter()
.filter_map(|i| attr::get_meta_item_list(i))
.collect::<~[~[@ast::meta_item]]>()
.concat_vec();
config::metas_in_cfg(/*bad*/copy cx.crate.node.config, cfg_metas)
} else {
false
@ -270,8 +272,8 @@ mod __test {
*/
fn mk_std(cx: &TestCtxt) -> @ast::view_item {
let vers = ast::lit_str(@"0.7");
fn mk_std(cx: &TestCtxt) -> ast::view_item {
let vers = ast::lit_str(@"0.8-pre");
let vers = nospan(vers);
let mi = ast::meta_name_value(@"vers", vers);
let mi = nospan(mi);
@ -285,13 +287,12 @@ fn mk_std(cx: &TestCtxt) -> @ast::view_item {
ast::view_item_extern_mod(id_std, ~[@mi],
cx.sess.next_node_id())
};
let vi = ast::view_item {
ast::view_item {
node: vi,
attrs: ~[],
vis: ast::public,
span: dummy_sp()
};
return @vi;
}
}
fn mk_test_module(cx: &TestCtxt) -> @ast::item {
@ -308,7 +309,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::item {
let mainfn = (quote_item!(
pub fn main() {
#[main];
extra::test::test_main_static(::std::os::args(), tests);
extra::test::test_main_static(::std::os::args(), TESTS);
}
)).get();
@ -341,16 +342,16 @@ fn nospan<T:Copy>(t: T) -> codemap::spanned<T> {
codemap::spanned { node: t, span: dummy_sp() }
}
fn path_node(ids: ~[ast::ident]) -> @ast::Path {
@ast::Path { span: dummy_sp(),
fn path_node(ids: ~[ast::ident]) -> ast::Path {
ast::Path { span: dummy_sp(),
global: false,
idents: ids,
rp: None,
types: ~[] }
}
fn path_node_global(ids: ~[ast::ident]) -> @ast::Path {
@ast::Path { span: dummy_sp(),
fn path_node_global(ids: ~[ast::ident]) -> ast::Path {
ast::Path { span: dummy_sp(),
global: true,
idents: ids,
rp: None,
@ -365,7 +366,7 @@ fn mk_tests(cx: &TestCtxt) -> @ast::item {
let test_descs = mk_test_descs(cx);
(quote_item!(
pub static tests : &'static [self::extra::test::TestDescAndFn] =
pub static TESTS : &'static [self::extra::test::TestDescAndFn] =
$test_descs
;
)).get()

View file

@ -136,7 +136,7 @@ fn visit_crate(e: &Env, c: &ast::crate) {
}
}
fn visit_view_item(e: @mut Env, i: @ast::view_item) {
fn visit_view_item(e: @mut Env, i: &ast::view_item) {
match i.node {
ast::view_item_extern_mod(ident, ref meta_items, id) => {
debug!("resolving extern mod stmt. ident: %?, meta: %?",

View file

@ -102,7 +102,7 @@ pub fn get_used_crate_files(cstore: &CStore) -> ~[Path] {
pub fn add_used_library(cstore: &mut CStore, lib: @str) -> bool {
assert!(!lib.is_empty());
if cstore.used_libraries.iter().any_(|x| x == &lib) { return false; }
if cstore.used_libraries.iter().any(|x| x == &lib) { return false; }
cstore.used_libraries.push(lib);
true
}

View file

@ -1141,7 +1141,7 @@ fn list_crate_attributes(intr: @ident_interner, md: ebml::Doc, hash: &str,
let r = get_attributes(md);
for r.iter().advance |attr| {
out.write_str(fmt!("%s\n", pprust::attribute_to_str(*attr, intr)));
out.write_str(fmt!("%s\n", pprust::attribute_to_str(attr, intr)));
}
out.write_str("\n\n");

View file

@ -1003,7 +1003,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
index);
}
}
item_impl(ref generics, opt_trait, ty, ref methods) => {
item_impl(ref generics, ref opt_trait, ref ty, ref methods) => {
add_to_index();
ebml_w.start_tag(tag_items_data_item);
encode_def_id(ebml_w, local_def(item.id));
@ -1014,7 +1014,7 @@ fn encode_info_for_item(ecx: &EncodeContext,
encode_name(ecx, ebml_w, item.ident);
encode_attributes(ebml_w, item.attrs);
match ty.node {
ast::ty_path(path, bounds, _) if path.idents.len() == 1 => {
ast::ty_path(ref path, ref bounds, _) if path.idents.len() == 1 => {
assert!(bounds.is_none());
encode_impl_type_basename(ecx, ebml_w,
ast_util::path_to_ident(path));

View file

@ -291,16 +291,16 @@ fn encode_ast(ebml_w: &mut writer::Encoder, item: ast::inlined_item) {
// inlined items.
fn simplify_ast(ii: &ast::inlined_item) -> ast::inlined_item {
fn drop_nested_items(blk: &ast::blk_, fld: @fold::ast_fold) -> ast::blk_ {
let stmts_sans_items = do blk.stmts.filtered |stmt| {
let stmts_sans_items = do blk.stmts.iter().filter_map |stmt| {
match stmt.node {
ast::stmt_expr(_, _) | ast::stmt_semi(_, _) |
ast::stmt_decl(@codemap::spanned { node: ast::decl_local(_),
span: _}, _) => true,
ast::stmt_decl(@codemap::spanned { node: ast::decl_item(_),
span: _}, _) => false,
ast::stmt_decl(@codemap::spanned { node: ast::decl_local(_), span: _}, _)
=> Some(*stmt),
ast::stmt_decl(@codemap::spanned { node: ast::decl_item(_), span: _}, _)
=> None,
ast::stmt_mac(*) => fail!("unexpanded macro in astencode")
}
};
}.collect();
let blk_sans_items = ast::blk_ {
view_items: ~[], // I don't know if we need the view_items here,
// but it doesn't break tests!

View file

@ -538,12 +538,13 @@ impl BorrowckCtxt {
move_data::MoveExpr(expr) => {
let expr_ty = ty::expr_ty_adjusted(self.tcx, expr);
let suggestion = move_suggestion(self.tcx, expr_ty,
"moved by default (use `copy` to override)");
self.tcx.sess.span_note(
expr.span,
fmt!("`%s` moved here because it has type `%s`, \
which is moved by default (use `copy` to override)",
fmt!("`%s` moved here because it has type `%s`, which is %s",
self.loan_path_to_str(moved_lp),
expr_ty.user_string(self.tcx)));
expr_ty.user_string(self.tcx), suggestion));
}
move_data::MovePat(pat) => {
@ -557,12 +558,28 @@ impl BorrowckCtxt {
}
move_data::Captured(expr) => {
let expr_ty = ty::expr_ty_adjusted(self.tcx, expr);
let suggestion = move_suggestion(self.tcx, expr_ty,
"moved by default (make a copy and \
capture that instead to override)");
self.tcx.sess.span_note(
expr.span,
fmt!("`%s` moved into closure environment here \
because its type is moved by default \
(make a copy and capture that instead to override)",
self.loan_path_to_str(moved_lp)));
fmt!("`%s` moved into closure environment here because it \
has type `%s`, which is %s",
self.loan_path_to_str(moved_lp),
expr_ty.user_string(self.tcx), suggestion));
}
}
fn move_suggestion(tcx: ty::ctxt, ty: ty::t, default_msg: &'static str)
-> &'static str {
match ty::get(ty).sty {
ty::ty_closure(ref cty) if cty.sigil == ast::BorrowedSigil =>
"a non-copyable stack closure (capture it in a new closure, \
e.g. `|x| f(x)`, to override)",
_ if !ty::type_is_copyable(tcx, ty) =>
"non-copyable (perhaps you meant to use clone()?)",
_ => default_msg,
}
}
}

View file

@ -506,7 +506,7 @@ impl FlowedMoveData {
for self.dfcx_moves.each_bit_on_entry_frozen(id) |index| {
let move = &self.move_data.moves[index];
let moved_path = move.path;
if base_indices.iter().any_(|x| x == &moved_path) {
if base_indices.iter().any(|x| x == &moved_path) {
// Scenario 1 or 2: `loan_path` or some base path of
// `loan_path` was moved.
if !f(move, self.move_data.path(moved_path).loan_path) {
@ -535,7 +535,7 @@ impl FlowedMoveData {
-> bool {
//! True if `id` is the id of the LHS of an assignment
self.move_data.assignee_ids.iter().any_(|x| x == &id)
self.move_data.assignee_ids.iter().any(|x| x == &id)
}
pub fn each_assignment_of(&self,

View file

@ -112,7 +112,7 @@ pub fn check_expr(sess: Session,
"` in a constant expression");
}
}
expr_path(pth) => {
expr_path(ref pth) => {
// NB: In the future you might wish to relax this slightly
// to handle on-demand instantiation of functions via
// foo::<bar> in a const. Currently that is only done on
@ -224,7 +224,7 @@ pub fn check_item_recursion(sess: Session,
(visitor.visit_item)(it, (env, visitor));
fn visit_item(it: @item, (env, v): (env, visit::vt<env>)) {
if env.idstack.iter().any_(|x| x == &(it.id)) {
if env.idstack.iter().any(|x| x == &(it.id)) {
env.sess.span_fatal(env.root_it.span, "recursive constant");
}
env.idstack.push(it.id);

View file

@ -95,7 +95,7 @@ pub fn check_expr(cx: @MatchCheckCtxt, ex: @expr, (s, v): ((), visit::vt<()>)) {
}
_ => { /* We assume only enum types can be uninhabited */ }
}
let arms = vec::concat(arms.filter_mapped(unguarded_pat));
let arms = arms.iter().filter_map(unguarded_pat).collect::<~[~[@pat]]>().concat_vec();
if arms.is_empty() {
cx.tcx.sess.span_err(ex.span, "non-exhaustive patterns");
} else {
@ -265,7 +265,7 @@ pub fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@pat]) -> useful {
}
Some(ref ctor) => {
match is_useful(cx,
&m.filter_mapped(|r| default(cx, *r)),
&m.iter().filter_map(|r| default(cx, *r)).collect::<matrix>(),
v.tail()) {
useful_ => useful(left_ty, /*bad*/copy *ctor),
ref u => (/*bad*/copy *u)
@ -287,7 +287,7 @@ pub fn is_useful_specialized(cx: &MatchCheckCtxt,
arity: uint,
lty: ty::t)
-> useful {
let ms = m.filter_mapped(|r| specialize(cx, *r, &ctor, arity, lty));
let ms = m.iter().filter_map(|r| specialize(cx, *r, &ctor, arity, lty)).collect::<matrix>();
let could_be_useful = is_useful(
cx, &ms, specialize(cx, v, &ctor, arity, lty).get());
match could_be_useful {
@ -371,7 +371,7 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
let variants = ty::enum_variants(cx.tcx, eid);
if found.len() != (*variants).len() {
for (*variants).iter().advance |v| {
if !found.iter().any_(|x| x == &(variant(v.id))) {
if !found.iter().any(|x| x == &(variant(v.id))) {
return Some(variant(v.id));
}
}
@ -397,14 +397,14 @@ pub fn missing_ctor(cx: &MatchCheckCtxt,
ty::ty_unboxed_vec(*) | ty::ty_evec(*) => {
// Find the lengths and slices of all vector patterns.
let vec_pat_lens = do m.filter_mapped |r| {
let vec_pat_lens = do m.iter().filter_map |r| {
match r[0].node {
pat_vec(ref before, ref slice, ref after) => {
Some((before.len() + after.len(), slice.is_some()))
}
_ => None
}
};
}.collect::<~[(uint, bool)]>();
// Sort them by length such that for patterns of the same length,
// those with a destructured slice come first.
@ -805,13 +805,13 @@ pub fn is_refutable(cx: &MatchCheckCtxt, pat: &pat) -> bool {
}
pat_lit(_) | pat_range(_, _) => { true }
pat_struct(_, ref fields, _) => {
fields.iter().any_(|f| is_refutable(cx, f.pat))
fields.iter().any(|f| is_refutable(cx, f.pat))
}
pat_tup(ref elts) => {
elts.iter().any_(|elt| is_refutable(cx, *elt))
elts.iter().any(|elt| is_refutable(cx, *elt))
}
pat_enum(_, Some(ref args)) => {
args.iter().any_(|a| is_refutable(cx, *a))
args.iter().any(|a| is_refutable(cx, *a))
}
pat_enum(_,_) => { false }
pat_vec(*) => { true }

View file

@ -341,14 +341,14 @@ impl<O:DataFlowOperator+Copy+'static> DataFlowContext<O> {
let entry_str = bits_to_str(on_entry);
let gens = self.gens.slice(start, end);
let gens_str = if gens.iter().any_(|&u| u != 0) {
let gens_str = if gens.iter().any(|&u| u != 0) {
fmt!(" gen: %s", bits_to_str(gens))
} else {
~""
};
let kills = self.kills.slice(start, end);
let kills_str = if kills.iter().any_(|&u| u != 0) {
let kills_str = if kills.iter().any(|&u| u != 0) {
fmt!(" kill: %s", bits_to_str(kills))
} else {
~""
@ -643,7 +643,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
self.walk_opt_expr(o_e, in_out, loop_scopes);
// is this a return from a `for`-loop closure?
match loop_scopes.iter().position_(|s| s.loop_kind == ForLoop) {
match loop_scopes.iter().position(|s| s.loop_kind == ForLoop) {
Some(i) => {
// if so, add the in_out bits to the state
// upon exit. Remember that we cannot count
@ -916,7 +916,7 @@ impl<'self, O:DataFlowOperator> PropagationContext<'self, O> {
Some(_) => {
match self.tcx().def_map.find(&expr.id) {
Some(&ast::def_label(loop_id)) => {
match loop_scopes.iter().position_(|l| l.loop_id == loop_id) {
match loop_scopes.iter().position(|l| l.loop_id == loop_id) {
Some(i) => i,
None => {
self.tcx().sess.span_bug(

View file

@ -117,7 +117,7 @@ fn check_item(item: @item, (cx, visitor): (Context, visit::vt<Context>)) {
// If this is a destructor, check kinds.
if !attrs_contains_name(item.attrs, "unsafe_destructor") {
match item.node {
item_impl(_, Some(trait_ref), self_type, _) => {
item_impl(_, Some(ref trait_ref), ref self_type, _) => {
match cx.tcx.def_map.find(&trait_ref.ref_id) {
None => cx.tcx.sess.bug("trait ref not in def map!"),
Some(&trait_def) => {
@ -125,7 +125,7 @@ fn check_item(item: @item, (cx, visitor): (Context, visit::vt<Context>)) {
if cx.tcx.lang_items.drop_trait() == trait_def_id {
// Yes, it's a destructor.
match self_type.node {
ty_path(_, bounds, path_node_id) => {
ty_path(_, ref bounds, path_node_id) => {
assert!(bounds.is_none());
let struct_def = cx.tcx.def_map.get_copy(
&path_node_id);
@ -198,8 +198,9 @@ fn with_appropriate_checker(cx: Context, id: node_id,
fn check_for_bare(cx: Context, fv: @freevar_entry) {
cx.tcx.sess.span_err(
fv.span,
"attempted dynamic environment capture");
}
"can't capture dynamic environment in a fn item; \
use the || { ... } closure form instead");
} // same check is done in resolve.rs, but shouldn't be done
let fty = ty::node_id_to_type(cx.tcx, id);
match ty::get(fty).sty {
@ -320,7 +321,7 @@ pub fn check_expr(e: @expr, (cx, v): (Context, visit::vt<Context>)) {
visit::visit_expr(e, (cx, v));
}
fn check_ty(aty: @Ty, (cx, v): (Context, visit::vt<Context>)) {
fn check_ty(aty: &Ty, (cx, v): (Context, visit::vt<Context>)) {
match aty.node {
ty_path(_, _, id) => {
let r = cx.tcx.node_type_substs.find(&id);
@ -535,7 +536,7 @@ pub fn check_cast_for_escaping_regions(
// Check, based on the region associated with the trait, whether it can
// possibly escape the enclosing fn item (note that all type parameters
// must have been declared on the enclosing fn item).
if target_regions.iter().any_(|r| is_re_scope(*r)) {
if target_regions.iter().any(|r| is_re_scope(*r)) {
return; /* case (1) */
}
@ -550,7 +551,7 @@ pub fn check_cast_for_escaping_regions(
|_r| {
// FIXME(#5723) --- turn this check on once &Objects are usable
//
// if !target_regions.iter().any_(|t_r| is_subregion_of(cx, *t_r, r)) {
// if !target_regions.iter().any(|t_r| is_subregion_of(cx, *t_r, r)) {
// cx.tcx.sess.span_err(
// source.span,
// fmt!("source contains borrowed pointer with lifetime \
@ -564,7 +565,7 @@ pub fn check_cast_for_escaping_regions(
|ty| {
match ty::get(ty).sty {
ty::ty_param(source_param) => {
if target_params.iter().any_(|x| x == &source_param) {
if target_params.iter().any(|x| x == &source_param) {
/* case (2) */
} else {
check_durable(cx.tcx, ty, source.span); /* case (3) */

View file

@ -63,33 +63,34 @@ pub enum LangItem {
FailFnLangItem, // 24
FailBoundsCheckFnLangItem, // 25
ExchangeMallocFnLangItem, // 26
ClosureExchangeMallocFnLangItem, // 27
ExchangeFreeFnLangItem, // 28
MallocFnLangItem, // 29
FreeFnLangItem, // 30
BorrowAsImmFnLangItem, // 31
BorrowAsMutFnLangItem, // 32
ReturnToMutFnLangItem, // 33
CheckNotBorrowedFnLangItem, // 34
StrDupUniqFnLangItem, // 35
RecordBorrowFnLangItem, // 36
UnrecordBorrowFnLangItem, // 37
VectorExchangeMallocFnLangItem, // 27
ClosureExchangeMallocFnLangItem, // 28
ExchangeFreeFnLangItem, // 29
MallocFnLangItem, // 30
FreeFnLangItem, // 31
BorrowAsImmFnLangItem, // 32
BorrowAsMutFnLangItem, // 33
ReturnToMutFnLangItem, // 34
CheckNotBorrowedFnLangItem, // 35
StrDupUniqFnLangItem, // 36
RecordBorrowFnLangItem, // 37
UnrecordBorrowFnLangItem, // 38
StartFnLangItem, // 38
StartFnLangItem, // 39
TyDescStructLangItem, // 39
TyVisitorTraitLangItem, // 40
OpaqueStructLangItem, // 41
TyDescStructLangItem, // 40
TyVisitorTraitLangItem, // 41
OpaqueStructLangItem, // 42
}
pub struct LanguageItems {
items: [Option<def_id>, ..42]
items: [Option<def_id>, ..43]
}
impl LanguageItems {
pub fn new() -> LanguageItems {
LanguageItems {
items: [ None, ..42 ]
items: [ None, ..43 ]
}
}
@ -129,23 +130,24 @@ impl LanguageItems {
24 => "fail_",
25 => "fail_bounds_check",
26 => "exchange_malloc",
27 => "closure_exchange_malloc",
28 => "exchange_free",
29 => "malloc",
30 => "free",
31 => "borrow_as_imm",
32 => "borrow_as_mut",
33 => "return_to_mut",
34 => "check_not_borrowed",
35 => "strdup_uniq",
36 => "record_borrow",
37 => "unrecord_borrow",
27 => "vector_exchange_malloc",
28 => "closure_exchange_malloc",
29 => "exchange_free",
30 => "malloc",
31 => "free",
32 => "borrow_as_imm",
33 => "borrow_as_mut",
34 => "return_to_mut",
35 => "check_not_borrowed",
36 => "strdup_uniq",
37 => "record_borrow",
38 => "unrecord_borrow",
38 => "start",
39 => "start",
39 => "ty_desc",
40 => "ty_visitor",
41 => "opaque",
40 => "ty_desc",
41 => "ty_visitor",
42 => "opaque",
_ => "???"
}
@ -238,6 +240,9 @@ impl LanguageItems {
pub fn exchange_malloc_fn(&self) -> def_id {
self.items[ExchangeMallocFnLangItem as uint].get()
}
pub fn vector_exchange_malloc_fn(&self) -> def_id {
self.items[VectorExchangeMallocFnLangItem as uint].get()
}
pub fn closure_exchange_malloc_fn(&self) -> def_id {
self.items[ClosureExchangeMallocFnLangItem as uint].get()
}
@ -331,6 +336,7 @@ impl<'self> LanguageItemCollector<'self> {
item_refs.insert(@"fail_bounds_check",
FailBoundsCheckFnLangItem as uint);
item_refs.insert(@"exchange_malloc", ExchangeMallocFnLangItem as uint);
item_refs.insert(@"vector_exchange_malloc", VectorExchangeMallocFnLangItem as uint);
item_refs.insert(@"closure_exchange_malloc", ClosureExchangeMallocFnLangItem as uint);
item_refs.insert(@"exchange_free", ExchangeFreeFnLangItem as uint);
item_refs.insert(@"malloc", MallocFnLangItem as uint);

View file

@ -14,7 +14,6 @@ use middle::ty;
use middle::pat_util;
use util::ppaux::{ty_to_str};
use std::char;
use std::cmp;
use std::hashmap::HashMap;
use std::i16;
@ -25,7 +24,6 @@ use std::u16;
use std::u32;
use std::u64;
use std::u8;
use std::vec;
use extra::smallintmap::SmallIntMap;
use syntax::attr;
use syntax::codemap::span;
@ -80,6 +78,7 @@ pub enum lint {
non_implicitly_copyable_typarams,
deprecated_pattern,
non_camel_case_types,
non_uppercase_statics,
type_limits,
default_methods,
unused_unsafe,
@ -198,6 +197,13 @@ static lint_table: &'static [(&'static str, LintSpec)] = &[
default: allow
}),
("non_uppercase_statics",
LintSpec {
lint: non_uppercase_statics,
desc: "static constants should have uppercase identifiers",
default: warn
}),
("managed_heap_memory",
LintSpec {
lint: managed_heap_memory,
@ -741,9 +747,9 @@ fn check_item_ctypes(cx: &Context, it: &ast::item) {
fn check_foreign_fn(cx: &Context, decl: &ast::fn_decl) {
for decl.inputs.iter().advance |in| {
check_ty(cx, in.ty);
check_ty(cx, &in.ty);
}
check_ty(cx, decl.output)
check_ty(cx, &decl.output)
}
match it.node {
@ -753,7 +759,7 @@ fn check_item_ctypes(cx: &Context, it: &ast::item) {
ast::foreign_item_fn(ref decl, _, _) => {
check_foreign_fn(cx, decl);
}
ast::foreign_item_static(t, _) => { check_ty(cx, t); }
ast::foreign_item_static(ref t, _) => { check_ty(cx, t); }
}
}
}
@ -854,7 +860,10 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::item) {
let ident = cx.sess.str_of(ident);
assert!(!ident.is_empty());
let ident = ident.trim_chars(&'_');
char::is_uppercase(ident.char_at(0)) &&
// start with a non-lowercase letter rather than non-uppercase
// ones (some scripts don't have a concept of upper/lowercase)
!ident.char_at(0).is_lowercase() &&
!ident.contains_char('_')
}
@ -881,6 +890,23 @@ fn check_item_non_camel_case_types(cx: &Context, it: &ast::item) {
}
}
fn check_item_non_uppercase_statics(cx: &Context, it: &ast::item) {
match it.node {
// only check static constants
ast::item_static(_, ast::m_imm, _) => {
let s = cx.tcx.sess.str_of(it.ident);
// check for lowercase letters rather than non-uppercase
// ones (some scripts don't have a concept of
// upper/lowercase)
if s.iter().any(|c| c.is_lowercase()) {
cx.span_lint(non_uppercase_statics, it.span,
"static constant should have an uppercase identifier");
}
}
_ => {}
}
}
fn lint_unused_unsafe() -> visit::vt<@mut Context> {
visit::mk_vt(@visit::Visitor {
visit_expr: |e, (cx, vt): (@mut Context, visit::vt<@mut Context>)| {
@ -940,10 +966,6 @@ fn lint_unused_mut() -> visit::vt<@mut Context> {
visit_fn_decl(cx, &tm.decl);
visit::visit_ty_method(tm, (cx, vt));
},
visit_struct_method: |sm, (cx, vt)| {
visit_fn_decl(cx, &sm.decl);
visit::visit_struct_method(sm, (cx, vt));
},
visit_trait_method: |tm, (cx, vt)| {
match *tm {
ast::required(ref tm) => visit_fn_decl(cx, &tm.decl),
@ -960,7 +982,7 @@ fn lint_session() -> visit::vt<@mut Context> {
match cx.tcx.sess.lints.pop(&id) {
None => {},
Some(l) => {
do vec::consume(l) |_, (lint, span, msg)| {
for l.consume_iter().advance |(lint, span, msg)| {
cx.span_lint(lint, span, msg)
}
}
@ -1016,21 +1038,13 @@ fn lint_missing_doc() -> visit::vt<@mut Context> {
// If we have doc(hidden), nothing to do
if cx.doc_hidden { return }
// If we're documented, nothing to do
if attrs.iter().any_(|a| a.node.is_sugared_doc) { return }
if attrs.iter().any(|a| a.node.is_sugared_doc) { return }
// otherwise, warn!
cx.span_lint(missing_doc, sp, msg);
}
visit::mk_vt(@visit::Visitor {
visit_struct_method: |m, (cx, vt)| {
if m.vis == ast::public {
check_attrs(cx, m.attrs, m.span,
"missing documentation for a method");
}
visit::visit_struct_method(m, (cx, vt));
},
visit_ty_method: |m, (cx, vt)| {
// All ty_method objects are linted about because they're part of a
// trait (no visibility)
@ -1143,6 +1157,7 @@ pub fn check_crate(tcx: ty::ctxt, crate: @ast::crate) {
}
check_item_ctypes(cx, it);
check_item_non_camel_case_types(cx, it);
check_item_non_uppercase_statics(cx, it);
check_item_default_methods(cx, it);
check_item_heap(cx, it);

View file

@ -390,8 +390,8 @@ impl VisitContext {
// any fields which (1) were not explicitly
// specified and (2) have a type that
// moves-by-default:
let consume_with = with_fields.iter().any_(|tf| {
!fields.iter().any_(|f| f.node.ident == tf.ident) &&
let consume_with = with_fields.iter().any(|tf| {
!fields.iter().any(|f| f.node.ident == tf.ident) &&
ty::type_moves_by_default(self.tcx, tf.mt.ty)
});

View file

@ -71,10 +71,10 @@ pub fn pat_is_binding_or_wild(dm: resolve::DefMap, pat: @pat) -> bool {
}
pub fn pat_bindings(dm: resolve::DefMap, pat: @pat,
it: &fn(binding_mode, node_id, span, @Path)) {
it: &fn(binding_mode, node_id, span, &Path)) {
for walk_pat(pat) |p| {
match p.node {
pat_ident(binding_mode, pth, _) if pat_is_binding(dm, p) => {
pat_ident(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
it(binding_mode, p.id, p.span, pth);
}
_ => {}

View file

@ -245,13 +245,21 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
method_id: def_id,
name: &ident) =
|span, method_id, name| {
// If the method is a default method, we need to use the def_id of
// the default implementation.
// Having to do this this is really unfortunate.
let method_id = match tcx.provided_method_sources.find(&method_id) {
None => method_id,
Some(source) => source.method_id
};
if method_id.crate == local_crate {
let is_private = method_is_private(span, method_id.node);
let container_id = local_method_container_id(span,
method_id.node);
if is_private &&
(container_id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(container_id.node))) {
!privileged_items.iter().any(|x| x == &(container_id.node))) {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
token::ident_to_str(name)));
@ -268,7 +276,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
};
// Checks that a private path is in scope.
let check_path: @fn(span: span, def: def, path: @Path) =
let check_path: @fn(span: span, def: def, path: &Path) =
|span, def, path| {
debug!("checking path");
match def {
@ -279,7 +287,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
def_fn(def_id, _) => {
if def_id.crate == local_crate {
if local_item_is_private(span, def_id.node) &&
!privileged_items.iter().any_(|x| x == &def_id.node) {
!privileged_items.iter().any(|x| x == &def_id.node) {
tcx.sess.span_err(span,
fmt!("function `%s` is private",
token::ident_to_str(path.idents.last())));
@ -324,7 +332,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
provided(method)
if method.vis == private &&
!privileged_items.iter()
.any_(|x| x == &(trait_id.node)) => {
.any(|x| x == &(trait_id.node)) => {
tcx.sess.span_err(span,
fmt!("method `%s` is private",
token::ident_to_str(&method
@ -409,7 +417,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
base))).sty {
ty_struct(id, _)
if id.crate != local_crate || !privileged_items.iter()
.any_(|x| x == &(id.node)) => {
.any(|x| x == &(id.node)) => {
debug!("(privacy checking) checking field access");
check_field(expr.span, id, ident);
}
@ -422,7 +430,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
base))).sty {
ty_struct(id, _)
if id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(id.node)) => {
!privileged_items.iter().any(|x| x == &(id.node)) => {
match method_map.find(&expr.id) {
None => {
tcx.sess.span_bug(expr.span,
@ -441,14 +449,14 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
_ => {}
}
}
expr_path(path) => {
expr_path(ref path) => {
check_path(expr.span, tcx.def_map.get_copy(&expr.id), path);
}
expr_struct(_, ref fields, _) => {
match ty::get(ty::expr_ty(tcx, expr)).sty {
ty_struct(id, _) => {
if id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(id.node)) {
!privileged_items.iter().any(|x| x == &(id.node)) {
for (*fields).iter().advance |field| {
debug!("(privacy checking) checking \
field in struct literal");
@ -459,7 +467,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
}
ty_enum(id, _) => {
if id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(id.node)) {
!privileged_items.iter().any(|x| x == &(id.node)) {
match tcx.def_map.get_copy(&expr.id) {
def_variant(_, variant_id) => {
for (*fields).iter().advance |field| {
@ -496,7 +504,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
match ty::get(ty::expr_ty(tcx, operand)).sty {
ty_enum(id, _) => {
if id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(id.node)) {
!privileged_items.iter().any(|x| x == &(id.node)) {
check_variant(expr.span, id);
}
}
@ -514,7 +522,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
match ty::get(ty::pat_ty(tcx, pattern)).sty {
ty_struct(id, _) => {
if id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &(id.node)) {
!privileged_items.iter().any(|x| x == &(id.node)) {
for fields.iter().advance |field| {
debug!("(privacy checking) checking \
struct pattern");
@ -525,7 +533,7 @@ pub fn check_crate<'mm>(tcx: ty::ctxt,
}
ty_enum(enum_id, _) => {
if enum_id.crate != local_crate ||
!privileged_items.iter().any_(|x| x == &enum_id.node) {
!privileged_items.iter().any(|x| x == &enum_id.node) {
match tcx.def_map.find(&pattern.id) {
Some(&def_variant(_, variant_id)) => {
for fields.iter().advance |field| {

View file

@ -141,7 +141,7 @@ impl ReachableContext {
}
}
}
item_impl(ref generics, trait_ref, _, ref methods) => {
item_impl(ref generics, ref trait_ref, _, ref methods) => {
// XXX(pcwalton): We conservatively assume any methods
// on a trait implementation are reachable, when this
// is not the case. We could be more precise by only

View file

@ -78,7 +78,7 @@ impl RegionMaps {
pub fn relate_free_regions(&mut self, sub: FreeRegion, sup: FreeRegion) {
match self.free_region_map.find_mut(&sub) {
Some(sups) => {
if !sups.iter().any_(|x| x == &sup) {
if !sups.iter().any(|x| x == &sup) {
sups.push(sup);
}
return;
@ -202,7 +202,7 @@ impl RegionMaps {
return true;
}
if !queue.iter().any_(|x| x == parent) {
if !queue.iter().any(|x| x == parent) {
queue.push(*parent);
}
}
@ -612,7 +612,7 @@ impl DetermineRpCtxt {
ambient_variance: self.ambient_variance,
id: self.item_id
};
if !vec.iter().any_(|x| x == &dep) { vec.push(dep); }
if !vec.iter().any(|x| x == &dep) { vec.push(dep); }
}
// Determines whether a reference to a region that appears in the
@ -651,18 +651,18 @@ impl DetermineRpCtxt {
// with &self type, &self is also bound. We detect those last two
// cases via flags (anon_implies_rp and self_implies_rp) that are
// true when the anon or self region implies RP.
pub fn region_is_relevant(&self, r: Option<@ast::Lifetime>) -> bool {
pub fn region_is_relevant(&self, r: &Option<ast::Lifetime>) -> bool {
match r {
None => {
&None => {
self.anon_implies_rp
}
Some(ref l) if l.ident == special_idents::statik => {
&Some(ref l) if l.ident == special_idents::statik => {
false
}
Some(ref l) if l.ident == special_idents::self_ => {
&Some(ref l) if l.ident == special_idents::self_ => {
true
}
Some(_) => {
&Some(_) => {
false
}
}
@ -713,10 +713,10 @@ fn determine_rp_in_fn(fk: &visit::fn_kind,
do cx.with(cx.item_id, false) {
do cx.with_ambient_variance(rv_contravariant) {
for decl.inputs.iter().advance |a| {
(visitor.visit_ty)(a.ty, (cx, visitor));
(visitor.visit_ty)(&a.ty, (cx, visitor));
}
}
(visitor.visit_ty)(decl.output, (cx, visitor));
(visitor.visit_ty)(&decl.output, (cx, visitor));
let generics = visit::generics_of_fn(fk);
(visitor.visit_generics)(&generics, (cx, visitor));
(visitor.visit_block)(body, (cx, visitor));
@ -731,7 +731,7 @@ fn determine_rp_in_ty_method(ty_m: &ast::ty_method,
}
}
fn determine_rp_in_ty(ty: @ast::Ty,
fn determine_rp_in_ty(ty: &ast::Ty,
(cx, visitor): (@mut DetermineRpCtxt,
visit::vt<@mut DetermineRpCtxt>)) {
// we are only interested in types that will require an item to
@ -747,7 +747,7 @@ fn determine_rp_in_ty(ty: @ast::Ty,
// locations)
let sess = cx.sess;
match ty.node {
ast::ty_rptr(r, _) => {
ast::ty_rptr(ref r, _) => {
debug!("referenced rptr type %s",
pprust::ty_to_str(ty, sess.intr()));
@ -762,7 +762,7 @@ fn determine_rp_in_ty(ty: @ast::Ty,
pprust::ty_to_str(ty, sess.intr()));
match f.region {
Some(_) => {
if cx.region_is_relevant(f.region) {
if cx.region_is_relevant(&f.region) {
let rv = cx.add_variance(rv_contravariant);
cx.add_rp(cx.item_id, rv)
}
@ -784,13 +784,13 @@ fn determine_rp_in_ty(ty: @ast::Ty,
// then check whether it is region-parameterized and consider
// that as a direct dependency.
match ty.node {
ast::ty_path(path, _bounds, id) => {
ast::ty_path(ref path, _, id) => {
match cx.def_map.find(&id) {
Some(&ast::def_ty(did)) |
Some(&ast::def_trait(did)) |
Some(&ast::def_struct(did)) => {
if did.crate == ast::local_crate {
if cx.region_is_relevant(path.rp) {
if cx.region_is_relevant(&path.rp) {
cx.add_dep(did.node);
}
} else {
@ -800,7 +800,7 @@ fn determine_rp_in_ty(ty: @ast::Ty,
Some(variance) => {
debug!("reference to external, rp'd type %s",
pprust::ty_to_str(ty, sess.intr()));
if cx.region_is_relevant(path.rp) {
if cx.region_is_relevant(&path.rp) {
let rv = cx.add_variance(variance);
cx.add_rp(cx.item_id, rv)
}
@ -815,16 +815,16 @@ fn determine_rp_in_ty(ty: @ast::Ty,
}
match ty.node {
ast::ty_box(mt) | ast::ty_uniq(mt) | ast::ty_vec(mt) |
ast::ty_rptr(_, mt) | ast::ty_ptr(mt) => {
ast::ty_box(ref mt) | ast::ty_uniq(ref mt) | ast::ty_vec(ref mt) |
ast::ty_rptr(_, ref mt) | ast::ty_ptr(ref mt) => {
visit_mt(mt, (cx, visitor));
}
ast::ty_path(path, _bounds, _) => {
ast::ty_path(ref path, _, _) => {
// type parameters are---for now, anyway---always invariant
do cx.with_ambient_variance(rv_invariant) {
for path.types.iter().advance |tp| {
(visitor.visit_ty)(*tp, (cx, visitor));
(visitor.visit_ty)(tp, (cx, visitor));
}
}
}
@ -837,10 +837,10 @@ fn determine_rp_in_ty(ty: @ast::Ty,
// parameters are contravariant
do cx.with_ambient_variance(rv_contravariant) {
for decl.inputs.iter().advance |a| {
(visitor.visit_ty)(a.ty, (cx, visitor));
(visitor.visit_ty)(&a.ty, (cx, visitor));
}
}
(visitor.visit_ty)(decl.output, (cx, visitor));
(visitor.visit_ty)(&decl.output, (cx, visitor));
}
}
@ -849,7 +849,7 @@ fn determine_rp_in_ty(ty: @ast::Ty,
}
}
fn visit_mt(mt: ast::mt,
fn visit_mt(mt: &ast::mt,
(cx, visitor): (@mut DetermineRpCtxt,
visit::vt<@mut DetermineRpCtxt>)) {
// mutability is invariant

View file

@ -41,7 +41,6 @@ use syntax::opt_vec::OptVec;
use std::str;
use std::uint;
use std::vec;
use std::hashmap::{HashMap, HashSet};
use std::util;
@ -511,6 +510,13 @@ pub struct NameBindings {
value_def: Option<ValueNsDef>, //< Meaning in value namespace.
}
/// Ways in which a trait can be referenced
enum TraitReferenceType {
TraitImplementation, // impl SomeTrait for T { ... }
TraitDerivation, // trait T : SomeTrait { ... }
TraitBoundingTypeParameter, // fn f<T:SomeTrait>() { ... }
}
impl NameBindings {
/// Creates a new module in this set of name bindings.
pub fn define_module(@mut self,
@ -1227,7 +1233,7 @@ impl Resolver {
visit_item(item, (new_parent, visitor));
}
item_impl(_, None, ty, ref methods) => {
item_impl(_, None, ref ty, ref methods) => {
// If this implements an anonymous trait, then add all the
// methods within to a new module, if the type was defined
// within this module.
@ -1237,9 +1243,9 @@ impl Resolver {
// the same module that declared the type.
// Create the module and add all methods.
match *ty {
Ty {
node: ty_path(path, _, _),
match ty {
&Ty {
node: ty_path(ref path, _, _),
_
} if path.idents.len() == 1 => {
let name = path_to_ident(path);
@ -1307,7 +1313,7 @@ impl Resolver {
visit_item(item, (parent, visitor));
}
item_impl(_, Some(_), _ty, ref _methods) => {
item_impl(_, Some(_), _, _) => {
visit_item(item, (parent, visitor));
}
@ -1426,7 +1432,7 @@ impl Resolver {
/// Constructs the reduced graph for one 'view item'. View items consist
/// of imports and use directives.
pub fn build_reduced_graph_for_view_item(@mut self,
view_item: @view_item,
view_item: &view_item,
(parent, _):
(ReducedGraphParent,
vt<ReducedGraphParent>)) {
@ -1440,7 +1446,7 @@ impl Resolver {
let mut module_path = ~[];
match view_path.node {
view_path_simple(_, full_path, _) => {
view_path_simple(_, ref full_path, _) => {
let path_len = full_path.idents.len();
assert!(path_len != 0);
@ -1451,8 +1457,8 @@ impl Resolver {
}
}
view_path_glob(module_ident_path, _) |
view_path_list(module_ident_path, _, _) => {
view_path_glob(ref module_ident_path, _) |
view_path_list(ref module_ident_path, _, _) => {
for module_ident_path.idents.iter().advance |ident| {
module_path.push(*ident);
}
@ -1462,7 +1468,7 @@ impl Resolver {
// Build up the import directives.
let module_ = self.get_module_from_parent(parent);
match view_path.node {
view_path_simple(binding, full_path, id) => {
view_path_simple(binding, ref full_path, id) => {
let source_ident = *full_path.idents.last();
let subclass = @SingleImport(binding,
source_ident);
@ -3382,7 +3388,8 @@ impl Resolver {
self.session.span_err(
span,
"attempted dynamic environment-capture");
"can't capture dynamic environment in a fn item; \
use the || { ... } closure form instead");
} else {
// This was an attempt to use a type parameter outside
// its scope.
@ -3404,7 +3411,8 @@ impl Resolver {
self.session.span_err(
span,
"attempted dynamic environment-capture");
"can't capture dynamic environment in a fn item; \
use the || { ... } closure form instead");
} else {
// This was an attempt to use a type parameter outside
// its scope.
@ -3525,8 +3533,8 @@ impl Resolver {
}
item_impl(ref generics,
implemented_traits,
self_type,
ref implemented_traits,
ref self_type,
ref methods) => {
self.resolve_implementation(item.id,
generics,
@ -3553,23 +3561,7 @@ impl Resolver {
// Resolve derived traits.
for traits.iter().advance |trt| {
match self.resolve_path(trt.path, TypeNS, true,
visitor) {
None =>
self.session.span_err(trt.path.span,
"attempt to derive a \
nonexistent trait"),
Some(def) => {
// Write a mapping from the trait ID to the
// definition of the trait into the definition
// map.
debug!("(resolving trait) found trait def: \
%?", def);
self.record_def(trt.ref_id, def);
}
}
self.resolve_trait_reference(trt, visitor, TraitDerivation);
}
for (*methods).iter().advance |method| {
@ -3593,10 +3585,10 @@ impl Resolver {
visitor);
for ty_m.decl.inputs.iter().advance |argument| {
self.resolve_type(argument.ty, visitor);
self.resolve_type(&argument.ty, visitor);
}
self.resolve_type(ty_m.decl.output, visitor);
self.resolve_type(&ty_m.decl.output, visitor);
}
}
provided(m) => {
@ -3786,12 +3778,12 @@ impl Resolver {
None,
visitor);
self.resolve_type(argument.ty, visitor);
self.resolve_type(&argument.ty, visitor);
debug!("(resolving function) recorded argument");
}
self.resolve_type(declaration.output, visitor);
self.resolve_type(&declaration.output, visitor);
}
}
@ -3819,8 +3811,8 @@ impl Resolver {
type_parameter_bound: &TyParamBound,
visitor: ResolveVisitor) {
match *type_parameter_bound {
TraitTyParamBound(tref) => {
self.resolve_trait_reference(tref, visitor)
TraitTyParamBound(ref tref) => {
self.resolve_trait_reference(tref, visitor, TraitBoundingTypeParameter)
}
RegionTyParamBound => {}
}
@ -3828,14 +3820,23 @@ impl Resolver {
pub fn resolve_trait_reference(@mut self,
trait_reference: &trait_ref,
visitor: ResolveVisitor) {
match self.resolve_path(trait_reference.path, TypeNS, true, visitor) {
visitor: ResolveVisitor,
reference_type: TraitReferenceType) {
match self.resolve_path(&trait_reference.path, TypeNS, true, visitor) {
None => {
self.session.span_err(trait_reference.path.span,
"attempt to implement an \
unknown trait");
let path_str = self.idents_to_str(trait_reference.path.idents);
let usage_str = match reference_type {
TraitBoundingTypeParameter => "bound type parameter with",
TraitImplementation => "implement",
TraitDerivation => "derive"
};
let msg = fmt!("attempt to %s a nonexistent trait `%s`", usage_str, path_str);
self.session.span_err(trait_reference.path.span, msg);
}
Some(def) => {
debug!("(resolving trait) found trait def: %?", def);
self.record_def(trait_reference.ref_id, def);
}
}
@ -3877,7 +3878,7 @@ impl Resolver {
// Resolve fields.
for fields.iter().advance |field| {
self.resolve_type(field.node.ty, visitor);
self.resolve_type(&field.node.ty, visitor);
}
}
}
@ -3912,8 +3913,8 @@ impl Resolver {
pub fn resolve_implementation(@mut self,
id: node_id,
generics: &Generics,
opt_trait_reference: Option<@trait_ref>,
self_type: @Ty,
opt_trait_reference: &Option<trait_ref>,
self_type: &Ty,
methods: &[@method],
visitor: ResolveVisitor) {
// If applicable, create a rib for the type parameters.
@ -3928,8 +3929,8 @@ impl Resolver {
// Resolve the trait reference, if necessary.
let original_trait_refs;
match opt_trait_reference {
Some(trait_reference) => {
self.resolve_trait_reference(trait_reference, visitor);
&Some(ref trait_reference) => {
self.resolve_trait_reference(trait_reference, visitor, TraitImplementation);
// Record the current set of trait references.
let mut new_trait_refs = ~[];
@ -3943,7 +3944,7 @@ impl Resolver {
&mut self.current_trait_refs,
Some(new_trait_refs)));
}
None => {
&None => {
original_trait_refs = None;
}
}
@ -4000,7 +4001,7 @@ impl Resolver {
let mutability = if local.node.is_mutbl {Mutable} else {Immutable};
// Resolve the type.
self.resolve_type(local.node.ty, visitor);
self.resolve_type(&local.node.ty, visitor);
// Resolve the initializer, if necessary.
match local.node.init {
@ -4111,12 +4112,12 @@ impl Resolver {
debug!("(resolving block) leaving block");
}
pub fn resolve_type(@mut self, ty: @Ty, visitor: ResolveVisitor) {
pub fn resolve_type(@mut self, ty: &Ty, visitor: ResolveVisitor) {
match ty.node {
// Like path expressions, the interpretation of path types depends
// on whether the path has multiple elements in it or not.
ty_path(path, bounds, path_id) => {
ty_path(ref path, ref bounds, path_id) => {
// This is a path in the type namespace. Walk through scopes
// scopes looking for it.
let mut result_def = None;
@ -4210,7 +4211,7 @@ impl Resolver {
let pat_id = pattern.id;
for walk_pat(pattern) |pattern| {
match pattern.node {
pat_ident(binding_mode, path, _)
pat_ident(binding_mode, ref path, _)
if !path.global && path.idents.len() == 1 => {
// The meaning of pat_ident with no type parameters
@ -4333,11 +4334,11 @@ impl Resolver {
// Check the types in the path pattern.
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
self.resolve_type(ty, visitor);
}
}
pat_ident(binding_mode, path, _) => {
pat_ident(binding_mode, ref path, _) => {
// This must be an enum variant, struct, or constant.
match self.resolve_path(path, ValueNS, false, visitor) {
Some(def @ def_variant(*)) |
@ -4366,11 +4367,11 @@ impl Resolver {
// Check the types in the path pattern.
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
self.resolve_type(ty, visitor);
}
}
pat_enum(path, _) => {
pat_enum(ref path, _) => {
// This must be an enum variant, struct or const.
match self.resolve_path(path, ValueNS, false, visitor) {
Some(def @ def_fn(*)) |
@ -4395,7 +4396,7 @@ impl Resolver {
// Check the types in the path pattern.
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
self.resolve_type(ty, visitor);
}
}
@ -4408,7 +4409,7 @@ impl Resolver {
self.resolve_expr(last_expr, visitor);
}
pat_struct(path, _, _) => {
pat_struct(ref path, _, _) => {
match self.resolve_path(path, TypeNS, false, visitor) {
Some(def_ty(class_id))
if self.structs.contains(&class_id) => {
@ -4483,14 +4484,14 @@ impl Resolver {
/// If `check_ribs` is true, checks the local definitions first; i.e.
/// doesn't skip straight to the containing module.
pub fn resolve_path(@mut self,
path: @Path,
path: &Path,
namespace: Namespace,
check_ribs: bool,
visitor: ResolveVisitor)
-> Option<def> {
// First, resolve the types.
for path.types.iter().advance |ty| {
self.resolve_type(*ty, visitor);
self.resolve_type(ty, visitor);
}
if path.global {
@ -4609,7 +4610,7 @@ impl Resolver {
return NoNameDefinition;
}
pub fn intern_module_part_of_path(@mut self, path: @Path) -> ~[ident] {
pub fn intern_module_part_of_path(@mut self, path: &Path) -> ~[ident] {
let mut module_path_idents = ~[];
for path.idents.iter().enumerate().advance |(index, ident)| {
if index == path.idents.len() - 1 {
@ -4623,7 +4624,7 @@ impl Resolver {
}
pub fn resolve_module_relative_path(@mut self,
path: @Path,
path: &Path,
xray: XrayFlag,
namespace: Namespace)
-> Option<def> {
@ -4689,7 +4690,7 @@ impl Resolver {
/// Invariant: This must be called only during main resolution, not during
/// import resolution.
pub fn resolve_crate_relative_path(@mut self,
path: @Path,
path: &Path,
xray: XrayFlag,
namespace: Namespace)
-> Option<def> {
@ -4915,7 +4916,7 @@ impl Resolver {
// The interpretation of paths depends on whether the path has
// multiple elements in it or not.
expr_path(path) => {
expr_path(ref path) => {
// This is a local path in the value namespace. Walk through
// scopes looking for it.
@ -4984,7 +4985,7 @@ impl Resolver {
visitor);
}
expr_struct(path, _, _) => {
expr_struct(ref path, _, _) => {
// Resolve the path to the structure it goes to.
match self.resolve_path(path, TypeNS, false, visitor) {
Some(def_ty(class_id)) | Some(def_struct(class_id))
@ -5294,7 +5295,7 @@ impl Resolver {
visit_crate(self.crate, ((), vt));
}
pub fn check_for_item_unused_imports(&mut self, vi: @view_item) {
pub fn check_for_item_unused_imports(&mut self, vi: &view_item) {
// Ignore public import statements because there's no way to be sure
// whether they're used or not. Also ignore imports with a dummy span
// because this means that they were generated in some fashion by the
@ -5360,7 +5361,7 @@ impl Resolver {
if idents.len() == 0 {
return ~"???";
}
return self.idents_to_str(vec::reversed(idents));
return self.idents_to_str(idents.consume_rev_iter().collect::<~[ast::ident]>());
}
pub fn dump_module(@mut self, module_: @mut Module) {

View file

@ -385,7 +385,7 @@ pub fn expand_nested_bindings<'r>(bcx: block,
do m.map |br| {
match br.pats[col].node {
ast::pat_ident(_, path, Some(inner)) => {
ast::pat_ident(_, ref path, Some(inner)) => {
let pats = vec::append(
br.pats.slice(0u, col).to_owned(),
vec::append(~[inner],
@ -441,7 +441,7 @@ pub fn enter_match<'r>(bcx: block,
let this = br.pats[col];
match this.node {
ast::pat_ident(_, path, None) => {
ast::pat_ident(_, ref path, None) => {
if pat_is_binding(dm, this) {
let binding_info =
br.data.bindings_map.get(
@ -796,7 +796,7 @@ pub fn enter_region<'r>(bcx: block,
pub fn get_options(bcx: block, m: &[@Match], col: uint) -> ~[Opt] {
let ccx = bcx.ccx();
fn add_to_set(tcx: ty::ctxt, set: &mut ~[Opt], val: Opt) {
if set.iter().any_(|l| opt_eq(tcx, l, &val)) {return;}
if set.iter().any(|l| opt_eq(tcx, l, &val)) {return;}
set.push(val);
}
@ -963,7 +963,7 @@ pub fn collect_record_or_struct_fields(bcx: block,
fn extend(idents: &mut ~[ast::ident], field_pats: &[ast::field_pat]) {
for field_pats.iter().advance |field_pat| {
let field_ident = field_pat.ident;
if !idents.iter().any_(|x| *x == field_ident) {
if !idents.iter().any(|x| *x == field_ident) {
idents.push(field_ident);
}
}
@ -974,7 +974,7 @@ pub fn pats_require_rooting(bcx: block,
m: &[@Match],
col: uint)
-> bool {
do m.iter().any_ |br| {
do m.iter().any |br| {
let pat_id = br.pats[col].id;
let key = root_map_key {id: pat_id, derefs: 0u };
bcx.ccx().maps.root_map.contains_key(&key)
@ -1003,7 +1003,7 @@ pub fn root_pats_as_necessary(mut bcx: block,
// matches may be wildcards like _ or identifiers).
macro_rules! any_pat (
($m:expr, $pattern:pat) => (
do ($m).iter().any_ |br| {
do ($m).iter().any |br| {
match br.pats[col].node {
$pattern => true,
_ => false
@ -1029,7 +1029,7 @@ pub fn any_tup_pat(m: &[@Match], col: uint) -> bool {
}
pub fn any_tuple_struct_pat(bcx: block, m: &[@Match], col: uint) -> bool {
do m.iter().any_ |br| {
do m.iter().any |br| {
let pat = br.pats[col];
match pat.node {
ast::pat_enum(_, Some(_)) => {
@ -1095,26 +1095,20 @@ pub fn compare_values(cx: block,
match ty::get(rhs_t).sty {
ty::ty_estr(ty::vstore_uniq) => {
let scratch_result = scratch_datum(cx, ty::mk_bool(), false);
let scratch_lhs = alloca(cx, val_ty(lhs));
Store(cx, lhs, scratch_lhs);
let scratch_rhs = alloca(cx, val_ty(rhs));
Store(cx, rhs, scratch_rhs);
let did = cx.tcx().lang_items.uniq_str_eq_fn();
let bcx = callee::trans_lang_call(cx, did, [scratch_lhs, scratch_rhs],
expr::SaveIn(scratch_result.val));
let result = scratch_result.to_result(bcx);
let result = callee::trans_lang_call(cx, did, [scratch_lhs, scratch_rhs], None);
Result {
bcx: result.bcx,
val: bool_to_i1(result.bcx, result.val)
}
}
ty::ty_estr(_) => {
let scratch_result = scratch_datum(cx, ty::mk_bool(), false);
let did = cx.tcx().lang_items.str_eq_fn();
let bcx = callee::trans_lang_call(cx, did, [lhs, rhs],
expr::SaveIn(scratch_result.val));
let result = scratch_result.to_result(bcx);
let result = callee::trans_lang_call(cx, did, [lhs, rhs], None);
Result {
bcx: result.bcx,
val: bool_to_i1(result.bcx, result.val)
@ -1395,8 +1389,12 @@ pub fn compile_submatch(bcx: block,
}
if any_uniq_pat(m, col) {
let pat_ty = node_id_type(bcx, pat_id);
let llbox = Load(bcx, val);
let unboxed = GEPi(bcx, llbox, [0u, abi::box_field_body]);
let unboxed = match ty::get(pat_ty).sty {
ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).contains_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
vec::append(~[unboxed], vals_left), chk);
return;
@ -1868,8 +1866,12 @@ pub fn bind_irrefutable_pat(bcx: block,
}
}
ast::pat_box(inner) | ast::pat_uniq(inner) => {
let pat_ty = node_id_type(bcx, pat.id);
let llbox = Load(bcx, val);
let unboxed = GEPi(bcx, llbox, [0u, abi::box_field_body]);
let unboxed = match ty::get(pat_ty).sty {
ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).contains_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
bcx = bind_irrefutable_pat(bcx,
inner,
unboxed,

View file

@ -131,13 +131,13 @@ fn represent_type_uncached(cx: &mut CrateContext, t: ty::t) -> Repr {
}
ty::ty_struct(def_id, ref substs) => {
let fields = ty::lookup_struct_fields(cx.tcx, def_id);
let ftys = do fields.map |field| {
let mut ftys = do fields.map |field| {
ty::lookup_field_type(cx.tcx, def_id, field.id, substs)
};
let packed = ty::lookup_packed(cx.tcx, def_id);
let dtor = ty::ty_dtor(cx.tcx, def_id).has_drop_flag();
let ftys =
if dtor { ftys + [ty::mk_bool()] } else { ftys };
if dtor { ftys.push(ty::mk_bool()); }
return Univariant(mk_struct(cx, ftys, packed), dtor)
}
ty::ty_enum(def_id, ref substs) => {
@ -147,7 +147,7 @@ fn represent_type_uncached(cx: &mut CrateContext, t: ty::t) -> Repr {
mk_struct(cx, self.tys, false).size == 0
}
fn find_ptr(&self) -> Option<uint> {
self.tys.iter().position_(|&ty| mono_data_classify(ty) == MonoNonNull)
self.tys.iter().position(|&ty| mono_data_classify(ty) == MonoNonNull)
}
}
@ -263,7 +263,7 @@ fn generic_fields_of(cx: &mut CrateContext, r: &Repr, sizing: bool) -> ~[Type] {
let padding = largest_size - most_aligned.size;
struct_llfields(cx, most_aligned, sizing)
+ [Type::array(&Type::i8(), padding)]
+ &[Type::array(&Type::i8(), padding)]
}
}
}
@ -512,7 +512,7 @@ pub fn trans_const(ccx: &mut CrateContext, r: &Repr, discr: int,
let discr_ty = C_int(ccx, discr);
let contents = build_const_struct(ccx, case,
~[discr_ty] + vals);
C_struct(contents + [padding(max_sz - case.size)])
C_struct(contents + &[padding(max_sz - case.size)])
}
NullablePointer{ nonnull: ref nonnull, nndiscr, ptrfield, _ } => {
if discr == nndiscr {

View file

@ -72,6 +72,7 @@ use std::uint;
use std::vec;
use std::local_data;
use extra::time;
use extra::sort;
use syntax::ast::ident;
use syntax::ast_map::{path, path_elt_to_str, path_name};
use syntax::ast_util::{local_def, path_to_ident};
@ -141,6 +142,48 @@ fn fcx_has_nonzero_span(fcx: fn_ctxt) -> bool {
}
}
struct StatRecorder<'self> {
ccx: @mut CrateContext,
name: &'self str,
start: u64,
istart: uint,
}
impl<'self> StatRecorder<'self> {
pub fn new(ccx: @mut CrateContext,
name: &'self str) -> StatRecorder<'self> {
let start = if ccx.sess.trans_stats() {
time::precise_time_ns()
} else {
0
};
let istart = ccx.stats.n_llvm_insns;
StatRecorder {
ccx: ccx,
name: name,
start: start,
istart: istart,
}
}
}
#[unsafe_destructor]
impl<'self> Drop for StatRecorder<'self> {
pub fn drop(&self) {
if self.ccx.sess.trans_stats() {
let end = time::precise_time_ns();
let elapsed = ((end - self.start) / 1_000_000) as uint;
let iend = self.ccx.stats.n_llvm_insns;
self.ccx.stats.fn_stats.push((self.name.to_owned(),
elapsed,
iend - self.istart));
self.ccx.stats.n_fns += 1;
// Reset LLVM insn count to avoid compound costs.
self.ccx.stats.n_llvm_insns = self.istart;
}
}
}
pub fn decl_fn(llmod: ModuleRef, name: &str, cc: lib::llvm::CallConv, ty: Type) -> ValueRef {
let llfn: ValueRef = do name.as_c_str |buf| {
unsafe {
@ -246,35 +289,48 @@ pub fn malloc_raw_dyn(bcx: block,
let _icx = push_ctxt("malloc_raw");
let ccx = bcx.ccx();
let (mk_fn, langcall) = match heap {
heap_managed | heap_managed_unique => {
(ty::mk_imm_box, bcx.tcx().lang_items.malloc_fn())
}
heap_exchange => {
(ty::mk_imm_uniq, bcx.tcx().lang_items.exchange_malloc_fn())
}
heap_exchange_closure => {
(ty::mk_imm_uniq, bcx.tcx().lang_items.closure_exchange_malloc_fn())
}
};
if heap == heap_exchange {
let llty_value = type_of::type_of(ccx, t);
let llalign = llalign_of_min(ccx, llty_value);
// Allocate space:
let r = callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.exchange_malloc_fn(),
[C_i32(llalign as i32), size],
None);
rslt(r.bcx, PointerCast(r.bcx, r.val, llty_value.ptr_to()))
} else if heap == heap_exchange_vector {
// Grab the TypeRef type of box_ptr_ty.
let box_ptr_ty = mk_fn(bcx.tcx(), t);
let element_type = match ty::get(t).sty {
ty::ty_unboxed_vec(e) => e,
_ => fail!("not a vector body")
};
let box_ptr_ty = ty::mk_evec(bcx.tcx(), element_type, ty::vstore_uniq);
let llty = type_of(ccx, box_ptr_ty);
let llty_value = type_of::type_of(ccx, t);
let llalign = llalign_of_min(ccx, llty_value);
// Allocate space:
let rval = alloca(bcx, Type::i8p());
let bcx = callee::trans_lang_call(
let r = callee::trans_lang_call(
bcx,
langcall,
bcx.tcx().lang_items.vector_exchange_malloc_fn(),
[C_i32(llalign as i32), size],
expr::SaveIn(rval));
rslt(bcx, PointerCast(bcx, Load(bcx, rval), llty))
None);
rslt(r.bcx, PointerCast(r.bcx, r.val, llty))
} else {
// we treat ~fn, @fn and @[] as @ here, which isn't ideal
let (mk_fn, langcall) = match heap {
heap_managed | heap_managed_unique => {
(ty::mk_imm_box, bcx.tcx().lang_items.malloc_fn())
}
heap_exchange_closure => {
(ty::mk_imm_box, bcx.tcx().lang_items.closure_exchange_malloc_fn())
}
_ => fail!("heap_exchange/heap_exchange_vector already handled")
};
// Grab the TypeRef type of box_ptr_ty.
let box_ptr_ty = mk_fn(bcx.tcx(), t);
let llty = type_of(ccx, box_ptr_ty);
@ -285,13 +341,12 @@ pub fn malloc_raw_dyn(bcx: block,
// Allocate space:
let tydesc = PointerCast(bcx, static_ti.tydesc, Type::i8p());
let rval = alloca(bcx, Type::i8p());
let bcx = callee::trans_lang_call(
let r = callee::trans_lang_call(
bcx,
langcall,
[tydesc, size],
expr::SaveIn(rval));
let r = rslt(bcx, PointerCast(bcx, Load(bcx, rval), llty));
None);
let r = rslt(r.bcx, PointerCast(r.bcx, r.val, llty));
maybe_set_managed_unique_rc(r.bcx, r.val, heap);
r
}
@ -316,6 +371,7 @@ pub struct MallocResult {
// and pulls out the body
pub fn malloc_general_dyn(bcx: block, t: ty::t, heap: heap, size: ValueRef)
-> MallocResult {
assert!(heap != heap_exchange);
let _icx = push_ctxt("malloc_general");
let Result {bcx: bcx, val: llbox} = malloc_raw_dyn(bcx, t, heap, size);
let body = GEPi(bcx, llbox, [0u, abi::box_field_body]);
@ -323,9 +379,9 @@ pub fn malloc_general_dyn(bcx: block, t: ty::t, heap: heap, size: ValueRef)
MallocResult { bcx: bcx, box: llbox, body: body }
}
pub fn malloc_general(bcx: block, t: ty::t, heap: heap)
-> MallocResult {
let ty = type_of(bcx.ccx(), t);
pub fn malloc_general(bcx: block, t: ty::t, heap: heap) -> MallocResult {
let ty = type_of(bcx.ccx(), t);
assert!(heap != heap_exchange);
malloc_general_dyn(bcx, t, heap, llsize_of(bcx.ccx(), ty))
}
pub fn malloc_boxed(bcx: block, t: ty::t)
@ -342,6 +398,7 @@ pub fn heap_for_unique(bcx: block, t: ty::t) -> heap {
}
pub fn maybe_set_managed_unique_rc(bcx: block, bx: ValueRef, heap: heap) {
assert!(heap != heap_exchange);
if heap == heap_managed_unique {
// In cases where we are looking at a unique-typed allocation in the
// managed heap (thus have refcount 1 from the managed allocator),
@ -353,11 +410,6 @@ pub fn maybe_set_managed_unique_rc(bcx: block, bx: ValueRef, heap: heap) {
}
}
pub fn malloc_unique(bcx: block, t: ty::t)
-> MallocResult {
malloc_general(bcx, t, heap_for_unique(bcx, t))
}
// Type descriptor and type glue stuff
pub fn get_tydesc_simple(ccx: &mut CrateContext, t: ty::t) -> ValueRef {
@ -863,10 +915,10 @@ pub fn need_invoke(bcx: block) -> bool {
// Walk the scopes to look for cleanups
let mut cur = bcx;
let mut cur_scope = cur.scope;
loop {
match cur.kind {
block_scope(inf) => {
let inf = &mut *inf; // FIXME(#5074) workaround old borrowck
cur_scope = match cur_scope {
Some(inf) => {
for inf.cleanups.iter().advance |cleanup| {
match *cleanup {
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
@ -876,12 +928,15 @@ pub fn need_invoke(bcx: block) -> bool {
}
}
}
inf.parent
}
None => {
cur = match cur.parent {
Some(next) => next,
None => return false
};
cur.scope
}
_ => ()
}
cur = match cur.parent {
Some(next) => next,
None => return false
}
}
}
@ -899,23 +954,21 @@ pub fn have_cached_lpad(bcx: block) -> bool {
pub fn in_lpad_scope_cx(bcx: block, f: &fn(si: &mut scope_info)) {
let mut bcx = bcx;
let mut cur_scope = bcx.scope;
loop {
{
match bcx.kind {
block_scope(inf) => {
let len = { // FIXME(#5074) workaround old borrowck
let inf = &mut *inf;
inf.cleanups.len()
};
if len > 0u || bcx.parent.is_none() {
f(inf);
return;
}
cur_scope = match cur_scope {
Some(inf) => {
if !inf.empty_cleanups() || (inf.parent.is_none() && bcx.parent.is_none()) {
f(inf);
return;
}
_ => ()
inf.parent
}
None => {
bcx = block_parent(bcx);
bcx.scope
}
}
bcx = block_parent(bcx);
}
}
@ -972,27 +1025,31 @@ pub fn get_landing_pad(bcx: block) -> BasicBlockRef {
pub fn find_bcx_for_scope(bcx: block, scope_id: ast::node_id) -> block {
let mut bcx_sid = bcx;
let mut cur_scope = bcx_sid.scope;
loop {
bcx_sid = match bcx_sid.node_info {
Some(NodeInfo { id, _ }) if id == scope_id => {
return bcx_sid
}
// FIXME(#6268, #6248) hacky cleanup for nested method calls
Some(NodeInfo { callee_id: Some(id), _ }) if id == scope_id => {
return bcx_sid
}
_ => {
match bcx_sid.parent {
None => bcx.tcx().sess.bug(
fmt!("no enclosing scope with id %d", scope_id)),
Some(bcx_par) => bcx_par
cur_scope = match cur_scope {
Some(inf) => {
match inf.node_info {
Some(NodeInfo { id, _ }) if id == scope_id => {
return bcx_sid
}
// FIXME(#6268, #6248) hacky cleanup for nested method calls
Some(NodeInfo { callee_id: Some(id), _ }) if id == scope_id => {
return bcx_sid
}
_ => inf.parent
}
}
None => {
bcx_sid = match bcx_sid.parent {
None => bcx.tcx().sess.bug(fmt!("no enclosing scope with id %d", scope_id)),
Some(bcx_par) => bcx_par
};
bcx_sid.scope
}
}
}
}
pub fn do_spill(bcx: block, v: ValueRef, t: ty::t) -> ValueRef {
@ -1014,13 +1071,13 @@ pub fn do_spill_noroot(cx: block, v: ValueRef) -> ValueRef {
pub fn spill_if_immediate(cx: block, v: ValueRef, t: ty::t) -> ValueRef {
let _icx = push_ctxt("spill_if_immediate");
if ty::type_is_immediate(t) { return do_spill(cx, v, t); }
if ty::type_is_immediate(cx.tcx(), t) { return do_spill(cx, v, t); }
return v;
}
pub fn load_if_immediate(cx: block, v: ValueRef, t: ty::t) -> ValueRef {
let _icx = push_ctxt("load_if_immediate");
if ty::type_is_immediate(t) { return Load(cx, v); }
if ty::type_is_immediate(cx.tcx(), t) { return Load(cx, v); }
return v;
}
@ -1145,7 +1202,7 @@ pub fn trans_stmt(cx: block, s: &ast::stmt) -> block {
// You probably don't want to use this one. See the
// next three functions instead.
pub fn new_block(cx: fn_ctxt, parent: Option<block>, kind: block_kind,
pub fn new_block(cx: fn_ctxt, parent: Option<block>, scope: Option<@mut scope_info>,
is_lpad: bool, name: &str, opt_node_info: Option<NodeInfo>)
-> block {
@ -1155,10 +1212,10 @@ pub fn new_block(cx: fn_ctxt, parent: Option<block>, kind: block_kind,
};
let bcx = mk_block(llbb,
parent,
kind,
is_lpad,
opt_node_info,
cx);
bcx.scope = scope;
for parent.iter().advance |cx| {
if cx.unreachable {
Unreachable(bcx);
@ -1169,27 +1226,30 @@ pub fn new_block(cx: fn_ctxt, parent: Option<block>, kind: block_kind,
}
}
pub fn simple_block_scope() -> block_kind {
block_scope(@mut scope_info {
pub fn simple_block_scope(parent: Option<@mut scope_info>,
node_info: Option<NodeInfo>) -> @mut scope_info {
@mut scope_info {
parent: parent,
loop_break: None,
loop_label: None,
cleanups: ~[],
cleanup_paths: ~[],
landing_pad: None
})
landing_pad: None,
node_info: node_info,
}
}
// Use this when you're at the top block of a function or the like.
pub fn top_scope_block(fcx: fn_ctxt, opt_node_info: Option<NodeInfo>)
-> block {
return new_block(fcx, None, simple_block_scope(), false,
return new_block(fcx, None, Some(simple_block_scope(None, opt_node_info)), false,
"function top level", opt_node_info);
}
pub fn scope_block(bcx: block,
opt_node_info: Option<NodeInfo>,
n: &str) -> block {
return new_block(bcx.fcx, Some(bcx), simple_block_scope(), bcx.is_lpad,
return new_block(bcx.fcx, Some(bcx), Some(simple_block_scope(None, opt_node_info)), bcx.is_lpad,
n, opt_node_info);
}
@ -1198,27 +1258,29 @@ pub fn loop_scope_block(bcx: block,
loop_label: Option<ident>,
n: &str,
opt_node_info: Option<NodeInfo>) -> block {
return new_block(bcx.fcx, Some(bcx), block_scope(@mut scope_info {
return new_block(bcx.fcx, Some(bcx), Some(@mut scope_info {
parent: None,
loop_break: Some(loop_break),
loop_label: loop_label,
cleanups: ~[],
cleanup_paths: ~[],
landing_pad: None
landing_pad: None,
node_info: opt_node_info,
}), bcx.is_lpad, n, opt_node_info);
}
// Use this when creating a block for the inside of a landing pad.
pub fn lpad_block(bcx: block, n: &str) -> block {
new_block(bcx.fcx, Some(bcx), block_non_scope, true, n, None)
new_block(bcx.fcx, Some(bcx), None, true, n, None)
}
// Use this when you're making a general CFG BB within a scope.
pub fn sub_block(bcx: block, n: &str) -> block {
new_block(bcx.fcx, Some(bcx), block_non_scope, bcx.is_lpad, n, None)
new_block(bcx.fcx, Some(bcx), None, bcx.is_lpad, n, None)
}
pub fn raw_block(fcx: fn_ctxt, is_lpad: bool, llbb: BasicBlockRef) -> block {
mk_block(llbb, None, block_non_scope, is_lpad, None, fcx)
mk_block(llbb, None, is_lpad, None, fcx)
}
@ -1277,42 +1339,47 @@ pub fn cleanup_and_leave(bcx: block,
(fmt!("cleanup_and_leave(%s)", cur.to_str())).to_managed());
}
match cur.kind {
block_scope(inf) if !inf.empty_cleanups() => {
let (sub_cx, dest, inf_cleanups) = {
let inf = &mut *inf;
let mut skip = 0;
let mut dest = None;
{
let r = (*inf).cleanup_paths.rev_iter().find_(|cp| cp.target == leave);
for r.iter().advance |cp| {
if cp.size == inf.cleanups.len() {
Br(bcx, cp.dest);
return;
}
let mut cur_scope = cur.scope;
loop {
cur_scope = match cur_scope {
Some (inf) if !inf.empty_cleanups() => {
let (sub_cx, dest, inf_cleanups) = {
let inf = &mut *inf;
let mut skip = 0;
let mut dest = None;
{
let r = (*inf).cleanup_paths.rev_iter().find_(|cp| cp.target == leave);
for r.iter().advance |cp| {
if cp.size == inf.cleanups.len() {
Br(bcx, cp.dest);
return;
}
skip = cp.size;
dest = Some(cp.dest);
skip = cp.size;
dest = Some(cp.dest);
}
}
let sub_cx = sub_block(bcx, "cleanup");
Br(bcx, sub_cx.llbb);
inf.cleanup_paths.push(cleanup_path {
target: leave,
size: inf.cleanups.len(),
dest: sub_cx.llbb
});
(sub_cx, dest, inf.cleanups.tailn(skip).to_owned())
};
bcx = trans_block_cleanups_(sub_cx,
inf_cleanups,
is_lpad);
for dest.iter().advance |&dest| {
Br(bcx, dest);
return;
}
let sub_cx = sub_block(bcx, "cleanup");
Br(bcx, sub_cx.llbb);
inf.cleanup_paths.push(cleanup_path {
target: leave,
size: inf.cleanups.len(),
dest: sub_cx.llbb
});
(sub_cx, dest, inf.cleanups.tailn(skip).to_owned())
};
bcx = trans_block_cleanups_(sub_cx,
inf_cleanups,
is_lpad);
for dest.iter().advance |&dest| {
Br(bcx, dest);
return;
inf.parent
}
Some(inf) => inf.parent,
None => break
}
_ => ()
}
match upto {
@ -1353,9 +1420,12 @@ pub fn with_scope(bcx: block,
bcx.to_str(), opt_node_info, name);
let _indenter = indenter();
let scope_cx = scope_block(bcx, opt_node_info, name);
Br(bcx, scope_cx.llbb);
leave_block(f(scope_cx), scope_cx)
let scope = simple_block_scope(bcx.scope, opt_node_info);
bcx.scope = Some(scope);
let ret = f(bcx);
let ret = trans_block_cleanups_(ret, /*bad*/copy scope.cleanups, false);
bcx.scope = scope.parent;
ret
}
pub fn with_scope_result(bcx: block,
@ -1363,10 +1433,14 @@ pub fn with_scope_result(bcx: block,
name: &str,
f: &fn(block) -> Result) -> Result {
let _icx = push_ctxt("with_scope_result");
let scope_cx = scope_block(bcx, opt_node_info, name);
Br(bcx, scope_cx.llbb);
let Result {bcx, val} = f(scope_cx);
rslt(leave_block(bcx, scope_cx), val)
let scope = simple_block_scope(bcx.scope, opt_node_info);
bcx.scope = Some(scope);
let Result { bcx: out_bcx, val } = f(bcx);
let out_bcx = trans_block_cleanups_(out_bcx, /*bad*/copy scope.cleanups, false);
bcx.scope = scope.parent;
rslt(out_bcx, val)
}
pub fn with_scope_datumblock(bcx: block, opt_node_info: Option<NodeInfo>,
@ -1399,7 +1473,7 @@ pub fn alloc_local(cx: block, local: &ast::local) -> block {
let _icx = push_ctxt("alloc_local");
let t = node_id_type(cx, local.node.id);
let simple_name = match local.node.pat.node {
ast::pat_ident(_, pth, None) => Some(path_to_ident(pth)),
ast::pat_ident(_, ref pth, None) => Some(path_to_ident(pth)),
_ => None
};
let val = alloc_ty(cx, t);
@ -1545,7 +1619,7 @@ pub fn mk_standard_basic_blocks(llfn: ValueRef) -> BasicBlocks {
// slot where the return value of the function must go.
pub fn make_return_pointer(fcx: fn_ctxt, output_type: ty::t) -> ValueRef {
unsafe {
if !ty::type_is_immediate(output_type) {
if !ty::type_is_immediate(fcx.ccx.tcx, output_type) {
llvm::LLVMGetParam(fcx.llfn, 0)
} else {
let lloutputtype = type_of::type_of(fcx.ccx, output_type);
@ -1584,7 +1658,7 @@ pub fn new_fn_ctxt_w_id(ccx: @mut CrateContext,
ty::subst_tps(ccx.tcx, substs.tys, substs.self_ty, output_type)
}
};
let is_immediate = ty::type_is_immediate(substd_output_type);
let is_immediate = ty::type_is_immediate(ccx.tcx, substd_output_type);
let fcx = @mut fn_ctxt_ {
llfn: llfndecl,
llenv: unsafe {
@ -1690,7 +1764,7 @@ pub fn copy_args_to_allocas(fcx: fn_ctxt,
match fcx.llself {
Some(slf) => {
let self_val = if slf.is_copy
&& datum::appropriate_mode(slf.t).is_by_value() {
&& datum::appropriate_mode(bcx.tcx(), slf.t).is_by_value() {
let tmp = BitCast(bcx, slf.v, type_of(bcx.ccx(), slf.t));
let alloc = alloc_ty(bcx, slf.t);
Store(bcx, tmp, alloc);
@ -1718,7 +1792,7 @@ pub fn copy_args_to_allocas(fcx: fn_ctxt,
// This alloca should be optimized away by LLVM's mem-to-reg pass in
// the event it's not truly needed.
// only by value if immediate:
let llarg = if datum::appropriate_mode(arg_ty).is_by_value() {
let llarg = if datum::appropriate_mode(bcx.tcx(), arg_ty).is_by_value() {
let alloc = alloc_ty(bcx, arg_ty);
Store(bcx, raw_llarg, alloc);
alloc
@ -1737,7 +1811,7 @@ pub fn copy_args_to_allocas(fcx: fn_ctxt,
fcx.llargs.insert(arg_id, llarg);
if fcx.ccx.sess.opts.extra_debuginfo && fcx_has_nonzero_span(fcx) {
debuginfo::create_arg(bcx, args[arg_n], args[arg_n].ty.span);
debuginfo::create_arg(bcx, &args[arg_n], args[arg_n].ty.span);
}
}
@ -1866,18 +1940,16 @@ pub fn trans_fn(ccx: @mut CrateContext,
param_substs: Option<@param_substs>,
id: ast::node_id,
attrs: &[ast::attribute]) {
let do_time = ccx.sess.trans_stats();
let start = if do_time { time::get_time() }
else { time::Timespec::new(0, 0) };
let the_path_str = path_str(ccx.sess, path);
let _s = StatRecorder::new(ccx, the_path_str);
debug!("trans_fn(self_arg=%?, param_substs=%s)",
self_arg,
param_substs.repr(ccx.tcx));
let _icx = push_ctxt("trans_fn");
ccx.stats.n_fns += 1;
let the_path_str = path_str(ccx.sess, path);
let output_type = ty::ty_fn_ret(ty::node_id_to_type(ccx.tcx, id));
trans_closure(ccx,
path,
copy path,
decl,
body,
llfndecl,
@ -1893,10 +1965,6 @@ pub fn trans_fn(ccx: @mut CrateContext,
}
},
|_bcx| { });
if do_time {
let end = time::get_time();
ccx.log_fn_time(the_path_str, start, end);
}
}
pub fn trans_enum_variant(ccx: @mut CrateContext,
@ -1911,7 +1979,7 @@ pub fn trans_enum_variant(ccx: @mut CrateContext,
let fn_args = do args.map |varg| {
ast::arg {
is_mutbl: false,
ty: varg.ty,
ty: copy varg.ty,
pat: ast_util::ident_to_pat(
ccx.tcx.sess.next_node_id(),
codemap::dummy_sp(),
@ -1985,7 +2053,7 @@ pub fn trans_tuple_struct(ccx: @mut CrateContext,
let fn_args = do fields.map |field| {
ast::arg {
is_mutbl: false,
ty: field.node.ty,
ty: copy field.node.ty,
pat: ast_util::ident_to_pat(ccx.tcx.sess.next_node_id(),
codemap::dummy_sp(),
special_idents::arg),
@ -2961,8 +3029,14 @@ pub fn trans_crate(sess: session::Session,
io::println(fmt!("n_monos: %u", ccx.stats.n_monos));
io::println(fmt!("n_inlines: %u", ccx.stats.n_inlines));
io::println(fmt!("n_closures: %u", ccx.stats.n_closures));
io::println("fn stats:");
do sort::quick_sort(ccx.stats.fn_stats) |&(_, _, insns_a), &(_, _, insns_b)| {
insns_a > insns_b
}
for ccx.stats.fn_stats.iter().advance |&(name, ms, insns)| {
io::println(fmt!("%u insns, %u ms, %s", insns, ms, name));
}
}
if ccx.sess.count_llvm_insns() {
for ccx.stats.llvm_insns.iter().advance |(&k, &v)| {
io::println(fmt!("%-7u %s", v, k));

View file

@ -46,6 +46,9 @@ pub fn B(cx: block) -> BuilderRef {
}
pub fn count_insn(cx: block, category: &str) {
if cx.ccx().sess.trans_stats() {
cx.ccx().stats.n_llvm_insns += 1;
}
do base::with_insn_ctxt |v| {
let h = &mut cx.ccx().stats.llvm_insns;
@ -565,7 +568,7 @@ pub fn LoadRangeAssert(cx: block, PointerVal: ValueRef, lo: c_ulonglong,
let min = llvm::LLVMConstInt(t, lo, signed);
let max = llvm::LLVMConstInt(t, hi, signed);
do vec::as_imm_buf([min, max]) |ptr, len| {
do [min, max].as_imm_buf |ptr, len| {
llvm::LLVMSetMetadata(value, lib::llvm::MD_range as c_uint,
llvm::LLVMMDNodeInContext(cx.fcx.ccx.llcx,
ptr, len as c_uint));
@ -942,7 +945,7 @@ pub fn Call(cx: block, Fn: ValueRef, Args: &[ValueRef]) -> ValueRef {
cx.val_to_str(Fn),
Args.map(|arg| cx.val_to_str(*arg)));
do vec::as_imm_buf(Args) |ptr, len| {
do Args.as_imm_buf |ptr, len| {
llvm::LLVMBuildCall(B(cx), Fn, ptr, len as c_uint, noname())
}
}

View file

@ -446,8 +446,8 @@ pub fn trans_call(in_cx: block,
node_id_type(in_cx, id),
|cx| trans(cx, f),
args,
dest,
DontAutorefArg)
Some(dest),
DontAutorefArg).bcx
}
pub fn trans_method_call(in_cx: block,
@ -484,15 +484,15 @@ pub fn trans_method_call(in_cx: block,
}
},
args,
dest,
DontAutorefArg)
Some(dest),
DontAutorefArg).bcx
}
pub fn trans_lang_call(bcx: block,
did: ast::def_id,
args: &[ValueRef],
dest: expr::Dest)
-> block {
dest: Option<expr::Dest>)
-> Result {
let fty = if did.crate == ast::local_crate {
ty::node_id_to_type(bcx.ccx().tcx, did.node)
} else {
@ -552,7 +552,7 @@ pub fn trans_lang_call_with_type_params(bcx: block,
}
Callee { bcx: callee.bcx, data: Fn(FnData { llfn: new_llval }) }
},
ArgVals(args), dest, DontAutorefArg);
ArgVals(args), Some(dest), DontAutorefArg).bcx;
}
pub fn body_contains_ret(body: &ast::blk) -> bool {
@ -579,10 +579,10 @@ pub fn trans_call_inner(in_cx: block,
ret_ty: ty::t,
get_callee: &fn(block) -> Callee,
args: CallArgs,
dest: expr::Dest,
dest: Option<expr::Dest>,
autoref_arg: AutorefArg)
-> block {
do base::with_scope(in_cx, call_info, "call") |cx| {
-> Result {
do base::with_scope_result(in_cx, call_info, "call") |cx| {
let ret_in_loop = match args {
ArgExprs(args) => {
args.len() > 0u && match args.last().node {
@ -633,7 +633,7 @@ pub fn trans_call_inner(in_cx: block,
let mut llargs = ~[];
if !ty::type_is_immediate(ret_ty) {
if !ty::type_is_immediate(bcx.tcx(), ret_ty) {
llargs.push(llretslot);
}
@ -669,18 +669,12 @@ pub fn trans_call_inner(in_cx: block,
bcx = new_bcx;
match dest {
expr::Ignore => {
None => { assert!(ty::type_is_immediate(bcx.tcx(), ret_ty)) }
Some(expr::Ignore) => {
// drop the value if it is not being saved.
unsafe {
if llvm::LLVMIsUndef(llretslot) != lib::llvm::True {
if ty::type_is_nil(ret_ty) {
// When implementing the for-loop sugar syntax, the
// type of the for-loop is nil, but the function
// it's invoking returns a bool. This is a special
// case to ignore instead of invoking the Store
// below into a scratch pointer of a mismatched
// type.
} else if ty::type_is_immediate(ret_ty) {
if ty::type_needs_drop(bcx.tcx(), ret_ty) {
if ty::type_is_immediate(bcx.tcx(), ret_ty) {
let llscratchptr = alloc_ty(bcx, ret_ty);
Store(bcx, llresult, llscratchptr);
bcx = glue::drop_ty(bcx, llscratchptr, ret_ty);
@ -690,11 +684,11 @@ pub fn trans_call_inner(in_cx: block,
}
}
}
expr::SaveIn(lldest) => {
Some(expr::SaveIn(lldest)) => {
// If this is an immediate, store into the result location.
// (If this was not an immediate, the result will already be
// directly written into the output slot.)
if ty::type_is_immediate(ret_ty) {
if ty::type_is_immediate(bcx.tcx(), ret_ty) {
Store(bcx, llresult, lldest);
}
}
@ -717,7 +711,7 @@ pub fn trans_call_inner(in_cx: block,
bcx
}
}
bcx
rslt(bcx, llresult)
}
}
@ -727,14 +721,14 @@ pub enum CallArgs<'self> {
ArgVals(&'self [ValueRef])
}
pub fn trans_ret_slot(bcx: block, fn_ty: ty::t, dest: expr::Dest)
pub fn trans_ret_slot(bcx: block, fn_ty: ty::t, dest: Option<expr::Dest>)
-> ValueRef {
let retty = ty::ty_fn_ret(fn_ty);
match dest {
expr::SaveIn(dst) => dst,
expr::Ignore => {
if ty::type_is_nil(retty) {
Some(expr::SaveIn(dst)) => dst,
_ => {
if ty::type_is_immediate(bcx.tcx(), retty) {
unsafe {
llvm::LLVMGetUndef(Type::nil().ptr_to().to_ref())
}
@ -898,7 +892,7 @@ pub fn trans_arg_expr(bcx: block,
}
ty::ByCopy => {
if ty::type_needs_drop(bcx.tcx(), arg_datum.ty) ||
arg_datum.appropriate_mode().is_by_ref() {
arg_datum.appropriate_mode(bcx.tcx()).is_by_ref() {
debug!("by copy arg with type %s, storing to scratch",
bcx.ty_to_str(arg_datum.ty));
let scratch = scratch_datum(bcx, arg_datum.ty, false);
@ -914,7 +908,7 @@ pub fn trans_arg_expr(bcx: block,
scratch.add_clean(bcx);
temp_cleanups.push(scratch.val);
match scratch.appropriate_mode() {
match scratch.appropriate_mode(bcx.tcx()) {
ByValue => val = Load(bcx, scratch.val),
ByRef(_) => val = scratch.val,
}

View file

@ -531,13 +531,13 @@ pub fn make_opaque_cbox_take_glue(
// Allocate memory, update original ptr, and copy existing data
let opaque_tydesc = PointerCast(bcx, tydesc, Type::i8p());
let rval = alloca(bcx, Type::i8p());
let bcx = callee::trans_lang_call(
let mut bcx = bcx;
let llresult = unpack_result!(bcx, callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.closure_exchange_malloc_fn(),
[opaque_tydesc, sz],
expr::SaveIn(rval));
let cbox_out = PointerCast(bcx, Load(bcx, rval), llopaquecboxty);
None));
let cbox_out = PointerCast(bcx, llresult, llopaquecboxty);
call_memcpy(bcx, cbox_out, cbox_in, sz, 1);
Store(bcx, cbox_out, cboxptr);

View file

@ -34,9 +34,6 @@ use std::cast::transmute;
use std::cast;
use std::hashmap::{HashMap};
use std::libc::{c_uint, c_longlong, c_ulonglong};
use std::to_bytes;
use std::str;
use std::vec::raw::to_ptr;
use std::vec;
use syntax::ast::ident;
use syntax::ast_map::{path, path_elt};
@ -96,8 +93,10 @@ pub struct Stats {
n_monos: uint,
n_inlines: uint,
n_closures: uint,
n_llvm_insns: uint,
llvm_insn_ctxt: ~[~str],
llvm_insns: HashMap<~str, uint>,
fn_times: ~[(~str, int)] // (ident, time)
fn_stats: ~[(~str, uint, uint)] // (ident, time-in-ms, llvm-instructions)
}
pub struct BuilderRef_res {
@ -275,6 +274,7 @@ pub enum heap {
heap_managed,
heap_managed_unique,
heap_exchange,
heap_exchange_vector,
heap_exchange_closure
}
@ -321,7 +321,7 @@ pub fn add_clean(bcx: block, val: ValueRef, t: ty::t) {
debug!("add_clean(%s, %s, %s)", bcx.to_str(), bcx.val_to_str(val), t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
do in_scope_cx(bcx) |scope_info| {
do in_scope_cx(bcx, None) |scope_info| {
scope_info.cleanups.push(clean(|a| glue::drop_ty(a, val, t), cleanup_type));
grow_scope_clean(scope_info);
}
@ -333,25 +333,36 @@ pub fn add_clean_temp_immediate(cx: block, val: ValueRef, ty: ty::t) {
cx.to_str(), cx.val_to_str(val),
ty.repr(cx.tcx()));
let cleanup_type = cleanup_type(cx.tcx(), ty);
do in_scope_cx(cx) |scope_info| {
do in_scope_cx(cx, None) |scope_info| {
scope_info.cleanups.push(
clean_temp(val, |a| glue::drop_ty_immediate(a, val, ty),
cleanup_type));
grow_scope_clean(scope_info);
}
}
pub fn add_clean_temp_mem(bcx: block, val: ValueRef, t: ty::t) {
add_clean_temp_mem_in_scope_(bcx, None, val, t);
}
pub fn add_clean_temp_mem_in_scope(bcx: block, scope_id: ast::node_id, val: ValueRef, t: ty::t) {
add_clean_temp_mem_in_scope_(bcx, Some(scope_id), val, t);
}
pub fn add_clean_temp_mem_in_scope_(bcx: block, scope_id: Option<ast::node_id>,
val: ValueRef, t: ty::t) {
if !ty::type_needs_drop(bcx.tcx(), t) { return; }
debug!("add_clean_temp_mem(%s, %s, %s)",
bcx.to_str(), bcx.val_to_str(val),
t.repr(bcx.tcx()));
let cleanup_type = cleanup_type(bcx.tcx(), t);
do in_scope_cx(bcx) |scope_info| {
do in_scope_cx(bcx, scope_id) |scope_info| {
scope_info.cleanups.push(clean_temp(val, |a| glue::drop_ty(a, val, t), cleanup_type));
grow_scope_clean(scope_info);
}
}
pub fn add_clean_return_to_mut(bcx: block,
scope_id: ast::node_id,
root_key: root_map_key,
frozen_val_ref: ValueRef,
bits_val_ref: ValueRef,
@ -369,7 +380,7 @@ pub fn add_clean_return_to_mut(bcx: block,
bcx.to_str(),
bcx.val_to_str(frozen_val_ref),
bcx.val_to_str(bits_val_ref));
do in_scope_cx(bcx) |scope_info| {
do in_scope_cx(bcx, Some(scope_id)) |scope_info| {
scope_info.cleanups.push(
clean_temp(
frozen_val_ref,
@ -385,12 +396,12 @@ pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
let f: @fn(block) -> block = |a| glue::trans_free(a, ptr);
f
}
heap_exchange | heap_exchange_closure => {
heap_exchange | heap_exchange_vector | heap_exchange_closure => {
let f: @fn(block) -> block = |a| glue::trans_exchange_free(a, ptr);
f
}
};
do in_scope_cx(cx) |scope_info| {
do in_scope_cx(cx, None) |scope_info| {
scope_info.cleanups.push(clean_temp(ptr, free_fn,
normal_exit_and_unwind));
grow_scope_clean(scope_info);
@ -402,8 +413,8 @@ pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
// this will be more involved. For now, we simply zero out the local, and the
// drop glue checks whether it is zero.
pub fn revoke_clean(cx: block, val: ValueRef) {
do in_scope_cx(cx) |scope_info| {
let cleanup_pos = scope_info.cleanups.iter().position_(
do in_scope_cx(cx, None) |scope_info| {
let cleanup_pos = scope_info.cleanups.iter().position(
|cu| match *cu {
clean_temp(v, _, _) if v == val => true,
_ => false
@ -419,27 +430,14 @@ pub fn revoke_clean(cx: block, val: ValueRef) {
}
pub fn block_cleanups(bcx: block) -> ~[cleanup] {
match bcx.kind {
block_non_scope => ~[],
block_scope(inf) => /*bad*/copy inf.cleanups
match bcx.scope {
None => ~[],
Some(inf) => /*bad*/copy inf.cleanups
}
}
pub enum block_kind {
// A scope at the end of which temporary values created inside of it are
// cleaned up. May correspond to an actual block in the language, but also
// to an implicit scope, for example, calls introduce an implicit scope in
// which the arguments are evaluated and cleaned up.
block_scope(@mut scope_info),
// A non-scope block is a basic block created as a translation artifact
// from translating code that expresses conditional logic rather than by
// explicit { ... } block structure in the source language. It's called a
// non-scope block because it doesn't introduce a new variable scope.
block_non_scope,
}
pub struct scope_info {
parent: Option<@mut scope_info>,
loop_break: Option<block>,
loop_label: Option<ident>,
// A list of functions that must be run at when leaving this
@ -451,6 +449,8 @@ pub struct scope_info {
cleanup_paths: ~[cleanup_path],
// Unwinding landing pad. Also cleared when cleanups change.
landing_pad: Option<BasicBlockRef>,
// info about the AST node this scope originated from, if any
node_info: Option<NodeInfo>,
}
impl scope_info {
@ -506,8 +506,8 @@ pub struct block_ {
terminated: bool,
unreachable: bool,
parent: Option<block>,
// The 'kind' of basic block this is.
kind: block_kind,
// The current scope within this basic block
scope: Option<@mut scope_info>,
// Is this block part of a landing pad?
is_lpad: bool,
// info about the AST node this block originated from, if any
@ -517,7 +517,7 @@ pub struct block_ {
fcx: fn_ctxt
}
pub fn block_(llbb: BasicBlockRef, parent: Option<block>, kind: block_kind,
pub fn block_(llbb: BasicBlockRef, parent: Option<block>,
is_lpad: bool, node_info: Option<NodeInfo>, fcx: fn_ctxt)
-> block_ {
@ -526,7 +526,7 @@ pub fn block_(llbb: BasicBlockRef, parent: Option<block>, kind: block_kind,
terminated: false,
unreachable: false,
parent: parent,
kind: kind,
scope: None,
is_lpad: is_lpad,
node_info: node_info,
fcx: fcx
@ -535,10 +535,10 @@ pub fn block_(llbb: BasicBlockRef, parent: Option<block>, kind: block_kind,
pub type block = @mut block_;
pub fn mk_block(llbb: BasicBlockRef, parent: Option<block>, kind: block_kind,
pub fn mk_block(llbb: BasicBlockRef, parent: Option<block>,
is_lpad: bool, node_info: Option<NodeInfo>, fcx: fn_ctxt)
-> block {
@mut block_(llbb, parent, kind, is_lpad, node_info, fcx)
@mut block_(llbb, parent, is_lpad, node_info, fcx)
}
pub struct Result {
@ -563,19 +563,33 @@ pub fn val_ty(v: ValueRef) -> Type {
}
}
pub fn in_scope_cx(cx: block, f: &fn(si: &mut scope_info)) {
pub fn in_scope_cx(cx: block, scope_id: Option<ast::node_id>, f: &fn(si: &mut scope_info)) {
let mut cur = cx;
let mut cur_scope = cur.scope;
loop {
match cur.kind {
block_scope(inf) => {
debug!("in_scope_cx: selected cur=%s (cx=%s)",
cur.to_str(), cx.to_str());
f(inf);
return;
cur_scope = match cur_scope {
Some(inf) => match scope_id {
Some(wanted) => match inf.node_info {
Some(NodeInfo { id: actual, _ }) if wanted == actual => {
debug!("in_scope_cx: selected cur=%s (cx=%s)",
cur.to_str(), cx.to_str());
f(inf);
return;
},
_ => inf.parent,
},
None => {
debug!("in_scope_cx: selected cur=%s (cx=%s)",
cur.to_str(), cx.to_str());
f(inf);
return;
}
},
None => {
cur = block_parent(cur);
cur.scope
}
_ => ()
}
cur = block_parent(cur);
}
}
@ -774,7 +788,7 @@ pub fn C_zero_byte_arr(size: uint) -> ValueRef {
pub fn C_struct(elts: &[ValueRef]) -> ValueRef {
unsafe {
do vec::as_imm_buf(elts) |ptr, len| {
do elts.as_imm_buf |ptr, len| {
llvm::LLVMConstStructInContext(base::task_llcx(), ptr, len as c_uint, False)
}
}
@ -782,7 +796,7 @@ pub fn C_struct(elts: &[ValueRef]) -> ValueRef {
pub fn C_packed_struct(elts: &[ValueRef]) -> ValueRef {
unsafe {
do vec::as_imm_buf(elts) |ptr, len| {
do elts.as_imm_buf |ptr, len| {
llvm::LLVMConstStructInContext(base::task_llcx(), ptr, len as c_uint, True)
}
}
@ -790,7 +804,7 @@ pub fn C_packed_struct(elts: &[ValueRef]) -> ValueRef {
pub fn C_named_struct(T: Type, elts: &[ValueRef]) -> ValueRef {
unsafe {
do vec::as_imm_buf(elts) |ptr, len| {
do elts.as_imm_buf |ptr, len| {
llvm::LLVMConstNamedStruct(T.to_ref(), ptr, len as c_uint)
}
}
@ -826,7 +840,7 @@ pub fn get_param(fndecl: ValueRef, param: uint) -> ValueRef {
pub fn const_get_elt(cx: &CrateContext, v: ValueRef, us: &[c_uint])
-> ValueRef {
unsafe {
let r = do vec::as_imm_buf(us) |p, len| {
let r = do us.as_imm_buf |p, len| {
llvm::LLVMConstExtractValue(v, p, len as c_uint)
};

View file

@ -91,7 +91,7 @@ pub fn const_vec(cx: @mut CrateContext, e: &ast::expr, es: &[@ast::expr])
let sz = llvm::LLVMConstMul(C_uint(cx, es.len()), unit_sz);
let vs = es.map(|e| const_expr(cx, *e));
// If the vector contains enums, an LLVM array won't work.
let v = if vs.iter().any_(|vi| val_ty(*vi) != llunitty) {
let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
C_struct(vs)
} else {
C_array(llunitty, vs)
@ -525,7 +525,7 @@ fn const_expr_unadjusted(cx: @mut CrateContext, e: &ast::expr) -> ValueRef {
_ => cx.sess.span_bug(e.span, "bad const-slice expr")
}
}
ast::expr_path(pth) => {
ast::expr_path(ref pth) => {
assert_eq!(pth.types.len(), 0);
let tcx = cx.tcx;
match tcx.def_map.find(&e.id) {

View file

@ -210,8 +210,10 @@ impl CrateContext {
n_monos: 0u,
n_inlines: 0u,
n_closures: 0u,
n_llvm_insns: 0u,
llvm_insn_ctxt: ~[],
llvm_insns: HashMap::new(),
fn_times: ~[]
fn_stats: ~[]
},
upcalls: upcall::declare_upcalls(targ_cfg, llmod),
tydesc_type: tydesc_type,
@ -226,12 +228,6 @@ impl CrateContext {
}
}
}
pub fn log_fn_time(&mut self, name: ~str, start: time::Timespec, end: time::Timespec) {
let elapsed = 1000 * ((end.sec - start.sec) as int) +
((end.nsec as int) - (start.nsec as int)) / 1000000;
self.stats.fn_times.push((name, elapsed));
}
}
#[unsafe_destructor]

View file

@ -26,7 +26,6 @@ use util::ppaux;
use middle::trans::type_::Type;
use std::str;
use std::vec;
use syntax::ast;
use syntax::ast::ident;
use syntax::ast_map::path_mod;
@ -190,9 +189,13 @@ pub fn trans_log(log_ex: &ast::expr,
let (modpath, modname) = {
let path = &mut bcx.fcx.path;
let modpath = vec::append(
~[path_mod(ccx.sess.ident_of(ccx.link_meta.name))],
path.filtered(|e| match *e { path_mod(_) => true, _ => false }));
let mut modpath = ~[path_mod(ccx.sess.ident_of(ccx.link_meta.name))];
for path.iter().advance |e| {
match *e {
path_mod(_) => { modpath.push(*e) }
_ => {}
}
}
let modname = path_str(ccx.sess, modpath);
(modpath, modname)
};
@ -246,42 +249,48 @@ pub fn trans_break_cont(bcx: block,
let _icx = push_ctxt("trans_break_cont");
// Locate closest loop block, outputting cleanup as we go.
let mut unwind = bcx;
let mut target;
let mut cur_scope = unwind.scope;
let mut target = unwind;
let mut quit = false;
loop {
match unwind.kind {
block_scope(@scope_info {
loop_break: Some(brk),
loop_label: l,
_
}) => {
// If we're looking for a labeled loop, check the label...
target = if to_end {
brk
} else {
unwind
};
match opt_label {
Some(desired) => match l {
Some(actual) if actual == desired => break,
// If it doesn't match the one we want,
// don't break
_ => ()
},
None => break
}
}
_ => ()
cur_scope = match cur_scope {
Some(@scope_info {
loop_break: Some(brk),
loop_label: l,
parent,
_
}) => {
// If we're looking for a labeled loop, check the label...
target = if to_end {
brk
} else {
unwind
};
match opt_label {
Some(desired) => match l {
Some(actual) if actual == desired => break,
// If it doesn't match the one we want,
// don't break
_ => parent,
},
None => break,
}
}
Some(inf) => inf.parent,
None => {
unwind = match unwind.parent {
Some(bcx) => bcx,
// This is a return from a loop body block
None => {
Store(bcx, C_bool(!to_end), bcx.fcx.llretptr.get());
cleanup_and_leave(bcx, None, Some(bcx.fcx.llreturn));
Unreachable(bcx);
return bcx;
}
};
unwind.scope
}
}
unwind = match unwind.parent {
Some(bcx) => bcx,
// This is a return from a loop body block
None => {
Store(bcx, C_bool(!to_end), bcx.fcx.llretptr.get());
cleanup_and_leave(bcx, None, Some(bcx.fcx.llreturn));
Unreachable(bcx);
return bcx;
}
};
}
cleanup_and_Br(bcx, unwind, target.llbb);
Unreachable(bcx);
@ -386,7 +395,7 @@ fn trans_fail_value(bcx: block,
let V_filename = PointerCast(bcx, V_filename, Type::i8p());
let args = ~[V_str, V_filename, C_int(ccx, V_line)];
let bcx = callee::trans_lang_call(
bcx, bcx.tcx().lang_items.fail_fn(), args, expr::Ignore);
bcx, bcx.tcx().lang_items.fail_fn(), args, Some(expr::Ignore)).bcx;
Unreachable(bcx);
return bcx;
}
@ -397,7 +406,7 @@ pub fn trans_fail_bounds_check(bcx: block, sp: span,
let (filename, line) = filename_and_line_num_from_span(bcx, sp);
let args = ~[filename, line, index, len];
let bcx = callee::trans_lang_call(
bcx, bcx.tcx().lang_items.fail_bounds_check_fn(), args, expr::Ignore);
bcx, bcx.tcx().lang_items.fail_bounds_check_fn(), args, Some(expr::Ignore)).bcx;
Unreachable(bcx);
return bcx;
}

View file

@ -100,6 +100,7 @@ use middle::trans::glue;
use middle::trans::tvec;
use middle::trans::type_of;
use middle::trans::write_guard;
use middle::trans::type_::Type;
use middle::ty;
use util::common::indenter;
use util::ppaux::ty_to_str;
@ -188,7 +189,7 @@ pub fn scratch_datum(bcx: block, ty: ty::t, zero: bool) -> Datum {
Datum { val: scratch, ty: ty, mode: ByRef(RevokeClean) }
}
pub fn appropriate_mode(ty: ty::t) -> DatumMode {
pub fn appropriate_mode(tcx: ty::ctxt, ty: ty::t) -> DatumMode {
/*!
*
* Indicates the "appropriate" mode for this value,
@ -197,7 +198,7 @@ pub fn appropriate_mode(ty: ty::t) -> DatumMode {
if ty::type_is_nil(ty) || ty::type_is_bot(ty) {
ByValue
} else if ty::type_is_immediate(ty) {
} else if ty::type_is_immediate(tcx, ty) {
ByValue
} else {
ByRef(RevokeClean)
@ -508,10 +509,10 @@ impl Datum {
}
}
pub fn appropriate_mode(&self) -> DatumMode {
pub fn appropriate_mode(&self, tcx: ty::ctxt) -> DatumMode {
/*! See the `appropriate_mode()` function */
appropriate_mode(self.ty)
appropriate_mode(tcx, self.ty)
}
pub fn to_appropriate_llval(&self, bcx: block) -> ValueRef {
@ -519,7 +520,7 @@ impl Datum {
*
* Yields an llvalue with the `appropriate_mode()`. */
match self.appropriate_mode() {
match self.appropriate_mode(bcx.tcx()) {
ByValue => self.to_value_llval(bcx),
ByRef(_) => self.to_ref_llval(bcx)
}
@ -530,7 +531,7 @@ impl Datum {
*
* Yields a datum with the `appropriate_mode()`. */
match self.appropriate_mode() {
match self.appropriate_mode(bcx.tcx()) {
ByValue => self.to_value_datum(bcx),
ByRef(_) => self.to_ref_datum(bcx)
}
@ -567,8 +568,14 @@ impl Datum {
* This datum must represent an @T or ~T box. Returns a new
* by-ref datum of type T, pointing at the contents. */
let content_ty = match ty::get(self.ty).sty {
ty::ty_box(mt) | ty::ty_uniq(mt) => mt.ty,
let (content_ty, header) = match ty::get(self.ty).sty {
ty::ty_box(mt) => (mt.ty, true),
ty::ty_uniq(mt) => (mt.ty, false),
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) => {
let unit_ty = ty::sequence_element_type(bcx.tcx(), self.ty);
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty);
(unboxed_vec_ty, true)
}
_ => {
bcx.tcx().sess.bug(fmt!(
"box_body() invoked on non-box type %s",
@ -576,9 +583,16 @@ impl Datum {
}
};
let ptr = self.to_value_llval(bcx);
let body = opaque_box_body(bcx, content_ty, ptr);
Datum {val: body, ty: content_ty, mode: ByRef(ZeroMem)}
if !header && !ty::type_contents(bcx.tcx(), content_ty).contains_managed() {
let ptr = self.to_value_llval(bcx);
let ty = type_of(bcx.ccx(), content_ty);
let body = PointerCast(bcx, ptr, ty.ptr_to());
Datum {val: body, ty: content_ty, mode: ByRef(ZeroMem)}
} else { // has a header
let ptr = self.to_value_llval(bcx);
let body = opaque_box_body(bcx, content_ty, ptr);
Datum {val: body, ty: content_ty, mode: ByRef(ZeroMem)}
}
}
pub fn to_rptr(&self, bcx: block) -> Datum {
@ -657,13 +671,7 @@ impl Datum {
ByValue => {
// Actually, this case cannot happen right
// now, because enums are never immediate.
// But in principle newtype'd immediate
// values should be immediate, and in that
// case the * would be a no-op except for
// changing the type, so I am putting this
// code in place here to do the right
// thing if this change ever goes through.
assert!(ty::type_is_immediate(ty));
assert!(ty::type_is_immediate(bcx.tcx(), ty));
(Some(Datum {ty: ty, ..*self}), bcx)
}
};
@ -695,15 +703,15 @@ impl Datum {
)
}
ByValue => {
// Actually, this case cannot happen right now,
// because structs are never immediate. But in
// principle, newtype'd immediate values should be
// immediate, and in that case the * would be a no-op
// except for changing the type, so I am putting this
// code in place here to do the right thing if this
// change ever goes through.
assert!(ty::type_is_immediate(ty));
(Some(Datum {ty: ty, ..*self}), bcx)
assert!(ty::type_is_immediate(bcx.tcx(), ty));
(
Some(Datum {
val: ExtractValue(bcx, self.val, 0),
ty: ty,
mode: ByValue
}),
bcx
)
}
}
}

View file

@ -133,7 +133,7 @@ pub fn create_local_var(bcx: block, local: @ast::local) -> DIVariable {
let cx = bcx.ccx();
let ident = match local.node.pat.node {
ast::pat_ident(_, pth, _) => ast_util::path_to_ident(pth),
ast::pat_ident(_, ref pth, _) => ast_util::path_to_ident(pth),
// FIXME this should be handled (#2533)
_ => {
bcx.sess().span_note(local.span, "debuginfo for pattern bindings NYI");
@ -182,7 +182,7 @@ pub fn create_local_var(bcx: block, local: @ast::local) -> DIVariable {
///
/// Adds the created metadata nodes directly to the crate's IR.
/// The return value should be ignored if called from outside of the debuginfo module.
pub fn create_arg(bcx: block, arg: ast::arg, span: span) -> Option<DIVariable> {
pub fn create_arg(bcx: block, arg: &ast::arg, span: span) -> Option<DIVariable> {
debug!("create_arg");
if true {
// XXX create_arg disabled for now because "node_id_type(bcx, arg.id)" below blows
@ -204,7 +204,7 @@ pub fn create_arg(bcx: block, arg: ast::arg, span: span) -> Option<DIVariable> {
let context = create_function(fcx);
match arg.pat.node {
ast::pat_ident(_, path, _) => {
ast::pat_ident(_, ref path, _) => {
// XXX: This is wrong; it should work for multiple bindings.
let ident = path.idents.last();
let name: &str = cx.sess.str_of(*ident);
@ -259,23 +259,25 @@ pub fn create_function(fcx: fn_ctxt) -> DISubprogram {
let fcx = &mut *fcx;
let span = fcx.span.get();
let (ident, ret_ty, id) = match cx.tcx.items.get_copy(&fcx.id) {
ast_map::node_item(item, _) => {
let fnitem = cx.tcx.items.get_copy(&fcx.id);
let (ident, ret_ty, id) = match fnitem {
ast_map::node_item(ref item, _) => {
match item.node {
ast::item_fn(ref decl, _, _, _, _) => {
(item.ident, decl.output, item.id)
ast::item_fn(ast::fn_decl { output: ref ty, _}, _, _, _, _) => {
(item.ident, ty, item.id)
}
_ => fcx.ccx.sess.span_bug(item.span, "create_function: item bound to non-function")
}
}
ast_map::node_method(method, _, _) => {
(method.ident, method.decl.output, method.id)
ast_map::node_method(@ast::method { decl: ast::fn_decl { output: ref ty, _ },
id: id, ident: ident, _}, _, _) => {
(ident, ty, id)
}
ast_map::node_expr(expr) => {
ast_map::node_expr(ref expr) => {
match expr.node {
ast::expr_fn_block(ref decl, _) => {
let name = gensym_name("fn");
(name, decl.output, expr.id)
(name, &decl.output, expr.id)
}
_ => fcx.ccx.sess.span_bug(expr.span,
"create_function: expected an expr_fn_block here")

View file

@ -150,6 +150,7 @@ use middle::ty::{AutoPtr, AutoBorrowVec, AutoBorrowVecRef, AutoBorrowFn,
use middle::ty;
use util::common::indenter;
use util::ppaux::Repr;
use middle::trans::machine::llsize_of;
use middle::trans::type_::Type;
@ -291,7 +292,7 @@ pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
debug!("add_env(closure_ty=%s)", closure_ty.repr(tcx));
let scratch = scratch_datum(bcx, closure_ty, false);
let llfn = GEPi(bcx, scratch.val, [0u, abi::fn_field_code]);
assert_eq!(datum.appropriate_mode(), ByValue);
assert_eq!(datum.appropriate_mode(tcx), ByValue);
Store(bcx, datum.to_appropriate_llval(bcx), llfn);
let llenv = GEPi(bcx, scratch.val, [0u, abi::fn_field_box]);
Store(bcx, base::null_env_ptr(bcx), llenv);
@ -464,7 +465,7 @@ fn trans_rvalue_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
expr, contents);
}
ast::expr_vstore(contents, ast::expr_vstore_uniq) => {
let heap = heap_for_unique(bcx, expr_ty(bcx, contents));
let heap = tvec::heap_for_unique_vector(bcx, expr_ty(bcx, contents));
return tvec::trans_uniq_or_managed_vstore(bcx, heap,
expr, contents);
}
@ -1147,7 +1148,7 @@ fn trans_rec_or_struct(bcx: block,
let mut need_base = vec::from_elem(field_tys.len(), true);
let numbered_fields = do fields.map |field| {
let opt_pos = field_tys.iter().position_(|field_ty| field_ty.ident == field.node.ident);
let opt_pos = field_tys.iter().position(|field_ty| field_ty.ident == field.node.ident);
match opt_pos {
Some(i) => {
need_base[i] = false;
@ -1171,7 +1172,7 @@ fn trans_rec_or_struct(bcx: block,
fields: leftovers })
}
None => {
if need_base.iter().any_(|b| *b) {
if need_base.iter().any(|b| *b) {
tcx.sess.span_bug(expr_span, "missing fields and no base expr")
}
None
@ -1329,12 +1330,23 @@ fn trans_unary_datum(bcx: block,
contents_ty: ty::t,
heap: heap) -> DatumBlock {
let _icx = push_ctxt("trans_boxed_expr");
let base::MallocResult { bcx, box: bx, body } =
base::malloc_general(bcx, contents_ty, heap);
add_clean_free(bcx, bx, heap);
let bcx = trans_into(bcx, contents, SaveIn(body));
revoke_clean(bcx, bx);
return immediate_rvalue_bcx(bcx, bx, box_ty);
if heap == heap_exchange {
let llty = type_of(bcx.ccx(), contents_ty);
let size = llsize_of(bcx.ccx(), llty);
let Result { bcx: bcx, val: val } = malloc_raw_dyn(bcx, contents_ty,
heap_exchange, size);
add_clean_free(bcx, val, heap_exchange);
let bcx = trans_into(bcx, contents, SaveIn(val));
revoke_clean(bcx, val);
return immediate_rvalue_bcx(bcx, val, box_ty);
} else {
let base::MallocResult { bcx, box: bx, body } =
base::malloc_general(bcx, contents_ty, heap);
add_clean_free(bcx, bx, heap);
let bcx = trans_into(bcx, contents, SaveIn(body));
revoke_clean(bcx, bx);
return immediate_rvalue_bcx(bcx, bx, box_ty);
}
}
}
@ -1539,8 +1551,8 @@ fn trans_overloaded_op(bcx: block,
origin)
},
callee::ArgExprs(args),
dest,
DoAutorefArg)
Some(dest),
DoAutorefArg).bcx
}
fn int_cast(bcx: block, lldsttype: Type, llsrctype: Type,

View file

@ -103,7 +103,7 @@ fn foreign_signature(ccx: &mut CrateContext, fn_sig: &ty::FnSig)
LlvmSignature {
llarg_tys: llarg_tys,
llret_ty: llret_ty,
sret: !ty::type_is_immediate(fn_sig.output),
sret: !ty::type_is_immediate(ccx.tcx, fn_sig.output),
}
}
@ -113,7 +113,7 @@ fn shim_types(ccx: @mut CrateContext, id: ast::node_id) -> ShimTypes {
_ => ccx.sess.bug("c_arg_and_ret_lltys called on non-function type")
};
let llsig = foreign_signature(ccx, &fn_sig);
let bundle_ty = Type::struct_(llsig.llarg_tys + [llsig.llret_ty.ptr_to()], false);
let bundle_ty = Type::struct_(llsig.llarg_tys + &[llsig.llret_ty.ptr_to()], false);
let ret_def = !ty::type_is_bot(fn_sig.output) &&
!ty::type_is_nil(fn_sig.output);
let fn_ty = abi_info(ccx).compute_info(llsig.llarg_tys, llsig.llret_ty, ret_def);
@ -192,7 +192,7 @@ fn build_wrap_fn_(ccx: @mut CrateContext,
// Patch up the return type if it's not immediate and we're returning via
// the C ABI.
if needs_c_return && !ty::type_is_immediate(tys.fn_sig.output) {
if needs_c_return && !ty::type_is_immediate(ccx.tcx, tys.fn_sig.output) {
let lloutputtype = type_of::type_of(fcx.ccx, tys.fn_sig.output);
fcx.llretptr = Some(alloca(raw_block(fcx, false, fcx.llstaticallocas),
lloutputtype));
@ -648,7 +648,7 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
// intrinsics, there are no argument cleanups to
// concern ourselves with.
let tp_ty = substs.tys[0];
let mode = appropriate_mode(tp_ty);
let mode = appropriate_mode(ccx.tcx, tp_ty);
let src = Datum {val: get_param(decl, first_real_arg + 1u),
ty: tp_ty, mode: mode};
bcx = src.move_to(bcx, DROP_EXISTING,
@ -657,7 +657,7 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
"move_val_init" => {
// See comments for `"move_val"`.
let tp_ty = substs.tys[0];
let mode = appropriate_mode(tp_ty);
let mode = appropriate_mode(ccx.tcx, tp_ty);
let src = Datum {val: get_param(decl, first_real_arg + 1u),
ty: tp_ty, mode: mode};
bcx = src.move_to(bcx, INIT, get_param(decl, first_real_arg));
@ -731,7 +731,7 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
let lldestptr = PointerCast(bcx, lldestptr, Type::i8p());
let llsrcval = get_param(decl, first_real_arg);
let llsrcptr = if ty::type_is_immediate(in_type) {
let llsrcptr = if ty::type_is_immediate(ccx.tcx, in_type) {
let llsrcptr = alloca(bcx, llintype);
Store(bcx, llsrcval, llsrcptr);
llsrcptr
@ -789,7 +789,7 @@ pub fn trans_intrinsic(ccx: @mut CrateContext,
bcx = trans_call_inner(
bcx, None, fty, ty::mk_nil(),
|bcx| Callee {bcx: bcx, data: Closure(datum)},
ArgVals(arg_vals), Ignore, DontAutorefArg);
ArgVals(arg_vals), Some(Ignore), DontAutorefArg).bcx;
}
"morestack_addr" => {
// XXX This is a hack to grab the address of this particular
@ -1221,7 +1221,7 @@ pub fn trans_foreign_fn(ccx: @mut CrateContext,
let mut i = 0u;
let n = tys.fn_sig.inputs.len();
if !ty::type_is_immediate(tys.fn_sig.output) {
if !ty::type_is_immediate(bcx.tcx(), tys.fn_sig.output) {
let llretptr = load_inbounds(bcx, llargbundle, [0u, n]);
llargvals.push(llretptr);
}
@ -1247,7 +1247,8 @@ pub fn trans_foreign_fn(ccx: @mut CrateContext,
shim_types: &ShimTypes,
llargbundle: ValueRef,
llretval: ValueRef) {
if bcx.fcx.llretptr.is_some() && ty::type_is_immediate(shim_types.fn_sig.output) {
if bcx.fcx.llretptr.is_some() &&
ty::type_is_immediate(bcx.tcx(), shim_types.fn_sig.output) {
// Write the value into the argument bundle.
let arg_count = shim_types.fn_sig.inputs.len();
let llretptr = load_inbounds(bcx,

View file

@ -47,7 +47,7 @@ pub fn trans_free(cx: block, v: ValueRef) -> block {
callee::trans_lang_call(cx,
cx.tcx().lang_items.free_fn(),
[PointerCast(cx, v, Type::i8p())],
expr::Ignore)
Some(expr::Ignore)).bcx
}
pub fn trans_exchange_free(cx: block, v: ValueRef) -> block {
@ -55,7 +55,7 @@ pub fn trans_exchange_free(cx: block, v: ValueRef) -> block {
callee::trans_lang_call(cx,
cx.tcx().lang_items.exchange_free_fn(),
[PointerCast(cx, v, Type::i8p())],
expr::Ignore)
Some(expr::Ignore)).bcx
}
pub fn take_ty(cx: block, v: ValueRef, t: ty::t) -> block {
@ -386,7 +386,9 @@ pub fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) {
ty::ty_uniq(*) => {
uniq::make_free_glue(bcx, v, t)
}
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) |
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) => {
tvec::make_uniq_free_glue(bcx, v, t)
}
ty::ty_evec(_, ty::vstore_box) | ty::ty_estr(ty::vstore_box) => {
make_free_glue(bcx, v,
tvec::expand_boxed_vec_ty(bcx.tcx(), t));
@ -738,15 +740,9 @@ pub fn make_generic_glue(ccx: @mut CrateContext,
name: &str)
-> ValueRef {
let _icx = push_ctxt("make_generic_glue");
if !ccx.sess.trans_stats() {
return make_generic_glue_inner(ccx, t, llfn, helper);
}
let start = time::get_time();
let llval = make_generic_glue_inner(ccx, t, llfn, helper);
let end = time::get_time();
ccx.log_fn_time(fmt!("glue %s %s", name, ty_to_short_str(ccx.tcx, t)), start, end);
return llval;
let glue_name = fmt!("glue %s %s", name, ty_to_short_str(ccx.tcx, t));
let _s = StatRecorder::new(ccx, glue_name);
make_generic_glue_inner(ccx, t, llfn, helper)
}
pub fn emit_tydescs(ccx: &mut CrateContext) {

View file

@ -14,9 +14,6 @@ use lib::llvm::{ValueRef};
use lib::llvm::False;
use lib::llvm::llvm;
use middle::trans::common::*;
use middle::trans::type_of;
use middle::ty;
use util::ppaux::ty_to_str;
use middle::trans::type_::Type;
@ -116,42 +113,3 @@ pub fn llalign_of(cx: &CrateContext, ty: Type) -> ValueRef {
llvm::LLVMAlignOf(ty.to_ref()), cx.int_type.to_ref(), False);
}
}
// Computes the size of the data part of an enum.
pub fn static_size_of_enum(cx: &mut CrateContext, t: ty::t) -> uint {
if cx.enum_sizes.contains_key(&t) {
return cx.enum_sizes.get_copy(&t);
}
debug!("static_size_of_enum %s", ty_to_str(cx.tcx, t));
match ty::get(t).sty {
ty::ty_enum(tid, ref substs) => {
// Compute max(variant sizes).
let mut max_size = 0;
let variants = ty::enum_variants(cx.tcx, tid);
for variants.iter().advance |variant| {
if variant.args.len() == 0 {
loop;
}
let lltypes = variant.args.map(|&variant_arg| {
let substituted = ty::subst(cx.tcx, substs, variant_arg);
type_of::sizing_type_of(cx, substituted)
});
debug!("static_size_of_enum: variant %s type %s",
cx.tcx.sess.str_of(variant.name),
cx.tn.type_to_str(Type::struct_(lltypes, false)));
let this_size = llsize_of_real(cx, Type::struct_(lltypes, false));
if max_size < this_size {
max_size = this_size;
}
}
cx.enum_sizes.insert(t, max_size);
return max_size;
}
_ => cx.sess.bug("static_size_of_enum called on non-enum")
}
}

View file

@ -577,6 +577,10 @@ pub fn trans_trait_callee_from_llval(bcx: block,
}
ast::sty_region(*) => {
match store {
ty::UniqTraitStore
if !ty::type_contents(bcx.tcx(), callee_ty).contains_managed() => {
llself = llbox;
}
ty::BoxTraitStore |
ty::UniqTraitStore => {
llself = GEPi(bcx, llbox, [0u, abi::box_field_body]);
@ -641,16 +645,18 @@ pub fn vtable_id(ccx: @mut CrateContext,
-> mono_id {
match origin {
&typeck::vtable_static(impl_id, ref substs, sub_vtables) => {
let psubsts = param_substs {
tys: copy *substs,
vtables: Some(sub_vtables),
self_ty: None,
self_vtable: None
};
monomorphize::make_mono_id(
ccx,
impl_id,
*substs,
if sub_vtables.is_empty() {
None
} else {
Some(sub_vtables)
},
None,
&psubsts,
None)
}

View file

@ -50,11 +50,13 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
fn_id=%s, \
real_substs=%s, \
vtables=%s, \
self_vtable=%s, \
impl_did_opt=%s, \
ref_id=%?)",
fn_id.repr(ccx.tcx),
real_substs.repr(ccx.tcx),
vtables.repr(ccx.tcx),
self_vtable.repr(ccx.tcx),
impl_did_opt.repr(ccx.tcx),
ref_id);
@ -71,21 +73,28 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
for real_substs.tps.iter().advance |s| { assert!(!ty::type_has_params(*s)); }
for substs.iter().advance |s| { assert!(!ty::type_has_params(*s)); }
let param_uses = type_use::type_uses_for(ccx, fn_id, substs.len());
let hash_id = make_mono_id(ccx, fn_id, substs, vtables, impl_did_opt,
let psubsts = @param_substs {
tys: substs,
vtables: vtables,
self_ty: real_substs.self_ty,
self_vtable: self_vtable
};
let hash_id = make_mono_id(ccx, fn_id, impl_did_opt,
&*psubsts,
Some(param_uses));
if hash_id.params.iter().any_(
if hash_id.params.iter().any(
|p| match *p { mono_precise(_, _) => false, _ => true }) {
must_cast = true;
}
debug!("monomorphic_fn(\
fn_id=%s, \
vtables=%s, \
substs=%s, \
psubsts=%s, \
hash_id=%?)",
fn_id.repr(ccx.tcx),
vtables.repr(ccx.tcx),
substs.repr(ccx.tcx),
psubsts.repr(ccx.tcx),
hash_id);
match ccx.monomorphized.find(&hash_id) {
@ -142,8 +151,8 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
ast_map::node_struct_ctor(_, i, pt) => (pt, i.ident, i.span)
};
let mono_ty = ty::subst_tps(ccx.tcx, substs,
real_substs.self_ty, llitem_ty);
let mono_ty = ty::subst_tps(ccx.tcx, psubsts.tys,
psubsts.self_ty, llitem_ty);
let llfty = type_of_fn_from_ty(ccx, mono_ty);
ccx.stats.n_monos += 1;
@ -172,13 +181,6 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
lldecl
};
let psubsts = Some(@param_substs {
tys: substs,
vtables: vtables,
self_ty: real_substs.self_ty,
self_vtable: self_vtable
});
let lldecl = match map_node {
ast_map::node_item(i@@ast::item {
node: ast::item_fn(ref decl, _, _, _, ref body),
@ -192,7 +194,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
body,
d,
no_self,
psubsts,
Some(psubsts),
fn_id.node,
[]);
d
@ -202,7 +204,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
}
ast_map::node_foreign_item(i, _, _, _) => {
let d = mk_lldecl();
foreign::trans_intrinsic(ccx, d, i, pt, psubsts.get(), i.attrs,
foreign::trans_intrinsic(ccx, d, i, pt, psubsts, i.attrs,
ref_id);
d
}
@ -214,7 +216,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
match v.node.kind {
ast::tuple_variant_kind(ref args) => {
trans_enum_variant(ccx, enum_item.id, v, /*bad*/copy *args,
this_tv.disr_val, psubsts, d);
this_tv.disr_val, Some(psubsts), d);
}
ast::struct_variant_kind(_) =>
ccx.tcx.sess.bug("can't monomorphize struct variants"),
@ -225,13 +227,13 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
// XXX: What should the self type be here?
let d = mk_lldecl();
set_inline_hint_if_appr(/*bad*/copy mth.attrs, d);
meth::trans_method(ccx, pt, mth, psubsts, d);
meth::trans_method(ccx, pt, mth, Some(psubsts), d);
d
}
ast_map::node_trait_method(@ast::provided(mth), _, pt) => {
let d = mk_lldecl();
set_inline_hint_if_appr(/*bad*/copy mth.attrs, d);
meth::trans_method(ccx, /*bad*/copy *pt, mth, psubsts, d);
meth::trans_method(ccx, /*bad*/copy *pt, mth, Some(psubsts), d);
d
}
ast_map::node_struct_ctor(struct_def, _, _) => {
@ -241,7 +243,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
/*bad*/copy struct_def.fields,
struct_def.ctor_id.expect("ast-mapped tuple struct \
didn't have a ctor id"),
psubsts,
Some(psubsts),
d);
d
}
@ -320,26 +322,36 @@ pub fn normalize_for_monomorphization(tcx: ty::ctxt,
pub fn make_mono_id(ccx: @mut CrateContext,
item: ast::def_id,
substs: &[ty::t],
vtables: Option<typeck::vtable_res>,
impl_did_opt: Option<ast::def_id>,
substs: &param_substs,
param_uses: Option<@~[type_use::type_uses]>) -> mono_id {
// FIXME (possibly #5801): Need a lot of type hints to get
// .collect() to work.
let precise_param_ids: ~[(ty::t, Option<@~[mono_id]>)] = match vtables {
let substs_iter = substs.self_ty.iter().chain_(substs.tys.iter());
let precise_param_ids: ~[(ty::t, Option<@~[mono_id]>)] = match substs.vtables {
Some(vts) => {
debug!("make_mono_id vtables=%s substs=%s",
vts.repr(ccx.tcx), substs.repr(ccx.tcx));
vts.iter().zip(substs.iter()).transform(|(vtable, subst)| {
vts.repr(ccx.tcx), substs.tys.repr(ccx.tcx));
let self_vtables = substs.self_vtable.map(|vtbl| @~[copy *vtbl]);
let vts_iter = self_vtables.iter().chain_(vts.iter());
vts_iter.zip(substs_iter).transform(|(vtable, subst)| {
let v = vtable.map(|vt| meth::vtable_id(ccx, vt));
(*subst, if !v.is_empty() { Some(@v) } else { None })
}).collect()
}
None => substs.iter().transform(|subst| (*subst, None::<@~[mono_id]>)).collect()
None => substs_iter.transform(|subst| (*subst, None::<@~[mono_id]>)).collect()
};
let param_ids = match param_uses {
Some(ref uses) => {
precise_param_ids.iter().zip(uses.iter()).transform(|(id, uses)| {
// param_uses doesn't include a use for the self type.
// We just say it is fully used.
let self_use =
substs.self_ty.map(|_| type_use::use_repr|type_use::use_tydesc);
let uses_iter = self_use.iter().chain_(uses.iter());
precise_param_ids.iter().zip(uses_iter).transform(|(id, uses)| {
if ccx.sess.no_monomorphic_collapse() {
match copy *id {
(a, b) => mono_precise(a, b)
@ -356,7 +368,7 @@ pub fn make_mono_id(ccx: @mut CrateContext,
let llty = type_of::type_of(ccx, subst);
let size = machine::llbitsize_of_real(ccx, llty);
let align = machine::llalign_of_min(ccx, llty);
let mode = datum::appropriate_mode(subst);
let mode = datum::appropriate_mode(ccx.tcx, subst);
let data_class = mono_data_classify(subst);
debug!("make_mono_id: type %s -> size %u align %u mode %? class %?",

View file

@ -17,7 +17,6 @@ use middle::trans::callee::{ArgVals, DontAutorefArg};
use middle::trans::callee;
use middle::trans::common::*;
use middle::trans::datum::*;
use middle::trans::expr::SaveIn;
use middle::trans::glue;
use middle::trans::machine;
use middle::trans::meth;
@ -96,14 +95,13 @@ impl Reflector {
ty::mk_bare_fn(tcx, copy self.visitor_methods[mth_idx].fty);
let v = self.visitor_val;
debug!("passing %u args:", args.len());
let bcx = self.bcx;
let mut bcx = self.bcx;
for args.iter().enumerate().advance |(i, a)| {
debug!("arg %u: %s", i, bcx.val_to_str(*a));
}
let bool_ty = ty::mk_bool();
let scratch = scratch_datum(bcx, bool_ty, false);
// XXX: Should not be BoxTraitStore!
let bcx = callee::trans_call_inner(
let result = unpack_result!(bcx, callee::trans_call_inner(
self.bcx, None, mth_ty, bool_ty,
|bcx| meth::trans_trait_callee_from_llval(bcx,
mth_ty,
@ -113,8 +111,7 @@ impl Reflector {
ast::sty_region(
None,
ast::m_imm)),
ArgVals(args), SaveIn(scratch.val), DontAutorefArg);
let result = scratch.to_value_llval(bcx);
ArgVals(args), None, DontAutorefArg));
let result = bool_to_i1(bcx, result);
let next_bcx = sub_block(bcx, "next");
CondBr(bcx, result, next_bcx.llbb, self.final_bcx.llbb);
@ -194,7 +191,11 @@ impl Reflector {
}
ty::ty_uniq(ref mt) => {
let extra = self.c_mt(mt);
self.visit("uniq", extra)
if ty::type_contents(bcx.tcx(), t).contains_managed() {
self.visit("uniq_managed", extra)
} else {
self.visit("uniq", extra)
}
}
ty::ty_ptr(ref mt) => {
let extra = self.c_mt(mt);
@ -278,7 +279,7 @@ impl Reflector {
let opaqueptrty = ty::mk_ptr(ccx.tcx, ty::mt { ty: opaquety, mutbl: ast::m_imm });
let make_get_disr = || {
let sub_path = bcx.fcx.path + [path_name(special_idents::anon)];
let sub_path = bcx.fcx.path + &[path_name(special_idents::anon)];
let sym = mangle_internal_name_by_path_and_seq(ccx,
sub_path,
"get_disr");

View file

@ -33,6 +33,23 @@ use std::option::None;
use syntax::ast;
use syntax::codemap;
pub fn make_uniq_free_glue(bcx: block, vptrptr: ValueRef, box_ty: ty::t)
-> block {
let box_datum = immediate_rvalue(Load(bcx, vptrptr), box_ty);
let not_null = IsNotNull(bcx, box_datum.val);
do with_cond(bcx, not_null) |bcx| {
let body_datum = box_datum.box_body(bcx);
let bcx = glue::drop_ty(bcx, body_datum.to_ref_llval(bcx),
body_datum.ty);
if ty::type_contents(bcx.tcx(), box_ty).contains_managed() {
glue::trans_free(bcx, box_datum.val)
} else {
glue::trans_exchange_free(bcx, box_datum.val)
}
}
}
// Boxed vector types are in some sense currently a "shorthand" for a box
// containing an unboxed vector. This expands a boxed vector type into such an
// expanded type. It doesn't respect mutability, but that doesn't matter at
@ -42,7 +59,7 @@ pub fn expand_boxed_vec_ty(tcx: ty::ctxt, t: ty::t) -> ty::t {
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(tcx, unit_ty);
match ty::get(t).sty {
ty::ty_estr(ty::vstore_uniq) | ty::ty_evec(_, ty::vstore_uniq) => {
ty::mk_imm_uniq(tcx, unboxed_vec_ty)
fail!("cannot treat vectors/strings as exchange allocations yet");
}
ty::ty_estr(ty::vstore_box) | ty::ty_evec(_, ty::vstore_box) => {
ty::mk_imm_box(tcx, unboxed_vec_ty)
@ -95,9 +112,17 @@ pub fn alloc_raw(bcx: block, unit_ty: ty::t,
return rslt(bcx, bx);
}
pub fn heap_for_unique_vector(bcx: block, t: ty::t) -> heap {
if ty::type_contents(bcx.tcx(), t).contains_managed() {
heap_managed_unique
} else {
heap_exchange_vector
}
}
pub fn alloc_uniq_raw(bcx: block, unit_ty: ty::t,
fill: ValueRef, alloc: ValueRef) -> Result {
alloc_raw(bcx, unit_ty, fill, alloc, base::heap_for_unique(bcx, unit_ty))
alloc_raw(bcx, unit_ty, fill, alloc, heap_for_unique_vector(bcx, unit_ty))
}
pub fn alloc_vec(bcx: block,
@ -298,7 +323,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
// Handle ~"".
match heap {
heap_exchange => {
heap_exchange_vector => {
match content_expr.node {
ast::expr_lit(@codemap::spanned {
node: ast::lit_str(s), _
@ -312,7 +337,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
bcx,
bcx.tcx().lang_items.strdup_uniq_fn(),
[ llptrval, llsizeval ],
expr::SaveIn(lldestval.to_ref_llval(bcx)));
Some(expr::SaveIn(lldestval.to_ref_llval(bcx)))).bcx;
return DatumBlock {
bcx: bcx,
datum: lldestval
@ -321,7 +346,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
_ => {}
}
}
heap_exchange_closure => fail!("vectors are not allocated with closure_exchange_alloc"),
heap_exchange | heap_exchange_closure => fail!("vectors use vector_exchange_alloc"),
heap_managed | heap_managed_unique => {}
}

View file

@ -245,7 +245,7 @@ impl Type {
}
pub fn box(ctx: &CrateContext, ty: &Type) -> Type {
Type::struct_(Type::box_header_fields(ctx) + [*ty], false)
Type::struct_(Type::box_header_fields(ctx) + &[*ty], false)
}
pub fn opaque_box(ctx: &CrateContext) -> Type {

View file

@ -18,8 +18,8 @@ use middle::trans::type_::Type;
use syntax::ast;
pub fn arg_is_indirect(_: &CrateContext, arg_ty: &ty::t) -> bool {
!ty::type_is_immediate(*arg_ty)
pub fn arg_is_indirect(ccx: &CrateContext, arg_ty: &ty::t) -> bool {
!ty::type_is_immediate(ccx.tcx, *arg_ty)
}
pub fn type_of_explicit_arg(ccx: &mut CrateContext, arg_ty: &ty::t) -> Type {
@ -41,7 +41,7 @@ pub fn type_of_fn(cx: &mut CrateContext, inputs: &[ty::t], output: ty::t) -> Typ
// Arg 0: Output pointer.
// (if the output type is non-immediate)
let output_is_immediate = ty::type_is_immediate(output);
let output_is_immediate = ty::type_is_immediate(cx.tcx, output);
let lloutputtype = type_of(cx, output);
if !output_is_immediate {
atys.push(lloutputtype.ptr_to());
@ -72,29 +72,6 @@ pub fn type_of_fn_from_ty(cx: &mut CrateContext, fty: ty::t) -> Type {
}
}
pub fn type_of_non_gc_box(cx: &mut CrateContext, t: ty::t) -> Type {
assert!(!ty::type_needs_infer(t));
let t_norm = ty::normalize_ty(cx.tcx, t);
if t != t_norm {
type_of_non_gc_box(cx, t_norm)
} else {
match ty::get(t).sty {
ty::ty_box(mt) => {
let ty = type_of(cx, mt.ty);
Type::box(cx, &ty).ptr_to()
}
ty::ty_uniq(mt) => {
let ty = type_of(cx, mt.ty);
Type::unique(cx, &ty).ptr_to()
}
_ => {
cx.sess.bug("non-box in type_of_non_gc_box");
}
}
}
}
// A "sizing type" is an LLVM type, the size and alignment of which are
// guaranteed to be equivalent to what you would get out of `type_of()`. It's
// useful because:
@ -104,8 +81,7 @@ pub fn type_of_non_gc_box(cx: &mut CrateContext, t: ty::t) -> Type {
//
// (2) It won't make any recursive calls to determine the structure of the
// type behind pointers. This can help prevent infinite loops for
// recursive types. For example, `static_size_of_enum()` relies on this
// behavior.
// recursive types. For example, enum types rely on this behavior.
pub fn sizing_type_of(cx: &mut CrateContext, t: ty::t) -> Type {
match cx.llsizingtypes.find_copy(&t) {
@ -232,7 +208,11 @@ pub fn type_of(cx: &mut CrateContext, t: ty::t) -> Type {
ty::ty_opaque_box => Type::opaque_box(cx).ptr_to(),
ty::ty_uniq(ref mt) => {
let ty = type_of(cx, mt.ty);
Type::unique(cx, &ty).ptr_to()
if ty::type_contents(cx.tcx, mt.ty).contains_managed() {
Type::unique(cx, &ty).ptr_to()
} else {
ty.ptr_to()
}
}
ty::ty_evec(ref mt, ty::vstore_uniq) => {
let ty = type_of(cx, mt.ty);

View file

@ -9,7 +9,6 @@
// except according to those terms.
use back;
use lib::llvm::ValueRef;
use middle::trans::base::*;
use middle::trans::build::*;
@ -18,6 +17,9 @@ use middle::trans::datum::immediate_rvalue;
use middle::trans::datum;
use middle::trans::glue;
use middle::ty;
use middle::trans::machine::llsize_of;
use middle::trans::type_of;
use middle::trans::type_of::*;
pub fn make_free_glue(bcx: block, vptrptr: ValueRef, box_ty: ty::t)
-> block {
@ -45,12 +47,21 @@ pub fn duplicate(bcx: block, src_box: ValueRef, src_ty: ty::t) -> Result {
let body_datum = src_datum.box_body(bcx);
// Malloc space in exchange heap and copy src into it
let MallocResult {
bcx: bcx,
box: dst_box,
body: dst_body
} = malloc_unique(bcx, body_datum.ty);
body_datum.copy_to(bcx, datum::INIT, dst_body);
if ty::type_contents(bcx.tcx(), src_ty).contains_managed() {
let MallocResult {
bcx: bcx,
box: dst_box,
body: dst_body
} = malloc_general(bcx, body_datum.ty, heap_managed_unique);
body_datum.copy_to(bcx, datum::INIT, dst_body);
rslt(bcx, dst_box)
rslt(bcx, dst_box)
} else {
let body_datum = body_datum.to_value_datum(bcx);
let llty = type_of(bcx.ccx(), body_datum.ty);
let size = llsize_of(bcx.ccx(), llty);
let Result { bcx: bcx, val: val } = malloc_raw_dyn(bcx, body_datum.ty, heap_exchange, size);
body_datum.copy_to(bcx, datum::INIT, val);
Result { bcx: bcx, val: val }
}
}

View file

@ -81,7 +81,7 @@ pub fn return_to_mut(mut bcx: block,
filename_val,
line_val
],
expr::Ignore);
Some(expr::Ignore)).bcx;
}
callee::trans_lang_call(
@ -93,8 +93,8 @@ pub fn return_to_mut(mut bcx: block,
filename_val,
line_val
],
expr::Ignore
)
Some(expr::Ignore)
).bcx
}
fn root(datum: &Datum,
@ -123,7 +123,7 @@ fn root(datum: &Datum,
let scratch = scratch_datum(bcx, datum.ty, true);
datum.copy_to_datum(bcx, INIT, scratch);
let cleanup_bcx = find_bcx_for_scope(bcx, root_info.scope);
add_clean_temp_mem(cleanup_bcx, scratch.val, scratch.ty);
add_clean_temp_mem_in_scope(cleanup_bcx, root_info.scope, scratch.val, scratch.ty);
// Now, consider also freezing it.
match root_info.freeze {
@ -144,7 +144,7 @@ fn root(datum: &Datum,
let box_ptr = Load(bcx, PointerCast(bcx, scratch.val, Type::i8p().ptr_to()));
bcx = callee::trans_lang_call(
let llresult = unpack_result!(bcx, callee::trans_lang_call(
bcx,
freeze_did,
[
@ -152,7 +152,7 @@ fn root(datum: &Datum,
filename,
line
],
expr::SaveIn(scratch_bits.val));
Some(expr::SaveIn(scratch_bits.val))));
if bcx.tcx().sess.debug_borrows() {
bcx = callee::trans_lang_call(
@ -160,15 +160,15 @@ fn root(datum: &Datum,
bcx.tcx().lang_items.record_borrow_fn(),
[
box_ptr,
Load(bcx, scratch_bits.val),
llresult,
filename,
line
],
expr::Ignore);
Some(expr::Ignore)).bcx;
}
add_clean_return_to_mut(
cleanup_bcx, root_key, scratch.val, scratch_bits.val,
cleanup_bcx, root_info.scope, root_key, scratch.val, scratch_bits.val,
filename, line);
}
}
@ -188,5 +188,5 @@ fn perform_write_guard(datum: &Datum,
bcx,
bcx.tcx().lang_items.check_not_borrowed_fn(),
[PointerCast(bcx, llval, Type::i8p()), filename, line],
expr::Ignore)
Some(expr::Ignore)).bcx
}

View file

@ -29,7 +29,6 @@ use util::enum_set::{EnumSet, CLike};
use std::cast;
use std::cmp;
use std::hashmap::{HashMap, HashSet};
use std::iter;
use std::ops;
use std::ptr::to_unsafe_ptr;
use std::to_bytes;
@ -1486,8 +1485,8 @@ pub fn type_needs_subst(ty: t) -> bool {
}
pub fn trait_ref_contains_error(tref: &ty::TraitRef) -> bool {
tref.substs.self_ty.iter().any_(|&t| type_is_error(t)) ||
tref.substs.tps.iter().any_(|&t| type_is_error(t))
tref.substs.self_ty.iter().any(|&t| type_is_error(t)) ||
tref.substs.tps.iter().any(|&t| type_is_error(t))
}
pub fn type_is_ty_var(ty: t) -> bool {
@ -1647,9 +1646,22 @@ pub fn type_is_scalar(ty: t) -> bool {
}
}
pub fn type_is_immediate(ty: t) -> bool {
fn type_is_newtype_immediate(cx: ctxt, ty: t) -> bool {
match get(ty).sty {
ty_struct(def_id, ref substs) => {
let fields = struct_fields(cx, def_id, substs);
fields.len() == 1 &&
fields[0].ident == token::special_idents::unnamed_field &&
type_is_immediate(cx, fields[0].mt.ty)
}
_ => false
}
}
pub fn type_is_immediate(cx: ctxt, ty: t) -> bool {
return type_is_scalar(ty) || type_is_boxed(ty) ||
type_is_unique(ty) || type_is_region_ptr(ty);
type_is_unique(ty) || type_is_region_ptr(ty) ||
type_is_newtype_immediate(cx, ty);
}
pub fn type_needs_drop(cx: ctxt, ty: t) -> bool {
@ -1752,7 +1764,7 @@ pub struct TypeContents {
impl TypeContents {
pub fn meets_bounds(&self, cx: ctxt, bbs: BuiltinBounds) -> bool {
iter::all(|bb| self.meets_bound(cx, bb), |f| bbs.each(f))
bbs.iter().all(|bb| self.meets_bound(cx, bb))
}
pub fn meets_bound(&self, cx: ctxt, bb: BuiltinBound) -> bool {
@ -2330,13 +2342,13 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
ty_struct(did, ref substs) => {
seen.push(did);
let fields = struct_fields(cx, did, substs);
let r = fields.iter().any_(|f| type_requires(cx, seen, r_ty, f.mt.ty));
let r = fields.iter().any(|f| type_requires(cx, seen, r_ty, f.mt.ty));
seen.pop();
r
}
ty_tup(ref ts) => {
ts.iter().any_(|t| type_requires(cx, seen, r_ty, *t))
ts.iter().any(|t| type_requires(cx, seen, r_ty, *t))
}
ty_enum(ref did, _) if seen.contains(did) => {
@ -2347,7 +2359,7 @@ pub fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
seen.push(did);
let vs = enum_variants(cx, did);
let r = !vs.is_empty() && do vs.iter().all |variant| {
do variant.args.iter().any_ |aty| {
do variant.args.iter().any |aty| {
let sty = subst(cx, substs, *aty);
type_requires(cx, seen, r_ty, sty)
}
@ -3148,7 +3160,7 @@ pub fn expr_kind(tcx: ctxt,
ast::expr_cast(*) => {
match tcx.node_types.find(&(expr.id as uint)) {
Some(&t) => {
if ty::type_is_immediate(t) {
if ty::type_is_immediate(tcx, t) {
RvalueDatumExpr
} else {
RvalueDpsExpr
@ -3229,7 +3241,7 @@ pub fn field_idx_strict(tcx: ty::ctxt, id: ast::ident, fields: &[field])
}
pub fn method_idx(id: ast::ident, meths: &[@Method]) -> Option<uint> {
meths.iter().position_(|m| m.ident == id)
meths.iter().position(|m| m.ident == id)
}
/// Returns a vector containing the indices of all type parameters that appear
@ -3612,12 +3624,12 @@ pub fn impl_trait_ref(cx: ctxt, id: ast::def_id) -> Option<@TraitRef> {
debug!("(impl_trait_ref) searching for trait impl %?", id);
match cx.items.find(&id.node) {
Some(&ast_map::node_item(@ast::item {
node: ast::item_impl(_, opt_trait, _, _),
node: ast::item_impl(_, ref opt_trait, _, _),
_},
_)) => {
match opt_trait {
Some(t) => Some(ty::node_id_to_trait_ref(cx, t.ref_id)),
None => None
&Some(ref t) => Some(ty::node_id_to_trait_ref(cx, t.ref_id)),
&None => None
}
}
_ => None
@ -3816,41 +3828,62 @@ pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] {
}, _) => {
let mut disr_val = -1;
@enum_definition.variants.iter().transform(|variant| {
let ctor_ty = node_id_to_type(cx, variant.node.id);
match variant.node.kind {
ast::tuple_variant_kind(ref args) => {
let ctor_ty = node_id_to_type(cx, variant.node.id);
let arg_tys = {
if args.len() > 0u {
ty_fn_args(ctor_ty).map(|a| *a)
} else {
let arg_tys = if args.len() > 0u {
ty_fn_args(ctor_ty).map(|a| *a) }
else {
~[]
}
};
};
match variant.node.disr_expr {
Some (ex) => {
disr_val = match const_eval::eval_const_expr(cx,
ex) {
const_eval::const_int(val) => val as int,
_ => cx.sess.bug("tag_variants: bad disr expr")
_ => cx.sess.bug("enum_variants: bad disr expr")
}
}
_ => disr_val += 1
}
@VariantInfo_{args: arg_tys,
ctor_ty: ctor_ty,
name: variant.node.name,
id: ast_util::local_def(variant.node.id),
disr_val: disr_val,
vis: variant.node.vis
@VariantInfo_{
args: arg_tys,
ctor_ty: ctor_ty,
name: variant.node.name,
id: ast_util::local_def(variant.node.id),
disr_val: disr_val,
vis: variant.node.vis
}
}
ast::struct_variant_kind(_) => {
fail!("struct variant kinds unimpl in enum_variants")
},
ast::struct_variant_kind(struct_def) => {
let arg_tys =
// Is this check needed for structs too, or are they always guaranteed
// to have a valid constructor function?
if struct_def.fields.len() > 0 {
ty_fn_args(ctor_ty).map(|a| *a)
} else {
~[]
};
assert!(variant.node.disr_expr.is_none());
disr_val += 1;
@VariantInfo_{
args: arg_tys,
ctor_ty: ctor_ty,
name: variant.node.name,
id: ast_util::local_def(variant.node.id),
disr_val: disr_val,
vis: variant.node.vis
}
}
}
}).collect()
}
_ => cx.sess.bug("tag_variants: id not bound to an enum")
_ => cx.sess.bug("enum_variants: id not bound to an enum")
}
};
cx.enum_var_cache.insert(id, result);

View file

@ -85,15 +85,15 @@ pub trait AstConv {
pub fn get_region_reporting_err(
tcx: ty::ctxt,
span: span,
a_r: Option<@ast::Lifetime>,
a_r: &Option<ast::Lifetime>,
res: Result<ty::Region, RegionError>) -> ty::Region
{
match res {
result::Ok(r) => r,
result::Err(ref e) => {
let descr = match a_r {
None => ~"anonymous lifetime",
Some(a) => fmt!("lifetime %s",
&None => ~"anonymous lifetime",
&Some(ref a) => fmt!("lifetime %s",
lifetime_to_str(a, tcx.sess.intr()))
};
tcx.sess.span_err(
@ -109,19 +109,19 @@ pub fn ast_region_to_region<AC:AstConv,RS:region_scope + Copy + 'static>(
this: &AC,
rscope: &RS,
default_span: span,
opt_lifetime: Option<@ast::Lifetime>) -> ty::Region
opt_lifetime: &Option<ast::Lifetime>) -> ty::Region
{
let (span, res) = match opt_lifetime {
None => {
&None => {
(default_span, rscope.anon_region(default_span))
}
Some(ref lifetime) if lifetime.ident == special_idents::statik => {
&Some(ref lifetime) if lifetime.ident == special_idents::statik => {
(lifetime.span, Ok(ty::re_static))
}
Some(ref lifetime) if lifetime.ident == special_idents::self_ => {
&Some(ref lifetime) if lifetime.ident == special_idents::self_ => {
(lifetime.span, rscope.self_region(lifetime.span))
}
Some(ref lifetime) => {
&Some(ref lifetime) => {
(lifetime.span, rscope.named_region(lifetime.span,
lifetime.ident))
}
@ -136,7 +136,7 @@ fn ast_path_substs<AC:AstConv,RS:region_scope + Copy + 'static>(
def_id: ast::def_id,
decl_generics: &ty::Generics,
self_ty: Option<ty::t>,
path: @ast::Path) -> ty::substs
path: &ast::Path) -> ty::substs
{
/*!
*
@ -164,11 +164,11 @@ fn ast_path_substs<AC:AstConv,RS:region_scope + Copy + 'static>(
}
(&Some(_), &None) => {
let res = rscope.anon_region(path.span);
let r = get_region_reporting_err(this.tcx(), path.span, None, res);
let r = get_region_reporting_err(this.tcx(), path.span, &None, res);
Some(r)
}
(&Some(_), &Some(_)) => {
Some(ast_region_to_region(this, rscope, path.span, path.rp))
Some(ast_region_to_region(this, rscope, path.span, &path.rp))
}
};
@ -179,7 +179,7 @@ fn ast_path_substs<AC:AstConv,RS:region_scope + Copy + 'static>(
fmt!("wrong number of type arguments: expected %u but found %u",
decl_generics.type_param_defs.len(), path.types.len()));
}
let tps = path.types.map(|a_t| ast_ty_to_ty(this, rscope, *a_t));
let tps = path.types.map(|a_t| ast_ty_to_ty(this, rscope, a_t));
substs {self_r:self_r, self_ty:self_ty, tps:tps}
}
@ -188,7 +188,7 @@ pub fn ast_path_to_substs_and_ty<AC:AstConv,RS:region_scope + Copy + 'static>(
this: &AC,
rscope: &RS,
did: ast::def_id,
path: @ast::Path) -> ty_param_substs_and_ty
path: &ast::Path) -> ty_param_substs_and_ty
{
let tcx = this.tcx();
let ty::ty_param_bounds_and_ty {
@ -206,7 +206,7 @@ pub fn ast_path_to_trait_ref<AC:AstConv,RS:region_scope + Copy + 'static>(
rscope: &RS,
trait_def_id: ast::def_id,
self_ty: Option<ty::t>,
path: @ast::Path) -> @ty::TraitRef
path: &ast::Path) -> @ty::TraitRef
{
let trait_def =
this.get_trait_def(trait_def_id);
@ -228,7 +228,7 @@ pub fn ast_path_to_ty<AC:AstConv,RS:region_scope + Copy + 'static>(
this: &AC,
rscope: &RS,
did: ast::def_id,
path: @ast::Path)
path: &ast::Path)
-> ty_param_substs_and_ty
{
// Look up the polytype of the item and then substitute the provided types
@ -276,7 +276,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + 'static>(
}
return ty::mk_evec(tcx, mt, vst);
}
ast::ty_path(path, bounds, id) => {
ast::ty_path(ref path, ref bounds, id) => {
// Note that the "bounds must be empty if path is not a trait"
// restriction is enforced in the below case for ty_path, which
// will run after this as long as the path isn't a trait.
@ -321,7 +321,7 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + 'static>(
}
fn check_path_args(tcx: ty::ctxt,
path: @ast::Path,
path: &ast::Path,
flags: uint) {
if (flags & NO_TPS) != 0u {
if path.types.len() > 0u {
@ -371,13 +371,13 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + 'static>(
ast::ty_ptr(ref mt) => {
ty::mk_ptr(tcx, ast_mt_to_mt(this, rscope, mt))
}
ast::ty_rptr(region, ref mt) => {
ast::ty_rptr(ref region, ref mt) => {
let r = ast_region_to_region(this, rscope, ast_ty.span, region);
mk_pointer(this, rscope, mt, ty::vstore_slice(r),
|tmt| ty::mk_rptr(tcx, r, tmt))
}
ast::ty_tup(ref fields) => {
let flds = fields.map(|t| ast_ty_to_ty(this, rscope, *t));
let flds = fields.map(|t| ast_ty_to_ty(this, rscope, t));
ty::mk_tup(tcx, flds)
}
ast::ty_bare_fn(ref bf) => {
@ -398,14 +398,14 @@ pub fn ast_ty_to_ty<AC:AstConv, RS:region_scope + Copy + 'static>(
f.purity,
f.onceness,
bounds,
f.region,
&f.region,
&f.decl,
None,
&f.lifetimes,
ast_ty.span);
ty::mk_closure(tcx, fn_decl)
}
ast::ty_path(path, bounds, id) => {
ast::ty_path(ref path, ref bounds, id) => {
let a_def = match tcx.def_map.find(&id) {
None => tcx.sess.span_fatal(
ast_ty.span, fmt!("unbound path %s",
@ -525,13 +525,13 @@ pub fn ty_of_arg<AC:AstConv,
RS:region_scope + Copy + 'static>(
this: &AC,
rscope: &RS,
a: ast::arg,
a: &ast::arg,
expected_ty: Option<ty::t>)
-> ty::t {
match a.ty.node {
ast::ty_infer if expected_ty.is_some() => expected_ty.get(),
ast::ty_infer => this.ty_infer(a.ty.span),
_ => ast_ty_to_ty(this, rscope, a.ty),
_ => ast_ty_to_ty(this, rscope, &a.ty),
}
}
@ -553,7 +553,7 @@ pub fn bound_lifetimes<AC:AstConv>(
let special_idents = [special_idents::statik, special_idents::self_];
let mut bound_lifetime_names = opt_vec::Empty;
ast_lifetimes.map_to_vec(|ast_lifetime| {
if special_idents.iter().any_(|&i| i == ast_lifetime.ident) {
if special_idents.iter().any(|&i| i == ast_lifetime.ident) {
this.tcx().sess.span_err(
ast_lifetime.span,
fmt!("illegal lifetime parameter name: `%s`",
@ -621,11 +621,11 @@ fn ty_of_method_or_bare_fn<AC:AstConv,RS:region_scope + Copy + 'static>(
transform_self_ty(this, &rb, self_info)
});
let input_tys = decl.inputs.map(|a| ty_of_arg(this, &rb, *a, None));
let input_tys = decl.inputs.map(|a| ty_of_arg(this, &rb, a, None));
let output_ty = match decl.output.node {
ast::ty_infer => this.ty_infer(decl.output.span),
_ => ast_ty_to_ty(this, &rb, decl.output)
_ => ast_ty_to_ty(this, &rb, &decl.output)
};
return (opt_transformed_self_ty,
@ -647,7 +647,7 @@ fn ty_of_method_or_bare_fn<AC:AstConv,RS:region_scope + Copy + 'static>(
ast::sty_value => {
Some(self_info.untransformed_self_ty)
}
ast::sty_region(lifetime, mutability) => {
ast::sty_region(ref lifetime, mutability) => {
let region =
ast_region_to_region(this, rscope,
self_info.explicit_self.span,
@ -677,7 +677,7 @@ pub fn ty_of_closure<AC:AstConv,RS:region_scope + Copy + 'static>(
purity: ast::purity,
onceness: ast::Onceness,
bounds: ty::BuiltinBounds,
opt_lifetime: Option<@ast::Lifetime>,
opt_lifetime: &Option<ast::Lifetime>,
decl: &ast::fn_decl,
expected_sig: Option<ty::FnSig>,
lifetimes: &OptVec<ast::Lifetime>,
@ -695,10 +695,10 @@ pub fn ty_of_closure<AC:AstConv,RS:region_scope + Copy + 'static>(
// resolve the function bound region in the original region
// scope `rscope`, not the scope of the function parameters
let bound_region = match opt_lifetime {
Some(_) => {
&Some(_) => {
ast_region_to_region(this, rscope, span, opt_lifetime)
}
None => {
&None => {
match sigil {
ast::OwnedSigil | ast::ManagedSigil => {
// @fn(), ~fn() default to static as the bound
@ -724,14 +724,14 @@ pub fn ty_of_closure<AC:AstConv,RS:region_scope + Copy + 'static>(
// were supplied
if i < e.inputs.len() {Some(e.inputs[i])} else {None}
};
ty_of_arg(this, &rb, *a, expected_arg_ty)
ty_of_arg(this, &rb, a, expected_arg_ty)
}.collect();
let expected_ret_ty = expected_sig.map(|e| e.output);
let output_ty = match decl.output.node {
ast::ty_infer if expected_ret_ty.is_some() => expected_ret_ty.get(),
ast::ty_infer => this.ty_infer(decl.output.span),
_ => ast_ty_to_ty(this, &rb, decl.output)
_ => ast_ty_to_ty(this, &rb, &decl.output)
};
ty::ClosureTy {
@ -764,7 +764,7 @@ fn conv_builtin_bounds(tcx: ty::ctxt, ast_bounds: &Option<OptVec<ast::TyParamBou
let mut builtin_bounds = ty::EmptyBuiltinBounds();
for bound_vec.iter().advance |ast_bound| {
match *ast_bound {
ast::TraitTyParamBound(b) => {
ast::TraitTyParamBound(ref b) => {
match lookup_def_tcx(tcx, b.path.span, b.ref_id) {
ast::def_trait(trait_did) => {
if try_add_builtin_trait(tcx,

Some files were not shown because too many files have changed in this diff Show more