auto merge of #6286 : nikomatsakis/rust/issue-5910-dyna-freeze, r=nikomatsakis

This rather sprawling branch refactors the borrow checker and much of the region code, addressing a number of outstanding issues. I will close them manually after validating that there are test cases for each one, but here is a (probably partial) list:

  - #4903: Flow sensitivity
  - #3387: Moves in overloaded operators
  - #3850: Region granularity
  - #4666: Odd loaning errors
  - #6021: borrow check errors with hashmaps
  - #5910: @mut broken

cc #5047

(take 5)
This commit is contained in:
bors 2013-05-06 17:52:52 -07:00
commit 05460fcd5a
238 changed files with 14376 additions and 5194 deletions

View file

@ -110,6 +110,9 @@ endif
ifdef SAVE_TEMPS
CFG_RUSTC_FLAGS += --save-temps
endif
ifdef ASM_COMMENTS
CFG_RUSTC_FLAGS += -z asm-comments
endif
ifdef TIME_PASSES
CFG_RUSTC_FLAGS += -Z time-passes
endif

View file

@ -42,7 +42,7 @@ pub fn empty_cell<T>() -> Cell<T> {
pub impl<T> Cell<T> {
/// Yields the value, failing if the cell is empty.
fn take(&self) -> T {
let mut self = unsafe { transmute_mut(self) };
let self = unsafe { transmute_mut(self) };
if self.is_empty() {
fail!(~"attempt to take an empty cell");
}
@ -54,7 +54,7 @@ pub impl<T> Cell<T> {
/// Returns the value, failing if the cell is full.
fn put_back(&self, value: T) {
let mut self = unsafe { transmute_mut(self) };
let self = unsafe { transmute_mut(self) };
if !self.is_empty() {
fail!(~"attempt to put a value back into a full cell");
}

View file

@ -15,6 +15,7 @@ use ptr::mut_null;
use repr::BoxRepr;
use sys::TypeDesc;
use cast::transmute;
#[cfg(notest)] use unstable::lang::clear_task_borrow_list;
#[cfg(notest)] use ptr::to_unsafe_ptr;
@ -179,6 +180,10 @@ pub unsafe fn annihilate() {
n_bytes_freed: 0
};
// Quick hack: we need to free this list upon task exit, and this
// is a convenient place to do it.
clear_task_borrow_list();
// Pass 1: Make all boxes immortal.
//
// In this pass, nothing gets freed, so it does not matter whether

View file

@ -205,8 +205,8 @@ impl<T: Owned> Selectable for Port<T> {
fn header(&self) -> *PacketHeader {
unsafe {
match self.endp {
Some(ref endp) => endp.header(),
None => fail!(~"peeking empty stream")
Some(ref endp) => endp.header(),
None => fail!(~"peeking empty stream")
}
}
}

View file

@ -28,13 +28,13 @@ pub mod rustrt {
pub extern {
unsafe fn tdefl_compress_mem_to_heap(psrc_buf: *const c_void,
src_buf_len: size_t,
pout_len: *size_t,
pout_len: *mut size_t,
flags: c_int)
-> *c_void;
unsafe fn tinfl_decompress_mem_to_heap(psrc_buf: *const c_void,
src_buf_len: size_t,
pout_len: *size_t,
pout_len: *mut size_t,
flags: c_int)
-> *c_void;
}
@ -52,11 +52,11 @@ pub fn deflate_bytes(bytes: &const [u8]) -> ~[u8] {
let res =
rustrt::tdefl_compress_mem_to_heap(b as *c_void,
len as size_t,
&outsz,
&mut outsz,
lz_norm);
assert!(res as int != 0);
let out = vec::raw::from_buf_raw(res as *u8,
outsz as uint);
outsz as uint);
libc::free(res);
out
}
@ -66,11 +66,11 @@ pub fn deflate_bytes(bytes: &const [u8]) -> ~[u8] {
pub fn inflate_bytes(bytes: &const [u8]) -> ~[u8] {
do vec::as_const_buf(bytes) |b, len| {
unsafe {
let outsz : size_t = 0;
let mut outsz : size_t = 0;
let res =
rustrt::tinfl_decompress_mem_to_heap(b as *c_void,
len as size_t,
&outsz,
&mut outsz,
0);
assert!(res as int != 0);
let out = vec::raw::from_buf_raw(res as *u8,

View file

@ -25,6 +25,7 @@ use rand;
use uint;
use vec;
use util::unreachable;
use kinds::Copy;
static INITIAL_CAPACITY: uint = 32u; // 2^5
@ -529,6 +530,18 @@ pub impl<K: Hash + Eq, V> HashMap<K, V> {
}
}
pub impl<K: Hash + Eq, V: Copy> HashMap<K, V> {
/// Like `find`, but returns a copy of the value.
fn find_copy(&self, k: &K) -> Option<V> {
self.find(k).map_consume(|v| copy *v)
}
/// Like `get`, but returns a copy of the value.
fn get_copy(&self, k: &K) -> V {
copy *self.get(k)
}
}
impl<K:Hash + Eq,V:Eq> Eq for HashMap<K, V> {
fn eq(&self, other: &HashMap<K, V>) -> bool {
if self.len() != other.len() { return false; }

View file

@ -1022,7 +1022,7 @@ pub enum WriterType { Screen, File }
pub trait Writer {
/// Write all of the given bytes.
fn write(&self, v: &const [u8]);
fn write(&self, v: &[u8]);
/// Move the current position within the stream. The second parameter
/// determines the position that the first parameter is relative to.
@ -1039,7 +1039,7 @@ pub trait Writer {
}
impl Writer for @Writer {
fn write(&self, v: &const [u8]) { self.write(v) }
fn write(&self, v: &[u8]) { self.write(v) }
fn seek(&self, a: int, b: SeekStyle) { self.seek(a, b) }
fn tell(&self) -> uint { self.tell() }
fn flush(&self) -> int { self.flush() }
@ -1047,7 +1047,7 @@ impl Writer for @Writer {
}
impl<W:Writer,C> Writer for Wrapper<W, C> {
fn write(&self, bs: &const [u8]) { self.base.write(bs); }
fn write(&self, bs: &[u8]) { self.base.write(bs); }
fn seek(&self, off: int, style: SeekStyle) { self.base.seek(off, style); }
fn tell(&self) -> uint { self.base.tell() }
fn flush(&self) -> int { self.base.flush() }
@ -1055,7 +1055,7 @@ impl<W:Writer,C> Writer for Wrapper<W, C> {
}
impl Writer for *libc::FILE {
fn write(&self, v: &const [u8]) {
fn write(&self, v: &[u8]) {
unsafe {
do vec::as_const_buf(v) |vbuf, len| {
let nout = libc::fwrite(vbuf as *c_void,
@ -1105,7 +1105,7 @@ pub fn FILE_writer(f: *libc::FILE, cleanup: bool) -> @Writer {
}
impl Writer for fd_t {
fn write(&self, v: &const [u8]) {
fn write(&self, v: &[u8]) {
unsafe {
let mut count = 0u;
do vec::as_const_buf(v) |vbuf, len| {
@ -1262,7 +1262,7 @@ pub fn u64_to_be_bytes<T>(n: u64, size: uint,
}
}
pub fn u64_from_be_bytes(data: &const [u8],
pub fn u64_from_be_bytes(data: &[u8],
start: uint,
size: uint)
-> u64 {
@ -1497,7 +1497,7 @@ pub struct BytesWriter {
}
impl Writer for BytesWriter {
fn write(&self, v: &const [u8]) {
fn write(&self, v: &[u8]) {
let v_len = v.len();
let bytes_len = vec::uniq_len(&const self.bytes);

View file

@ -268,8 +268,7 @@ pub mod types {
pub type ssize_t = i32;
}
pub mod posix01 {
use libc::types::os::arch::c95::{c_int, c_short, c_long,
time_t};
use libc::types::os::arch::c95::{c_short, c_long, time_t};
use libc::types::os::arch::posix88::{dev_t, gid_t, ino_t};
use libc::types::os::arch::posix88::{mode_t, off_t};
use libc::types::os::arch::posix88::{uid_t};

View file

@ -352,13 +352,13 @@ pub fn fsync_fd(fd: c_int, _l: io::fsync::Level) -> c_int {
}
}
pub struct Pipe { mut in: c_int, mut out: c_int }
pub struct Pipe { in: c_int, out: c_int }
#[cfg(unix)]
pub fn pipe() -> Pipe {
unsafe {
let mut fds = Pipe {in: 0 as c_int,
out: 0 as c_int };
out: 0 as c_int };
assert!((libc::pipe(&mut fds.in) == (0 as c_int)));
return Pipe {in: fds.in, out: fds.out};
}
@ -1025,10 +1025,10 @@ pub fn last_os_error() -> ~str {
#[cfg(target_os = "macos")]
#[cfg(target_os = "android")]
#[cfg(target_os = "freebsd")]
fn strerror_r(errnum: c_int, buf: *c_char, buflen: size_t) -> c_int {
fn strerror_r(errnum: c_int, buf: *mut c_char, buflen: size_t) -> c_int {
#[nolink]
extern {
unsafe fn strerror_r(errnum: c_int, buf: *c_char,
unsafe fn strerror_r(errnum: c_int, buf: *mut c_char,
buflen: size_t) -> c_int;
}
unsafe {
@ -1040,10 +1040,10 @@ pub fn last_os_error() -> ~str {
// and requires macros to instead use the POSIX compliant variant.
// So we just use __xpg_strerror_r which is always POSIX compliant
#[cfg(target_os = "linux")]
fn strerror_r(errnum: c_int, buf: *c_char, buflen: size_t) -> c_int {
fn strerror_r(errnum: c_int, buf: *mut c_char, buflen: size_t) -> c_int {
#[nolink]
extern {
unsafe fn __xpg_strerror_r(errnum: c_int, buf: *c_char,
unsafe fn __xpg_strerror_r(errnum: c_int, buf: *mut c_char,
buflen: size_t) -> c_int;
}
unsafe {
@ -1053,7 +1053,7 @@ pub fn last_os_error() -> ~str {
let mut buf = [0 as c_char, ..TMPBUF_SZ];
unsafe {
let err = strerror_r(errno() as c_int, &buf[0],
let err = strerror_r(errno() as c_int, &mut buf[0],
TMPBUF_SZ as size_t);
if err < 0 {
fail!(~"strerror_r failure");

View file

@ -296,34 +296,34 @@ impl<T> Ord for *const T {
// Equality for region pointers
#[cfg(notest)]
impl<'self,T:Eq> Eq for &'self const T {
impl<'self,T:Eq> Eq for &'self T {
#[inline(always)]
fn eq(&self, other: & &'self const T) -> bool {
fn eq(&self, other: & &'self T) -> bool {
return *(*self) == *(*other);
}
#[inline(always)]
fn ne(&self, other: & &'self const T) -> bool {
fn ne(&self, other: & &'self T) -> bool {
return *(*self) != *(*other);
}
}
// Comparison for region pointers
#[cfg(notest)]
impl<'self,T:Ord> Ord for &'self const T {
impl<'self,T:Ord> Ord for &'self T {
#[inline(always)]
fn lt(&self, other: & &'self const T) -> bool {
fn lt(&self, other: & &'self T) -> bool {
*(*self) < *(*other)
}
#[inline(always)]
fn le(&self, other: & &'self const T) -> bool {
fn le(&self, other: & &'self T) -> bool {
*(*self) <= *(*other)
}
#[inline(always)]
fn ge(&self, other: & &'self const T) -> bool {
fn ge(&self, other: & &'self T) -> bool {
*(*self) >= *(*other)
}
#[inline(always)]
fn gt(&self, other: & &'self const T) -> bool {
fn gt(&self, other: & &'self T) -> bool {
*(*self) > *(*other)
}
}

View file

@ -31,8 +31,10 @@ pub struct Environment {
argc: c_int,
/// The argv value passed to main
argv: **c_char,
/// Print GC debugging info
debug_mem: bool
/// Print GC debugging info (true if env var RUST_DEBUG_MEM is set)
debug_mem: bool,
/// Print GC debugging info (true if env var RUST_DEBUG_BORROW is set)
debug_borrow: bool,
}
/// Get the global environment settings

View file

@ -137,7 +137,6 @@ pub impl Scheduler {
/// Called by a running task to end execution, after which it will
/// be recycled by the scheduler for reuse in a new task.
fn terminate_current_task(~self) {
let mut self = self;
assert!(self.in_task_context());
rtdebug!("ending running task");
@ -153,7 +152,6 @@ pub impl Scheduler {
}
fn schedule_new_task(~self, task: ~Task) {
let mut self = self;
assert!(self.in_task_context());
do self.switch_running_tasks_and_then(task) |last_task| {
@ -305,7 +303,7 @@ pub impl Scheduler {
unsafe {
let last_task = transmute::<Option<&Task>, Option<&mut Task>>(last_task);
let last_task_context = match last_task {
Some(ref t) => Some(&mut t.saved_context), None => None
Some(t) => Some(&mut t.saved_context), None => None
};
let next_task_context = match self.current_task {
Some(ref mut t) => Some(&mut t.saved_context), None => None

View file

@ -2472,9 +2472,6 @@ pub trait StrSlice<'self> {
fn any(&self, it: &fn(char) -> bool) -> bool;
fn contains<'a>(&self, needle: &'a str) -> bool;
fn contains_char(&self, needle: char) -> bool;
#[cfg(stage1)]
#[cfg(stage2)]
#[cfg(stage3)]
fn char_iter(&self) -> StrCharIterator<'self>;
fn each(&self, it: &fn(u8) -> bool);
fn eachi(&self, it: &fn(uint, u8) -> bool);
@ -2536,9 +2533,6 @@ impl<'self> StrSlice<'self> for &'self str {
contains_char(*self, needle)
}
#[cfg(stage1)]
#[cfg(stage2)]
#[cfg(stage3)]
#[inline]
fn char_iter(&self) -> StrCharIterator<'self> {
StrCharIterator {
@ -2732,17 +2726,11 @@ impl Clone for ~str {
}
}
#[cfg(stage1)]
#[cfg(stage2)]
#[cfg(stage3)]
pub struct StrCharIterator<'self> {
priv index: uint,
priv string: &'self str,
}
#[cfg(stage1)]
#[cfg(stage2)]
#[cfg(stage3)]
impl<'self> Iterator<char> for StrCharIterator<'self> {
#[inline]
fn next(&mut self) -> Option<char> {

View file

@ -19,7 +19,7 @@ use io::Writer;
use option::{None, Option, Some};
use str;
pub type Cb<'self> = &'self fn(buf: &const [u8]) -> bool;
pub type Cb<'self> = &'self fn(buf: &[u8]) -> bool;
/**
* A trait to implement in order to make a type hashable;

View file

@ -501,7 +501,7 @@ pub mod rt {
pub fn conv_int(cv: Conv, i: int, buf: &mut ~str) {
let radix = 10;
let prec = get_int_precision(cv);
let mut s : ~str = uint_to_str_prec(int::abs(i) as uint, radix, prec);
let s : ~str = uint_to_str_prec(int::abs(i) as uint, radix, prec);
let head = if i >= 0 {
if have_flag(cv.flags, flag_sign_always) {
@ -516,7 +516,7 @@ pub mod rt {
}
pub fn conv_uint(cv: Conv, u: uint, buf: &mut ~str) {
let prec = get_int_precision(cv);
let mut rs =
let rs =
match cv.ty {
TyDefault => uint_to_str_prec(u, 10, prec),
TyHexLower => uint_to_str_prec(u, 16, prec),
@ -559,7 +559,7 @@ pub mod rt {
CountIs(c) => (float::to_str_exact, c as uint),
CountImplied => (float::to_str_digits, 6u)
};
let mut s = to_str(f, digits);
let s = to_str(f, digits);
let head = if 0.0 <= f {
if have_flag(cv.flags, flag_sign_always) {
Some('+')

View file

@ -10,8 +10,9 @@
//! Runtime calls emitted by the compiler.
use uint;
use cast::transmute;
use libc::{c_char, c_uchar, c_void, size_t, uintptr_t, c_int};
use libc::{c_char, c_uchar, c_void, size_t, uintptr_t, c_int, STDERR_FILENO};
use managed::raw::BoxRepr;
use str;
use sys;
@ -19,17 +20,19 @@ use unstable::exchange_alloc;
use cast::transmute;
use rt::{context, OldTaskContext};
use rt::local_services::borrow_local_services;
use option::{Option, Some, None};
use io;
#[allow(non_camel_case_types)]
pub type rust_task = c_void;
#[cfg(target_word_size = "32")]
pub static FROZEN_BIT: uint = 0x80000000;
#[cfg(target_word_size = "64")]
pub static FROZEN_BIT: uint = 0x8000000000000000;
pub static FROZEN_BIT: uint = 1 << (uint::bits - 1);
pub static MUT_BIT: uint = 1 << (uint::bits - 2);
static ALL_BITS: uint = FROZEN_BIT | MUT_BIT;
pub mod rustrt {
use libc::{c_char, uintptr_t};
use unstable::lang::rust_task;
use libc::{c_void, c_char, uintptr_t};
pub extern {
#[rust_stack]
@ -45,6 +48,17 @@ pub mod rustrt {
#[fast_ffi]
unsafe fn rust_upcall_free_noswitch(ptr: *c_char);
#[rust_stack]
fn rust_take_task_borrow_list(task: *rust_task) -> *c_void;
#[rust_stack]
fn rust_set_task_borrow_list(task: *rust_task, map: *c_void);
#[rust_stack]
fn rust_try_get_task() -> *rust_task;
fn rust_dbg_breakpoint();
}
}
@ -55,7 +69,7 @@ pub fn fail_(expr: *c_char, file: *c_char, line: size_t) -> ! {
#[lang="fail_bounds_check"]
pub fn fail_bounds_check(file: *c_char, line: size_t,
index: size_t, len: size_t) {
index: size_t, len: size_t) {
let msg = fmt!("index out of bounds: the len is %d but the index is %d",
len as int, index as int);
do str::as_buf(msg) |p, _len| {
@ -63,11 +77,74 @@ pub fn fail_bounds_check(file: *c_char, line: size_t,
}
}
pub fn fail_borrowed() {
let msg = "borrowed";
do str::as_buf(msg) |msg_p, _| {
do str::as_buf("???") |file_p, _| {
fail_(msg_p as *c_char, file_p as *c_char, 0);
#[deriving(Eq)]
struct BorrowRecord {
box: *mut BoxRepr,
file: *c_char,
line: size_t
}
fn try_take_task_borrow_list() -> Option<~[BorrowRecord]> {
unsafe {
let cur_task: *rust_task = rustrt::rust_try_get_task();
if cur_task.is_not_null() {
let ptr = rustrt::rust_take_task_borrow_list(cur_task);
if ptr.is_null() {
None
} else {
let v: ~[BorrowRecord] = transmute(ptr);
Some(v)
}
} else {
None
}
}
}
fn swap_task_borrow_list(f: &fn(~[BorrowRecord]) -> ~[BorrowRecord]) {
unsafe {
let cur_task: *rust_task = rustrt::rust_try_get_task();
if cur_task.is_not_null() {
let mut borrow_list: ~[BorrowRecord] = {
let ptr = rustrt::rust_take_task_borrow_list(cur_task);
if ptr.is_null() { ~[] } else { transmute(ptr) }
};
borrow_list = f(borrow_list);
rustrt::rust_set_task_borrow_list(cur_task, transmute(borrow_list));
}
}
}
pub unsafe fn clear_task_borrow_list() {
// pub because it is used by the box annihilator.
let _ = try_take_task_borrow_list();
}
unsafe fn fail_borrowed(box: *mut BoxRepr, file: *c_char, line: size_t) {
debug_borrow("fail_borrowed: ", box, 0, 0, file, line);
match try_take_task_borrow_list() {
None => { // not recording borrows
let msg = "borrowed";
do str::as_buf(msg) |msg_p, _| {
fail_(msg_p as *c_char, file, line);
}
}
Some(borrow_list) => { // recording borrows
let mut msg = ~"borrowed";
let mut sep = " at ";
for borrow_list.each_reverse |entry| {
if entry.box == box {
str::push_str(&mut msg, sep);
let filename = str::raw::from_c_str(entry.file);
str::push_str(&mut msg, filename);
str::push_str(&mut msg, fmt!(":%u", entry.line as uint));
sep = " and at ";
}
}
do str::as_buf(msg) |msg_p, _| {
fail_(msg_p as *c_char, file, line)
}
}
}
}
@ -79,6 +156,77 @@ pub unsafe fn exchange_malloc(td: *c_char, size: uintptr_t) -> *c_char {
transmute(exchange_alloc::malloc(transmute(td), transmute(size)))
}
/// Because this code is so perf. sensitive, use a static constant so that
/// debug printouts are compiled out most of the time.
static ENABLE_DEBUG: bool = false;
#[inline]
unsafe fn debug_borrow<T>(tag: &'static str,
p: *const T,
old_bits: uint,
new_bits: uint,
filename: *c_char,
line: size_t) {
//! A useful debugging function that prints a pointer + tag + newline
//! without allocating memory.
if ENABLE_DEBUG && ::rt::env::get().debug_borrow {
debug_borrow_slow(tag, p, old_bits, new_bits, filename, line);
}
unsafe fn debug_borrow_slow<T>(tag: &'static str,
p: *const T,
old_bits: uint,
new_bits: uint,
filename: *c_char,
line: size_t) {
let dbg = STDERR_FILENO as io::fd_t;
dbg.write_str(tag);
dbg.write_hex(p as uint);
dbg.write_str(" ");
dbg.write_hex(old_bits);
dbg.write_str(" ");
dbg.write_hex(new_bits);
dbg.write_str(" ");
dbg.write_cstr(filename);
dbg.write_str(":");
dbg.write_hex(line as uint);
dbg.write_str("\n");
}
}
trait DebugPrints {
fn write_hex(&self, val: uint);
unsafe fn write_cstr(&self, str: *c_char);
}
impl DebugPrints for io::fd_t {
fn write_hex(&self, mut i: uint) {
let letters = ['0', '1', '2', '3', '4', '5', '6', '7', '8',
'9', 'a', 'b', 'c', 'd', 'e', 'f'];
static uint_nibbles: uint = ::uint::bytes << 1;
let mut buffer = [0_u8, ..uint_nibbles+1];
let mut c = uint_nibbles;
while c > 0 {
c -= 1;
buffer[c] = letters[i & 0xF] as u8;
i >>= 4;
}
self.write(buffer.slice(0, uint_nibbles));
}
unsafe fn write_cstr(&self, p: *c_char) {
use libc::strlen;
use vec;
let len = strlen(p);
let p: *u8 = transmute(p);
do vec::raw::buf_as_slice(p, len as uint) |s| {
self.write(s);
}
}
}
// NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
// inside a landing pad may corrupt the state of the exception handler. If a
// problem occurs, call exit instead.
@ -121,6 +269,7 @@ pub unsafe fn local_free(ptr: *c_char) {
}
}
#[cfg(stage0)]
#[lang="borrow_as_imm"]
#[inline(always)]
pub unsafe fn borrow_as_imm(a: *u8) {
@ -128,6 +277,86 @@ pub unsafe fn borrow_as_imm(a: *u8) {
(*a).header.ref_count |= FROZEN_BIT;
}
#[cfg(not(stage0))]
#[lang="borrow_as_imm"]
#[inline(always)]
pub unsafe fn borrow_as_imm(a: *u8, file: *c_char, line: size_t) -> uint {
let a: *mut BoxRepr = transmute(a);
let old_ref_count = (*a).header.ref_count;
let new_ref_count = old_ref_count | FROZEN_BIT;
debug_borrow("borrow_as_imm:", a, old_ref_count, new_ref_count, file, line);
if (old_ref_count & MUT_BIT) != 0 {
fail_borrowed(a, file, line);
}
(*a).header.ref_count = new_ref_count;
old_ref_count
}
#[cfg(not(stage0))]
#[lang="borrow_as_mut"]
#[inline(always)]
pub unsafe fn borrow_as_mut(a: *u8, file: *c_char, line: size_t) -> uint {
let a: *mut BoxRepr = transmute(a);
let old_ref_count = (*a).header.ref_count;
let new_ref_count = old_ref_count | MUT_BIT | FROZEN_BIT;
debug_borrow("borrow_as_mut:", a, old_ref_count, new_ref_count, file, line);
if (old_ref_count & (MUT_BIT|FROZEN_BIT)) != 0 {
fail_borrowed(a, file, line);
}
(*a).header.ref_count = new_ref_count;
old_ref_count
}
#[cfg(not(stage0))]
#[lang="record_borrow"]
pub unsafe fn record_borrow(a: *u8, old_ref_count: uint,
file: *c_char, line: size_t) {
if (old_ref_count & ALL_BITS) == 0 {
// was not borrowed before
let a: *mut BoxRepr = transmute(a);
debug_borrow("record_borrow:", a, old_ref_count, 0, file, line);
do swap_task_borrow_list |borrow_list| {
let mut borrow_list = borrow_list;
borrow_list.push(BorrowRecord {box: a, file: file, line: line});
borrow_list
}
}
}
#[cfg(not(stage0))]
#[lang="unrecord_borrow"]
pub unsafe fn unrecord_borrow(a: *u8, old_ref_count: uint,
file: *c_char, line: size_t) {
if (old_ref_count & ALL_BITS) == 0 {
// was not borrowed before, so we should find the record at
// the end of the list
let a: *mut BoxRepr = transmute(a);
debug_borrow("unrecord_borrow:", a, old_ref_count, 0, file, line);
do swap_task_borrow_list |borrow_list| {
let mut borrow_list = borrow_list;
assert!(!borrow_list.is_empty());
let br = borrow_list.pop();
if br.box != a || br.file != file || br.line != line {
let err = fmt!("wrong borrow found, br=%?", br);
do str::as_buf(err) |msg_p, _| {
fail_(msg_p as *c_char, file, line)
}
}
borrow_list
}
}
}
#[cfg(stage0)]
#[lang="return_to_mut"]
#[inline(always)]
pub unsafe fn return_to_mut(a: *u8) {
@ -139,12 +368,49 @@ pub unsafe fn return_to_mut(a: *u8) {
}
}
#[cfg(not(stage0))]
#[lang="return_to_mut"]
#[inline(always)]
pub unsafe fn return_to_mut(a: *u8, orig_ref_count: uint,
file: *c_char, line: size_t) {
// Sometimes the box is null, if it is conditionally frozen.
// See e.g. #4904.
if !a.is_null() {
let a: *mut BoxRepr = transmute(a);
let old_ref_count = (*a).header.ref_count;
let new_ref_count =
(old_ref_count & !ALL_BITS) | (orig_ref_count & ALL_BITS);
debug_borrow("return_to_mut:",
a, old_ref_count, new_ref_count, file, line);
(*a).header.ref_count = new_ref_count;
}
}
#[cfg(stage0)]
#[lang="check_not_borrowed"]
#[inline(always)]
pub unsafe fn check_not_borrowed(a: *u8) {
let a: *mut BoxRepr = transmute(a);
if ((*a).header.ref_count & FROZEN_BIT) != 0 {
fail_borrowed();
do str::as_buf("XXX") |file_p, _| {
fail_borrowed(a, file_p as *c_char, 0);
}
}
}
#[cfg(not(stage0))]
#[lang="check_not_borrowed"]
#[inline(always)]
pub unsafe fn check_not_borrowed(a: *u8,
file: *c_char,
line: size_t) {
let a: *mut BoxRepr = transmute(a);
let ref_count = (*a).header.ref_count;
debug_borrow("check_not_borrowed:", a, ref_count, 0, file, line);
if (ref_count & FROZEN_BIT) != 0 {
fail_borrowed(a, file, line);
}
}

View file

@ -26,19 +26,20 @@ pub fn ignore<T>(_x: T) { }
/// Sets `*ptr` to `new_value`, invokes `op()`, and then restores the
/// original value of `*ptr`.
///
/// NB: This function accepts `@mut T` and not `&mut T` to avoid
/// an obvious borrowck hazard. Typically passing in `&mut T` will
/// cause borrow check errors because it freezes whatever location
/// that `&mut T` is stored in (either statically or dynamically).
#[inline(always)]
pub fn with<T:Copy,R>(
ptr: &mut T,
new_value: T,
pub fn with<T,R>(
ptr: @mut T,
mut value: T,
op: &fn() -> R) -> R
{
// NDM: if swap operator were defined somewhat differently,
// we wouldn't need to copy...
let old_value = *ptr;
*ptr = new_value;
value <-> *ptr;
let result = op();
*ptr = old_value;
*ptr = value;
return result;
}

View file

@ -1821,17 +1821,14 @@ pub trait CopyableVector<T> {
}
/// Extension methods for vectors
impl<'self,T:Copy> CopyableVector<T> for &'self const [T] {
impl<'self,T:Copy> CopyableVector<T> for &'self [T] {
/// Returns a copy of `v`.
#[inline]
fn to_owned(&self) -> ~[T] {
let mut result = ~[];
// FIXME: #4568
unsafe {
reserve(&mut result, self.len());
for self.each |e| {
result.push(copy *e);
}
reserve(&mut result, self.len());
for self.each |e| {
result.push(copy *e);
}
result

View file

@ -788,7 +788,7 @@ pub fn link_binary(sess: Session,
};
debug!("output: %s", output.to_str());
let mut cc_args = link_args(sess, obj_filename, out_filename, lm);
let cc_args = link_args(sess, obj_filename, out_filename, lm);
debug!("%s link args: %s", cc_prog, str::connect(cc_args, ~" "));
// We run 'cc' here
let prog = run::program_output(cc_prog, cc_args);

View file

@ -262,7 +262,7 @@ pub fn compile_rest(sess: Session,
middle::check_loop::check_crate(ty_cx, crate));
let middle::moves::MoveMaps {moves_map, variable_moves_map,
capture_map} =
moved_variables_set, capture_map} =
time(time_passes, ~"compute moves", ||
middle::moves::compute_moves(ty_cx, method_map, crate));
@ -270,20 +270,19 @@ pub fn compile_rest(sess: Session,
middle::check_match::check_crate(ty_cx, method_map,
moves_map, crate));
let last_use_map =
time(time_passes, ~"liveness checking", ||
middle::liveness::check_crate(ty_cx, method_map,
variable_moves_map,
capture_map, crate));
time(time_passes, ~"liveness checking", ||
middle::liveness::check_crate(ty_cx, method_map,
variable_moves_map,
capture_map, crate));
let (root_map, mutbl_map, write_guard_map) =
let (root_map, write_guard_map) =
time(time_passes, ~"borrow checking", ||
middle::borrowck::check_crate(ty_cx, method_map,
moves_map, capture_map,
crate));
moves_map, moved_variables_set,
capture_map, crate));
time(time_passes, ~"kind checking", ||
kind::check_crate(ty_cx, method_map, last_use_map, crate));
kind::check_crate(ty_cx, method_map, crate));
time(time_passes, ~"lint checking", ||
lint::check_crate(ty_cx, crate));
@ -291,9 +290,7 @@ pub fn compile_rest(sess: Session,
if upto == cu_no_trans { return (crate, Some(ty_cx)); }
let maps = astencode::Maps {
mutbl_map: mutbl_map,
root_map: root_map,
last_use_map: last_use_map,
method_map: method_map,
vtable_map: vtable_map,
write_guard_map: write_guard_map,
@ -607,11 +604,6 @@ pub fn build_session_options(binary: @~str,
let target_opt = getopts::opt_maybe_str(matches, ~"target");
let target_feature_opt = getopts::opt_maybe_str(matches, ~"target-feature");
let save_temps = getopts::opt_present(matches, ~"save-temps");
match output_type {
// unless we're emitting huamn-readable assembly, omit comments.
link::output_type_llvm_assembly | link::output_type_assembly => (),
_ => debugging_opts |= session::no_asm_comments
}
let opt_level = {
if (debugging_opts & session::no_opt) != 0 {
No

View file

@ -45,7 +45,7 @@ pub static time_passes: uint = 1 << 1;
pub static count_llvm_insns: uint = 1 << 2;
pub static time_llvm_passes: uint = 1 << 3;
pub static trans_stats: uint = 1 << 4;
pub static no_asm_comments: uint = 1 << 5;
pub static asm_comments: uint = 1 << 5;
pub static no_verify: uint = 1 << 6;
pub static trace: uint = 1 << 7;
pub static coherence: uint = 1 << 8;
@ -73,7 +73,7 @@ pub fn debugging_opts_map() -> ~[(~str, ~str, uint)] {
(~"time-llvm-passes", ~"measure time of each LLVM pass",
time_llvm_passes),
(~"trans-stats", ~"gather trans statistics", trans_stats),
(~"no-asm-comments", ~"omit comments when using -S", no_asm_comments),
(~"asm-comments", ~"generate comments into the assembly (may change behavior)", asm_comments),
(~"no-verify", ~"skip LLVM verification", no_verify),
(~"trace", ~"emit trace logs", trace),
(~"coherence", ~"perform coherence checking", coherence),
@ -188,6 +188,9 @@ pub impl Session_ {
fn err(@self, msg: &str) {
self.span_diagnostic.handler().err(msg)
}
fn err_count(@self) -> uint {
self.span_diagnostic.handler().err_count()
}
fn has_errors(@self) -> bool {
self.span_diagnostic.handler().has_errors()
}
@ -263,7 +266,7 @@ pub impl Session_ {
}
fn trans_stats(@self) -> bool { self.debugging_opt(trans_stats) }
fn meta_stats(@self) -> bool { self.debugging_opt(meta_stats) }
fn no_asm_comments(@self) -> bool { self.debugging_opt(no_asm_comments) }
fn asm_comments(@self) -> bool { self.debugging_opt(asm_comments) }
fn no_verify(@self) -> bool { self.debugging_opt(no_verify) }
fn trace(@self) -> bool { self.debugging_opt(trace) }
fn coherence(@self) -> bool { self.debugging_opt(coherence) }

View file

@ -69,7 +69,8 @@ fn generate_test_harness(sess: session::Session,
testfns: ~[]
};
cx.ext_cx.bt_push(ExpandedFrom(CallInfo {
let ext_cx = cx.ext_cx;
ext_cx.bt_push(ExpandedFrom(CallInfo {
call_site: dummy_sp(),
callee: NameAndSpan {
name: ~"test",
@ -84,7 +85,7 @@ fn generate_test_harness(sess: session::Session,
let fold = fold::make_fold(precursor);
let res = @fold.fold_crate(&*crate);
cx.ext_cx.bt_pop();
ext_cx.bt_pop();
return res;
}

View file

@ -244,8 +244,8 @@ fn doc_transformed_self_ty(doc: ebml::Doc,
}
}
pub fn item_type(_: ast::def_id, item: ebml::Doc, tcx: ty::ctxt, cdata: cmd)
-> ty::t {
pub fn item_type(_item_id: ast::def_id, item: ebml::Doc,
tcx: ty::ctxt, cdata: cmd) -> ty::t {
doc_type(item, tcx, cdata)
}

View file

@ -198,7 +198,7 @@ fn encode_type_param_bounds(ebml_w: &mut writer::Encoder,
ecx: @EncodeContext,
params: &OptVec<TyParam>) {
let ty_param_defs =
@params.map_to_vec(|param| *ecx.tcx.ty_param_defs.get(&param.id));
@params.map_to_vec(|param| ecx.tcx.ty_param_defs.get_copy(&param.id));
encode_ty_type_param_defs(ebml_w, ecx, ty_param_defs,
tag_items_data_item_ty_param_bounds);
}
@ -288,7 +288,7 @@ fn encode_discriminant(ecx: @EncodeContext,
ebml_w: &mut writer::Encoder,
id: node_id) {
ebml_w.start_tag(tag_items_data_item_symbol);
ebml_w.writer.write(str::to_bytes(**ecx.discrim_symbols.get(&id)));
ebml_w.writer.write(str::to_bytes(*ecx.discrim_symbols.get_copy(&id)));
ebml_w.end_tag();
}
@ -1036,7 +1036,7 @@ fn encode_info_for_items(ecx: @EncodeContext,
let ebml_w = copy *ebml_w;
|i, cx, v| {
visit::visit_item(i, cx, v);
match *ecx.tcx.items.get(&i.id) {
match ecx.tcx.items.get_copy(&i.id) {
ast_map::node_item(_, pt) => {
let mut ebml_w = copy ebml_w;
encode_info_for_item(ecx, &mut ebml_w, i, index, *pt);
@ -1049,7 +1049,7 @@ fn encode_info_for_items(ecx: @EncodeContext,
let ebml_w = copy *ebml_w;
|ni, cx, v| {
visit::visit_foreign_item(ni, cx, v);
match *ecx.tcx.items.get(&ni.id) {
match ecx.tcx.items.get_copy(&ni.id) {
ast_map::node_foreign_item(_, abi, _, pt) => {
let mut ebml_w = copy ebml_w;
encode_info_for_foreign_item(ecx,

View file

@ -245,6 +245,9 @@ fn parse_region(st: @mut PState) -> ty::Region {
't' => {
ty::re_static
}
'e' => {
ty::re_static
}
_ => fail!(~"parse_region: bad input")
}
}

View file

@ -71,30 +71,29 @@ pub fn enc_ty(w: @io::Writer, cx: @ctxt, t: ty::t) {
w.write_str(result_str);
}
ac_use_abbrevs(abbrevs) => {
match abbrevs.find(&t) {
Some(a) => { w.write_str(*a.s); return; }
None => {
let pos = w.tell();
enc_sty(w, cx, /*bad*/copy ty::get(t).sty);
let end = w.tell();
let len = end - pos;
fn estimate_sz(u: uint) -> uint {
let mut n = u;
let mut len = 0u;
while n != 0u { len += 1u; n = n >> 4u; }
return len;
}
let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = ~"#" + uint::to_str_radix(pos, 16u) + ~":" +
uint::to_str_radix(len, 16u) + ~"#";
let a = ty_abbrev { pos: pos, len: len, s: @s };
abbrevs.insert(t, a);
}
return;
match abbrevs.find(&t) {
Some(a) => { w.write_str(*a.s); return; }
None => {}
}
}
let pos = w.tell();
enc_sty(w, cx, /*bad*/copy ty::get(t).sty);
let end = w.tell();
let len = end - pos;
fn estimate_sz(u: uint) -> uint {
let mut n = u;
let mut len = 0u;
while n != 0u { len += 1u; n = n >> 4u; }
return len;
}
let abbrev_len = 3u + estimate_sz(pos) + estimate_sz(len);
if abbrev_len < len {
// I.e. it's actually an abbreviation.
let s = ~"#" + uint::to_str_radix(pos, 16u) + ~":" +
uint::to_str_radix(len, 16u) + ~"#";
let a = ty_abbrev { pos: pos, len: len, s: @s };
abbrevs.insert(t, a);
}
return;
}
}
}
@ -152,6 +151,9 @@ fn enc_region(w: @io::Writer, cx: @ctxt, r: ty::Region) {
ty::re_static => {
w.write_char('t');
}
ty::re_empty => {
w.write_char('e');
}
ty::re_infer(_) => {
// these should not crop up after typeck
cx.diag.handler().bug(~"Cannot encode region variables");

View file

@ -44,9 +44,7 @@ use writer = std::ebml::writer;
// Auxiliary maps of things to be encoded
pub struct Maps {
mutbl_map: middle::borrowck::mutbl_map,
root_map: middle::borrowck::root_map,
last_use_map: middle::liveness::last_use_map,
method_map: middle::typeck::method_map,
vtable_map: middle::typeck::vtable_map,
write_guard_map: middle::borrowck::write_guard_map,
@ -152,7 +150,7 @@ pub fn decode_inlined_item(cdata: @cstore::crate_metadata,
fn reserve_id_range(sess: Session,
from_id_range: ast_util::id_range) -> ast_util::id_range {
// Handle the case of an empty range:
if ast_util::empty(from_id_range) { return from_id_range; }
if from_id_range.empty() { return from_id_range; }
let cnt = from_id_range.max - from_id_range.min;
let to_id_min = sess.parse_sess.next_id;
let to_id_max = sess.parse_sess.next_id + cnt;
@ -163,7 +161,6 @@ fn reserve_id_range(sess: Session,
pub impl ExtendedDecodeContext {
fn tr_id(&self, id: ast::node_id) -> ast::node_id {
/*!
*
* Translates an internal id, meaning a node id that is known
* to refer to some part of the item currently being inlined,
* such as a local variable or argument. All naked node-ids
@ -174,12 +171,11 @@ pub impl ExtendedDecodeContext {
*/
// from_id_range should be non-empty
assert!(!ast_util::empty(self.from_id_range));
assert!(!self.from_id_range.empty());
(id - self.from_id_range.min + self.to_id_range.min)
}
fn tr_def_id(&self, did: ast::def_id) -> ast::def_id {
/*!
*
* Translates an EXTERNAL def-id, converting the crate number
* from the one used in the encoded data to the current crate
* numbers.. By external, I mean that it be translated to a
@ -204,7 +200,6 @@ pub impl ExtendedDecodeContext {
}
fn tr_intern_def_id(&self, did: ast::def_id) -> ast::def_id {
/*!
*
* Translates an INTERNAL def-id, meaning a def-id that is
* known to refer to some part of the item currently being
* inlined. In that case, we want to convert the def-id to
@ -435,11 +430,7 @@ impl tr for ty::AutoAdjustment {
impl tr for ty::AutoRef {
fn tr(&self, xcx: @ExtendedDecodeContext) -> ty::AutoRef {
ty::AutoRef {
kind: self.kind,
region: self.region.tr(xcx),
mutbl: self.mutbl,
}
self.map_region(|r| r.tr(xcx))
}
}
@ -448,7 +439,7 @@ impl tr for ty::Region {
match *self {
ty::re_bound(br) => ty::re_bound(br.tr(xcx)),
ty::re_scope(id) => ty::re_scope(xcx.tr_id(id)),
ty::re_static | ty::re_infer(*) => *self,
ty::re_empty | ty::re_static | ty::re_infer(*) => *self,
ty::re_free(ref fr) => {
ty::re_free(ty::FreeRegion {scope_id: xcx.tr_id(fr.scope_id),
bound_region: fr.bound_region.tr(xcx)})
@ -724,7 +715,7 @@ trait ebml_writer_helpers {
fn emit_arg(&mut self, ecx: @e::EncodeContext, arg: ty::arg);
fn emit_ty(&mut self, ecx: @e::EncodeContext, ty: ty::t);
fn emit_vstore(&mut self, ecx: @e::EncodeContext, vstore: ty::vstore);
fn emit_tys(&mut self, ecx: @e::EncodeContext, tys: ~[ty::t]);
fn emit_tys(&mut self, ecx: @e::EncodeContext, tys: &[ty::t]);
fn emit_type_param_def(&mut self,
ecx: @e::EncodeContext,
type_param_def: &ty::TypeParameterDef);
@ -752,7 +743,7 @@ impl ebml_writer_helpers for writer::Encoder {
}
}
fn emit_tys(&mut self, ecx: @e::EncodeContext, tys: ~[ty::t]) {
fn emit_tys(&mut self, ecx: @e::EncodeContext, tys: &[ty::t]) {
do self.emit_from_vec(tys) |this, ty| {
this.emit_ty(ecx, *ty)
}
@ -859,9 +850,7 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
do ebml_w.tag(c::tag_table_node_type_subst) |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
// FIXME(#5562): removing this copy causes a segfault
// before stage2
ebml_w.emit_tys(ecx, /*bad*/copy **tys)
ebml_w.emit_tys(ecx, **tys)
}
}
}
@ -896,23 +885,6 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext,
}
}
if maps.mutbl_map.contains(&id) {
do ebml_w.tag(c::tag_table_mutbl) |ebml_w| {
ebml_w.id(id);
}
}
for maps.last_use_map.find(&id).each |&m| {
do ebml_w.tag(c::tag_table_last_use) |ebml_w| {
ebml_w.id(id);
do ebml_w.tag(c::tag_table_val) |ebml_w| {
do ebml_w.emit_from_vec(/*bad*/ copy **m) |ebml_w, id| {
id.encode(ebml_w);
}
}
}
}
for maps.method_map.find(&id).each |&mme| {
do ebml_w.tag(c::tag_table_method_map) |ebml_w| {
ebml_w.id(id);
@ -1113,9 +1085,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
found for id %d (orig %d)",
tag, id, id0);
if tag == (c::tag_table_mutbl as uint) {
dcx.maps.mutbl_map.insert(id);
} else if tag == (c::tag_table_moves_map as uint) {
if tag == (c::tag_table_moves_map as uint) {
dcx.maps.moves_map.insert(id);
} else {
let val_doc = entry_doc.get(c::tag_table_val as uint);
@ -1144,11 +1114,6 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
} else if tag == (c::tag_table_param_defs as uint) {
let bounds = val_dsr.read_type_param_def(xcx);
dcx.tcx.ty_param_defs.insert(id, bounds);
} else if tag == (c::tag_table_last_use as uint) {
let ids = val_dsr.read_to_vec(|val_dsr| {
xcx.tr_id(val_dsr.read_int())
});
dcx.maps.last_use_map.insert(id, @mut ids);
} else if tag == (c::tag_table_method_map as uint) {
dcx.maps.method_map.insert(
id,

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,750 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
# The Borrow Checker
This pass has the job of enforcing memory safety. This is a subtle
topic. The only way I know how to explain it is terms of a formal
model, so that's what I'll do.
# Formal model
Let's consider a simple subset of Rust in which you can only borrow
from lvalues like so:
LV = x | LV.f | *LV
Here `x` represents some variable, `LV.f` is a field reference,
and `*LV` is a pointer dereference. There is no auto-deref or other
niceties. This means that if you have a type like:
struct S { f: uint }
and a variable `a: ~S`, then the rust expression `a.f` would correspond
to an `LV` of `(*a).f`.
Here is the formal grammar for the types we'll consider:
TY = () | S<'LT...> | ~TY | & 'LT MQ TY | @ MQ TY
MQ = mut | imm | const
Most of these types should be pretty self explanatory. Here `S` is a
struct name and we assume structs are declared like so:
SD = struct S<'LT...> { (f: TY)... }
# An intuitive explanation
## Issuing loans
Now, imagine we had a program like this:
struct Foo { f: uint, g: uint }
...
'a: {
let mut x: ~Foo = ...;
let y = &mut (*x).f;
x = ...;
}
This is of course dangerous because mutating `x` will free the old
value and hence invalidate `y`. The borrow checker aims to prevent
this sort of thing.
### Loans
The way the borrow checker works is that it analyzes each borrow
expression (in our simple model, that's stuff like `&LV`, though in
real life there are a few other cases to consider). For each borrow
expression, it computes a vector of loans:
LOAN = (LV, LT, PT, LK)
PT = Partial | Total
LK = MQ | RESERVE
Each `LOAN` tuple indicates some sort of restriction on what can be
done to the lvalue `LV`; `LV` will always be a path owned by the
current stack frame. These restrictions are called "loans" because
they are always the result of a borrow expression.
Every loan has a lifetime `LT` during which those restrictions are in
effect. The indicator `PT` distinguishes between *total* loans, in
which the LV itself was borrowed, and *partial* loans, which means
that some content ownwed by LV was borrowed.
The final element in the loan tuple is the *loan kind* `LK`. There
are four kinds: mutable, immutable, const, and reserve:
- A "mutable" loan means that LV may be written to through an alias, and
thus LV cannot be written to directly or immutably aliased (remember
that we preserve the invariant that any given value can only be
written to through one path at a time; hence if there is a mutable
alias to LV, then LV cannot be written directly until this alias is
out of scope).
- An "immutable" loan means that LV must remain immutable. Hence it
cannot be written, but other immutable aliases are permitted.
- A "const" loan means that an alias to LV exists. LV may still be
written or frozen.
- A "reserve" loan is the strongest case. It prevents both mutation
and aliasing of any kind, including `&const` loans. Reserve loans
are a side-effect of borrowing an `&mut` loan.
In addition to affecting mutability, a loan of any kind implies that
LV cannot be moved.
### Example
To give you a better feeling for what a loan is, let's look at three
loans that would be issued as a result of the borrow `&(*x).f` in the
example above:
((*x).f, Total, mut, 'a)
(*x, Partial, mut, 'a)
(x, Partial, mut, 'a)
The first loan states that the expression `(*x).f` has been loaned
totally as mutable for the lifetime `'a`. This first loan would
prevent an assignment `(*x).f = ...` from occurring during the
lifetime `'a`.
Now let's look at the second loan. You may have expected that each
borrow would result in only one loan. But this is not the case.
Instead, there will be loans for every path where mutation might
affect the validity of the borrowed pointer that is created (in some
cases, there can even be multiple loans per path, see the section on
"Borrowing in Calls" below for the gory details). The reason for this
is to prevent actions that would indirectly affect the borrowed path.
In this case, we wish to ensure that `(*x).f` is not mutated except
through the mutable alias `y`. Therefore, we must not only prevent an
assignment to `(*x).f` but also an assignment like `*x = Foo {...}`,
as this would also mutate the field `f`. To do so, we issue a
*partial* mutable loan for `*x` (the loan is partial because `*x`
itself was not borrowed). This partial loan will cause any attempt to
assign to `*x` to be flagged as an error.
Because both partial and total loans prevent assignments, you may
wonder why we bother to distinguish between them. The reason for this
distinction has to do with preventing double borrows. In particular,
it is legal to borrow both `&mut x.f` and `&mut x.g` simultaneously,
but it is not legal to borrow `&mut x.f` twice. In the borrow checker,
the first case would result in two *partial* mutable loans of `x`
(along with one total mutable loan of `x.f` and one of `x.g) whereas
the second would result in two *total* mutable loans of `x.f` (along
with two partial mutable loans of `x`). Multiple *total mutable* loan
for the same path are not permitted, but multiple *partial* loans (of
any mutability) are permitted.
Finally, we come to the third loan. This loan is a partial mutable
loan of `x`. This loan prevents us from reassigning `x`, which would
be bad for two reasons. First, it would change the value of `(*x).f`
but, even worse, it would cause the pointer `y` to become a dangling
pointer. Bad all around.
## Checking for illegal assignments, moves, and reborrows
Once we have computed the loans introduced by each borrow, the borrow
checker will determine the full set of loans in scope at each
expression and use that to decide whether that expression is legal.
Remember that the scope of loan is defined by its lifetime LT. We
sometimes say that a loan which is in-scope at a particular point is
an "outstanding loan".
The kinds of expressions which in-scope loans can render illegal are
*assignments*, *moves*, and *borrows*.
An assignments to an lvalue LV is illegal if there is in-scope mutable
or immutable loan for LV. Assignment with an outstanding mutable loan
is illegal because then the `&mut` pointer is supposed to be the only
way to mutate the value. Assignment with an outstanding immutable
loan is illegal because the value is supposed to be immutable at that
point.
A move from an lvalue LV is illegal if there is any sort of
outstanding loan.
A borrow expression may be illegal if any of the loans which it
produces conflict with other outstanding loans. Two loans are
considered compatible if one of the following conditions holds:
- At least one loan is a const loan.
- Both loans are partial loans.
- Both loans are immutable.
Any other combination of loans is illegal.
# The set of loans that results from a borrow expression
Here we'll define four functions---MUTATE, FREEZE, ALIAS, and
TAKE---which are all used to compute the set of LOANs that result
from a borrow expression. The first three functions each have
a similar type signature:
MUTATE(LV, LT, PT) -> LOANS
FREEZE(LV, LT, PT) -> LOANS
ALIAS(LV, LT, PT) -> LOANS
MUTATE, FREEZE, and ALIAS are used when computing the loans result
from mutable, immutable, and const loans respectively. For example,
the loans resulting from an expression like `&mut (*x).f` would be
computed by `MUTATE((*x).f, LT, Total)`, where `LT` is the lifetime of
the resulting pointer. Similarly the loans for `&(*x).f` and `&const
(*x).f` would be computed by `FREEZE((*x).f, LT, Total)` and
`ALIAS((*x).f, LT, Total)` respectively. (Actually this is a slight
simplification; see the section below on Borrows in Calls for the full
gory details)
The names MUTATE, FREEZE, and ALIAS are intended to suggest the
semantics of `&mut`, `&`, and `&const` borrows respectively. `&mut`,
for example, creates a mutable alias of LV. `&` causes the borrowed
value to be frozen (immutable). `&const` does neither but does
introduce an alias to be the borrowed value.
Each of these three functions is only defined for some inputs. That
is, it may occur that some particular borrow is not legal. For
example, it is illegal to make an `&mut` loan of immutable data. In
that case, the MUTATE() function is simply not defined (in the code,
it returns a Result<> condition to indicate when a loan would be
illegal).
The final function, RESERVE, is used as part of borrowing an `&mut`
pointer. Due to the fact that it is used for one very particular
purpose, it has a rather simpler signature than the others:
RESERVE(LV, LT) -> LOANS
It is explained when we come to that case.
## The function MUTATE()
Here we use [inference rules][ir] to define the MUTATE() function.
We will go case by case for the various kinds of lvalues that
can be borrowed.
[ir]: http://en.wikipedia.org/wiki/Rule_of_inference
### Mutating local variables
The rule for mutating local variables is as follows:
Mutate-Variable:
LT <= Scope(x)
Mut(x) = Mut
--------------------------------------------------
MUTATE(x, LT, PT) = (x, LT, PT, mut)
Here `Scope(x)` is the lifetime of the block in which `x` was declared
and `Mut(x)` indicates the mutability with which `x` was declared.
This rule simply states that you can only create a mutable alias
to a variable if it is mutable, and that alias cannot outlive the
stack frame in which the variable is declared.
### Mutating fields and owned pointers
As it turns out, the rules for mutating fields and mutating owned
pointers turn out to be quite similar. The reason is that the
expressions `LV.f` and `*LV` are both owned by their base expression
`LV`. So basically the result of mutating `LV.f` or `*LV` is computed
by adding a loan for `LV.f` or `*LV` and then the loans for a partial
take of `LV`:
Mutate-Field:
MUTATE(LV, LT, Partial) = LOANS
------------------------------------------------------------
MUTATE(LV.f, LT, PT) = LOANS, (LV.F, LT, PT, mut)
Mutate-Owned-Ptr:
Type(LV) = ~Ty
MUTATE(LV, LT, Partial) = LOANS
------------------------------------------------------------
MUTATE(*LV, LT, PT) = LOANS, (*LV, LT, PT, mut)
Note that while our micro-language only has fields, the slight
variations on the `Mutate-Field` rule are used for any interior content
that appears in the full Rust language, such as the contents of a
tuple, fields in a struct, or elements of a fixed-length vector.
### Mutating dereferenced borrowed pointers
The rule for borrowed pointers is by far the most complicated:
Mutate-Mut-Borrowed-Ptr:
Type(LV) = &LT_P mut Ty // (1)
LT <= LT_P // (2)
RESERVE(LV, LT) = LOANS // (3)
------------------------------------------------------------
MUTATE(*LV, LT, PT) = LOANS, (*LV, LT, PT, Mut)
Condition (1) states that only a mutable borrowed pointer can be
taken. Condition (2) states that the lifetime of the alias must be
less than the lifetime of the borrowed pointer being taken.
Conditions (3) and (4) are where things get interesting. The intended
semantics of the borrow is that the new `&mut` pointer is the only one
which has the right to modify the data; the original `&mut` pointer
must not be used for mutation. Because borrowed pointers do not own
their content nor inherit mutability, we must be particularly cautious
of aliases, which could permit the original borrowed pointer to be
reached from another path and thus circumvent our loans.
Here is one example of what could go wrong if we ignore clause (4):
let x: &mut T;
...
let y = &mut *x; // Only *y should be able to mutate...
let z = &const x;
**z = ...; // ...but here **z is still able to mutate!
Another possible error could occur with moves:
let x: &mut T;
...
let y = &mut *x; // Issues loan: (*x, LT, Total, Mut)
let z = x; // moves from x
*z = ...; // Mutates *y indirectly! Bad.
In both of these cases, the problem is that when creating the alias
`y` we would only issue a loan preventing assignment through `*x`.
But this loan can be easily circumvented by moving from `x` or
aliasing it. Note that, in the first example, the alias of `x` was
created using `&const`, which is a particularly weak form of alias.
The danger of aliases can also occur when the `&mut` pointer itself
is already located in an alias location, as here:
let x: @mut &mut T; // or &mut &mut T, &&mut T,
... // &const &mut T, @&mut T, etc
let y = &mut **x; // Only *y should be able to mutate...
let z = x;
**z = ...; // ...but here **z is still able to mutate!
When we cover the rules for RESERVE, we will see that it would
disallow this case, because MUTATE can only be applied to canonical
lvalues which are owned by the current stack frame.
It might be the case that if `&const` and `@const` pointers were
removed, we could do away with RESERVE and simply use MUTATE instead.
But we have to be careful about the final example in particular, since
dynamic freezing would not be sufficient to prevent this example.
Perhaps a combination of MUTATE with a predicate OWNED(LV).
One final detail: unlike every other case, when we calculate the loans
using RESERVE we do not use the original lifetime `LT` but rather
`GLB(Scope(LV), LT)`. What this says is:
### Mutating dereferenced managed pointers
Because the correctness of managed pointer loans is checked dynamically,
the rule is quite simple:
Mutate-Mut-Managed-Ptr:
Type(LV) = @mut Ty
Add ROOT-FREEZE annotation for *LV with lifetime LT
------------------------------------------------------------
MUTATE(*LV, LT, Total) = []
No loans are issued. Instead, we add a side annotation that causes
`*LV` to be rooted and frozen on entry to LV. You could rephrase
these rules as having multiple returns values, or rephrase this as a
kind of loan, but whatever.
One interesting point is that *partial takes* of `@mut` are forbidden.
This is not for any soundness reason but just because it is clearer
for users when `@mut` values are either lent completely or not at all.
## The function FREEZE
The rules for FREEZE are pretty similar to MUTATE. The first four
cases I'll just present without discussion, as the reasoning is
quite analogous to the MUTATE case:
Freeze-Variable:
LT <= Scope(x)
--------------------------------------------------
FREEZE(x, LT, PT) = (x, LT, PT, imm)
Freeze-Field:
FREEZE(LV, LT, Partial) = LOANS
------------------------------------------------------------
FREEZE(LV.f, LT, PT) = LOANS, (LV.F, LT, PT, imm)
Freeze-Owned-Ptr:
Type(LV) = ~Ty
FREEZE(LV, LT, Partial) = LOANS
------------------------------------------------------------
FREEZE(*LV, LT, PT) = LOANS, (*LV, LT, PT, imm)
Freeze-Mut-Borrowed-Ptr:
Type(LV) = &LT_P mut Ty
LT <= LT_P
RESERVE(LV, LT) = LOANS
------------------------------------------------------------
FREEZE(*LV, LT, PT) = LOANS, (*LV, LT, PT, Imm)
Freeze-Mut-Managed-Ptr:
Type(LV) = @mut Ty
Add ROOT-FREEZE annotation for *LV with lifetime LT
------------------------------------------------------------
Freeze(*LV, LT, Total) = []
The rule to "freeze" an immutable borrowed pointer is quite
simple, since the content is already immutable:
Freeze-Imm-Borrowed-Ptr:
Type(LV) = &LT_P Ty // (1)
LT <= LT_P // (2)
------------------------------------------------------------
FREEZE(*LV, LT, PT) = LOANS, (*LV, LT, PT, Mut)
The final two rules pertain to borrows of `@Ty`. There is a bit of
subtlety here. The main problem is that we must guarantee that the
managed box remains live for the entire borrow. We can either do this
dynamically, by rooting it, or (better) statically, and hence there
are two rules:
Freeze-Imm-Managed-Ptr-1:
Type(LV) = @Ty
Add ROOT annotation for *LV
------------------------------------------------------------
FREEZE(*LV, LT, PT) = []
Freeze-Imm-Managed-Ptr-2:
Type(LV) = @Ty
LT <= Scope(LV)
Mut(LV) = imm
LV is not moved
------------------------------------------------------------
FREEZE(*LV, LT, PT) = []
The intention of the second rule is to avoid an extra root if LV
serves as a root. In that case, LV must (1) outlive the borrow; (2)
be immutable; and (3) not be moved.
## The ALIAS function
The function ALIAS is used for `&const` loans but also to handle one
corner case concerning function arguments (covered in the section
"Borrows in Calls" below). It computes the loans that result from
observing that there is a pointer to `LV` and thus that pointer must
remain valid.
The first two rules are simple:
Alias-Variable:
LT <= Scope(x)
--------------------------------------------------
ALIAS(x, LT, PT) = (x, LT, PT, Const)
Alias-Field:
ALIAS(LV, LT, Partial) = LOANS
------------------------------------------------------------
ALIAS(LV.f, LT, PT) = LOANS, (LV.F, LT, PT, Const)
### Aliasing owned pointers
The rule for owned pointers is somewhat interesting:
Alias-Owned-Ptr:
Type(LV) = ~Ty
FREEZE(LV, LT, Partial) = LOANS
------------------------------------------------------------
ALIAS(*LV, LT, PT) = LOANS, (*LV, LT, PT, Const)
Here we *freeze* the base `LV`. The reason is that if an owned
pointer is mutated it frees its content, which means that the alias to
`*LV` would become a dangling pointer.
### Aliasing borrowed pointers
The rule for borrowed pointers is quite simple, because borrowed
pointers do not own their content and thus do not play a role in
keeping it live:
Alias-Borrowed-Ptr:
Type(LV) = &LT_P MQ Ty
LT <= LT_P
------------------------------------------------------------
ALIAS(*LV, LT, PT) = []
Basically, the existence of a borrowed pointer to some memory with
lifetime LT_P is proof that the memory can safely be aliased for any
lifetime LT <= LT_P.
### Aliasing managed pointers
The rules for aliasing managed pointers are similar to those
used with FREEZE, except that they apply to all manager pointers
regardles of mutability:
Alias-Managed-Ptr-1:
Type(LV) = @MQ Ty
Add ROOT annotation for *LV
------------------------------------------------------------
ALIAS(*LV, LT, PT) = []
Alias-Managed-Ptr-2:
Type(LV) = @MQ Ty
LT <= Scope(LV)
Mut(LV) = imm
LV is not moved
------------------------------------------------------------
ALIAS(*LV, LT, PT) = []
## The RESERVE function
The final function, RESERVE, is used for loans of `&mut` pointers. As
discussed in the section on the function MUTATE, we must be quite
careful when "re-borrowing" an `&mut` pointer to ensure that the original
`&mut` pointer can no longer be used to mutate.
There are a couple of dangers to be aware of:
- `&mut` pointers do not inherit mutability. Therefore, if you have
an lvalue LV with type `&mut T` and you freeze `LV`, you do *not*
freeze `*LV`. This is quite different from an `LV` with type `~T`.
- Also, because they do not inherit mutability, if the `&mut` pointer
lives in an aliased location, then *any alias* can be used to write!
As a consequence of these two rules, RESERVE can only be successfully
invoked on an lvalue LV that is *owned by the current stack frame*.
This ensures that there are no aliases that are not visible from the
outside. Moreover, Reserve loans are incompatible with all other
loans, even Const loans. This prevents any aliases from being created
within the current function.
### Reserving local variables
The rule for reserving a variable is generally straightforward but
with one interesting twist:
Reserve-Variable:
--------------------------------------------------
RESERVE(x, LT) = (x, LT, Total, Reserve)
The twist here is that the incoming lifetime is not required to
be a subset of the incoming variable, unlike every other case. To
see the reason for this, imagine the following function:
struct Foo { count: uint }
fn count_field(x: &'a mut Foo) -> &'a mut count {
&mut (*x).count
}
This function consumes one `&mut` pointer and returns another with the
same lifetime pointing at a particular field. The borrow for the
`&mut` expression will result in a call to `RESERVE(x, 'a)`, which is
intended to guarantee that `*x` is not later aliased or used to
mutate. But the lifetime of `x` is limited to the current function,
which is a sublifetime of the parameter `'a`, so the rules used for
MUTATE, FREEZE, and ALIAS (which require that the lifetime of the loan
not exceed the lifetime of the variable) would result in an error.
Nonetheless this function is perfectly legitimate. After all, the
caller has moved in an `&mut` pointer with lifetime `'a`, and thus has
given up their right to mutate the value for the remainder of `'a`.
So it is fine for us to return a pointer with the same lifetime.
The reason that RESERVE differs from the other functions is that
RESERVE is not responsible for guaranteeing that the pointed-to data
will outlive the borrowed pointer being created. After all, `&mut`
values do not own the data they point at.
### Reserving owned content
The rules for fields and owned pointers are very straightforward:
Reserve-Field:
RESERVE(LV, LT) = LOANS
------------------------------------------------------------
RESERVE(LV.f, LT) = LOANS, (LV.F, LT, Total, Reserve)
Reserve-Owned-Ptr:
Type(LV) = ~Ty
RESERVE(LV, LT) = LOANS
------------------------------------------------------------
RESERVE(*LV, LT) = LOANS, (*LV, LT, Total, Reserve)
### Reserving `&mut` borrowed pointers
Unlike other borrowed pointers, `&mut` pointers are unaliasable,
so we can reserve them like everything else:
Reserve-Mut-Borrowed-Ptr:
Type(LV) = &LT_P mut Ty
RESERVE(LV, LT) = LOANS
------------------------------------------------------------
RESERVE(*LV, LT) = LOANS, (*LV, LT, Total, Reserve)
## Borrows in calls
Earlier we said that the MUTATE, FREEZE, and ALIAS functions were used
to compute the loans resulting from a borrow expression. But this is
not strictly correct, there is a slight complication that occurs with
calls by which additional loans may be necessary. We will explain
that here and give the full details.
Imagine a call expression `'a: E1(E2, E3)`, where `Ei` are some
expressions. If we break this down to something a bit lower-level, it
is kind of short for:
'a: {
'a_arg1: let temp1: ... = E1;
'a_arg2: let temp2: ... = E2;
'a_arg3: let temp3: ... = E3;
'a_call: temp1(temp2, temp3)
}
Here the lifetime labels indicate the various lifetimes. As you can
see there are in fact four relevant lifetimes (only one of which was
named by the user): `'a` corresponds to the expression `E1(E2, E3)` as
a whole. `'a_arg1`, `'a_arg2`, and `'a_arg3` correspond to the
evaluations of `E1`, `E2`, and `E3` respectively. Finally, `'a_call`
corresponds to the *actual call*, which is the point where the values
of the parameters will be used.
Now, let's look at a (contrived, but representative) example to see
why all this matters:
struct Foo { f: uint, g: uint }
...
fn add(p: &mut uint, v: uint) {
*p += v;
}
...
fn inc(p: &mut uint) -> uint {
*p += 1; *p
}
fn weird() {
let mut x: ~Foo = ~Foo { ... };
'a: add(&mut (*x).f,
'b: inc(&mut (*x).f)) // (*)
}
The important part is the line marked `(*)` which contains a call to
`add()`. The first argument is a mutable borrow of the field `f`.
The second argument *always borrows* the field `f`. Now, if these two
borrows overlapped in time, this would be illegal, because there would
be two `&mut` pointers pointing at `f`. And, in a way, they *do*
overlap in time, since the first argument will be evaluated first,
meaning that the pointer will exist when the second argument executes.
But in another important way they do not overlap in time. Let's
expand out that final call to `add()` as we did before:
'a: {
'a_arg1: let a_temp1: ... = add;
'a_arg2: let a_temp2: &'a_call mut uint = &'a_call mut (*x).f;
'a_arg3_: let a_temp3: uint = {
let b_temp1: ... = inc;
let b_temp2: &'b_call = &'b_call mut (*x).f;
'b_call: b_temp1(b_temp2)
};
'a_call: a_temp1(a_temp2, a_temp3)
}
When it's written this way, we can see that although there are two
borrows, the first has lifetime `'a_call` and the second has lifetime
`'b_call` and in fact these lifetimes do not overlap. So everything
is fine.
But this does not mean that there isn't reason for caution! Imagine a
devious program like *this* one:
struct Foo { f: uint, g: uint }
...
fn add(p: &mut uint, v: uint) {
*p += v;
}
...
fn consume(x: ~Foo) -> uint {
x.f + x.g
}
fn weird() {
let mut x: ~Foo = ~Foo { ... };
'a: add(&mut (*x).f, consume(x)) // (*)
}
In this case, there is only one borrow, but the second argument is
`consume(x)` instead of a second borrow. Because `consume()` is
declared to take a `~Foo`, it will in fact free the pointer `x` when
it has finished executing. If it is not obvious why this is
troublesome, consider this expanded version of that call:
'a: {
'a_arg1: let a_temp1: ... = add;
'a_arg2: let a_temp2: &'a_call mut uint = &'a_call mut (*x).f;
'a_arg3_: let a_temp3: uint = {
let b_temp1: ... = consume;
let b_temp2: ~Foo = x;
'b_call: b_temp1(x)
};
'a_call: a_temp1(a_temp2, a_temp3)
}
In this example, we will have borrowed the first argument before `x`
is freed and then free `x` during evaluation of the second
argument. This causes `a_temp2` to be invalidated.
Of course the loans computed from the borrow expression are supposed
to prevent this situation. But if we just considered the loans from
`MUTATE((*x).f, 'a_call, Total)`, the resulting loans would be:
((*x).f, 'a_call, Total, Mut)
(*x, 'a_call, Partial, Mut)
(x, 'a_call, Partial, Mut)
Because these loans are only in scope for `'a_call`, they do nothing
to prevent the move that occurs evaluating the second argument.
The way that we solve this is to say that if you have a borrow
expression `&'LT_P mut LV` which itself occurs in the lifetime
`'LT_B`, then the resulting loans are:
MUTATE(LV, LT_P, Total) + ALIAS(LV, LUB(LT_P, LT_B), Total)
The call to MUTATE is what we've seen so far. The second part
expresses the idea that the expression LV will be evaluated starting
at LT_B until the end of LT_P. Now, in the normal case, LT_P >= LT_B,
and so the second set of loans that result from a ALIAS are basically
a no-op. However, in the case of an argument where the evaluation of
the borrow occurs before the interval where the resulting pointer will
be used, this ALIAS is important.
In the case of our example, it would produce a set of loans like:
((*x).f, 'a, Total, Const)
(*x, 'a, Total, Const)
(x, 'a, Total, Imm)
The scope of these loans is `'a = LUB('a_arg2, 'a_call)`, and so they
encompass all subsequent arguments. The first set of loans are Const
loans, which basically just prevent moves. However, when we cross
over the dereference of the owned pointer `x`, the rule for ALIAS
specifies that `x` must be frozen, and hence the final loan is an Imm
loan. In any case the troublesome second argument would be flagged
as an error.
# Maps that are created
Borrowck results in two maps.
- `root_map`: identifies those expressions or patterns whose result
needs to be rooted. Conceptually the root_map maps from an
expression or pattern node to a `node_id` identifying the scope for
which the expression must be rooted (this `node_id` should identify
a block or call). The actual key to the map is not an expression id,
however, but a `root_map_key`, which combines an expression id with a
deref count and is used to cope with auto-deref.
*/

View file

@ -1,641 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ----------------------------------------------------------------------
// Gathering loans
//
// The borrow check proceeds in two phases. In phase one, we gather the full
// set of loans that are required at any point. These are sorted according to
// their associated scopes. In phase two, checking loans, we will then make
// sure that all of these loans are honored.
use middle::borrowck::preserve::{PreserveCondition, PcOk, PcIfPure};
use middle::borrowck::{Loan, bckerr, bckres, BorrowckCtxt, err_mutbl};
use middle::borrowck::{LoanKind, TotalFreeze, PartialFreeze,
TotalTake, PartialTake, Immobile};
use middle::borrowck::ReqMaps;
use middle::borrowck::loan;
use middle::mem_categorization::{cmt, mem_categorization_ctxt};
use middle::pat_util;
use middle::ty::{ty_region};
use middle::ty;
use util::common::indenter;
use util::ppaux::{Repr, region_to_str};
use core::hashmap::{HashSet, HashMap};
use syntax::ast::{m_const, m_imm, m_mutbl};
use syntax::ast;
use syntax::codemap::span;
use syntax::print::pprust;
use syntax::visit;
/// Context used while gathering loans:
///
/// - `bccx`: the the borrow check context
/// - `req_maps`: the maps computed by `gather_loans()`, see def'n of the
/// struct `ReqMaps` for more info
/// - `item_ub`: the id of the block for the enclosing fn/method item
/// - `root_ub`: the id of the outermost block for which we can root
/// an `@T`. This is the id of the innermost enclosing
/// loop or function body.
///
/// The role of `root_ub` is to prevent us from having to accumulate
/// vectors of rooted items at runtime. Consider this case:
///
/// fn foo(...) -> int {
/// let mut ptr: &int;
/// while some_cond {
/// let x: @int = ...;
/// ptr = &*x;
/// }
/// *ptr
/// }
///
/// If we are not careful here, we would infer the scope of the borrow `&*x`
/// to be the body of the function `foo()` as a whole. We would then
/// have root each `@int` that is produced, which is an unbounded number.
/// No good. Instead what will happen is that `root_ub` will be set to the
/// body of the while loop and we will refuse to root the pointer `&*x`
/// because it would have to be rooted for a region greater than `root_ub`.
struct GatherLoanCtxt {
bccx: @BorrowckCtxt,
req_maps: ReqMaps,
item_ub: ast::node_id,
root_ub: ast::node_id,
ignore_adjustments: HashSet<ast::node_id>
}
pub fn gather_loans(bccx: @BorrowckCtxt, crate: @ast::crate) -> ReqMaps {
let glcx = @mut GatherLoanCtxt {
bccx: bccx,
req_maps: ReqMaps { req_loan_map: HashMap::new(),
pure_map: HashMap::new() },
item_ub: 0,
root_ub: 0,
ignore_adjustments: HashSet::new()
};
let v = visit::mk_vt(@visit::Visitor {visit_expr: req_loans_in_expr,
visit_fn: req_loans_in_fn,
visit_stmt: add_stmt_to_map,
.. *visit::default_visitor()});
visit::visit_crate(crate, glcx, v);
let @GatherLoanCtxt{req_maps, _} = glcx;
return req_maps;
}
fn req_loans_in_fn(fk: &visit::fn_kind,
decl: &ast::fn_decl,
body: &ast::blk,
sp: span,
id: ast::node_id,
self: @mut GatherLoanCtxt,
v: visit::vt<@mut GatherLoanCtxt>) {
// see explanation attached to the `root_ub` field:
let old_item_id = self.item_ub;
let old_root_ub = self.root_ub;
self.root_ub = body.node.id;
match *fk {
visit::fk_anon(*) | visit::fk_fn_block(*) => {}
visit::fk_item_fn(*) | visit::fk_method(*) => {
self.item_ub = body.node.id;
}
}
visit::visit_fn(fk, decl, body, sp, id, self, v);
self.root_ub = old_root_ub;
self.item_ub = old_item_id;
}
fn req_loans_in_expr(ex: @ast::expr,
self: @mut GatherLoanCtxt,
vt: visit::vt<@mut GatherLoanCtxt>) {
let bccx = self.bccx;
let tcx = bccx.tcx;
let old_root_ub = self.root_ub;
debug!("req_loans_in_expr(expr=%?/%s)",
ex.id, pprust::expr_to_str(ex, tcx.sess.intr()));
// If this expression is borrowed, have to ensure it remains valid:
{
let mut this = &mut *self;
if !this.ignore_adjustments.contains(&ex.id) {
for tcx.adjustments.find(&ex.id).each |&adjustments| {
this.guarantee_adjustments(ex, *adjustments);
}
}
}
// Special checks for various kinds of expressions:
match ex.node {
ast::expr_addr_of(mutbl, base) => {
let base_cmt = self.bccx.cat_expr(base);
// make sure that the thing we are pointing out stays valid
// for the lifetime `scope_r` of the resulting ptr:
let scope_r = ty_region(tcx, ex.span, tcx.ty(ex));
self.guarantee_valid(base_cmt, mutbl, scope_r);
visit::visit_expr(ex, self, vt);
}
ast::expr_match(ex_v, ref arms) => {
let cmt = self.bccx.cat_expr(ex_v);
for (*arms).each |arm| {
for arm.pats.each |pat| {
self.gather_pat(cmt, *pat, arm.body.node.id, ex.id);
}
}
visit::visit_expr(ex, self, vt);
}
ast::expr_index(rcvr, _) |
ast::expr_binary(_, rcvr, _) |
ast::expr_unary(_, rcvr) |
ast::expr_assign_op(_, rcvr, _)
if self.bccx.method_map.contains_key(&ex.id) => {
// Receivers in method calls are always passed by ref.
//
// Here, in an overloaded operator, the call is this expression,
// and hence the scope of the borrow is this call.
//
// FIX? / NOT REALLY---technically we should check the other
// argument and consider the argument mode. But how annoying.
// And this problem when goes away when argument modes are
// phased out. So I elect to leave this undone.
let scope_r = ty::re_scope(ex.id);
let rcvr_cmt = self.bccx.cat_expr(rcvr);
self.guarantee_valid(rcvr_cmt, m_imm, scope_r);
// FIXME (#3387): Total hack: Ignore adjustments for the left-hand
// side. Their regions will be inferred to be too large.
self.ignore_adjustments.insert(rcvr.id);
visit::visit_expr(ex, self, vt);
}
// FIXME--#3387
// ast::expr_binary(_, lhs, rhs) => {
// // Universal comparison operators like ==, >=, etc
// // take their arguments by reference.
// let lhs_ty = ty::expr_ty(self.tcx(), lhs);
// if !ty::type_is_scalar(lhs_ty) {
// let scope_r = ty::re_scope(ex.id);
// let lhs_cmt = self.bccx.cat_expr(lhs);
// self.guarantee_valid(lhs_cmt, m_imm, scope_r);
// let rhs_cmt = self.bccx.cat_expr(rhs);
// self.guarantee_valid(rhs_cmt, m_imm, scope_r);
// }
// visit::visit_expr(ex, self, vt);
// }
ast::expr_field(rcvr, _, _)
if self.bccx.method_map.contains_key(&ex.id) => {
// Receivers in method calls are always passed by ref.
//
// Here, the field a.b is in fact a closure. Eventually, this
// should be an &fn, but for now it's an @fn. In any case,
// the enclosing scope is either the call where it is a rcvr
// (if used like `a.b(...)`), the call where it's an argument
// (if used like `x(a.b)`), or the block (if used like `let x
// = a.b`).
let scope_r = self.tcx().region_maps.encl_region(ex.id);
let rcvr_cmt = self.bccx.cat_expr(rcvr);
self.guarantee_valid(rcvr_cmt, m_imm, scope_r);
visit::visit_expr(ex, self, vt);
}
// see explanation attached to the `root_ub` field:
ast::expr_while(cond, ref body) => {
// during the condition, can only root for the condition
self.root_ub = cond.id;
(vt.visit_expr)(cond, self, vt);
// during body, can only root for the body
self.root_ub = body.node.id;
(vt.visit_block)(body, self, vt);
}
// see explanation attached to the `root_ub` field:
ast::expr_loop(ref body, _) => {
self.root_ub = body.node.id;
visit::visit_expr(ex, self, vt);
}
_ => {
visit::visit_expr(ex, self, vt);
}
}
// Check any contained expressions:
self.root_ub = old_root_ub;
}
pub impl GatherLoanCtxt {
fn tcx(&mut self) -> ty::ctxt { self.bccx.tcx }
fn guarantee_adjustments(&mut self,
expr: @ast::expr,
adjustment: &ty::AutoAdjustment) {
debug!("guarantee_adjustments(expr=%s, adjustment=%?)",
expr.repr(self.tcx()), adjustment);
let _i = indenter();
match *adjustment {
ty::AutoAddEnv(*) => {
debug!("autoaddenv -- no autoref");
return;
}
ty::AutoDerefRef(
ty::AutoDerefRef {
autoref: None, _ }) => {
debug!("no autoref");
return;
}
ty::AutoDerefRef(
ty::AutoDerefRef {
autoref: Some(ref autoref),
autoderefs: autoderefs}) => {
let mcx = &mem_categorization_ctxt {
tcx: self.tcx(),
method_map: self.bccx.method_map};
let cmt = mcx.cat_expr_autoderefd(expr, autoderefs);
debug!("after autoderef, cmt=%s", self.bccx.cmt_to_repr(cmt));
match autoref.kind {
ty::AutoPtr => {
self.guarantee_valid(cmt,
autoref.mutbl,
autoref.region)
}
ty::AutoBorrowVec | ty::AutoBorrowVecRef => {
let cmt_index = mcx.cat_index(expr, cmt);
self.guarantee_valid(cmt_index,
autoref.mutbl,
autoref.region)
}
ty::AutoBorrowFn => {
let cmt_deref = mcx.cat_deref_fn(expr, cmt, 0);
self.guarantee_valid(cmt_deref,
autoref.mutbl,
autoref.region)
}
}
}
}
}
// guarantees that addr_of(cmt) will be valid for the duration of
// `static_scope_r`, or reports an error. This may entail taking
// out loans, which will be added to the `req_loan_map`. This can
// also entail "rooting" GC'd pointers, which means ensuring
// dynamically that they are not freed.
fn guarantee_valid(&mut self,
cmt: cmt,
req_mutbl: ast::mutability,
scope_r: ty::Region)
{
let loan_kind = match req_mutbl {
m_mutbl => TotalTake,
m_imm => TotalFreeze,
m_const => Immobile
};
self.bccx.stats.guaranteed_paths += 1;
debug!("guarantee_valid(cmt=%s, req_mutbl=%?, \
loan_kind=%?, scope_r=%s)",
self.bccx.cmt_to_repr(cmt),
req_mutbl,
loan_kind,
region_to_str(self.tcx(), scope_r));
let _i = indenter();
match cmt.lp {
// If this expression is a loanable path, we MUST take out a
// loan. This is somewhat non-obvious. You might think,
// for example, that if we have an immutable local variable
// `x` whose value is being borrowed, we could rely on `x`
// not to change. This is not so, however, because even
// immutable locals can be moved. So we take out a loan on
// `x`, guaranteeing that it remains immutable for the
// duration of the reference: if there is an attempt to move
// it within that scope, the loan will be detected and an
// error will be reported.
Some(_) => {
match loan::loan(self.bccx, cmt, scope_r, loan_kind) {
Err(ref e) => { self.bccx.report((*e)); }
Ok(loans) => {
self.add_loans(cmt, loan_kind, scope_r, loans);
}
}
}
// The path is not loanable: in that case, we must try and
// preserve it dynamically (or see that it is preserved by
// virtue of being rooted in some immutable path). We must
// also check that the mutability of the desired pointer
// matches with the actual mutability (but if an immutable
// pointer is desired, that is ok as long as we are pure)
None => {
let result: bckres<PreserveCondition> = {
do self.check_mutbl(loan_kind, cmt).chain |pc1| {
do self.bccx.preserve(cmt, scope_r,
self.item_ub,
self.root_ub).chain |pc2| {
Ok(pc1.combine(pc2))
}
}
};
match result {
Ok(PcOk) => {
debug!("result of preserve: PcOk");
// we were able guarantee the validity of the ptr,
// perhaps by rooting or because it is immutably
// rooted. good.
self.bccx.stats.stable_paths += 1;
}
Ok(PcIfPure(ref e)) => {
debug!("result of preserve: %?", PcIfPure((*e)));
// we are only able to guarantee the validity if
// the scope is pure
match scope_r {
ty::re_scope(pure_id) => {
// if the scope is some block/expr in the
// fn, then just require that this scope
// be pure
self.req_maps.pure_map.insert(pure_id, *e);
self.bccx.stats.req_pure_paths += 1;
debug!("requiring purity for scope %?",
scope_r);
if self.tcx().sess.borrowck_note_pure() {
self.bccx.span_note(
cmt.span,
fmt!("purity required"));
}
}
_ => {
// otherwise, we can't enforce purity for
// that scope, so give up and report an
// error
self.bccx.report((*e));
}
}
}
Err(ref e) => {
// we cannot guarantee the validity of this pointer
debug!("result of preserve: error");
self.bccx.report((*e));
}
}
}
}
}
// Check that the pat `cmt` is compatible with the required
// mutability, presuming that it can be preserved to stay alive
// long enough.
//
// For example, if you have an expression like `&x.f` where `x`
// has type `@mut{f:int}`, this check might fail because `&x.f`
// reqires an immutable pointer, but `f` lives in (aliased)
// mutable memory.
fn check_mutbl(&mut self,
loan_kind: LoanKind,
cmt: cmt)
-> bckres<PreserveCondition> {
debug!("check_mutbl(loan_kind=%?, cmt.mutbl=%?)",
loan_kind, cmt.mutbl);
match loan_kind {
Immobile => Ok(PcOk),
TotalTake | PartialTake => {
if cmt.mutbl.is_mutable() {
Ok(PcOk)
} else {
Err(bckerr { cmt: cmt, code: err_mutbl(loan_kind) })
}
}
TotalFreeze | PartialFreeze => {
if cmt.mutbl.is_immutable() {
Ok(PcOk)
} else if cmt.cat.is_mutable_box() {
Ok(PcOk)
} else {
// Eventually:
let e = bckerr {cmt: cmt,
code: err_mutbl(loan_kind)};
Ok(PcIfPure(e))
}
}
}
}
fn add_loans(&mut self,
cmt: cmt,
loan_kind: LoanKind,
scope_r: ty::Region,
loans: ~[Loan]) {
if loans.len() == 0 {
return;
}
// Normally we wouldn't allow `re_free` here. However, in this case
// it should be sound. Below is nmatsakis' reasoning:
//
// Perhaps [this permits] a function kind of like this one here, which
// consumes one mut pointer and returns a narrower one:
//
// struct Foo { f: int }
// fn foo(p: &'v mut Foo) -> &'v mut int { &mut p.f }
//
// I think this should work fine but there is more subtlety to it than
// I at first imagined. Unfortunately it's a very important use case,
// I think, so it really ought to work. The changes you [pcwalton]
// made to permit re_free() do permit this case, I think, but I'm not
// sure what else they permit. I have to think that over a bit.
//
// Ordinarily, a loan with scope re_free wouldn't make sense, because
// you couldn't enforce it. But in this case, your function signature
// informs the caller that you demand exclusive access to p and its
// contents for the lifetime v. Since borrowed pointers are
// non-copyable, they must have (a) made a borrow which will enforce
// those conditions and then (b) given you the resulting pointer.
// Therefore, they should be respecting the loan. So it actually seems
// that it's ok in this case to have a loan with re_free, so long as
// the scope of the loan is no greater than the region pointer on
// which it is based. Neat but not something I had previously
// considered all the way through. (Note that we already rely on
// similar reasoning to permit you to return borrowed pointers into
// immutable structures, this is just the converse I suppose)
let scope_id = match scope_r {
ty::re_scope(scope_id) |
ty::re_free(ty::FreeRegion {scope_id, _}) => {
scope_id
}
_ => {
self.bccx.tcx.sess.span_bug(
cmt.span,
fmt!("loans required but scope is scope_region is %s \
(%?)",
region_to_str(self.tcx(), scope_r),
scope_r));
}
};
self.add_loans_to_scope_id(scope_id, loans);
if loan_kind.is_freeze() && !cmt.mutbl.is_immutable() {
self.bccx.stats.loaned_paths_imm += 1;
if self.tcx().sess.borrowck_note_loan() {
self.bccx.span_note(
cmt.span,
fmt!("immutable loan required"));
}
} else {
self.bccx.stats.loaned_paths_same += 1;
}
}
fn add_loans_to_scope_id(&mut self,
scope_id: ast::node_id,
loans: ~[Loan]) {
debug!("adding %u loans to scope_id %?: %s",
loans.len(), scope_id,
str::connect(loans.map(|l| self.bccx.loan_to_repr(l)), ", "));
match self.req_maps.req_loan_map.find(&scope_id) {
Some(req_loans) => {
req_loans.push_all(loans);
return;
}
None => {}
}
self.req_maps.req_loan_map.insert(scope_id, @mut loans);
}
fn gather_pat(@mut self,
discr_cmt: cmt,
root_pat: @ast::pat,
arm_id: ast::node_id,
match_id: ast::node_id) {
do self.bccx.cat_pattern(discr_cmt, root_pat) |cmt, pat| {
match pat.node {
ast::pat_ident(bm, _, _) if self.pat_is_binding(pat) => {
match bm {
ast::bind_by_ref(mutbl) => {
// ref x or ref x @ p --- creates a ptr which must
// remain valid for the scope of the match
// find the region of the resulting pointer (note that
// the type of such a pattern will *always* be a
// region pointer)
let scope_r = ty_region(self.tcx(), pat.span,
self.tcx().ty(pat));
// if the scope of the region ptr turns out to be
// specific to this arm, wrap the categorization with
// a cat_discr() node. There is a detailed discussion
// of the function of this node in method preserve():
let arm_scope = ty::re_scope(arm_id);
if self.bccx.is_subregion_of(scope_r, arm_scope) {
let cmt_discr = self.bccx.cat_discr(cmt, match_id);
self.guarantee_valid(cmt_discr, mutbl, scope_r);
} else {
self.guarantee_valid(cmt, mutbl, scope_r);
}
}
ast::bind_by_copy | ast::bind_infer => {
// Nothing to do here; neither copies nor moves induce
// borrows.
}
}
}
ast::pat_vec(_, Some(slice_pat), _) => {
// The `slice_pat` here creates a slice into the
// original vector. This is effectively a borrow of
// the elements of the vector being matched.
let slice_ty = self.tcx().ty(slice_pat);
let (slice_mutbl, slice_r) =
self.vec_slice_info(slice_pat, slice_ty);
let mcx = self.bccx.mc_ctxt();
let cmt_index = mcx.cat_index(slice_pat, cmt);
self.guarantee_valid(cmt_index, slice_mutbl, slice_r);
}
_ => {}
}
}
}
fn vec_slice_info(@mut self,
pat: @ast::pat,
slice_ty: ty::t) -> (ast::mutability, ty::Region) {
/*!
*
* In a pattern like [a, b, ..c], normally `c` has slice type,
* but if you have [a, b, ..ref c], then the type of `ref c`
* will be `&&[]`, so to extract the slice details we have
* to recurse through rptrs.
*/
match ty::get(slice_ty).sty {
ty::ty_evec(slice_mt, ty::vstore_slice(slice_r)) => {
(slice_mt.mutbl, slice_r)
}
ty::ty_rptr(_, ref mt) => {
self.vec_slice_info(pat, mt.ty)
}
_ => {
self.tcx().sess.span_bug(
pat.span,
fmt!("Type of slice pattern is not a slice"));
}
}
}
fn pat_is_variant_or_struct(@mut self, pat: @ast::pat) -> bool {
pat_util::pat_is_variant_or_struct(self.bccx.tcx.def_map, pat)
}
fn pat_is_binding(@mut self, pat: @ast::pat) -> bool {
pat_util::pat_is_binding(self.bccx.tcx.def_map, pat)
}
}
// Setting up info that preserve needs.
// This is just the most convenient place to do it.
fn add_stmt_to_map(stmt: @ast::stmt,
self: @mut GatherLoanCtxt,
vt: visit::vt<@mut GatherLoanCtxt>) {
match stmt.node {
ast::stmt_expr(_, id) | ast::stmt_semi(_, id) => {
self.bccx.stmt_map.insert(id);
}
_ => ()
}
visit::visit_stmt(stmt, self, vt);
}

View file

@ -0,0 +1,347 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This module implements the check that the lifetime of a borrow
//! does not exceed the lifetime of the value being borrowed.
use core::prelude::*;
use middle::borrowck::*;
use mc = middle::mem_categorization;
use middle::ty;
use syntax::ast::{m_const, m_imm, m_mutbl};
use syntax::ast;
use syntax::codemap::span;
use util::ppaux::{note_and_explain_region};
pub fn guarantee_lifetime(bccx: @BorrowckCtxt,
item_scope_id: ast::node_id,
root_scope_id: ast::node_id,
span: span,
cmt: mc::cmt,
loan_region: ty::Region,
loan_mutbl: ast::mutability) {
debug!("guarantee_lifetime(cmt=%s, loan_region=%s)",
cmt.repr(bccx.tcx), loan_region.repr(bccx.tcx));
let ctxt = GuaranteeLifetimeContext {bccx: bccx,
item_scope_id: item_scope_id,
span: span,
loan_region: loan_region,
loan_mutbl: loan_mutbl,
cmt_original: cmt,
root_scope_id: root_scope_id};
ctxt.check(cmt, None);
}
///////////////////////////////////////////////////////////////////////////
// Private
struct GuaranteeLifetimeContext {
bccx: @BorrowckCtxt,
// the node id of the function body for the enclosing item
item_scope_id: ast::node_id,
// the node id of the innermost loop / function body; this is the
// longest scope for which we can root managed boxes
root_scope_id: ast::node_id,
span: span,
loan_region: ty::Region,
loan_mutbl: ast::mutability,
cmt_original: mc::cmt
}
impl GuaranteeLifetimeContext {
fn tcx(&self) -> ty::ctxt {
self.bccx.tcx
}
fn check(&self, cmt: mc::cmt, discr_scope: Option<ast::node_id>) {
//! Main routine. Walks down `cmt` until we find the "guarantor".
match cmt.cat {
mc::cat_rvalue |
mc::cat_implicit_self |
mc::cat_copied_upvar(*) |
mc::cat_local(*) |
mc::cat_arg(*) |
mc::cat_self(*) |
mc::cat_deref(_, _, mc::region_ptr(*)) |
mc::cat_deref(_, _, mc::unsafe_ptr) => {
let scope = self.scope(cmt);
self.check_scope(scope)
}
mc::cat_stack_upvar(cmt) => {
self.check(cmt, discr_scope)
}
mc::cat_static_item => {
}
mc::cat_deref(base, derefs, mc::gc_ptr(ptr_mutbl)) => {
let base_scope = self.scope(base);
// See rule Freeze-Imm-Managed-Ptr-2 in doc.rs
let omit_root = (
ptr_mutbl == m_imm &&
self.bccx.is_subregion_of(self.loan_region, base_scope) &&
base.mutbl.is_immutable() &&
!self.is_moved(base)
);
if !omit_root {
self.check_root(cmt, base, derefs, ptr_mutbl, discr_scope);
} else {
debug!("omitting root, base=%s, base_scope=%?",
base.repr(self.tcx()), base_scope);
}
}
mc::cat_deref(base, _, mc::uniq_ptr(*)) |
mc::cat_interior(base, _) => {
self.check(base, discr_scope)
}
mc::cat_discr(base, new_discr_scope) => {
// Subtle: in a match, we must ensure that each binding
// variable remains valid for the duration of the arm in
// which it appears, presuming that this arm is taken.
// But it is inconvenient in trans to root something just
// for one arm. Therefore, we insert a cat_discr(),
// basically a special kind of category that says "if this
// value must be dynamically rooted, root it for the scope
// `match_id`.
//
// As an example, consider this scenario:
//
// let mut x = @Some(3);
// match *x { Some(y) {...} None {...} }
//
// Technically, the value `x` need only be rooted
// in the `some` arm. However, we evaluate `x` in trans
// before we know what arm will be taken, so we just
// always root it for the duration of the match.
//
// As a second example, consider *this* scenario:
//
// let x = @mut @Some(3);
// match x { @@Some(y) {...} @@None {...} }
//
// Here again, `x` need only be rooted in the `some` arm.
// In this case, the value which needs to be rooted is
// found only when checking which pattern matches: but
// this check is done before entering the arm. Therefore,
// even in this case we just choose to keep the value
// rooted for the entire match. This means the value will be
// rooted even if the none arm is taken. Oh well.
//
// At first, I tried to optimize the second case to only
// root in one arm, but the result was suboptimal: first,
// it interfered with the construction of phi nodes in the
// arm, as we were adding code to root values before the
// phi nodes were added. This could have been addressed
// with a second basic block. However, the naive approach
// also yielded suboptimal results for patterns like:
//
// let x = @mut @...;
// match x { @@some_variant(y) | @@some_other_variant(y) =>
//
// The reason is that we would root the value once for
// each pattern and not once per arm. This is also easily
// fixed, but it's yet more code for what is really quite
// the corner case.
//
// Nonetheless, if you decide to optimize this case in the
// future, you need only adjust where the cat_discr()
// node appears to draw the line between what will be rooted
// in the *arm* vs the *match*.
self.check(base, Some(new_discr_scope))
}
}
}
fn check_root(&self,
cmt_deref: mc::cmt,
cmt_base: mc::cmt,
derefs: uint,
ptr_mutbl: ast::mutability,
discr_scope: Option<ast::node_id>) {
debug!("check_root(cmt_deref=%s, cmt_base=%s, derefs=%?, ptr_mutbl=%?, \
discr_scope=%?)",
cmt_deref.repr(self.tcx()),
cmt_base.repr(self.tcx()),
derefs,
ptr_mutbl,
discr_scope);
// Make sure that the loan does not exceed the maximum time
// that we can root the value, dynamically.
let root_region = ty::re_scope(self.root_scope_id);
if !self.bccx.is_subregion_of(self.loan_region, root_region) {
self.report_error(
err_out_of_root_scope(root_region, self.loan_region));
return;
}
// Extract the scope id that indicates how long the rooting is required
let root_scope = match self.loan_region {
ty::re_scope(id) => id,
_ => {
// the check above should fail for anything is not re_scope
self.bccx.tcx.sess.span_bug(
cmt_base.span,
fmt!("Cannot issue root for scope region: %?",
self.loan_region));
}
};
// If inside of a match arm, expand the rooting to the entire
// match. See the detailed discussion in `check()` above.
let mut root_scope = match discr_scope {
None => root_scope,
Some(id) => {
if self.bccx.is_subscope_of(root_scope, id) {
id
} else {
root_scope
}
}
};
// If we are borrowing the inside of an `@mut` box,
// we need to dynamically mark it to prevent incompatible
// borrows from happening later.
let opt_dyna = match ptr_mutbl {
m_imm | m_const => None,
m_mutbl => {
match self.loan_mutbl {
m_mutbl => Some(DynaMut),
m_imm | m_const => Some(DynaImm)
}
}
};
// FIXME(#3511) grow to the nearest cleanup scope---this can
// cause observable errors if freezing!
if !self.bccx.tcx.region_maps.is_cleanup_scope(root_scope) {
debug!("%? is not a cleanup scope, adjusting", root_scope);
let cleanup_scope =
self.bccx.tcx.region_maps.cleanup_scope(root_scope);
if opt_dyna.is_some() {
self.tcx().sess.span_warn(
self.span,
fmt!("Dynamic freeze scope artifically extended \
(see Issue #6248)"));
note_and_explain_region(
self.bccx.tcx,
"managed value only needs to be frozen for ",
ty::re_scope(root_scope),
"...");
note_and_explain_region(
self.bccx.tcx,
"...but due to Issue #6248, it will be frozen for ",
ty::re_scope(cleanup_scope),
"");
}
root_scope = cleanup_scope;
}
// Add a record of what is required
let rm_key = root_map_key {id: cmt_deref.id, derefs: derefs};
let root_info = RootInfo {scope: root_scope, freeze: opt_dyna};
self.bccx.root_map.insert(rm_key, root_info);
debug!("root_key: %? root_info: %?", rm_key, root_info);
}
fn check_scope(&self, max_scope: ty::Region) {
//! Reports an error if `loan_region` is larger than `valid_scope`
if !self.bccx.is_subregion_of(self.loan_region, max_scope) {
self.report_error(err_out_of_scope(max_scope, self.loan_region));
}
}
fn is_moved(&self, cmt: mc::cmt) -> bool {
//! True if `cmt` is something that is potentially moved
//! out of the current stack frame.
match cmt.guarantor().cat {
mc::cat_local(id) |
mc::cat_self(id) |
mc::cat_arg(id) => {
self.bccx.moved_variables_set.contains(&id)
}
mc::cat_rvalue |
mc::cat_static_item |
mc::cat_implicit_self |
mc::cat_copied_upvar(*) |
mc::cat_deref(*) => {
false
}
r @ mc::cat_interior(*) |
r @ mc::cat_stack_upvar(*) |
r @ mc::cat_discr(*) => {
self.tcx().sess.span_bug(
cmt.span,
fmt!("illegal guarantor category: %?", r));
}
}
}
fn scope(&self, cmt: mc::cmt) -> ty::Region {
//! Returns the maximal region scope for the which the
//! lvalue `cmt` is guaranteed to be valid without any
//! rooting etc, and presuming `cmt` is not mutated.
match cmt.cat {
mc::cat_rvalue => {
ty::re_scope(self.bccx.tcx.region_maps.cleanup_scope(cmt.id))
}
mc::cat_implicit_self |
mc::cat_copied_upvar(_) => {
ty::re_scope(self.item_scope_id)
}
mc::cat_static_item => {
ty::re_static
}
mc::cat_local(local_id) |
mc::cat_arg(local_id) |
mc::cat_self(local_id) => {
self.bccx.tcx.region_maps.encl_region(local_id)
}
mc::cat_deref(_, _, mc::unsafe_ptr(*)) => {
ty::re_static
}
mc::cat_deref(_, _, mc::region_ptr(_, r)) => {
r
}
mc::cat_deref(cmt, _, mc::uniq_ptr(*)) |
mc::cat_deref(cmt, _, mc::gc_ptr(*)) |
mc::cat_interior(cmt, _) |
mc::cat_stack_upvar(cmt) |
mc::cat_discr(cmt, _) => {
self.scope(cmt)
}
}
}
fn report_error(&self, code: bckerr_code) {
self.bccx.report(BckError {
cmt: self.cmt_original,
span: self.span,
code: code
});
}
}

View file

@ -0,0 +1,636 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ----------------------------------------------------------------------
// Gathering loans
//
// The borrow check proceeds in two phases. In phase one, we gather the full
// set of loans that are required at any point. These are sorted according to
// their associated scopes. In phase two, checking loans, we will then make
// sure that all of these loans are honored.
use core::prelude::*;
use middle::borrowck::*;
use mc = middle::mem_categorization;
use middle::pat_util;
use middle::ty::{ty_region};
use middle::ty;
use util::common::indenter;
use util::ppaux::{Repr};
use syntax::ast::{m_const, m_imm, m_mutbl};
use syntax::ast;
use syntax::ast_util::id_range;
use syntax::codemap::span;
use syntax::print::pprust;
use syntax::visit;
mod lifetime;
mod restrictions;
/// Context used while gathering loans:
///
/// - `bccx`: the the borrow check context
/// - `item_ub`: the id of the block for the enclosing fn/method item
/// - `root_ub`: the id of the outermost block for which we can root
/// an `@T`. This is the id of the innermost enclosing
/// loop or function body.
///
/// The role of `root_ub` is to prevent us from having to accumulate
/// vectors of rooted items at runtime. Consider this case:
///
/// fn foo(...) -> int {
/// let mut ptr: &int;
/// while some_cond {
/// let x: @int = ...;
/// ptr = &*x;
/// }
/// *ptr
/// }
///
/// If we are not careful here, we would infer the scope of the borrow `&*x`
/// to be the body of the function `foo()` as a whole. We would then
/// have root each `@int` that is produced, which is an unbounded number.
/// No good. Instead what will happen is that `root_ub` will be set to the
/// body of the while loop and we will refuse to root the pointer `&*x`
/// because it would have to be rooted for a region greater than `root_ub`.
struct GatherLoanCtxt {
bccx: @BorrowckCtxt,
id_range: id_range,
all_loans: @mut ~[Loan],
item_ub: ast::node_id,
repeating_ids: ~[ast::node_id]
}
pub fn gather_loans(bccx: @BorrowckCtxt,
body: &ast::blk) -> (id_range, @mut ~[Loan]) {
let glcx = @mut GatherLoanCtxt {
bccx: bccx,
id_range: id_range::max(),
all_loans: @mut ~[],
item_ub: body.node.id,
repeating_ids: ~[body.node.id]
};
let v = visit::mk_vt(@visit::Visitor {visit_expr: gather_loans_in_expr,
visit_block: gather_loans_in_block,
visit_fn: gather_loans_in_fn,
visit_stmt: add_stmt_to_map,
visit_pat: add_pat_to_id_range,
.. *visit::default_visitor()});
(v.visit_block)(body, glcx, v);
return (glcx.id_range, glcx.all_loans);
}
fn add_pat_to_id_range(p: @ast::pat,
self: @mut GatherLoanCtxt,
v: visit::vt<@mut GatherLoanCtxt>) {
// NB: This visitor function just adds the pat ids into the id
// range. We gather loans that occur in patterns using the
// `gather_pat()` method below. Eventually these two should be
// brought together.
self.id_range.add(p.id);
visit::visit_pat(p, self, v);
}
fn gather_loans_in_fn(fk: &visit::fn_kind,
decl: &ast::fn_decl,
body: &ast::blk,
sp: span,
id: ast::node_id,
self: @mut GatherLoanCtxt,
v: visit::vt<@mut GatherLoanCtxt>) {
match fk {
// Do not visit items here, the outer loop in borrowck/mod
// will visit them for us in turn.
&visit::fk_item_fn(*) | &visit::fk_method(*) => {
return;
}
// Visit closures as part of the containing item.
&visit::fk_anon(*) | &visit::fk_fn_block(*) => {
self.push_repeating_id(body.node.id);
visit::visit_fn(fk, decl, body, sp, id, self, v);
self.pop_repeating_id(body.node.id);
}
}
}
fn gather_loans_in_block(blk: &ast::blk,
self: @mut GatherLoanCtxt,
vt: visit::vt<@mut GatherLoanCtxt>) {
self.id_range.add(blk.node.id);
visit::visit_block(blk, self, vt);
}
fn gather_loans_in_expr(ex: @ast::expr,
self: @mut GatherLoanCtxt,
vt: visit::vt<@mut GatherLoanCtxt>) {
let bccx = self.bccx;
let tcx = bccx.tcx;
debug!("gather_loans_in_expr(expr=%?/%s)",
ex.id, pprust::expr_to_str(ex, tcx.sess.intr()));
self.id_range.add(ex.id);
self.id_range.add(ex.callee_id);
// If this expression is borrowed, have to ensure it remains valid:
for tcx.adjustments.find(&ex.id).each |&adjustments| {
self.guarantee_adjustments(ex, *adjustments);
}
// Special checks for various kinds of expressions:
match ex.node {
ast::expr_addr_of(mutbl, base) => {
let base_cmt = self.bccx.cat_expr(base);
// make sure that the thing we are pointing out stays valid
// for the lifetime `scope_r` of the resulting ptr:
let scope_r = ty_region(tcx, ex.span, ty::expr_ty(tcx, ex));
self.guarantee_valid(ex.id, ex.span, base_cmt, mutbl, scope_r);
visit::visit_expr(ex, self, vt);
}
ast::expr_match(ex_v, ref arms) => {
let cmt = self.bccx.cat_expr(ex_v);
for arms.each |arm| {
for arm.pats.each |pat| {
self.gather_pat(cmt, *pat, arm.body.node.id, ex.id);
}
}
visit::visit_expr(ex, self, vt);
}
ast::expr_index(_, arg) |
ast::expr_binary(_, _, arg)
if self.bccx.method_map.contains_key(&ex.id) => {
// Arguments in method calls are always passed by ref.
//
// Currently these do not use adjustments, so we have to
// hardcode this check here (note that the receiver DOES use
// adjustments).
let scope_r = ty::re_scope(ex.id);
let arg_cmt = self.bccx.cat_expr(arg);
self.guarantee_valid(arg.id, arg.span, arg_cmt, m_imm, scope_r);
visit::visit_expr(ex, self, vt);
}
// see explanation attached to the `root_ub` field:
ast::expr_while(cond, ref body) => {
// during the condition, can only root for the condition
self.push_repeating_id(cond.id);
(vt.visit_expr)(cond, self, vt);
self.pop_repeating_id(cond.id);
// during body, can only root for the body
self.push_repeating_id(body.node.id);
(vt.visit_block)(body, self, vt);
self.pop_repeating_id(body.node.id);
}
// see explanation attached to the `root_ub` field:
ast::expr_loop(ref body, _) => {
self.push_repeating_id(body.node.id);
visit::visit_expr(ex, self, vt);
self.pop_repeating_id(body.node.id);
}
_ => {
visit::visit_expr(ex, self, vt);
}
}
}
pub impl GatherLoanCtxt {
fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
fn push_repeating_id(&mut self, id: ast::node_id) {
self.repeating_ids.push(id);
}
fn pop_repeating_id(&mut self, id: ast::node_id) {
let popped = self.repeating_ids.pop();
assert!(id == popped);
}
fn guarantee_adjustments(&mut self,
expr: @ast::expr,
adjustment: &ty::AutoAdjustment) {
debug!("guarantee_adjustments(expr=%s, adjustment=%?)",
expr.repr(self.tcx()), adjustment);
let _i = indenter();
match *adjustment {
ty::AutoAddEnv(*) => {
debug!("autoaddenv -- no autoref");
return;
}
ty::AutoDerefRef(
ty::AutoDerefRef {
autoref: None, _ }) => {
debug!("no autoref");
return;
}
ty::AutoDerefRef(
ty::AutoDerefRef {
autoref: Some(ref autoref),
autoderefs: autoderefs}) => {
let mcx = &mc::mem_categorization_ctxt {
tcx: self.tcx(),
method_map: self.bccx.method_map};
let cmt = mcx.cat_expr_autoderefd(expr, autoderefs);
debug!("after autoderef, cmt=%s", cmt.repr(self.tcx()));
match *autoref {
ty::AutoPtr(r, m) => {
self.guarantee_valid(expr.id,
expr.span,
cmt,
m,
r)
}
ty::AutoBorrowVec(r, m) | ty::AutoBorrowVecRef(r, m) => {
let cmt_index = mcx.cat_index(expr, cmt);
self.guarantee_valid(expr.id,
expr.span,
cmt_index,
m,
r)
}
ty::AutoBorrowFn(r) => {
let cmt_deref = mcx.cat_deref_fn(expr, cmt, 0);
self.guarantee_valid(expr.id,
expr.span,
cmt_deref,
m_imm,
r)
}
ty::AutoUnsafe(_) => {}
}
}
}
}
// Guarantees that addr_of(cmt) will be valid for the duration of
// `static_scope_r`, or reports an error. This may entail taking
// out loans, which will be added to the `req_loan_map`. This can
// also entail "rooting" GC'd pointers, which means ensuring
// dynamically that they are not freed.
fn guarantee_valid(&mut self,
borrow_id: ast::node_id,
borrow_span: span,
cmt: mc::cmt,
req_mutbl: ast::mutability,
loan_region: ty::Region)
{
debug!("guarantee_valid(borrow_id=%?, cmt=%s, \
req_mutbl=%?, loan_region=%?)",
borrow_id,
cmt.repr(self.tcx()),
req_mutbl,
loan_region);
// a loan for the empty region can never be dereferenced, so
// it is always safe
if loan_region == ty::re_empty {
return;
}
let root_ub = { *self.repeating_ids.last() }; // FIXME(#5074)
// Check that the lifetime of the borrow does not exceed
// the lifetime of the data being borrowed.
lifetime::guarantee_lifetime(self.bccx, self.item_ub, root_ub,
borrow_span, cmt, loan_region, req_mutbl);
// Check that we don't allow mutable borrows of non-mutable data.
check_mutability(self.bccx, borrow_span, cmt, req_mutbl);
// Compute the restrictions that are required to enforce the
// loan is safe.
let restr = restrictions::compute_restrictions(
self.bccx, borrow_span,
cmt, self.restriction_set(req_mutbl));
// Create the loan record (if needed).
let loan = match restr {
restrictions::Safe => {
// No restrictions---no loan record necessary
return;
}
restrictions::SafeIf(loan_path, restrictions) => {
let loan_scope = match loan_region {
ty::re_scope(id) => id,
ty::re_free(ref fr) => fr.scope_id,
ty::re_static => {
// If we get here, an error must have been
// reported in
// `lifetime::guarantee_lifetime()`, because
// the only legal ways to have a borrow with a
// static lifetime should not require
// restrictions. To avoid reporting derived
// errors, we just return here without adding
// any loans.
return;
}
ty::re_empty |
ty::re_bound(*) |
ty::re_infer(*) => {
self.tcx().sess.span_bug(
cmt.span,
fmt!("Invalid borrow lifetime: %?", loan_region));
}
};
debug!("loan_scope = %?", loan_scope);
let gen_scope = self.compute_gen_scope(borrow_id, loan_scope);
debug!("gen_scope = %?", gen_scope);
let kill_scope = self.compute_kill_scope(loan_scope, loan_path);
debug!("kill_scope = %?", kill_scope);
if req_mutbl == m_mutbl {
self.mark_loan_path_as_mutated(loan_path);
}
let all_loans = &mut *self.all_loans; // FIXME(#5074)
Loan {
index: all_loans.len(),
loan_path: loan_path,
cmt: cmt,
mutbl: req_mutbl,
gen_scope: gen_scope,
kill_scope: kill_scope,
span: borrow_span,
restrictions: restrictions
}
}
};
debug!("guarantee_valid(borrow_id=%?), loan=%s",
borrow_id, loan.repr(self.tcx()));
// let loan_path = loan.loan_path;
// let loan_gen_scope = loan.gen_scope;
// let loan_kill_scope = loan.kill_scope;
self.all_loans.push(loan);
// if loan_gen_scope != borrow_id {
// FIXME(#6268) Nested method calls
//
// Typically, the scope of the loan includes the point at
// which the loan is originated. This
// This is a subtle case. See the test case
// <compile-fail/borrowck-bad-nested-calls-free.rs>
// to see what we are guarding against.
//let restr = restrictions::compute_restrictions(
// self.bccx, borrow_span, cmt, RESTR_EMPTY);
//let loan = {
// let all_loans = &mut *self.all_loans; // FIXME(#5074)
// Loan {
// index: all_loans.len(),
// loan_path: loan_path,
// cmt: cmt,
// mutbl: m_const,
// gen_scope: borrow_id,
// kill_scope: kill_scope,
// span: borrow_span,
// restrictions: restrictions
// }
// }
fn check_mutability(bccx: @BorrowckCtxt,
borrow_span: span,
cmt: mc::cmt,
req_mutbl: ast::mutability) {
match req_mutbl {
m_const => {
// Data of any mutability can be lent as const.
}
m_imm => {
match cmt.mutbl {
mc::McImmutable | mc::McDeclared | mc::McInherited => {
// both imm and mut data can be lent as imm;
// for mutable data, this is a freeze
}
mc::McReadOnly => {
bccx.report(BckError {span: borrow_span,
cmt: cmt,
code: err_mutbl(req_mutbl)});
}
}
}
m_mutbl => {
// Only mutable data can be lent as mutable.
if !cmt.mutbl.is_mutable() {
bccx.report(BckError {span: borrow_span,
cmt: cmt,
code: err_mutbl(req_mutbl)});
}
}
}
}
}
fn restriction_set(&self, req_mutbl: ast::mutability) -> RestrictionSet {
match req_mutbl {
m_const => RESTR_EMPTY,
m_imm => RESTR_EMPTY | RESTR_MUTATE,
m_mutbl => RESTR_EMPTY | RESTR_MUTATE | RESTR_FREEZE
}
}
fn mark_loan_path_as_mutated(&self, loan_path: @LoanPath) {
//! For mutable loans of content whose mutability derives
//! from a local variable, mark the mutability decl as necessary.
match *loan_path {
LpVar(local_id) => {
self.tcx().used_mut_nodes.insert(local_id);
}
LpExtend(base, mc::McInherited, _) => {
self.mark_loan_path_as_mutated(base);
}
LpExtend(_, mc::McDeclared, _) |
LpExtend(_, mc::McImmutable, _) |
LpExtend(_, mc::McReadOnly, _) => {
}
}
}
fn compute_gen_scope(&self,
borrow_id: ast::node_id,
loan_scope: ast::node_id) -> ast::node_id {
//! Determine when to introduce the loan. Typically the loan
//! is introduced at the point of the borrow, but in some cases,
//! notably method arguments, the loan may be introduced only
//! later, once it comes into scope.
let rm = self.bccx.tcx.region_maps;
if rm.is_subscope_of(borrow_id, loan_scope) {
borrow_id
} else {
loan_scope
}
}
fn compute_kill_scope(&self,
loan_scope: ast::node_id,
lp: @LoanPath) -> ast::node_id {
//! Determine when the loan restrictions go out of scope.
//! This is either when the lifetime expires or when the
//! local variable which roots the loan-path goes out of scope,
//! whichever happens faster.
//!
//! It may seem surprising that we might have a loan region
//! larger than the variable which roots the loan-path; this can
//! come about when variables of `&mut` type are re-borrowed,
//! as in this example:
//!
//! fn counter<'a>(v: &'a mut Foo) -> &'a mut uint {
//! &mut v.counter
//! }
//!
//! In this case, the borrowed pointer (`'a`) outlives the
//! variable `v` that hosts it. Note that this doesn't come up
//! with immutable `&` pointers, because borrows of such pointers
//! do not require restrictions and hence do not cause a loan.
let rm = self.bccx.tcx.region_maps;
let lexical_scope = rm.encl_scope(lp.node_id());
if rm.is_subscope_of(lexical_scope, loan_scope) {
lexical_scope
} else {
assert!(rm.is_subscope_of(loan_scope, lexical_scope));
loan_scope
}
}
fn gather_pat(&mut self,
discr_cmt: mc::cmt,
root_pat: @ast::pat,
arm_body_id: ast::node_id,
match_id: ast::node_id) {
do self.bccx.cat_pattern(discr_cmt, root_pat) |cmt, pat| {
match pat.node {
ast::pat_ident(bm, _, _) if self.pat_is_binding(pat) => {
match bm {
ast::bind_by_ref(mutbl) => {
// ref x or ref x @ p --- creates a ptr which must
// remain valid for the scope of the match
// find the region of the resulting pointer (note that
// the type of such a pattern will *always* be a
// region pointer)
let scope_r =
ty_region(self.tcx(), pat.span,
ty::node_id_to_type(self.tcx(), pat.id));
// if the scope of the region ptr turns out to be
// specific to this arm, wrap the categorization
// with a cat_discr() node. There is a detailed
// discussion of the function of this node in
// `lifetime.rs`:
let arm_scope = ty::re_scope(arm_body_id);
if self.bccx.is_subregion_of(scope_r, arm_scope) {
let cmt_discr = self.bccx.cat_discr(cmt, match_id);
self.guarantee_valid(pat.id, pat.span,
cmt_discr, mutbl, scope_r);
} else {
self.guarantee_valid(pat.id, pat.span,
cmt, mutbl, scope_r);
}
}
ast::bind_by_copy | ast::bind_infer => {
// Nothing to do here; neither copies nor moves induce
// borrows.
}
}
}
ast::pat_vec(_, Some(slice_pat), _) => {
// The `slice_pat` here creates a slice into the
// original vector. This is effectively a borrow of
// the elements of the vector being matched.
let slice_ty = ty::node_id_to_type(self.tcx(),
slice_pat.id);
let (slice_mutbl, slice_r) =
self.vec_slice_info(slice_pat, slice_ty);
let mcx = self.bccx.mc_ctxt();
let cmt_index = mcx.cat_index(slice_pat, cmt);
self.guarantee_valid(pat.id, pat.span,
cmt_index, slice_mutbl, slice_r);
}
_ => {}
}
}
}
fn vec_slice_info(&self,
pat: @ast::pat,
slice_ty: ty::t) -> (ast::mutability, ty::Region) {
/*!
*
* In a pattern like [a, b, ..c], normally `c` has slice type,
* but if you have [a, b, ..ref c], then the type of `ref c`
* will be `&&[]`, so to extract the slice details we have
* to recurse through rptrs.
*/
match ty::get(slice_ty).sty {
ty::ty_evec(slice_mt, ty::vstore_slice(slice_r)) => {
(slice_mt.mutbl, slice_r)
}
ty::ty_rptr(_, ref mt) => {
self.vec_slice_info(pat, mt.ty)
}
_ => {
self.tcx().sess.span_bug(
pat.span,
fmt!("Type of slice pattern is not a slice"));
}
}
}
fn pat_is_variant_or_struct(&self, pat: @ast::pat) -> bool {
pat_util::pat_is_variant_or_struct(self.bccx.tcx.def_map, pat)
}
fn pat_is_binding(&self, pat: @ast::pat) -> bool {
pat_util::pat_is_binding(self.bccx.tcx.def_map, pat)
}
}
// Setting up info that preserve needs.
// This is just the most convenient place to do it.
fn add_stmt_to_map(stmt: @ast::stmt,
self: @mut GatherLoanCtxt,
vt: visit::vt<@mut GatherLoanCtxt>) {
match stmt.node {
ast::stmt_expr(_, id) | ast::stmt_semi(_, id) => {
self.bccx.stmt_map.insert(id);
}
_ => ()
}
visit::visit_stmt(stmt, self, vt);
}

View file

@ -0,0 +1,249 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Computes the restrictions that result from a borrow.
use core::prelude::*;
use middle::borrowck::*;
use mc = middle::mem_categorization;
use middle::ty;
use syntax::ast::{m_const, m_imm, m_mutbl};
use syntax::codemap::span;
pub enum RestrictionResult {
Safe,
SafeIf(@LoanPath, ~[Restriction])
}
pub fn compute_restrictions(bccx: @BorrowckCtxt,
span: span,
cmt: mc::cmt,
restr: RestrictionSet) -> RestrictionResult {
let ctxt = RestrictionsContext {
bccx: bccx,
span: span,
cmt_original: cmt
};
ctxt.compute(cmt, restr)
}
///////////////////////////////////////////////////////////////////////////
// Private
struct RestrictionsContext {
bccx: @BorrowckCtxt,
span: span,
cmt_original: mc::cmt
}
impl RestrictionsContext {
fn tcx(&self) -> ty::ctxt {
self.bccx.tcx
}
fn compute(&self,
cmt: mc::cmt,
restrictions: RestrictionSet) -> RestrictionResult {
// Check for those cases where we cannot control the aliasing
// and make sure that we are not being asked to.
match cmt.freely_aliasable() {
None => {}
Some(cause) => {
self.check_aliasing_permitted(cause, restrictions);
}
}
match cmt.cat {
mc::cat_rvalue => {
// Effectively, rvalues are stored into a
// non-aliasable temporary on the stack. Since they
// are inherently non-aliasable, they can only be
// accessed later through the borrow itself and hence
// must inherently comply with its terms.
Safe
}
mc::cat_local(local_id) |
mc::cat_arg(local_id) |
mc::cat_self(local_id) => {
let lp = @LpVar(local_id);
SafeIf(lp, ~[Restriction {loan_path: lp,
set: restrictions}])
}
mc::cat_interior(cmt_base, i @ mc::interior_variant(_)) => {
// When we borrow the interior of an enum, we have to
// ensure the enum itself is not mutated, because that
// could cause the type of the memory to change.
let result = self.compute(cmt_base, restrictions | RESTR_MUTATE);
self.extend(result, cmt.mutbl, LpInterior(i), restrictions)
}
mc::cat_interior(cmt_base, i @ mc::interior_tuple) |
mc::cat_interior(cmt_base, i @ mc::interior_anon_field) |
mc::cat_interior(cmt_base, i @ mc::interior_field(*)) |
mc::cat_interior(cmt_base, i @ mc::interior_index(*)) => {
// For all of these cases, overwriting the base would
// not change the type of the memory, so no additional
// restrictions are needed.
//
// FIXME(#5397) --- Mut fields are not treated soundly
// (hopefully they will just get phased out)
let result = self.compute(cmt_base, restrictions);
self.extend(result, cmt.mutbl, LpInterior(i), restrictions)
}
mc::cat_deref(cmt_base, _, mc::uniq_ptr(*)) => {
// When we borrow the interior of an owned pointer, we
// cannot permit the base to be mutated, because that
// would cause the unique pointer to be freed.
let result = self.compute(cmt_base, restrictions | RESTR_MUTATE);
self.extend(result, cmt.mutbl, LpDeref, restrictions)
}
mc::cat_copied_upvar(*) | // FIXME(#2152) allow mutation of upvars
mc::cat_static_item(*) |
mc::cat_implicit_self(*) |
mc::cat_deref(_, _, mc::region_ptr(m_imm, _)) |
mc::cat_deref(_, _, mc::gc_ptr(m_imm)) => {
Safe
}
mc::cat_deref(_, _, mc::region_ptr(m_const, _)) |
mc::cat_deref(_, _, mc::gc_ptr(m_const)) => {
self.check_no_mutability_control(cmt, restrictions);
Safe
}
mc::cat_deref(cmt_base, _, mc::gc_ptr(m_mutbl)) => {
// Technically, no restrictions are *necessary* here.
// The validity of the borrow is guaranteed
// dynamically. However, nonetheless we add a
// restriction to make a "best effort" to report
// static errors. For example, if there is code like
//
// let v = @mut ~[1, 2, 3];
// for v.each |e| {
// v.push(e + 1);
// }
//
// Then the code below would add restrictions on `*v`,
// which means that an error would be reported
// here. This of course is not perfect. For example,
// a function like the following would not report an error
// at compile-time but would fail dynamically:
//
// let v = @mut ~[1, 2, 3];
// let w = v;
// for v.each |e| {
// w.push(e + 1);
// }
//
// In addition, we only add a restriction for those cases
// where we can construct a sensible loan path, so an
// example like the following will fail dynamically:
//
// impl V {
// fn get_list(&self) -> @mut ~[int];
// }
// ...
// let v: &V = ...;
// for v.get_list().each |e| {
// v.get_list().push(e + 1);
// }
match opt_loan_path(cmt_base) {
None => Safe,
Some(lp_base) => {
let lp = @LpExtend(lp_base, cmt.mutbl, LpDeref);
SafeIf(lp, ~[Restriction {loan_path: lp,
set: restrictions}])
}
}
}
mc::cat_deref(cmt_base, _, mc::region_ptr(m_mutbl, _)) => {
// Because an `&mut` pointer does not inherit its
// mutability, we can only prevent mutation or prevent
// freezing if it is not aliased. Therefore, in such
// cases we restrict aliasing on `cmt_base`.
if restrictions.intersects(RESTR_MUTATE | RESTR_FREEZE) {
let result = self.compute(cmt_base, restrictions | RESTR_ALIAS);
self.extend(result, cmt.mutbl, LpDeref, restrictions)
} else {
let result = self.compute(cmt_base, restrictions);
self.extend(result, cmt.mutbl, LpDeref, restrictions)
}
}
mc::cat_deref(_, _, mc::unsafe_ptr) => {
// We are very trusting when working with unsafe pointers.
Safe
}
mc::cat_stack_upvar(cmt_base) |
mc::cat_discr(cmt_base, _) => {
self.compute(cmt_base, restrictions)
}
}
}
fn extend(&self,
result: RestrictionResult,
mc: mc::MutabilityCategory,
elem: LoanPathElem,
restrictions: RestrictionSet) -> RestrictionResult {
match result {
Safe => Safe,
SafeIf(base_lp, base_vec) => {
let lp = @LpExtend(base_lp, mc, elem);
SafeIf(lp, vec::append_one(base_vec,
Restriction {loan_path: lp,
set: restrictions}))
}
}
}
fn check_aliasing_permitted(&self,
cause: mc::AliasableReason,
restrictions: RestrictionSet) {
//! This method is invoked when the current `cmt` is something
//! where aliasing cannot be controlled. It reports an error if
//! the restrictions required that it not be aliased; currently
//! this only occurs when re-borrowing an `&mut` pointer.
//!
//! NB: To be 100% consistent, we should report an error if
//! RESTR_FREEZE is found, because we cannot prevent freezing,
//! nor would we want to. However, we do not report such an
//! error, because this restriction only occurs when the user
//! is creating an `&mut` pointer to immutable or read-only
//! data, and there is already another piece of code that
//! checks for this condition.
if restrictions.intersects(RESTR_ALIAS) {
self.bccx.report_aliasability_violation(
self.span,
BorrowViolation,
cause);
}
}
fn check_no_mutability_control(&self,
cmt: mc::cmt,
restrictions: RestrictionSet) {
if restrictions.intersects(RESTR_MUTATE | RESTR_FREEZE) {
self.bccx.report(BckError {span: self.span,
cmt: cmt,
code: err_freeze_aliasable_const});
}
}
}

View file

@ -1,311 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The `Loan` module deals with borrows of *uniquely mutable* data. We
say that data is uniquely mutable if the current activation (stack
frame) controls the only mutable reference to the data. The most
common way that this can occur is if the current activation owns the
data being borrowed, but it can also occur with `&mut` pointers. The
primary characteristic of uniquely mutable data is that, at any given
time, there is at most one path that can be used to mutate it, and
that path is only accessible from the top stack frame.
Given that some data found at a path P is being borrowed to a borrowed
pointer with mutability M and lifetime L, the job of the code in this
module is to compute the set of *loans* that are necessary to ensure
that (1) the data found at P outlives L and that (2) if M is mutable
then the path P will not be modified directly or indirectly except
through that pointer. A *loan* is the combination of a path P_L, a
mutability M_L, and a lifetime L_L where:
- The path P_L indicates what data has been lent.
- The mutability M_L indicates the access rights on the data:
- const: the data cannot be moved
- immutable/mutable: the data cannot be moved or mutated
- The lifetime L_L indicates the *scope* of the loan.
FIXME #4730 --- much more needed, don't have time to write this all up now
*/
// ----------------------------------------------------------------------
// Loan(Ex, M, S) = Ls holds if ToAddr(Ex) will remain valid for the entirety
// of the scope S, presuming that the returned set of loans `Ls` are honored.
use middle::borrowck::{Loan, bckerr, bckres, BorrowckCtxt, err_mutbl};
use middle::borrowck::{LoanKind, TotalFreeze, PartialFreeze,
TotalTake, PartialTake, Immobile};
use middle::borrowck::{err_out_of_scope};
use middle::mem_categorization::{cat_arg, cat_binding, cat_discr, cat_comp};
use middle::mem_categorization::{cat_deref, cat_discr, cat_local, cat_self};
use middle::mem_categorization::{cat_special, cat_stack_upvar, cmt};
use middle::mem_categorization::{comp_field, comp_index, comp_variant};
use middle::mem_categorization::{gc_ptr, region_ptr};
use middle::ty;
use util::common::indenter;
use syntax::ast::m_imm;
use syntax::ast;
pub fn loan(bccx: @BorrowckCtxt,
cmt: cmt,
scope_region: ty::Region,
loan_kind: LoanKind) -> bckres<~[Loan]>
{
let mut lc = LoanContext {
bccx: bccx,
scope_region: scope_region,
loans: ~[]
};
match lc.loan(cmt, loan_kind, true) {
Err(ref e) => return Err((*e)),
Ok(()) => {}
}
// FIXME #4945: Workaround for borrow check bug.
Ok(copy lc.loans)
}
struct LoanContext {
bccx: @BorrowckCtxt,
// the region scope for which we must preserve the memory
scope_region: ty::Region,
// accumulated list of loans that will be required
loans: ~[Loan]
}
pub impl LoanContext {
fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
fn loan(&mut self,
cmt: cmt,
loan_kind: LoanKind,
owns_lent_data: bool) -> bckres<()>
{
/*!
*
* The main routine.
*
* # Parameters
*
* - `cmt`: the categorization of the data being borrowed
* - `req_mutbl`: the mutability of the borrowed pointer
* that was created
* - `owns_lent_data`: indicates whether `cmt` owns the
* data that is being lent. See
* discussion in `issue_loan()`.
*/
debug!("loan(%s, %?)",
self.bccx.cmt_to_repr(cmt),
loan_kind);
let _i = indenter();
// see stable() above; should only be called when `cmt` is lendable
if cmt.lp.is_none() {
self.bccx.tcx.sess.span_bug(
cmt.span,
"loan() called with non-lendable value");
}
match cmt.cat {
cat_binding(_) | cat_rvalue | cat_special(_) => {
// should never be loanable
self.bccx.tcx.sess.span_bug(
cmt.span,
"rvalue with a non-none lp");
}
cat_local(local_id) | cat_arg(local_id) | cat_self(local_id) => {
// FIXME(#4903)
let local_region = self.bccx.tcx.region_maps.encl_region(local_id);
self.issue_loan(cmt, local_region, loan_kind,
owns_lent_data)
}
cat_stack_upvar(cmt) => {
self.loan(cmt, loan_kind, owns_lent_data)
}
cat_discr(base, _) => {
self.loan(base, loan_kind, owns_lent_data)
}
cat_comp(cmt_base, comp_field(_, m)) |
cat_comp(cmt_base, comp_index(_, m)) => {
// For most components, the type of the embedded data is
// stable. Therefore, the base structure need only be
// const---unless the component must be immutable. In
// that case, it must also be embedded in an immutable
// location, or else the whole structure could be
// overwritten and the component along with it.
self.loan_stable_comp(cmt, cmt_base, loan_kind, m,
owns_lent_data)
}
cat_comp(cmt_base, comp_tuple) |
cat_comp(cmt_base, comp_anon_field) => {
// As above.
self.loan_stable_comp(cmt, cmt_base, loan_kind, m_imm,
owns_lent_data)
}
cat_comp(cmt_base, comp_variant(enum_did)) => {
// For enums, the memory is unstable if there are multiple
// variants, because if the enum value is overwritten then
// the memory changes type.
if ty::enum_is_univariant(self.bccx.tcx, enum_did) {
self.loan_stable_comp(cmt, cmt_base, loan_kind, m_imm,
owns_lent_data)
} else {
self.loan_unstable_deref(cmt, cmt_base, loan_kind,
owns_lent_data)
}
}
cat_deref(cmt_base, _, uniq_ptr) => {
// For unique pointers, the memory being pointed out is
// unstable because if the unique pointer is overwritten
// then the memory is freed.
self.loan_unstable_deref(cmt, cmt_base, loan_kind,
owns_lent_data)
}
cat_deref(cmt_base, _, region_ptr(ast::m_mutbl, region)) => {
// Mutable data can be loaned out as immutable or const. We must
// loan out the base as well as the main memory. For example,
// if someone borrows `*b`, we want to borrow `b` as immutable
// as well.
do self.loan(cmt_base, TotalFreeze, false).chain |_| {
self.issue_loan(cmt, region, loan_kind, owns_lent_data)
}
}
cat_deref(_, _, unsafe_ptr) |
cat_deref(_, _, gc_ptr(_)) |
cat_deref(_, _, region_ptr(_, _)) => {
// Aliased data is simply not lendable.
self.bccx.tcx.sess.span_bug(
cmt.span,
"aliased ptr with a non-none lp");
}
}
}
// A "stable component" is one where assigning the base of the
// component cannot cause the component itself to change types.
// Example: record fields.
fn loan_stable_comp(&mut self,
cmt: cmt,
cmt_base: cmt,
loan_kind: LoanKind,
comp_mutbl: ast::mutability,
owns_lent_data: bool) -> bckres<()>
{
let base_kind = match (comp_mutbl, loan_kind) {
// Declared as "immutable" means: inherited mutability and
// hence mutable iff parent is mutable. So propagate
// mutability on up.
(m_imm, TotalFreeze) | (m_imm, PartialFreeze) => PartialFreeze,
(m_imm, TotalTake) | (m_imm, PartialTake) => PartialTake,
// Declared as "mutable" means: always mutable no matter
// what the mutability of the base is. So that means we
// can weaken the condition on the base to PartialFreeze.
// This implies that the user could freeze the base, but
// that is ok since the even with an &T base, the mut
// field will still be considered mutable.
(_, TotalTake) | (_, PartialTake) |
(_, TotalFreeze) | (_, PartialFreeze) => {
PartialFreeze
}
// If we just need to guarantee the value won't be moved,
// it doesn't matter what mutability the component was
// declared with.
(_, Immobile) => Immobile,
};
do self.loan(cmt_base, base_kind, owns_lent_data).chain |_ok| {
// can use static for the scope because the base
// determines the lifetime, ultimately
self.issue_loan(cmt, ty::re_static, loan_kind,
owns_lent_data)
}
}
// An "unstable deref" means a deref of a ptr/comp where, if the
// base of the deref is assigned to, pointers into the result of the
// deref would be invalidated. Examples: interior of variants, uniques.
fn loan_unstable_deref(&mut self,
cmt: cmt,
cmt_base: cmt,
loan_kind: LoanKind,
owns_lent_data: bool) -> bckres<()> {
// Variant components: the base must be immutable, because
// if it is overwritten, the types of the embedded data
// could change.
do self.loan(cmt_base, PartialFreeze, owns_lent_data).chain |_| {
// can use static, as in loan_stable_comp()
self.issue_loan(cmt, ty::re_static, loan_kind,
owns_lent_data)
}
}
fn issue_loan(&mut self,
cmt: cmt,
scope_ub: ty::Region,
loan_kind: LoanKind,
owns_lent_data: bool) -> bckres<()> {
// Subtle: the `scope_ub` is the maximal lifetime of `cmt`.
// Therefore, if `cmt` owns the data being lent, then the
// scope of the loan must be less than `scope_ub`, or else the
// data would be freed while the loan is active.
//
// However, if `cmt` does *not* own the data being lent, then
// it is ok if `cmt` goes out of scope during the loan. This
// can occur when you have an `&mut` parameter that is being
// reborrowed.
if !owns_lent_data ||
self.bccx.is_subregion_of(self.scope_region, scope_ub)
{
if cmt.mutbl.is_mutable() {
// If this loan is a mutable loan, then mark the loan path (if
// it exists) as being used. This is similar to the check
// performed in check_loans.rs in check_assignment(), but this
// is for a different purpose of having the 'mut' qualifier.
for cmt.lp.each |lp| {
for lp.node_id().each |&id| {
self.tcx().used_mut_nodes.insert(id);
}
}
} else if loan_kind.is_take() {
// We do not allow non-mutable data to be "taken"
// under any circumstances.
return Err(bckerr {
cmt:cmt,
code:err_mutbl(loan_kind)
});
}
self.loans.push(Loan {
// Note: cmt.lp must be Some(_) because otherwise this
// loan process does not apply at all.
lp: cmt.lp.get(),
cmt: cmt,
kind: loan_kind
});
return Ok(());
} else {
// The loan being requested lives longer than the data
// being loaned out!
return Err(bckerr {
cmt:cmt,
code:err_out_of_scope(scope_ub, self.scope_region)
});
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,409 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ----------------------------------------------------------------------
// Preserve(Ex, S) holds if ToAddr(Ex) will remain valid for the entirety of
// the scope S.
//
use middle::borrowck::{RootInfo, bckerr, bckerr_code, bckres, BorrowckCtxt};
use middle::borrowck::{err_mut_uniq, err_mut_variant};
use middle::borrowck::{err_out_of_root_scope, err_out_of_scope};
use middle::borrowck::{err_root_not_permitted, root_map_key};
use middle::mem_categorization::{cat_arg, cat_binding, cat_comp, cat_deref};
use middle::mem_categorization::{cat_discr, cat_local, cat_self, cat_special};
use middle::mem_categorization::{cat_stack_upvar, cmt, comp_field};
use middle::mem_categorization::{comp_index, comp_variant, gc_ptr};
use middle::mem_categorization::{region_ptr};
use middle::ty;
use util::common::indenter;
use syntax::ast;
pub enum PreserveCondition {
PcOk,
PcIfPure(bckerr)
}
pub impl PreserveCondition {
// combines two preservation conditions such that if either of
// them requires purity, the result requires purity
fn combine(&self, pc: PreserveCondition) -> PreserveCondition {
match *self {
PcOk => {pc}
PcIfPure(_) => {*self}
}
}
}
pub impl BorrowckCtxt {
fn preserve(&self,
cmt: cmt,
scope_region: ty::Region,
item_ub: ast::node_id,
root_ub: ast::node_id) -> bckres<PreserveCondition>
{
let ctxt = PreserveCtxt {
bccx: self,
scope_region: scope_region,
item_ub: item_ub,
root_ub: root_ub,
root_managed_data: true
};
ctxt.preserve(cmt)
}
}
struct PreserveCtxt<'self> {
bccx: &'self BorrowckCtxt,
// the region scope for which we must preserve the memory
scope_region: ty::Region,
// the scope for the body of the enclosing fn/method item
item_ub: ast::node_id,
// the upper bound on how long we can root an @T pointer
root_ub: ast::node_id,
// if false, do not attempt to root managed data
root_managed_data: bool
}
pub impl<'self> PreserveCtxt<'self> {
fn tcx(&self) -> ty::ctxt { self.bccx.tcx }
fn preserve(&self, cmt: cmt) -> bckres<PreserveCondition> {
debug!("preserve(cmt=%s, root_ub=%?, root_managed_data=%b)",
self.bccx.cmt_to_repr(cmt), self.root_ub,
self.root_managed_data);
let _i = indenter();
match cmt.cat {
cat_special(sk_implicit_self) |
cat_special(sk_heap_upvar) => {
self.compare_scope(cmt, ty::re_scope(self.item_ub))
}
cat_special(sk_static_item) | cat_special(sk_method) => {
Ok(PcOk)
}
cat_rvalue => {
// when we borrow an rvalue, we can keep it rooted but only
// up to the root_ub point
// When we're in a 'const &x = ...' context, self.root_ub is
// zero and the rvalue is static, not bound to a scope.
let scope_region = if self.root_ub == 0 {
ty::re_static
} else {
// Maybe if we pass in the parent instead here,
// we can prevent the "scope not found" error
debug!("scope_region thing: %? ", cmt.id);
self.tcx().region_maps.encl_region(cmt.id)
};
self.compare_scope(cmt, scope_region)
}
cat_stack_upvar(cmt) => {
self.preserve(cmt)
}
cat_local(local_id) => {
// Normally, local variables are lendable, and so this
// case should never trigger. However, if we are
// preserving an expression like a.b where the field `b`
// has @ type, then it will recurse to ensure that the `a`
// is stable to try and avoid rooting the value `a.b`. In
// this case, root_managed_data will be false.
if self.root_managed_data {
self.tcx().sess.span_bug(
cmt.span,
"preserve() called with local and !root_managed_data");
}
let local_region = self.tcx().region_maps.encl_region(local_id);
self.compare_scope(cmt, local_region)
}
cat_binding(local_id) => {
// Bindings are these kind of weird implicit pointers (cc
// #2329). We require (in gather_loans) that they be
// rooted in an immutable location.
let local_region = self.tcx().region_maps.encl_region(local_id);
self.compare_scope(cmt, local_region)
}
cat_arg(local_id) => {
// This can happen as not all args are lendable (e.g., &&
// modes). In that case, the caller guarantees stability
// for at least the scope of the fn. This is basically a
// deref of a region ptr.
let local_region = self.tcx().region_maps.encl_region(local_id);
self.compare_scope(cmt, local_region)
}
cat_self(local_id) => {
let local_region = self.tcx().region_maps.encl_region(local_id);
self.compare_scope(cmt, local_region)
}
cat_comp(cmt_base, comp_field(*)) |
cat_comp(cmt_base, comp_index(*)) |
cat_comp(cmt_base, comp_tuple) |
cat_comp(cmt_base, comp_anon_field) => {
// Most embedded components: if the base is stable, the
// type never changes.
self.preserve(cmt_base)
}
cat_comp(cmt_base, comp_variant(enum_did)) => {
if ty::enum_is_univariant(self.tcx(), enum_did) {
self.preserve(cmt_base)
} else {
// If there are multiple variants: overwriting the
// base could cause the type of this memory to change,
// so require imm.
self.require_imm(cmt, cmt_base, err_mut_variant)
}
}
cat_deref(cmt_base, _, uniq_ptr) => {
// Overwriting the base could cause this memory to be
// freed, so require imm.
self.require_imm(cmt, cmt_base, err_mut_uniq)
}
cat_deref(_, _, region_ptr(_, region)) => {
// References are always "stable" for lifetime `region` by
// induction (when the reference of type &MT was created,
// the memory must have been stable).
self.compare_scope(cmt, region)
}
cat_deref(_, _, unsafe_ptr) => {
// Unsafe pointers are the user's problem
Ok(PcOk)
}
cat_deref(base, derefs, gc_ptr(*)) => {
// GC'd pointers of type @MT: if this pointer lives in
// immutable, stable memory, then everything is fine. But
// otherwise we have no guarantee the pointer will stay
// live, so we must root the pointer (i.e., inc the ref
// count) for the duration of the loan.
debug!("base.mutbl = %?", base.mutbl);
if cmt.cat.derefs_through_mutable_box() {
self.attempt_root(cmt, base, derefs)
} else if base.mutbl.is_immutable() {
let non_rooting_ctxt = PreserveCtxt {
root_managed_data: false,
..*self
};
match non_rooting_ctxt.preserve(base) {
Ok(PcOk) => {
Ok(PcOk)
}
Ok(PcIfPure(_)) => {
debug!("must root @T, otherwise purity req'd");
self.attempt_root(cmt, base, derefs)
}
Err(ref e) => {
debug!("must root @T, err: %s",
self.bccx.bckerr_to_str((*e)));
self.attempt_root(cmt, base, derefs)
}
}
} else {
self.attempt_root(cmt, base, derefs)
}
}
cat_discr(base, match_id) => {
// Subtle: in a match, we must ensure that each binding
// variable remains valid for the duration of the arm in
// which it appears, presuming that this arm is taken.
// But it is inconvenient in trans to root something just
// for one arm. Therefore, we insert a cat_discr(),
// basically a special kind of category that says "if this
// value must be dynamically rooted, root it for the scope
// `match_id`.
//
// As an example, consider this scenario:
//
// let mut x = @Some(3);
// match *x { Some(y) {...} None {...} }
//
// Technically, the value `x` need only be rooted
// in the `some` arm. However, we evaluate `x` in trans
// before we know what arm will be taken, so we just
// always root it for the duration of the match.
//
// As a second example, consider *this* scenario:
//
// let x = @mut @Some(3);
// match x { @@Some(y) {...} @@None {...} }
//
// Here again, `x` need only be rooted in the `some` arm.
// In this case, the value which needs to be rooted is
// found only when checking which pattern matches: but
// this check is done before entering the arm. Therefore,
// even in this case we just choose to keep the value
// rooted for the entire match. This means the value will be
// rooted even if the none arm is taken. Oh well.
//
// At first, I tried to optimize the second case to only
// root in one arm, but the result was suboptimal: first,
// it interfered with the construction of phi nodes in the
// arm, as we were adding code to root values before the
// phi nodes were added. This could have been addressed
// with a second basic block. However, the naive approach
// also yielded suboptimal results for patterns like:
//
// let x = @mut @...;
// match x { @@some_variant(y) | @@some_other_variant(y) =>
//
// The reason is that we would root the value once for
// each pattern and not once per arm. This is also easily
// fixed, but it's yet more code for what is really quite
// the corner case.
//
// Nonetheless, if you decide to optimize this case in the
// future, you need only adjust where the cat_discr()
// node appears to draw the line between what will be rooted
// in the *arm* vs the *match*.
let match_rooting_ctxt = PreserveCtxt {
scope_region: ty::re_scope(match_id),
..*self
};
match_rooting_ctxt.preserve(base)
}
}
}
/// Reqiures that `cmt` (which is a deref or subcomponent of
/// `base`) be found in an immutable location (that is, `base`
/// must be immutable). Also requires that `base` itself is
/// preserved.
fn require_imm(&self,
cmt: cmt,
cmt_base: cmt,
code: bckerr_code) -> bckres<PreserveCondition> {
// Variant contents and unique pointers: must be immutably
// rooted to a preserved address.
match self.preserve(cmt_base) {
// the base is preserved, but if we are not mutable then
// purity is required
Ok(PcOk) => {
if !cmt_base.mutbl.is_immutable() {
Ok(PcIfPure(bckerr {cmt:cmt, code:code}))
} else {
Ok(PcOk)
}
}
// the base requires purity too, that's fine
Ok(PcIfPure(ref e)) => {
Ok(PcIfPure((*e)))
}
// base is not stable, doesn't matter
Err(ref e) => {
Err((*e))
}
}
}
/// Checks that the scope for which the value must be preserved
/// is a subscope of `scope_ub`; if so, success.
fn compare_scope(&self,
cmt: cmt,
scope_ub: ty::Region) -> bckres<PreserveCondition> {
if self.bccx.is_subregion_of(self.scope_region, scope_ub) {
Ok(PcOk)
} else {
Err(bckerr {
cmt:cmt,
code:err_out_of_scope(scope_ub, self.scope_region)
})
}
}
/// Here, `cmt=*base` is always a deref of managed data (if
/// `derefs` != 0, then an auto-deref). This routine determines
/// whether it is safe to MAKE cmt stable by rooting the pointer
/// `base`. We can only do the dynamic root if the desired
/// lifetime `self.scope_region` is a subset of `self.root_ub`
/// scope; otherwise, it would either require that we hold the
/// value live for longer than the current fn or else potentially
/// require that an statically unbounded number of values be
/// rooted (if a loop exists).
fn attempt_root(&self, cmt: cmt, base: cmt,
derefs: uint) -> bckres<PreserveCondition> {
if !self.root_managed_data {
// normally, there is a root_ub; the only time that this
// is none is when a boxed value is stored in an immutable
// location. In that case, we will test to see if that
// immutable location itself can be preserved long enough
// in which case no rooting is necessary. But there it
// would be sort of pointless to avoid rooting the inner
// box by rooting an outer box, as it would just keep more
// memory live than necessary, so we set root_ub to none.
return Err(bckerr { cmt: cmt, code: err_root_not_permitted });
}
let root_region = ty::re_scope(self.root_ub);
match self.scope_region {
// we can only root values if the desired region is some concrete
// scope within the fn body
ty::re_scope(scope_id) => {
debug!("Considering root map entry for %s: \
node %d:%u -> scope_id %?, root_ub %?",
self.bccx.cmt_to_repr(cmt), base.id,
derefs, scope_id, self.root_ub);
if self.bccx.is_subregion_of(self.scope_region, root_region) {
debug!("Elected to root");
let rk = root_map_key { id: base.id, derefs: derefs };
// This code could potentially lead cause boxes to be frozen
// for longer than necessarily at runtime. It prevents an
// ICE in trans; the fundamental problem is that it's hard
// to make sure trans and borrowck have the same notion of
// scope. The real fix is to clean up how trans handles
// cleanups, but that's hard. If this becomes an issue, it's
// an option to just change this to `let scope_to_use =
// scope_id;`. Though that would potentially re-introduce
// the ICE. See #3511 for more details.
let scope_to_use = if
self.bccx.stmt_map.contains(&scope_id) {
// Root it in its parent scope, b/c
// trans won't introduce a new scope for the
// stmt
self.root_ub
}
else {
// Use the more precise scope
scope_id
};
// We freeze if and only if this is a *mutable* @ box that
// we're borrowing into a pointer.
self.bccx.root_map.insert(rk, RootInfo {
scope: scope_to_use,
freezes: cmt.cat.derefs_through_mutable_box()
});
return Ok(PcOk);
} else {
debug!("Unable to root");
return Err(bckerr {
cmt: cmt,
code: err_out_of_root_scope(root_region,
self.scope_region)
});
}
}
// we won't be able to root long enough
_ => {
return Err(bckerr {
cmt:cmt,
code:err_out_of_root_scope(root_region, self.scope_region)
});
}
}
}
}

View file

@ -237,7 +237,7 @@ pub fn check_item_recursion(sess: Session,
match env.def_map.find(&e.id) {
Some(&def_const(def_id)) => {
if ast_util::is_local(def_id) {
match *env.ast_map.get(&def_id.node) {
match env.ast_map.get_copy(&def_id.node) {
ast_map::node_item(it, _) => {
(v.visit_item)(it, env, v);
}

View file

@ -523,7 +523,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
}
}
pat_enum(_, args) => {
match *cx.tcx.def_map.get(&pat_id) {
match cx.tcx.def_map.get_copy(&pat_id) {
def_const(did) => {
let const_expr =
lookup_const_by_id(cx.tcx, did).get();
@ -567,7 +567,7 @@ pub fn specialize(cx: @MatchCheckCtxt,
}
pat_struct(_, ref flds, _) => {
// Is this a struct or an enum variant?
match *cx.tcx.def_map.get(&pat_id) {
match cx.tcx.def_map.get_copy(&pat_id) {
def_variant(_, variant_id) => {
if variant(variant_id) == *ctor_id {
// FIXME #4731: Is this right? --pcw

View file

@ -185,9 +185,7 @@ pub fn lookup_const_by_id(tcx: ty::ctxt,
}
} else {
let maps = astencode::Maps {
mutbl_map: @mut HashSet::new(),
root_map: @mut HashMap::new(),
last_use_map: @mut HashMap::new(),
method_map: @mut HashMap::new(),
vtable_map: @mut HashMap::new(),
write_guard_map: @mut HashSet::new(),

File diff suppressed because it is too large Load diff

View file

@ -10,7 +10,6 @@
use middle::freevars::freevar_entry;
use middle::freevars;
use middle::liveness;
use middle::pat_util;
use middle::ty;
use middle::typeck;
@ -56,19 +55,16 @@ pub static try_adding: &'static str = "Try adding a move";
pub struct Context {
tcx: ty::ctxt,
method_map: typeck::method_map,
last_use_map: liveness::last_use_map,
current_item: node_id,
current_item: node_id
}
pub fn check_crate(tcx: ty::ctxt,
method_map: typeck::method_map,
last_use_map: liveness::last_use_map,
crate: @crate) {
let ctx = Context {
tcx: tcx,
method_map: method_map,
last_use_map: last_use_map,
current_item: -1,
current_item: -1
};
let visit = visit::mk_vt(@visit::Visitor {
visit_arm: check_arm,
@ -132,7 +128,7 @@ fn check_item(item: @item, cx: Context, visitor: visit::vt<Context>) {
// Yes, it's a destructor.
match self_type.node {
ty_path(_, path_node_id) => {
let struct_def = *cx.tcx.def_map.get(
let struct_def = cx.tcx.def_map.get_copy(
&path_node_id);
let struct_did =
ast_util::def_id_of_def(struct_def);
@ -261,11 +257,9 @@ pub fn check_expr(e: @expr, cx: Context, v: visit::vt<Context>) {
_ => e.id
};
for cx.tcx.node_type_substs.find(&type_parameter_id).each |ts| {
// FIXME(#5562): removing this copy causes a segfault before stage2
let ts = /*bad*/ copy **ts;
let type_param_defs = match e.node {
expr_path(_) => {
let did = ast_util::def_id_of_def(*cx.tcx.def_map.get(&e.id));
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&e.id));
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs
}
_ => {
@ -286,7 +280,7 @@ pub fn check_expr(e: @expr, cx: Context, v: visit::vt<Context>) {
ts.repr(cx.tcx),
type_param_defs.repr(cx.tcx)));
}
for vec::each2(ts, *type_param_defs) |&ty, type_param_def| {
for vec::each2(**ts, *type_param_defs) |&ty, type_param_def| {
check_bounds(cx, type_parameter_id, e.span, ty, type_param_def)
}
}
@ -324,12 +318,10 @@ fn check_ty(aty: @Ty, cx: Context, v: visit::vt<Context>) {
match aty.node {
ty_path(_, id) => {
for cx.tcx.node_type_substs.find(&id).each |ts| {
// FIXME(#5562): removing this copy causes a segfault before stage2
let ts = /*bad*/ copy **ts;
let did = ast_util::def_id_of_def(*cx.tcx.def_map.get(&id));
let did = ast_util::def_id_of_def(cx.tcx.def_map.get_copy(&id));
let type_param_defs =
ty::lookup_item_type(cx.tcx, did).generics.type_param_defs;
for vec::each2(ts, *type_param_defs) |&ty, type_param_def| {
for vec::each2(**ts, *type_param_defs) |&ty, type_param_def| {
check_bounds(cx, aty.id, aty.span, ty, type_param_def)
}
}
@ -392,7 +384,7 @@ pub fn check_bounds(cx: Context,
fn is_nullary_variant(cx: Context, ex: @expr) -> bool {
match ex.node {
expr_path(_) => {
match *cx.tcx.def_map.get(&ex.id) {
match cx.tcx.def_map.get_copy(&ex.id) {
def_variant(edid, vdid) => {
vec::len(ty::enum_variant_with_id(cx.tcx, edid, vdid).args) == 0u
}

View file

@ -28,7 +28,6 @@ use syntax::ast_util::local_def;
use syntax::visit::{default_simple_visitor, mk_simple_visitor, SimpleVisitor};
use syntax::visit::visit_crate;
use core::cast::transmute;
use core::hashmap::HashMap;
pub enum LangItem {
@ -67,21 +66,24 @@ pub enum LangItem {
MallocFnLangItem, // 28
FreeFnLangItem, // 29
BorrowAsImmFnLangItem, // 30
ReturnToMutFnLangItem, // 31
CheckNotBorrowedFnLangItem, // 32
StrDupUniqFnLangItem, // 33
BorrowAsMutFnLangItem, // 31
ReturnToMutFnLangItem, // 32
CheckNotBorrowedFnLangItem, // 33
StrDupUniqFnLangItem, // 34
RecordBorrowFnLangItem, // 35
UnrecordBorrowFnLangItem, // 36
StartFnLangItem, // 34
StartFnLangItem, // 37
}
pub struct LanguageItems {
items: [Option<def_id>, ..35]
items: [Option<def_id>, ..38]
}
pub impl LanguageItems {
pub fn new() -> LanguageItems {
LanguageItems {
items: [ None, ..35 ]
items: [ None, ..38 ]
}
}
@ -129,11 +131,14 @@ pub impl LanguageItems {
28 => "malloc",
29 => "free",
30 => "borrow_as_imm",
31 => "return_to_mut",
32 => "check_not_borrowed",
33 => "strdup_uniq",
31 => "borrow_as_mut",
32 => "return_to_mut",
33 => "check_not_borrowed",
34 => "strdup_uniq",
35 => "record_borrow",
36 => "unrecord_borrow",
34 => "start",
37 => "start",
_ => "???"
}
@ -238,6 +243,9 @@ pub impl LanguageItems {
pub fn borrow_as_imm_fn(&const self) -> def_id {
self.items[BorrowAsImmFnLangItem as uint].get()
}
pub fn borrow_as_mut_fn(&const self) -> def_id {
self.items[BorrowAsMutFnLangItem as uint].get()
}
pub fn return_to_mut_fn(&const self) -> def_id {
self.items[ReturnToMutFnLangItem as uint].get()
}
@ -247,15 +255,20 @@ pub impl LanguageItems {
pub fn strdup_uniq_fn(&const self) -> def_id {
self.items[StrDupUniqFnLangItem as uint].get()
}
pub fn record_borrow_fn(&const self) -> def_id {
self.items[RecordBorrowFnLangItem as uint].get()
}
pub fn unrecord_borrow_fn(&const self) -> def_id {
self.items[UnrecordBorrowFnLangItem as uint].get()
}
pub fn start_fn(&const self) -> def_id {
self.items[StartFnLangItem as uint].get()
}
}
fn LanguageItemCollector<'r>(crate: @crate,
session: Session,
items: &'r mut LanguageItems)
-> LanguageItemCollector<'r> {
fn LanguageItemCollector(crate: @crate,
session: Session)
-> LanguageItemCollector {
let mut item_refs = HashMap::new();
item_refs.insert(@~"const", ConstTraitLangItem as uint);
@ -294,22 +307,25 @@ fn LanguageItemCollector<'r>(crate: @crate,
item_refs.insert(@~"malloc", MallocFnLangItem as uint);
item_refs.insert(@~"free", FreeFnLangItem as uint);
item_refs.insert(@~"borrow_as_imm", BorrowAsImmFnLangItem as uint);
item_refs.insert(@~"borrow_as_mut", BorrowAsMutFnLangItem as uint);
item_refs.insert(@~"return_to_mut", ReturnToMutFnLangItem as uint);
item_refs.insert(@~"check_not_borrowed",
CheckNotBorrowedFnLangItem as uint);
item_refs.insert(@~"strdup_uniq", StrDupUniqFnLangItem as uint);
item_refs.insert(@~"record_borrow", RecordBorrowFnLangItem as uint);
item_refs.insert(@~"unrecord_borrow", UnrecordBorrowFnLangItem as uint);
item_refs.insert(@~"start", StartFnLangItem as uint);
LanguageItemCollector {
crate: crate,
session: session,
items: items,
items: LanguageItems::new(),
item_refs: item_refs
}
}
struct LanguageItemCollector<'self> {
items: &'self mut LanguageItems,
struct LanguageItemCollector {
items: LanguageItems,
crate: @crate,
session: Session,
@ -317,8 +333,8 @@ struct LanguageItemCollector<'self> {
item_refs: HashMap<@~str, uint>,
}
pub impl<'self> LanguageItemCollector<'self> {
fn match_and_collect_meta_item(&self, item_def_id: def_id,
pub impl LanguageItemCollector {
fn match_and_collect_meta_item(&mut self, item_def_id: def_id,
meta_item: @meta_item) {
match meta_item.node {
meta_name_value(key, literal) => {
@ -333,7 +349,7 @@ pub impl<'self> LanguageItemCollector<'self> {
}
}
fn collect_item(&self, item_index: uint, item_def_id: def_id) {
fn collect_item(&mut self, item_index: uint, item_def_id: def_id) {
// Check for duplicates.
match self.items.items[item_index] {
Some(original_def_id) if original_def_id != item_def_id => {
@ -349,42 +365,45 @@ pub impl<'self> LanguageItemCollector<'self> {
self.items.items[item_index] = Some(item_def_id);
}
fn match_and_collect_item(&self,
fn match_and_collect_item(&mut self,
item_def_id: def_id, key: @~str, value: @~str) {
if *key != ~"lang" {
return; // Didn't match.
}
match self.item_refs.find(&value) {
let item_index = self.item_refs.find(&value).map(|x| **x);
// prevent borrow checker from considering ^~~~~~~~~~~
// self to be borrowed (annoying)
match item_index {
Some(item_index) => {
self.collect_item(item_index, item_def_id);
}
None => {
// Didn't match.
}
Some(&item_index) => {
self.collect_item(item_index, item_def_id)
return;
}
}
}
fn collect_local_language_items(&self) {
unsafe {
let this: *LanguageItemCollector<'self> = transmute(self);
visit_crate(self.crate, (), mk_simple_visitor(@SimpleVisitor {
visit_item: |item| {
for item.attrs.each |attribute| {
unsafe {
(*this).match_and_collect_meta_item(
local_def(item.id),
attribute.node.value
);
}
fn collect_local_language_items(&mut self) {
let this: *mut LanguageItemCollector = &mut *self;
visit_crate(self.crate, (), mk_simple_visitor(@SimpleVisitor {
visit_item: |item| {
for item.attrs.each |attribute| {
unsafe {
(*this).match_and_collect_meta_item(
local_def(item.id),
attribute.node.value
);
}
},
.. *default_simple_visitor()
}));
}
}
},
.. *default_simple_visitor()
}));
}
fn collect_external_language_items(&self) {
fn collect_external_language_items(&mut self) {
let crate_store = self.session.cstore;
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
for each_lang_item(crate_store, crate_number)
@ -408,7 +427,7 @@ pub impl<'self> LanguageItemCollector<'self> {
}
}
fn collect(&self) {
fn collect(&mut self) {
self.collect_local_language_items();
self.collect_external_language_items();
self.check_completeness();
@ -418,8 +437,8 @@ pub impl<'self> LanguageItemCollector<'self> {
pub fn collect_language_items(crate: @crate,
session: Session)
-> LanguageItems {
let mut items = LanguageItems::new();
let collector = LanguageItemCollector(crate, session, &mut items);
let mut collector = LanguageItemCollector(crate, session);
collector.collect();
copy items
let LanguageItemCollector { items, _ } = collector;
items
}

View file

@ -667,7 +667,7 @@ fn check_item_ctypes(cx: ty::ctxt, it: @ast::item) {
for vec::each(vec::append_one(tys, decl.output)) |ty| {
match ty.node {
ast::ty_path(_, id) => {
match *cx.def_map.get(&id) {
match cx.def_map.get_copy(&id) {
ast::def_prim_ty(ast::ty_int(ast::ty_i)) => {
cx.sess.span_lint(
ctypes, id, fn_id,

View file

@ -112,7 +112,6 @@ use util::ppaux::ty_to_str;
use core::cast::transmute;
use core::hashmap::HashMap;
use core::util::with;
use syntax::ast::*;
use syntax::codemap::span;
use syntax::parse::token::special_idents;
@ -121,16 +120,6 @@ use syntax::visit::{fk_anon, fk_fn_block, fk_item_fn, fk_method};
use syntax::visit::{vt};
use syntax::{visit, ast_util};
// Maps from an expr id to a list of variable ids for which this expr
// is the last use. Typically, the expr is a path and the node id is
// the local/argument/etc that the path refers to. However, it also
// possible for the expr to be a closure, in which case the list is a
// list of closed over variables that can be moved into the closure.
//
// Very subtle (#2633): borrowck will remove entries from this table
// if it detects an outstanding loan (that is, the addr is taken).
pub type last_use_map = @mut HashMap<node_id, @mut ~[node_id]>;
#[deriving(Eq)]
struct Variable(uint);
#[deriving(Eq)]
@ -158,7 +147,7 @@ pub fn check_crate(tcx: ty::ctxt,
method_map: typeck::method_map,
variable_moves_map: moves::VariableMovesMap,
capture_map: moves::CaptureMap,
crate: @crate) -> last_use_map {
crate: @crate) {
let visitor = visit::mk_vt(@visit::Visitor {
visit_fn: visit_fn,
visit_local: visit_local,
@ -168,16 +157,13 @@ pub fn check_crate(tcx: ty::ctxt,
.. *visit::default_visitor()
});
let last_use_map = @mut HashMap::new();
let initial_maps = @mut IrMaps(tcx,
method_map,
variable_moves_map,
capture_map,
last_use_map,
0);
visit::visit_crate(crate, initial_maps, visitor);
tcx.sess.abort_if_errors();
return last_use_map;
}
impl to_str::ToStr for LiveNode {
@ -241,23 +227,11 @@ enum VarKind {
ImplicitRet
}
fn relevant_def(def: def) -> Option<node_id> {
match def {
def_binding(nid, _) |
def_arg(nid, _) |
def_local(nid, _) |
def_self(nid, _) => Some(nid),
_ => None
}
}
struct IrMaps {
tcx: ty::ctxt,
method_map: typeck::method_map,
variable_moves_map: moves::VariableMovesMap,
capture_map: moves::CaptureMap,
last_use_map: last_use_map,
num_live_nodes: uint,
num_vars: uint,
@ -274,7 +248,6 @@ fn IrMaps(tcx: ty::ctxt,
method_map: typeck::method_map,
variable_moves_map: moves::VariableMovesMap,
capture_map: moves::CaptureMap,
last_use_map: last_use_map,
cur_item: node_id)
-> IrMaps {
IrMaps {
@ -282,7 +255,6 @@ fn IrMaps(tcx: ty::ctxt,
method_map: method_map,
variable_moves_map: variable_moves_map,
capture_map: capture_map,
last_use_map: last_use_map,
num_live_nodes: 0,
num_vars: 0,
live_node_map: HashMap::new(),
@ -367,35 +339,13 @@ pub impl IrMaps {
fn lnk(&mut self, ln: LiveNode) -> LiveNodeKind {
self.lnks[*ln]
}
fn add_last_use(&mut self, expr_id: node_id, var: Variable) {
let vk = self.var_kinds[*var];
debug!("Node %d is a last use of variable %?", expr_id, vk);
match vk {
Arg(id, _) |
Local(LocalInfo { id: id, kind: FromLetNoInitializer, _ }) |
Local(LocalInfo { id: id, kind: FromLetWithInitializer, _ }) |
Local(LocalInfo { id: id, kind: FromMatch(_), _ }) => {
let v = match self.last_use_map.find(&expr_id) {
Some(&v) => v,
None => {
let v = @mut ~[];
self.last_use_map.insert(expr_id, v);
v
}
};
v.push(id);
}
ImplicitRet => debug!("--but it is not owned"),
}
}
}
fn visit_item(item: @item, self: @mut IrMaps, v: vt<@mut IrMaps>) {
do with(&mut self.cur_item, item.id) {
visit::visit_item(item, self, v)
}
let old_cur_item = self.cur_item;
self.cur_item = item.id;
visit::visit_item(item, self, v);
self.cur_item = old_cur_item;
}
fn visit_fn(fk: &visit::fn_kind,
@ -413,7 +363,6 @@ fn visit_fn(fk: &visit::fn_kind,
self.method_map,
self.variable_moves_map,
self.capture_map,
self.last_use_map,
self.cur_item);
unsafe {
@ -517,9 +466,9 @@ fn visit_expr(expr: @expr, self: @mut IrMaps, vt: vt<@mut IrMaps>) {
match expr.node {
// live nodes required for uses or definitions of variables:
expr_path(_) => {
let def = *self.tcx.def_map.get(&expr.id);
let def = self.tcx.def_map.get_copy(&expr.id);
debug!("expr %d: path that leads to %?", expr.id, def);
if relevant_def(def).is_some() {
if moves::moved_variable_node_id_from_def(def).is_some() {
self.add_live_node_for_node(expr.id, ExprNode(expr.span));
}
visit::visit_expr(expr, self, vt);
@ -536,7 +485,7 @@ fn visit_expr(expr: @expr, self: @mut IrMaps, vt: vt<@mut IrMaps>) {
let cvs = self.capture_map.get(&expr.id);
let mut call_caps = ~[];
for cvs.each |cv| {
match relevant_def(cv.def) {
match moves::moved_variable_node_id_from_def(cv.def) {
Some(rv) => {
let cv_ln = self.add_live_node(FreeVarNode(cv.span));
let is_move = match cv.mode {
@ -664,8 +613,8 @@ pub impl Liveness {
fn variable_from_path(&self, expr: @expr) -> Option<Variable> {
match expr.node {
expr_path(_) => {
let def = *self.tcx.def_map.get(&expr.id);
relevant_def(def).map(
let def = self.tcx.def_map.get_copy(&expr.id);
moves::moved_variable_node_id_from_def(def).map(
|rdef| self.variable(*rdef, expr.span)
)
}
@ -681,7 +630,7 @@ pub impl Liveness {
span: span) -> Option<Variable> {
match self.tcx.def_map.find(&node_id) {
Some(&def) => {
relevant_def(def).map(
moves::moved_variable_node_id_from_def(def).map(
|rdef| self.variable(*rdef, span)
)
}
@ -810,11 +759,13 @@ pub impl Liveness {
None => {
// Vanilla 'break' or 'loop', so use the enclosing
// loop scope
let loop_scope = &mut *self.loop_scope;
if loop_scope.len() == 0 {
self.tcx.sess.span_bug(sp, "break outside loop");
}
else {
let len = { // FIXME(#5074) stage0
let loop_scope = &mut *self.loop_scope;
loop_scope.len()
};
if len == 0 {
self.tcx.sess.span_bug(sp, ~"break outside loop");
} else {
// FIXME(#5275): this shouldn't have to be a method...
self.last_loop_scope()
}
@ -1384,8 +1335,8 @@ pub impl Liveness {
fn access_path(&self, expr: @expr, succ: LiveNode, acc: uint)
-> LiveNode {
let def = *self.tcx.def_map.get(&expr.id);
match relevant_def(def) {
let def = self.tcx.def_map.get_copy(&expr.id);
match moves::moved_variable_node_id_from_def(def) {
Some(nid) => {
let ln = self.live_node(expr.id, expr.span);
if acc != 0u {
@ -1518,7 +1469,6 @@ fn check_expr(expr: @expr, self: @Liveness, vt: vt<@Liveness>) {
expr_path(_) => {
for self.variable_from_def_map(expr.id, expr.span).each |var| {
let ln = self.live_node(expr.id, expr.span);
self.consider_last_use(expr, ln, *var);
match self.ir.variable_moves_map.find(&expr.id) {
None => {}
@ -1537,7 +1487,6 @@ fn check_expr(expr: @expr, self: @Liveness, vt: vt<@Liveness>) {
let caps = self.ir.captures(expr);
for caps.each |cap| {
let var = self.variable(cap.var_nid, expr.span);
self.consider_last_use(expr, cap.ln, var);
if cap.is_move {
self.check_move_from_var(cap.ln, var, expr);
}
@ -1606,7 +1555,7 @@ enum ReadKind {
}
pub impl Liveness {
fn check_ret(@self, id: node_id, sp: span, _fk: &visit::fn_kind,
fn check_ret(&self, id: node_id, sp: span, _fk: &visit::fn_kind,
entry_ln: LiveNode) {
if self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() {
// if no_ret_var is live, then we fall off the end of the
@ -1626,11 +1575,11 @@ pub impl Liveness {
}
}
fn check_move_from_var(@self, ln: LiveNode,
fn check_move_from_var(&self,
ln: LiveNode,
var: Variable,
move_expr: @expr) {
/*!
*
* Checks whether `var` is live on entry to any of the
* successors of `ln`. If it is, report an error.
* `move_expr` is the expression which caused the variable
@ -1650,20 +1599,10 @@ pub impl Liveness {
}
}
fn consider_last_use(@self, expr: @expr, ln: LiveNode, var: Variable) {
debug!("consider_last_use(expr.id=%?, ln=%s, var=%s)",
expr.id, ln.to_str(), var.to_str());
match self.live_on_exit(ln, var) {
Some(_) => {}
None => self.ir.add_last_use(expr.id, var)
}
}
fn check_lvalue(@self, expr: @expr, vt: vt<@Liveness>) {
match expr.node {
expr_path(_) => {
match *self.tcx.def_map.get(&expr.id) {
match self.tcx.def_map.get_copy(&expr.id) {
def_local(nid, mutbl) => {
// Assignment to an immutable variable or argument: only legal
// if there is no later assignment. If this local is actually
@ -1676,7 +1615,7 @@ pub impl Liveness {
self.warn_about_dead_assign(expr.span, expr.id, ln, var);
}
def => {
match relevant_def(def) {
match moves::moved_variable_node_id_from_def(def) {
Some(nid) => {
let ln = self.live_node(expr.id, expr.span);
let var = self.variable(nid, expr.span);
@ -1696,14 +1635,14 @@ pub impl Liveness {
}
}
fn check_for_reassignments_in_pat(@self, pat: @pat, mutbl: bool) {
fn check_for_reassignments_in_pat(&self, pat: @pat, mutbl: bool) {
do self.pat_bindings(pat) |ln, var, sp, id| {
self.check_for_reassignment(ln, var, sp,
if mutbl {Some(id)} else {None});
}
}
fn check_for_reassignment(@self, ln: LiveNode, var: Variable,
fn check_for_reassignment(&self, ln: LiveNode, var: Variable,
orig_span: span, mutbl: Option<node_id>) {
match self.assigned_on_exit(ln, var) {
Some(ExprNode(span)) => {
@ -1728,7 +1667,7 @@ pub impl Liveness {
}
}
fn report_illegal_move(@self, lnk: LiveNodeKind,
fn report_illegal_move(&self, lnk: LiveNodeKind,
var: Variable,
move_expr: @expr) {
// the only time that it is possible to have a moved variable
@ -1793,7 +1732,8 @@ pub impl Liveness {
};
}
fn report_move_location(@self, move_expr: @expr,
fn report_move_location(&self,
move_expr: @expr,
var: Variable,
expr_descr: &str,
pronoun: &str) {
@ -1807,7 +1747,8 @@ pub impl Liveness {
ty_to_str(self.tcx, move_expr_ty)));
}
fn report_illegal_read(@self, chk_span: span,
fn report_illegal_read(&self,
chk_span: span,
lnk: LiveNodeKind,
var: Variable,
rk: ReadKind) {
@ -1838,12 +1779,12 @@ pub impl Liveness {
}
}
fn should_warn(@self, var: Variable) -> Option<@~str> {
fn should_warn(&self, var: Variable) -> Option<@~str> {
let name = self.ir.variable_name(var);
if name[0] == ('_' as u8) { None } else { Some(name) }
}
fn warn_about_unused_args(@self, decl: &fn_decl, entry_ln: LiveNode) {
fn warn_about_unused_args(&self, decl: &fn_decl, entry_ln: LiveNode) {
for decl.inputs.each |arg| {
do pat_util::pat_bindings(self.tcx.def_map, arg.pat)
|_bm, p_id, sp, _n| {
@ -1853,7 +1794,7 @@ pub impl Liveness {
}
}
fn warn_about_unused_or_dead_vars_in_pat(@self, pat: @pat) {
fn warn_about_unused_or_dead_vars_in_pat(&self, pat: @pat) {
do self.pat_bindings(pat) |ln, var, sp, id| {
if !self.warn_about_unused(sp, id, ln, var) {
self.warn_about_dead_assign(sp, id, ln, var);
@ -1861,7 +1802,7 @@ pub impl Liveness {
}
}
fn warn_about_unused(@self, sp: span, id: node_id,
fn warn_about_unused(&self, sp: span, id: node_id,
ln: LiveNode, var: Variable) -> bool {
if !self.used_on_entry(ln, var) {
for self.should_warn(var).each |name| {
@ -1891,7 +1832,7 @@ pub impl Liveness {
return false;
}
fn warn_about_dead_assign(@self, sp: span, id: node_id,
fn warn_about_dead_assign(&self, sp: span, id: node_id,
ln: LiveNode, var: Variable) {
if self.live_on_exit(ln, var).is_none() {
for self.should_warn(var).each |name| {

File diff suppressed because it is too large Load diff

View file

@ -246,10 +246,19 @@ pub type MovesMap = @mut HashSet<node_id>;
* expression */
pub type VariableMovesMap = @mut HashMap<node_id, @expr>;
/**
* Set of variable node-ids that are moved.
*
* Note: The `VariableMovesMap` stores expression ids that
* are moves, whereas this set stores the ids of the variables
* that are moved at some point */
pub type MovedVariablesSet = @mut HashSet<node_id>;
/** See the section Output on the module comment for explanation. */
pub struct MoveMaps {
moves_map: MovesMap,
variable_moves_map: VariableMovesMap,
moved_variables_set: MovedVariablesSet,
capture_map: CaptureMap
}
@ -279,13 +288,25 @@ pub fn compute_moves(tcx: ty::ctxt,
move_maps: MoveMaps {
moves_map: @mut HashSet::new(),
variable_moves_map: @mut HashMap::new(),
capture_map: @mut HashMap::new()
capture_map: @mut HashMap::new(),
moved_variables_set: @mut HashSet::new()
}
};
visit::visit_crate(crate, visit_cx, visitor);
return visit_cx.move_maps;
}
pub fn moved_variable_node_id_from_def(def: def) -> Option<node_id> {
match def {
def_binding(nid, _) |
def_arg(nid, _) |
def_local(nid, _) |
def_self(nid, _) => Some(nid),
_ => None
}
}
// ______________________________________________________________________
// Expressions
@ -419,6 +440,11 @@ pub impl VisitContext {
MoveInPart(entire_expr) => {
self.move_maps.variable_moves_map.insert(
expr.id, entire_expr);
let def = self.tcx.def_map.get_copy(&expr.id);
for moved_variable_node_id_from_def(def).each |&id| {
self.move_maps.moved_variables_set.insert(id);
}
}
Read => {}
MoveInWhole => {

View file

@ -481,7 +481,7 @@ pub fn check_crate(tcx: ty::ctxt,
}
}
expr_path(path) => {
check_path(expr.span, *tcx.def_map.get(&expr.id), path);
check_path(expr.span, tcx.def_map.get_copy(&expr.id), path);
}
expr_struct(_, ref fields, _) => {
match ty::get(ty::expr_ty(tcx, expr)).sty {
@ -499,7 +499,7 @@ pub fn check_crate(tcx: ty::ctxt,
ty_enum(id, _) => {
if id.crate != local_crate ||
!privileged_items.contains(&(id.node)) {
match *tcx.def_map.get(&expr.id) {
match tcx.def_map.get_copy(&expr.id) {
def_variant(_, variant_id) => {
for (*fields).each |field| {
debug!("(privacy checking) \

View file

@ -47,59 +47,27 @@ The region maps encode information about region relationships.
- the free region map is populated during type check as we check
each function. See the function `relate_free_regions` for
more information.
- `cleanup_scopes` includes scopes where trans cleanups occur
- this is intended to reflect the current state of trans, not
necessarily how I think things ought to work
*/
pub struct RegionMaps {
priv scope_map: HashMap<ast::node_id, ast::node_id>,
priv free_region_map: HashMap<FreeRegion, ~[FreeRegion]>,
priv cleanup_scopes: HashSet<ast::node_id>
}
pub struct ctxt {
pub struct Context {
sess: Session,
def_map: resolve::DefMap,
// Generated maps:
region_maps: @mut RegionMaps,
// Generally speaking, expressions are parented to their innermost
// enclosing block. But some kinds of expressions serve as
// parents: calls, methods, etc. In addition, some expressions
// serve as parents by virtue of where they appear. For example,
// the condition in a while loop is always a parent. In those
// cases, we add the node id of such an expression to this set so
// that when we visit it we can view it as a parent.
root_exprs: @mut HashSet<ast::node_id>,
// Scope where variables should be parented to
var_parent: parent,
// The parent scope is the innermost block, statement, call, or match
// expression during the execution of which the current expression
// will be evaluated. Generally speaking, the innermost parent
// scope is also the closest suitable ancestor in the AST tree.
//
// There is a subtle point concerning call arguments. Imagine
// you have a call:
//
// { // block a
// foo( // call b
// x,
// y);
// }
//
// In what lifetime are the expressions `x` and `y` evaluated? At
// first, I imagine the answer was the block `a`, as the arguments
// are evaluated before the call takes place. But this turns out
// to be wrong. The lifetime of the call must encompass the
// argument evaluation as well.
//
// The reason is that evaluation of an earlier argument could
// create a borrow which exists during the evaluation of later
// arguments. Consider this torture test, for example,
//
// fn test1(x: @mut ~int) {
// foo(&**x, *x = ~5);
// }
//
// Here, the first argument `&**x` will be a borrow of the `~int`,
// but the second argument overwrites that very value! Bad.
// (This test is borrowck-pure-scope-in-call.rs, btw)
// Innermost enclosing expression
parent: parent,
}
@ -128,10 +96,22 @@ pub impl RegionMaps {
sup: ast::node_id)
{
debug!("record_parent(sub=%?, sup=%?)", sub, sup);
assert!(sub != sup);
self.scope_map.insert(sub, sup);
}
pub fn record_cleanup_scope(&mut self,
scope_id: ast::node_id)
{
//! Records that a scope is a CLEANUP SCOPE. This is invoked
//! from within regionck. We wait until regionck because we do
//! not know which operators are overloaded until that point,
//! and only overloaded operators result in cleanup scopes.
self.cleanup_scopes.insert(scope_id);
}
fn opt_encl_scope(&self,
id: ast::node_id) -> Option<ast::node_id>
{
@ -151,6 +131,22 @@ pub impl RegionMaps {
}
}
fn is_cleanup_scope(&self, scope_id: ast::node_id) -> bool {
self.cleanup_scopes.contains(&scope_id)
}
fn cleanup_scope(&self,
expr_id: ast::node_id) -> ast::node_id
{
//! Returns the scope when temps in expr will be cleaned up
let mut id = self.encl_scope(expr_id);
while !self.cleanup_scopes.contains(&id) {
id = self.encl_scope(id);
}
return id;
}
fn encl_region(&self,
id: ast::node_id) -> ty::Region
{
@ -159,22 +155,38 @@ pub impl RegionMaps {
ty::re_scope(self.encl_scope(id))
}
fn is_sub_scope(&self,
sub_scope: ast::node_id,
superscope: ast::node_id) -> bool
pub fn scopes_intersect(&self,
scope1: ast::node_id,
scope2: ast::node_id) -> bool
{
self.is_subscope_of(scope1, scope2) || self.is_subscope_of(scope2, scope1)
}
fn is_subscope_of(&self,
subscope: ast::node_id,
superscope: ast::node_id) -> bool
{
/*!
* Returns true if `sub_scope` is equal to or is lexically
* Returns true if `subscope` is equal to or is lexically
* nested inside `superscope` and false otherwise.
*/
let mut sub_scope = sub_scope;
while superscope != sub_scope {
match self.scope_map.find(&sub_scope) {
None => return false,
Some(&scope) => sub_scope = scope
let mut s = subscope;
while superscope != s {
match self.scope_map.find(&s) {
None => {
debug!("is_subscope_of(%?, %?, s=%?)=false",
subscope, superscope, s);
return false;
}
Some(&scope) => s = scope
}
}
debug!("is_subscope_of(%?, %?)=true",
subscope, superscope);
return true;
}
@ -239,11 +251,11 @@ pub impl RegionMaps {
}
(ty::re_scope(sub_scope), ty::re_scope(super_scope)) => {
self.is_sub_scope(sub_scope, super_scope)
self.is_subscope_of(sub_scope, super_scope)
}
(ty::re_scope(sub_scope), ty::re_free(ref fr)) => {
self.is_sub_scope(sub_scope, fr.scope_id)
self.is_subscope_of(sub_scope, fr.scope_id)
}
(ty::re_free(sub_fr), ty::re_free(super_fr)) => {
@ -301,6 +313,7 @@ pub impl RegionMaps {
fn ancestors_of(self: &RegionMaps, scope: ast::node_id)
-> ~[ast::node_id]
{
// debug!("ancestors_of(scope=%d)", scope);
let mut result = ~[scope];
let mut scope = scope;
loop {
@ -311,13 +324,14 @@ pub impl RegionMaps {
scope = superscope;
}
}
// debug!("ancestors_of_loop(scope=%d)", scope);
}
}
}
}
/// Extracts that current parent from cx, failing if there is none.
pub fn parent_id(cx: ctxt, span: span) -> ast::node_id {
pub fn parent_id(cx: Context, span: span) -> ast::node_id {
match cx.parent {
None => {
cx.sess.span_bug(span, "crate should not be parent here");
@ -329,143 +343,136 @@ pub fn parent_id(cx: ctxt, span: span) -> ast::node_id {
}
/// Records the current parent (if any) as the parent of `child_id`.
pub fn record_parent(cx: ctxt, child_id: ast::node_id) {
pub fn parent_to_expr(cx: Context, child_id: ast::node_id) {
for cx.parent.each |parent_id| {
cx.region_maps.record_parent(child_id, *parent_id);
}
}
pub fn resolve_block(blk: &ast::blk, cx: ctxt, visitor: visit::vt<ctxt>) {
pub fn resolve_block(blk: &ast::blk, cx: Context, visitor: visit::vt<Context>) {
// Record the parent of this block.
record_parent(cx, blk.node.id);
parent_to_expr(cx, blk.node.id);
// Descend.
let new_cx: ctxt = ctxt {parent: Some(blk.node.id),.. cx};
let new_cx = Context {var_parent: Some(blk.node.id),
parent: Some(blk.node.id),
..cx};
visit::visit_block(blk, new_cx, visitor);
}
pub fn resolve_arm(arm: &ast::arm, cx: ctxt, visitor: visit::vt<ctxt>) {
pub fn resolve_arm(arm: &ast::arm, cx: Context, visitor: visit::vt<Context>) {
visit::visit_arm(arm, cx, visitor);
}
pub fn resolve_pat(pat: @ast::pat, cx: ctxt, visitor: visit::vt<ctxt>) {
match pat.node {
ast::pat_ident(*) => {
let defn_opt = cx.def_map.find(&pat.id);
match defn_opt {
Some(&ast::def_variant(_,_)) => {
/* Nothing to do; this names a variant. */
}
_ => {
/* This names a local. Bind it to the containing scope. */
record_parent(cx, pat.id);
}
}
}
_ => { /* no-op */ }
}
pub fn resolve_pat(pat: @ast::pat, cx: Context, visitor: visit::vt<Context>) {
assert!(cx.var_parent == cx.parent);
parent_to_expr(cx, pat.id);
visit::visit_pat(pat, cx, visitor);
}
pub fn resolve_stmt(stmt: @ast::stmt, cx: ctxt, visitor: visit::vt<ctxt>) {
pub fn resolve_stmt(stmt: @ast::stmt, cx: Context, visitor: visit::vt<Context>) {
match stmt.node {
ast::stmt_decl(*) => {
visit::visit_stmt(stmt, cx, visitor);
}
// This code has to be kept consistent with trans::base::trans_stmt
ast::stmt_expr(_, stmt_id) |
ast::stmt_semi(_, stmt_id) => {
record_parent(cx, stmt_id);
let mut expr_cx = cx;
expr_cx.parent = Some(stmt_id);
visit::visit_stmt(stmt, expr_cx, visitor);
}
ast::stmt_mac(*) => cx.sess.bug(~"unexpanded macro")
ast::stmt_decl(*) => {
visit::visit_stmt(stmt, cx, visitor);
}
ast::stmt_expr(_, stmt_id) |
ast::stmt_semi(_, stmt_id) => {
parent_to_expr(cx, stmt_id);
let expr_cx = Context {parent: Some(stmt_id), ..cx};
visit::visit_stmt(stmt, expr_cx, visitor);
}
ast::stmt_mac(*) => cx.sess.bug(~"unexpanded macro")
}
}
pub fn resolve_expr(expr: @ast::expr, cx: ctxt, visitor: visit::vt<ctxt>) {
record_parent(cx, expr.id);
pub fn resolve_expr(expr: @ast::expr, cx: Context, visitor: visit::vt<Context>) {
parent_to_expr(cx, expr.id);
let mut new_cx = cx;
new_cx.parent = Some(expr.id);
match expr.node {
// Calls or overloadable operators
// FIXME #3387
// ast::expr_index(*) | ast::expr_binary(*) |
// ast::expr_unary(*) |
ast::expr_call(*) | ast::expr_method_call(*) => {
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr()));
new_cx.parent = Some(expr.id);
}
ast::expr_match(*) => {
debug!("node %d: %s", expr.id, pprust::expr_to_str(expr,
cx.sess.intr()));
new_cx.parent = Some(expr.id);
}
ast::expr_while(cond, _) => {
new_cx.root_exprs.insert(cond.id);
}
_ => {}
ast::expr_assign_op(*) | ast::expr_index(*) | ast::expr_binary(*) |
ast::expr_unary(*) | ast::expr_call(*) | ast::expr_method_call(*) => {
// FIXME(#6268) Nested method calls
//
// The lifetimes for a call or method call look as follows:
//
// call.id
// - arg0.id
// - ...
// - argN.id
// - call.callee_id
//
// The idea is that call.callee_id represents *the time when
// the invoked function is actually running* and call.id
// represents *the time to prepare the arguments and make the
// call*. See the section "Borrows in Calls" borrowck/doc.rs
// for an extended explanantion of why this distinction is
// important.
//
// parent_to_expr(new_cx, expr.callee_id);
}
ast::expr_match(*) => {
new_cx.var_parent = Some(expr.id);
}
_ => {}
};
if new_cx.root_exprs.contains(&expr.id) {
new_cx.parent = Some(expr.id);
}
visit::visit_expr(expr, new_cx, visitor);
}
pub fn resolve_local(local: @ast::local,
cx: ctxt,
visitor: visit::vt<ctxt>) {
record_parent(cx, local.node.id);
cx: Context,
visitor: visit::vt<Context>) {
assert!(cx.var_parent == cx.parent);
parent_to_expr(cx, local.node.id);
visit::visit_local(local, cx, visitor);
}
pub fn resolve_item(item: @ast::item, cx: ctxt, visitor: visit::vt<ctxt>) {
pub fn resolve_item(item: @ast::item, cx: Context, visitor: visit::vt<Context>) {
// Items create a new outer block scope as far as we're concerned.
let new_cx: ctxt = ctxt {parent: None,.. cx};
let new_cx = Context {var_parent: None, parent: None, ..cx};
visit::visit_item(item, new_cx, visitor);
}
pub fn resolve_fn(fk: &visit::fn_kind,
decl: &ast::fn_decl,
body: &ast::blk,
sp: span,
_sp: span,
id: ast::node_id,
cx: ctxt,
visitor: visit::vt<ctxt>) {
let fn_cx = match *fk {
visit::fk_item_fn(*) | visit::fk_method(*) => {
// Top-level functions are a root scope.
ctxt {parent: Some(id),.. cx}
}
cx: Context,
visitor: visit::vt<Context>) {
debug!("region::resolve_fn(id=%?, body.node.id=%?, cx.parent=%?)",
id, body.node.id, cx.parent);
visit::fk_anon(*) | visit::fk_fn_block(*) => {
// Closures continue with the inherited scope.
cx
}
};
// Record the ID of `self`.
// The arguments and `self` are parented to the body of the fn.
let decl_cx = Context {parent: Some(body.node.id),
var_parent: Some(body.node.id),
..cx};
match *fk {
visit::fk_method(_, _, method) => {
cx.region_maps.record_parent(method.self_id, body.node.id);
}
_ => {}
}
visit::visit_fn_decl(decl, decl_cx, visitor);
debug!("visiting fn with body %d. cx.parent: %? \
fn_cx.parent: %?",
body.node.id, cx.parent, fn_cx.parent);
for decl.inputs.each |input| {
cx.region_maps.record_parent(input.id, body.node.id);
}
visit::visit_fn(fk, decl, body, sp, id, fn_cx, visitor);
// The body of the fn itself is either a root scope (top-level fn)
// or it continues with the inherited scope (closures).
let body_cx = match *fk {
visit::fk_item_fn(*) |
visit::fk_method(*) => {
Context {parent: None, var_parent: None, ..cx}
}
visit::fk_anon(*) |
visit::fk_fn_block(*) => {
cx
}
};
(visitor.visit_block)(body, body_cx, visitor);
}
pub fn resolve_crate(sess: Session,
@ -474,13 +481,14 @@ pub fn resolve_crate(sess: Session,
{
let region_maps = @mut RegionMaps {
scope_map: HashMap::new(),
free_region_map: HashMap::new()
free_region_map: HashMap::new(),
cleanup_scopes: HashSet::new(),
};
let cx: ctxt = ctxt {sess: sess,
def_map: def_map,
region_maps: region_maps,
root_exprs: @mut HashSet::new(),
parent: None};
let cx = Context {sess: sess,
def_map: def_map,
region_maps: region_maps,
parent: None,
var_parent: None};
let visitor = visit::mk_vt(@visit::Visitor {
visit_block: resolve_block,
visit_item: resolve_item,
@ -771,7 +779,8 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
pprust::ty_to_str(ty, sess.intr()));
if cx.region_is_relevant(r) {
cx.add_rp(cx.item_id, cx.add_variance(rv_contravariant))
let rv = cx.add_variance(rv_contravariant);
cx.add_rp(cx.item_id, rv)
}
}
@ -781,14 +790,14 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
match f.region {
Some(_) => {
if cx.region_is_relevant(f.region) {
cx.add_rp(cx.item_id,
cx.add_variance(rv_contravariant))
let rv = cx.add_variance(rv_contravariant);
cx.add_rp(cx.item_id, rv)
}
}
None => {
if f.sigil == ast::BorrowedSigil && cx.anon_implies_rp {
cx.add_rp(cx.item_id,
cx.add_variance(rv_contravariant));
let rv = cx.add_variance(rv_contravariant);
cx.add_rp(cx.item_id, rv)
}
}
}
@ -819,7 +828,8 @@ pub fn determine_rp_in_ty(ty: @ast::Ty,
debug!("reference to external, rp'd type %s",
pprust::ty_to_str(ty, sess.intr()));
if cx.region_is_relevant(path.rp) {
cx.add_rp(cx.item_id, cx.add_variance(variance))
let rv = cx.add_variance(variance);
cx.add_rp(cx.item_id, rv)
}
}
}
@ -938,7 +948,7 @@ pub fn determine_rp_in_crate(sess: Session,
let cx = &mut *cx;
while cx.worklist.len() != 0 {
let c_id = cx.worklist.pop();
let c_variance = *cx.region_paramd_items.get(&c_id);
let c_variance = cx.region_paramd_items.get_copy(&c_id);
debug!("popped %d from worklist", c_id);
match cx.dep_map.find(&c_id) {
None => {}

View file

@ -779,9 +779,9 @@ pub fn Resolver(session: Session,
unresolved_imports: 0,
current_module: current_module,
value_ribs: ~[],
type_ribs: ~[],
label_ribs: ~[],
value_ribs: @mut ~[],
type_ribs: @mut ~[],
label_ribs: @mut ~[],
xray_context: NoXray,
current_trait_refs: None,
@ -830,13 +830,13 @@ pub struct Resolver {
// The current set of local scopes, for values.
// FIXME #4948: Reuse ribs to avoid allocation.
value_ribs: ~[@Rib],
value_ribs: @mut ~[@Rib],
// The current set of local scopes, for types.
type_ribs: ~[@Rib],
type_ribs: @mut ~[@Rib],
// The current set of local scopes, for labels.
label_ribs: ~[@Rib],
label_ribs: @mut ~[@Rib],
// Whether the current context is an X-ray context. An X-ray context is
// allowed to access private names of any module.
@ -971,7 +971,7 @@ pub impl Resolver {
module_.children.insert(name, child);
return (child, new_parent);
}
Some(child) => {
Some(&child) => {
// Enforce the duplicate checking mode:
//
// * If we're requesting duplicate module checking, check that
@ -1033,7 +1033,7 @@ pub impl Resolver {
*self.session.str_of(name)));
}
}
return (*child, new_parent);
return (child, new_parent);
}
}
}
@ -1864,7 +1864,7 @@ pub impl Resolver {
*self.session.str_of(target));
match module_.import_resolutions.find(&target) {
Some(resolution) => {
Some(&resolution) => {
debug!("(building import directive) bumping \
reference");
resolution.outstanding_references += 1;
@ -2395,7 +2395,7 @@ pub impl Resolver {
(*ident, new_import_resolution);
}
None => { /* continue ... */ }
Some(dest_import_resolution) => {
Some(&dest_import_resolution) => {
// Merge the two import resolutions at a finer-grained
// level.
@ -2433,8 +2433,8 @@ pub impl Resolver {
module_.import_resolutions.insert
(*ident, dest_import_resolution);
}
Some(existing_import_resolution) => {
dest_import_resolution = *existing_import_resolution;
Some(&existing_import_resolution) => {
dest_import_resolution = existing_import_resolution;
}
}
@ -4294,19 +4294,18 @@ pub impl Resolver {
}
pat_struct(path, _, _) => {
let structs: &mut HashSet<def_id> = &mut self.structs;
match self.resolve_path(path, TypeNS, false, visitor) {
Some(def_ty(class_id))
if structs.contains(&class_id) => {
if self.structs.contains(&class_id) => {
let class_def = def_struct(class_id);
self.record_def(pattern.id, class_def);
}
Some(definition @ def_struct(class_id))
if structs.contains(&class_id) => {
Some(definition @ def_struct(class_id)) => {
assert!(self.structs.contains(&class_id));
self.record_def(pattern.id, definition);
}
Some(definition @ def_variant(_, variant_id))
if structs.contains(&variant_id) => {
if self.structs.contains(&variant_id) => {
self.record_def(pattern.id, definition);
}
result => {
@ -4608,12 +4607,12 @@ pub impl Resolver {
let search_result;
match namespace {
ValueNS => {
search_result = self.search_ribs(&mut self.value_ribs, ident,
search_result = self.search_ribs(self.value_ribs, ident,
span,
DontAllowCapturingSelf);
}
TypeNS => {
search_result = self.search_ribs(&mut self.type_ribs, ident,
search_result = self.search_ribs(self.type_ribs, ident,
span, AllowCapturingSelf);
}
}
@ -4803,15 +4802,14 @@ pub impl Resolver {
expr_struct(path, _, _) => {
// Resolve the path to the structure it goes to.
let structs: &mut HashSet<def_id> = &mut self.structs;
match self.resolve_path(path, TypeNS, false, visitor) {
Some(def_ty(class_id)) | Some(def_struct(class_id))
if structs.contains(&class_id) => {
if self.structs.contains(&class_id) => {
let class_def = def_struct(class_id);
self.record_def(expr.id, class_def);
}
Some(definition @ def_variant(_, class_id))
if structs.contains(&class_id) => {
if self.structs.contains(&class_id) => {
self.record_def(expr.id, definition);
}
_ => {
@ -4827,17 +4825,19 @@ pub impl Resolver {
expr_loop(_, Some(label)) => {
do self.with_label_rib {
let this = &mut *self;
let def_like = dl_def(def_label(expr.id));
let rib = this.label_ribs[this.label_ribs.len() - 1];
rib.bindings.insert(label, def_like);
{
let this = &mut *self;
let def_like = dl_def(def_label(expr.id));
let rib = this.label_ribs[this.label_ribs.len() - 1];
rib.bindings.insert(label, def_like);
}
visit_expr(expr, (), visitor);
}
}
expr_break(Some(label)) | expr_again(Some(label)) => {
match self.search_ribs(&mut self.label_ribs, label, expr.span,
match self.search_ribs(self.label_ribs, label, expr.span,
DontAllowCapturingSelf) {
None =>
self.session.span_err(expr.span,
@ -5248,7 +5248,7 @@ pub impl Resolver {
debug!("Import resolutions:");
for module_.import_resolutions.each |name, import_resolution| {
let mut value_repr;
let value_repr;
match import_resolution.target_for_namespace(ValueNS) {
None => { value_repr = ~""; }
Some(_) => {
@ -5257,7 +5257,7 @@ pub impl Resolver {
}
}
let mut type_repr;
let type_repr;
match import_resolution.target_for_namespace(TypeNS) {
None => { type_repr = ~""; }
Some(_) => {

File diff suppressed because it is too large Load diff

View file

@ -280,7 +280,7 @@ pub fn trans_opt(bcx: block, o: &Opt) -> opt_result {
pub fn variant_opt(bcx: block, pat_id: ast::node_id)
-> Opt {
let ccx = bcx.ccx();
match *ccx.tcx.def_map.get(&pat_id) {
match ccx.tcx.def_map.get_copy(&pat_id) {
ast::def_variant(enum_id, var_id) => {
let variants = ty::enum_variants(ccx.tcx, enum_id);
for vec::each(*variants) |v| {
@ -516,7 +516,7 @@ pub fn enter_opt<'r>(bcx: block,
match p.node {
ast::pat_enum(*) |
ast::pat_ident(_, _, None) if pat_is_const(tcx.def_map, p) => {
let const_def = *tcx.def_map.get(&p.id);
let const_def = tcx.def_map.get_copy(&p.id);
let const_def_id = ast_util::def_id_of_def(const_def);
if opt_eq(tcx, &lit(ConstLit(const_def_id)), opt) {
Some(~[])
@ -552,7 +552,7 @@ pub fn enter_opt<'r>(bcx: block,
if opt_eq(tcx, &variant_opt(bcx, p.id), opt) {
// Look up the struct variant ID.
let struct_id;
match *tcx.def_map.get(&p.id) {
match tcx.def_map.get_copy(&p.id) {
ast::def_variant(_, found_struct_id) => {
struct_id = found_struct_id;
}
@ -865,7 +865,18 @@ pub fn extract_variant_args(bcx: block,
ExtractedBlock { vals: args, bcx: bcx }
}
fn match_datum(bcx: block, val: ValueRef, pat_id: ast::node_id) -> Datum {
//! Helper for converting from the ValueRef that we pass around in
//! the match code, which is always by ref, into a Datum. Eventually
//! we should just pass around a Datum and be done with it.
let ty = node_id_type(bcx, pat_id);
Datum {val: val, ty: ty, mode: datum::ByRef, source: RevokeClean}
}
pub fn extract_vec_elems(bcx: block,
pat_span: span,
pat_id: ast::node_id,
elem_count: uint,
slice: Option<uint>,
@ -873,9 +884,9 @@ pub fn extract_vec_elems(bcx: block,
count: ValueRef)
-> ExtractedBlock {
let _icx = bcx.insn_ctxt("match::extract_vec_elems");
let vec_datum = match_datum(bcx, val, pat_id);
let (bcx, base, len) = vec_datum.get_vec_base_and_len(bcx, pat_span, pat_id);
let vt = tvec::vec_types(bcx, node_id_type(bcx, pat_id));
let unboxed = load_if_immediate(bcx, val, vt.vec_ty);
let (base, len) = tvec::get_base_and_len(bcx, unboxed, vt.vec_ty);
let mut elems = do vec::from_fn(elem_count) |i| {
match slice {
@ -946,30 +957,28 @@ pub fn collect_record_or_struct_fields(bcx: block,
}
}
pub fn root_pats_as_necessary(bcx: block,
pub fn pats_require_rooting(bcx: block,
m: &[@Match],
col: uint)
-> bool {
vec::any(m, |br| {
let pat_id = br.pats[col].id;
let key = root_map_key {id: pat_id, derefs: 0u };
bcx.ccx().maps.root_map.contains_key(&key)
})
}
pub fn root_pats_as_necessary(mut bcx: block,
m: &[@Match],
col: uint,
val: ValueRef)
-> block {
let mut bcx = bcx;
for vec::each(m) |br| {
let pat_id = br.pats[col].id;
let key = root_map_key {id: pat_id, derefs: 0u };
match bcx.ccx().maps.root_map.find(&key) {
None => (),
Some(&root_info) => {
// Note: the scope_id will always be the id of the match. See
// the extended comment in rustc::middle::borrowck::preserve()
// for details (look for the case covering cat_discr).
let datum = Datum {val: val, ty: node_id_type(bcx, pat_id),
mode: ByRef, source: ZeroMem};
bcx = datum.root(bcx, root_info);
// If we kept going, we'd only re-root the same value, so
// return now.
return bcx;
}
if pat_id != 0 {
let datum = Datum {val: val, ty: node_id_type(bcx, pat_id),
mode: ByRef, source: ZeroMem};
bcx = datum.root_and_write_guard(bcx, br.pats[col].span, pat_id, 0);
}
}
return bcx;
@ -1113,7 +1122,8 @@ pub fn compare_values(cx: block,
pub fn store_non_ref_bindings(bcx: block,
data: &ArmData,
opt_temp_cleanups: Option<&mut ~[ValueRef]>)
-> block {
-> block
{
/*!
*
* For each copy/move binding, copy the value from the value
@ -1124,6 +1134,7 @@ pub fn store_non_ref_bindings(bcx: block,
*/
let mut bcx = bcx;
let mut opt_temp_cleanups = opt_temp_cleanups;
for data.bindings_map.each_value |&binding_info| {
match binding_info.trmode {
TrByValue(is_move, lldest) => {
@ -1138,9 +1149,10 @@ pub fn store_non_ref_bindings(bcx: block,
}
};
for opt_temp_cleanups.each |temp_cleanups| {
do opt_temp_cleanups.mutate |temp_cleanups| {
add_clean_temp_mem(bcx, lldest, binding_info.ty);
temp_cleanups.push(lldest);
temp_cleanups
}
}
TrByRef | TrByImplicitRef => {}
@ -1293,13 +1305,20 @@ pub fn compile_submatch(bcx: block,
vec::slice(vals, col + 1u, vals.len()));
let ccx = *bcx.fcx.ccx;
let mut pat_id = 0;
let mut pat_span = dummy_sp();
for vec::each(m) |br| {
// Find a real id (we're adding placeholder wildcard patterns, but
// each column is guaranteed to have at least one real pattern)
if pat_id == 0 { pat_id = br.pats[col].id; }
if pat_id == 0 {
pat_id = br.pats[col].id;
pat_span = br.pats[col].span;
}
}
bcx = root_pats_as_necessary(bcx, m, col, val);
// If we are not matching against an `@T`, we should not be
// required to root any values.
assert!(any_box_pat(m, col) || !pats_require_rooting(bcx, m, col));
let rec_fields = collect_record_or_struct_fields(bcx, m, col);
if rec_fields.len() > 0 {
let pat_ty = node_id_type(bcx, pat_id);
@ -1360,6 +1379,7 @@ pub fn compile_submatch(bcx: block,
// Unbox in case of a box field
if any_box_pat(m, col) {
bcx = root_pats_as_necessary(bcx, m, col, val);
let llbox = Load(bcx, val);
let box_no_addrspace = non_gc_box_cast(bcx, llbox);
let unboxed =
@ -1560,8 +1580,8 @@ pub fn compile_submatch(bcx: block,
vec_len_ge(_, i) => Some(i),
_ => None
};
let args = extract_vec_elems(opt_cx, pat_id, n, slice,
val, test_val);
let args = extract_vec_elems(opt_cx, pat_span, pat_id, n, slice,
val, test_val);
size = args.vals.len();
unpacked = /*bad*/copy args.vals;
opt_cx = args.bcx;

View file

@ -34,7 +34,6 @@ use lib;
use metadata::common::LinkMeta;
use metadata::{csearch, cstore, encoder};
use middle::astencode;
use middle::borrowck::RootInfo;
use middle::resolve;
use middle::trans::_match;
use middle::trans::adt;
@ -62,7 +61,6 @@ use middle::trans::type_of::*;
use middle::ty;
use util::common::indenter;
use util::ppaux::{Repr, ty_to_str};
use util::ppaux;
use core::hash;
use core::hashmap::{HashMap, HashSet};
@ -391,14 +389,16 @@ pub fn get_tydesc_simple(ccx: @CrateContext, t: ty::t) -> ValueRef {
pub fn get_tydesc(ccx: @CrateContext, t: ty::t) -> @mut tydesc_info {
match ccx.tydescs.find(&t) {
Some(&inf) => inf,
_ => {
ccx.stats.n_static_tydescs += 1u;
let inf = glue::declare_tydesc(ccx, t);
ccx.tydescs.insert(t, inf);
inf
}
Some(&inf) => {
return inf;
}
_ => { }
}
ccx.stats.n_static_tydescs += 1u;
let inf = glue::declare_tydesc(ccx, t);
ccx.tydescs.insert(t, inf);
return inf;
}
pub fn set_optimize_for_size(f: ValueRef) {
@ -885,23 +885,22 @@ pub fn need_invoke(bcx: block) -> bool {
// Walk the scopes to look for cleanups
let mut cur = bcx;
loop {
let current = &mut *cur;
let kind = &mut *current.kind;
match *kind {
block_scope(ref mut inf) => {
for vec::each((*inf).cleanups) |cleanup| {
match *cleanup {
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
if cleanup_type == normal_exit_and_unwind {
return true;
match cur.kind {
block_scope(inf) => {
let inf = &mut *inf; // FIXME(#5074) workaround old borrowck
for vec::each(inf.cleanups) |cleanup| {
match *cleanup {
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
if cleanup_type == normal_exit_and_unwind {
return true;
}
}
}
}
}
}
}
_ => ()
_ => ()
}
cur = match current.parent {
cur = match cur.parent {
Some(next) => next,
None => return false
}
@ -923,11 +922,13 @@ pub fn in_lpad_scope_cx(bcx: block, f: &fn(si: &mut scope_info)) {
let mut bcx = bcx;
loop {
{
// FIXME #4280: Borrow check bug workaround.
let kind: &mut block_kind = &mut *bcx.kind;
match *kind {
block_scope(ref mut inf) => {
if inf.cleanups.len() > 0u || bcx.parent.is_none() {
match bcx.kind {
block_scope(inf) => {
let len = { // FIXME(#5074) workaround old borrowck
let inf = &mut *inf;
inf.cleanups.len()
};
if len > 0u || bcx.parent.is_none() {
f(inf);
return;
}
@ -989,57 +990,30 @@ pub fn get_landing_pad(bcx: block) -> BasicBlockRef {
return pad_bcx.llbb;
}
// Arranges for the value found in `*root_loc` to be dropped once the scope
// associated with `scope_id` exits. This is used to keep boxes live when
// there are extant region pointers pointing at the interior.
//
// Note that `root_loc` is not the value itself but rather a pointer to the
// value. Generally it in alloca'd value. The reason for this is that the
// value is initialized in an inner block but may be freed in some outer
// block, so an SSA value that is valid in the inner block may not be valid in
// the outer block. In fact, the inner block may not even execute. Rather
// than generate the full SSA form, we just use an alloca'd value.
pub fn add_root_cleanup(bcx: block,
root_info: RootInfo,
root_loc: ValueRef,
ty: ty::t) {
debug!("add_root_cleanup(bcx=%s, \
scope=%d, \
freezes=%?, \
root_loc=%s, \
ty=%s)",
bcx.to_str(),
root_info.scope,
root_info.freezes,
val_str(bcx.ccx().tn, root_loc),
ppaux::ty_to_str(bcx.ccx().tcx, ty));
let bcx_scope = find_bcx_for_scope(bcx, root_info.scope);
if root_info.freezes {
add_clean_frozen_root(bcx_scope, root_loc, ty);
} else {
add_clean_temp_mem(bcx_scope, root_loc, ty);
}
fn find_bcx_for_scope(bcx: block, scope_id: ast::node_id) -> block {
let mut bcx_sid = bcx;
loop {
bcx_sid = match bcx_sid.node_info {
Some(NodeInfo { id, _ }) if id == scope_id => {
pub fn find_bcx_for_scope(bcx: block, scope_id: ast::node_id) -> block {
let mut bcx_sid = bcx;
loop {
bcx_sid = match bcx_sid.node_info {
Some(NodeInfo { id, _ }) if id == scope_id => {
return bcx_sid
}
_ => {
match bcx_sid.parent {
None => bcx.tcx().sess.bug(
fmt!("no enclosing scope with id %d", scope_id)),
Some(bcx_par) => bcx_par
// FIXME(#6268, #6248) hacky cleanup for nested method calls
Some(NodeInfo { callee_id: Some(id), _ }) if id == scope_id => {
return bcx_sid
}
_ => {
match bcx_sid.parent {
None => bcx.tcx().sess.bug(
fmt!("no enclosing scope with id %d", scope_id)),
Some(bcx_par) => bcx_par
}
}
}
}
}
}
}
pub fn do_spill(bcx: block, v: ValueRef, t: ty::t) -> ValueRef {
if ty::type_is_bot(t) {
@ -1160,7 +1134,7 @@ pub fn trans_stmt(cx: block, s: &ast::stmt) -> block {
let _icx = cx.insn_ctxt("trans_stmt");
debug!("trans_stmt(%s)", stmt_to_str(s, cx.tcx().sess.intr()));
if !cx.sess().no_asm_comments() {
if cx.sess().asm_comments() {
add_span_comment(cx, s.span, stmt_to_str(s, cx.ccx().sess.intr()));
}
@ -1220,7 +1194,7 @@ pub fn new_block(cx: fn_ctxt, parent: Option<block>, kind: block_kind,
}
pub fn simple_block_scope() -> block_kind {
block_scope(scope_info {
block_scope(@mut scope_info {
loop_break: None,
loop_label: None,
cleanups: ~[],
@ -1248,7 +1222,7 @@ pub fn loop_scope_block(bcx: block,
loop_label: Option<ident>,
n: ~str,
opt_node_info: Option<NodeInfo>) -> block {
return new_block(bcx.fcx, Some(bcx), block_scope(scope_info {
return new_block(bcx.fcx, Some(bcx), block_scope(@mut scope_info {
loop_break: Some(loop_break),
loop_label: loop_label,
cleanups: ~[],
@ -1284,7 +1258,7 @@ pub fn trans_block_cleanups(bcx: block, cleanups: ~[cleanup]) -> block {
}
pub fn trans_block_cleanups_(bcx: block,
cleanups: ~[cleanup],
cleanups: &[cleanup],
/* cleanup_cx: block, */
is_lpad: bool) -> block {
let _icx = bcx.insn_ctxt("trans_block_cleanups");
@ -1326,28 +1300,28 @@ pub fn cleanup_and_leave(bcx: block,
@fmt!("cleanup_and_leave(%s)", cur.to_str()));
}
{
// FIXME #4280: Borrow check bug workaround.
let kind: &mut block_kind = &mut *cur.kind;
match *kind {
block_scope(ref mut inf) if !inf.cleanups.is_empty() => {
for vec::find((*inf).cleanup_paths,
|cp| cp.target == leave).each |cp| {
Br(bcx, cp.dest);
return;
}
let sub_cx = sub_block(bcx, ~"cleanup");
Br(bcx, sub_cx.llbb);
inf.cleanup_paths.push(cleanup_path {
target: leave,
dest: sub_cx.llbb
});
match cur.kind {
block_scope(inf) if !inf.empty_cleanups() => {
let (sub_cx, inf_cleanups) = {
let inf = &mut *inf; // FIXME(#5074) workaround stage0
for vec::find((*inf).cleanup_paths,
|cp| cp.target == leave).each |cp| {
Br(bcx, cp.dest);
return;
}
let sub_cx = sub_block(bcx, ~"cleanup");
Br(bcx, sub_cx.llbb);
inf.cleanup_paths.push(cleanup_path {
target: leave,
dest: sub_cx.llbb
});
(sub_cx, copy inf.cleanups)
};
bcx = trans_block_cleanups_(sub_cx,
block_cleanups(cur),
inf_cleanups,
is_lpad);
}
_ => ()
}
_ => ()
}
match upto {
@ -2080,7 +2054,7 @@ pub fn trans_tuple_struct(ccx: @CrateContext,
fcx.llretptr.get(),
0,
i);
let llarg = match *fcx.llargs.get(&field.node.id) {
let llarg = match fcx.llargs.get_copy(&field.node.id) {
local_mem(x) => x,
_ => {
ccx.tcx.sess.bug(~"trans_tuple_struct: llarg wasn't \
@ -2120,7 +2094,7 @@ pub fn trans_enum_def(ccx: @CrateContext, enum_definition: &ast::enum_def,
pub fn trans_item(ccx: @CrateContext, item: &ast::item) {
let _icx = ccx.insn_ctxt("trans_item");
let path = match *ccx.tcx.items.get(&item.id) {
let path = match ccx.tcx.items.get_copy(&item.id) {
ast_map::node_item(_, p) => p,
// tjc: ?
_ => fail!(~"trans_item"),
@ -2302,7 +2276,7 @@ pub fn create_entry_wrapper(ccx: @CrateContext,
// Call main.
let lloutputarg = C_null(T_ptr(T_i8()));
let llenvarg = unsafe { llvm::LLVMGetParam(llfdecl, 1 as c_uint) };
let mut args = ~[lloutputarg, llenvarg];
let args = ~[lloutputarg, llenvarg];
let llresult = Call(bcx, main_llfn, args);
Store(bcx, llresult, fcx.llretptr.get());
@ -2413,7 +2387,7 @@ pub fn fill_fn_pair(bcx: block, pair: ValueRef, llfn: ValueRef,
}
pub fn item_path(ccx: @CrateContext, i: @ast::item) -> path {
let base = match *ccx.tcx.items.get(&i.id) {
let base = match ccx.tcx.items.get_copy(&i.id) {
ast_map::node_item(_, p) => p,
// separate map for paths?
_ => fail!(~"item_path")
@ -2428,7 +2402,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef {
Some(&v) => v,
None => {
let mut exprt = false;
let val = match *tcx.items.get(&id) {
let val = match tcx.items.get_copy(&id) {
ast_map::node_item(i, pth) => {
let my_path = vec::append(/*bad*/copy *pth,
~[path_name(i.ident)]);

View file

@ -846,7 +846,7 @@ pub fn _UndefReturn(cx: block, Fn: ValueRef) -> ValueRef {
pub fn add_span_comment(bcx: block, sp: span, text: &str) {
let ccx = bcx.ccx();
if !ccx.sess.no_asm_comments() {
if ccx.sess.asm_comments() {
let s = fmt!("%s (%s)", text, ccx.sess.codemap.span_to_str(sp));
debug!("%s", copy s);
add_comment(bcx, s);
@ -856,7 +856,7 @@ pub fn add_span_comment(bcx: block, sp: span, text: &str) {
pub fn add_comment(bcx: block, text: &str) {
unsafe {
let ccx = bcx.ccx();
if !ccx.sess.no_asm_comments() {
if ccx.sess.asm_comments() {
let sanitized = str::replace(text, ~"$", ~"");
let comment_text = ~"# " +
str::replace(sanitized, ~"\n", ~"\n\t# ");

View file

@ -39,7 +39,6 @@ use middle::trans::monomorphize;
use middle::trans::type_of;
use middle::ty;
use middle::typeck;
use util::common::indenter;
use util::ppaux::Repr;
use syntax::ast;
@ -340,16 +339,12 @@ pub fn trans_method_call(in_cx: block,
node_id_type(in_cx, call_ex.callee_id),
expr_ty(in_cx, call_ex),
|cx| {
match cx.ccx().maps.method_map.find(&call_ex.id) {
match cx.ccx().maps.method_map.find_copy(&call_ex.id) {
Some(origin) => {
debug!("origin for %s: %s",
call_ex.repr(in_cx.tcx()),
origin.repr(in_cx.tcx()));
// FIXME(#5562): removing this copy causes a segfault
// before stage2
let origin = /*bad*/ copy *origin;
meth::trans_method_callee(cx,
call_ex.callee_id,
rcvr,
@ -687,7 +682,6 @@ pub fn trans_arg_expr(bcx: block,
self_mode,
arg_expr.repr(bcx.tcx()),
ret_flag.map(|v| bcx.val_str(*v)));
let _indenter = indenter();
// translate the arg expr to a datum
let arg_datumblock = match ret_flag {
@ -722,7 +716,7 @@ pub fn trans_arg_expr(bcx: block,
}
}
};
let mut arg_datum = arg_datumblock.datum;
let arg_datum = arg_datumblock.datum;
let bcx = arg_datumblock.bcx;
debug!(" arg datum: %s", arg_datum.to_str(bcx.ccx()));

View file

@ -208,7 +208,6 @@ pub fn store_environment(bcx: block,
// allocate closure in the heap
let Result {bcx: bcx, val: llbox} = allocate_cbox(bcx, sigil, cdata_ty);
let mut temp_cleanups = ~[];
// cbox_ty has the form of a tuple: (a, b, c) we want a ptr to a
// tuple. This could be a ptr in uniq or a box or on stack,
@ -224,7 +223,7 @@ pub fn store_environment(bcx: block,
for vec::eachi(bound_values) |i, bv| {
debug!("Copy %s into closure", bv.to_str(ccx));
if !ccx.sess.no_asm_comments() {
if ccx.sess.asm_comments() {
add_comment(bcx, fmt!("Copy %s into closure",
bv.to_str(ccx)));
}
@ -244,9 +243,6 @@ pub fn store_environment(bcx: block,
}
}
for vec::each(temp_cleanups) |cleanup| {
revoke_clean(bcx, *cleanup);
}
ClosureResult { llbox: llbox, cdata_ty: cdata_ty, bcx: bcx }
}
@ -424,7 +420,7 @@ pub fn trans_expr_fn(bcx: block,
let Result {bcx: bcx, val: closure} = match sigil {
ast::BorrowedSigil | ast::ManagedSigil | ast::OwnedSigil => {
let cap_vars = *ccx.maps.capture_map.get(&user_id);
let cap_vars = ccx.maps.capture_map.get_copy(&user_id);
let ret_handle = match is_loop_body {Some(x) => x,
None => None};
let ClosureResult {llbox, cdata_ty, bcx}

View file

@ -27,18 +27,18 @@ use middle::resolve;
use middle::trans::adt;
use middle::trans::base;
use middle::trans::build;
use middle::trans::callee;
use middle::trans::datum;
use middle::trans::debuginfo;
use middle::trans::expr;
use middle::trans::glue;
use middle::trans::reachable;
use middle::trans::shape;
use middle::trans::type_of;
use middle::trans::type_use;
use middle::trans::write_guard;
use middle::ty::substs;
use middle::ty;
use middle::typeck;
use middle::borrowck::root_map_key;
use util::ppaux::{Repr};
use core::cast::transmute;
@ -467,28 +467,35 @@ pub fn add_clean_temp_mem(bcx: block, val: ValueRef, t: ty::t) {
scope_clean_changed(scope_info);
}
}
pub fn add_clean_frozen_root(bcx: block, val: ValueRef, t: ty::t) {
debug!("add_clean_frozen_root(%s, %s, %s)",
bcx.to_str(), val_str(bcx.ccx().tn, val),
t.repr(bcx.tcx()));
let (root, rooted) = root_for_cleanup(bcx, val, t);
let cleanup_type = cleanup_type(bcx.tcx(), t);
pub fn add_clean_return_to_mut(bcx: block,
root_key: root_map_key,
frozen_val_ref: ValueRef,
bits_val_ref: ValueRef,
filename_val: ValueRef,
line_val: ValueRef) {
//! When an `@mut` has been frozen, we have to
//! call the lang-item `return_to_mut` when the
//! freeze goes out of scope. We need to pass
//! in both the value which was frozen (`frozen_val`) and
//! the value (`bits_val_ref`) which was returned when the
//! box was frozen initially. Here, both `frozen_val_ref` and
//! `bits_val_ref` are in fact pointers to stack slots.
debug!("add_clean_return_to_mut(%s, %s, %s)",
bcx.to_str(),
val_str(bcx.ccx().tn, frozen_val_ref),
val_str(bcx.ccx().tn, bits_val_ref));
do in_scope_cx(bcx) |scope_info| {
scope_info.cleanups.push(
clean_temp(val, |bcx| {
let bcx = callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.return_to_mut_fn(),
~[
build::Load(bcx,
build::PointerCast(bcx,
root,
T_ptr(T_ptr(T_i8()))))
],
expr::Ignore
);
glue::drop_ty_root(bcx, root, rooted, t)
}, cleanup_type));
clean_temp(
frozen_val_ref,
|bcx| write_guard::return_to_mut(bcx,
root_key,
frozen_val_ref,
bits_val_ref,
filename_val,
line_val),
normal_exit_only));
scope_clean_changed(scope_info);
}
}
@ -516,6 +523,7 @@ pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
// drop glue checks whether it is zero.
pub fn revoke_clean(cx: block, val: ValueRef) {
do in_scope_cx(cx) |scope_info| {
let scope_info = &mut *scope_info; // FIXME(#5074) workaround borrowck
let cleanup_pos = vec::position(
scope_info.cleanups,
|cu| match *cu {
@ -534,9 +542,9 @@ pub fn revoke_clean(cx: block, val: ValueRef) {
}
pub fn block_cleanups(bcx: block) -> ~[cleanup] {
match *bcx.kind {
match bcx.kind {
block_non_scope => ~[],
block_scope(ref mut inf) => /*bad*/copy inf.cleanups
block_scope(inf) => /*bad*/copy inf.cleanups
}
}
@ -545,7 +553,7 @@ pub enum block_kind {
// cleaned up. May correspond to an actual block in the language, but also
// to an implicit scope, for example, calls introduce an implicit scope in
// which the arguments are evaluated and cleaned up.
block_scope(scope_info),
block_scope(@mut scope_info),
// A non-scope block is a basic block created as a translation artifact
// from translating code that expresses conditional logic rather than by
@ -568,19 +576,29 @@ pub struct scope_info {
landing_pad: Option<BasicBlockRef>,
}
pub impl scope_info {
fn empty_cleanups(&mut self) -> bool {
self.cleanups.is_empty()
}
}
pub trait get_node_info {
fn info(&self) -> Option<NodeInfo>;
}
impl get_node_info for @ast::expr {
fn info(&self) -> Option<NodeInfo> {
Some(NodeInfo { id: self.id, span: self.span })
Some(NodeInfo {id: self.id,
callee_id: Some(self.callee_id),
span: self.span})
}
}
impl get_node_info for ast::blk {
fn info(&self) -> Option<NodeInfo> {
Some(NodeInfo { id: self.node.id, span: self.span })
Some(NodeInfo {id: self.node.id,
callee_id: None,
span: self.span})
}
}
@ -592,6 +610,7 @@ impl get_node_info for Option<@ast::expr> {
pub struct NodeInfo {
id: ast::node_id,
callee_id: Option<ast::node_id>,
span: span
}
@ -611,7 +630,7 @@ pub struct block_ {
unreachable: bool,
parent: Option<block>,
// The 'kind' of basic block this is.
kind: @mut block_kind,
kind: block_kind,
// Is this block part of a landing pad?
is_lpad: bool,
// info about the AST node this block originated from, if any
@ -630,7 +649,7 @@ pub fn block_(llbb: BasicBlockRef, parent: Option<block>, kind: block_kind,
terminated: false,
unreachable: false,
parent: parent,
kind: @mut kind,
kind: kind,
is_lpad: is_lpad,
node_info: node_info,
fcx: fcx
@ -678,21 +697,17 @@ pub fn val_str(tn: @TypeNames, v: ValueRef) -> @str {
return ty_str(tn, val_ty(v));
}
pub fn in_scope_cx(cx: block, f: &fn(si: &mut scope_info)) {
pub fn in_scope_cx(cx: block, f: &fn(si: @mut scope_info)) {
let mut cur = cx;
loop {
{
// XXX: Borrow check bug workaround.
let kind: &mut block_kind = &mut *cur.kind;
match *kind {
block_scope(ref mut inf) => {
debug!("in_scope_cx: selected cur=%s (cx=%s)",
cur.to_str(), cx.to_str());
f(inf);
return;
}
_ => ()
match cur.kind {
block_scope(inf) => {
debug!("in_scope_cx: selected cur=%s (cx=%s)",
cur.to_str(), cx.to_str());
f(inf);
return;
}
_ => ()
}
cur = block_parent(cur);
}
@ -1517,6 +1532,15 @@ pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs {
}
}
pub fn filename_and_line_num_from_span(bcx: block,
span: span) -> (ValueRef, ValueRef) {
let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(), @/*bad*/copy loc.file.name);
let filename = build::PointerCast(bcx, filename_cstr, T_ptr(T_i8()));
let line = C_int(bcx.ccx(), loc.line as int);
(filename, line)
}
// Casts a Rust bool value to an i1.
pub fn bool_to_i1(bcx: block, llval: ValueRef) -> ValueRef {
build::ICmp(bcx, lib::llvm::IntNE, llval, C_bool(false))

View file

@ -157,7 +157,7 @@ pub fn get_const_val(cx: @CrateContext, def_id: ast::def_id) -> ValueRef {
if !ast_util::is_local(def_id) {
def_id = inline::maybe_instantiate_inline(cx, def_id, true);
}
match *cx.tcx.items.get(&def_id.node) {
match cx.tcx.items.get_copy(&def_id.node) {
ast_map::node_item(@ast::item {
node: ast::item_const(_, subexpr), _
}, _) => {
@ -166,7 +166,7 @@ pub fn get_const_val(cx: @CrateContext, def_id: ast::def_id) -> ValueRef {
_ => cx.tcx.sess.bug(~"expected a const to be an item")
}
}
*cx.const_values.get(&def_id.node)
cx.const_values.get_copy(&def_id.node)
}
pub fn const_expr(cx: @CrateContext, e: @ast::expr) -> ValueRef {
@ -194,18 +194,19 @@ pub fn const_expr(cx: @CrateContext, e: @ast::expr) -> ValueRef {
match adj.autoref {
None => { }
Some(ref autoref) => {
assert!(autoref.region == ty::re_static);
assert!(autoref.mutbl != ast::m_mutbl);
// Don't copy data to do a deref+ref.
let llptr = match maybe_ptr {
Some(ptr) => ptr,
None => const_addr_of(cx, llconst)
};
match autoref.kind {
ty::AutoPtr => {
match *autoref {
ty::AutoUnsafe(m) |
ty::AutoPtr(ty::re_static, m) => {
assert!(m != ast::m_mutbl);
llconst = llptr;
}
ty::AutoBorrowVec => {
ty::AutoBorrowVec(ty::re_static, m) => {
assert!(m != ast::m_mutbl);
let size = machine::llsize_of(cx,
val_ty(llconst));
assert!(abi::slice_elt_base == 0);
@ -550,7 +551,7 @@ pub fn trans_const(ccx: @CrateContext, _e: @ast::expr, id: ast::node_id) {
let g = base::get_item_val(ccx, id);
// At this point, get_item_val has already translated the
// constant's initializer to determine its LLVM type.
let v = *ccx.const_values.get(&id);
let v = ccx.const_values.get_copy(&id);
llvm::LLVMSetInitializer(g, v);
llvm::LLVMSetGlobalConstant(g, True);
}

View file

@ -193,7 +193,7 @@ pub fn trans_log(log_ex: @ast::expr,
};
let global = if ccx.module_data.contains_key(&modname) {
*ccx.module_data.get(&modname)
ccx.module_data.get_copy(&modname)
} else {
let s = link::mangle_internal_name_by_path_and_seq(
ccx, modpath, ~"loglevel");
@ -243,8 +243,8 @@ pub fn trans_break_cont(bcx: block,
let mut unwind = bcx;
let mut target;
loop {
match *unwind.kind {
block_scope(scope_info {
match unwind.kind {
block_scope(@scope_info {
loop_break: Some(brk),
loop_label: l,
_
@ -333,7 +333,7 @@ pub fn trans_fail_expr(bcx: block,
bcx, expr::trans_to_datum(bcx, arg_expr));
if ty::type_is_str(arg_datum.ty) {
let (lldata, _lllen) = arg_datum.get_base_and_len(bcx);
let (lldata, _) = arg_datum.get_vec_base_and_len_no_root(bcx);
return trans_fail_value(bcx, sp_opt, lldata);
} else if bcx.unreachable || ty::type_is_bot(arg_datum.ty) {
return bcx;
@ -385,13 +385,7 @@ fn trans_fail_value(bcx: block,
pub fn trans_fail_bounds_check(bcx: block, sp: span,
index: ValueRef, len: ValueRef) -> block {
let _icx = bcx.insn_ctxt("trans_fail_bounds_check");
let ccx = bcx.ccx();
let loc = bcx.sess().parse_sess.cm.lookup_char_pos(sp.lo);
let line = C_int(ccx, loc.line as int);
let filename_cstr = C_cstr(bcx.ccx(), @/*bad*/copy loc.file.name);
let filename = PointerCast(bcx, filename_cstr, T_ptr(T_i8()));
let (filename, line) = filename_and_line_num_from_span(bcx, sp);
let args = ~[filename, line, index, len];
let bcx = callee::trans_lang_call(
bcx, bcx.tcx().lang_items.fail_bounds_check_fn(), args, expr::Ignore);

View file

@ -87,17 +87,16 @@
use lib;
use lib::llvm::ValueRef;
use middle::borrowck::{RootInfo, root_map_key};
use middle::trans::adt;
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::common::*;
use middle::trans::common;
use middle::trans::expr;
use middle::trans::glue;
use middle::trans::tvec;
use middle::trans::type_of;
use middle::trans::write_guard;
use middle::ty;
use util::common::indenter;
use util::ppaux::ty_to_str;
@ -105,6 +104,7 @@ use util::ppaux::ty_to_str;
use core::container::Set; // XXX: this should not be necessary
use core::to_bytes;
use syntax::ast;
use syntax::codemap::span;
use syntax::parse::token::special_idents;
#[deriving(Eq)]
@ -516,59 +516,6 @@ pub impl Datum {
}
}
fn root(&self, bcx: block, root_info: RootInfo) -> block {
/*!
*
* In some cases, borrowck will decide that an @T/@[]/@str
* value must be rooted for the program to be safe. In that
* case, we will call this function, which will stash a copy
* away until we exit the scope `scope_id`. */
debug!("root(scope_id=%?, freezes=%?, self=%?)",
root_info.scope, root_info.freezes, self.to_str(bcx.ccx()));
if bcx.sess().trace() {
trans_trace(
bcx, None,
@fmt!("preserving until end of scope %d",
root_info.scope));
}
let scratch = scratch_datum(bcx, self.ty, true);
self.copy_to_datum(bcx, INIT, scratch);
add_root_cleanup(bcx, root_info, scratch.val, scratch.ty);
// If we need to freeze the box, do that now.
if root_info.freezes {
callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.borrow_as_imm_fn(),
~[
Load(bcx,
PointerCast(bcx,
scratch.val,
T_ptr(T_ptr(T_i8()))))
],
expr::Ignore)
} else {
bcx
}
}
fn perform_write_guard(&self, bcx: block) -> block {
// Create scratch space, but do not root it.
let llval = match self.mode {
ByValue => self.val,
ByRef => Load(bcx, self.val),
};
callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.check_not_borrowed_fn(),
~[ PointerCast(bcx, llval, T_ptr(T_i8())) ],
expr::Ignore)
}
fn drop_val(&self, bcx: block) -> block {
if !ty::type_needs_drop(bcx.tcx(), self.ty) {
return bcx;
@ -620,32 +567,20 @@ pub impl Datum {
fn try_deref(&self,
bcx: block, // block wherein to generate insn's
expr_id: ast::node_id, // id of expr being deref'd
span: span, // location where deref occurs
expr_id: ast::node_id, // id of deref expr
derefs: uint, // number of times deref'd already
is_auto: bool) // if true, only deref if auto-derefable
-> (Option<Datum>, block)
{
let ccx = bcx.ccx();
debug!("try_deref(expr_id=%d, derefs=%?, is_auto=%b, self=%?)",
debug!("try_deref(expr_id=%?, derefs=%?, is_auto=%b, self=%?)",
expr_id, derefs, is_auto, self.to_str(bcx.ccx()));
let _indenter = indenter();
// root the autoderef'd value, if necessary:
//
// (Note: root'd values are always boxes)
let key = root_map_key { id: expr_id, derefs: derefs };
let bcx = match ccx.maps.root_map.find(&key) {
None => bcx,
Some(&root_info) => self.root(bcx, root_info)
};
// Perform the write guard, if necessary.
//
// (Note: write-guarded values are always boxes)
let bcx = if ccx.maps.write_guard_map.contains(&key) {
self.perform_write_guard(bcx)
} else { bcx };
let bcx =
write_guard::root_and_write_guard(
self, bcx, span, expr_id, derefs);
match ty::get(self.ty).sty {
ty::ty_box(_) | ty::ty_uniq(_) => {
@ -755,10 +690,10 @@ pub impl Datum {
}
fn deref(&self, bcx: block,
expr: @ast::expr, // the expression whose value is being deref'd
expr: @ast::expr, // the deref expression
derefs: uint)
-> DatumBlock {
match self.try_deref(bcx, expr.id, derefs, false) {
match self.try_deref(bcx, expr.span, expr.id, derefs, false) {
(Some(lvres), bcx) => DatumBlock { bcx: bcx, datum: lvres },
(None, _) => {
bcx.ccx().sess.span_bug(expr.span,
@ -768,6 +703,7 @@ pub impl Datum {
}
fn autoderef(&self, bcx: block,
span: span,
expr_id: ast::node_id,
max: uint)
-> DatumBlock {
@ -782,7 +718,7 @@ pub impl Datum {
let mut bcx = bcx;
while derefs < max {
derefs += 1u;
match datum.try_deref(bcx, expr_id, derefs, true) {
match datum.try_deref(bcx, span, expr_id, derefs, true) {
(None, new_bcx) => { bcx = new_bcx; break }
(Some(datum_deref), new_bcx) => {
datum = datum_deref;
@ -798,8 +734,34 @@ pub impl Datum {
DatumBlock { bcx: bcx, datum: datum }
}
fn get_base_and_len(&self, bcx: block) -> (ValueRef, ValueRef) {
tvec::get_base_and_len(bcx, self.to_appropriate_llval(bcx), self.ty)
fn get_vec_base_and_len(&self,
mut bcx: block,
span: span,
expr_id: ast::node_id)
-> (block, ValueRef, ValueRef) {
//! Converts a vector into the slice pair. Performs rooting
//! and write guards checks.
// only imp't for @[] and @str, but harmless
bcx = write_guard::root_and_write_guard(self, bcx, span, expr_id, 0);
let (base, len) = self.get_vec_base_and_len_no_root(bcx);
(bcx, base, len)
}
fn get_vec_base_and_len_no_root(&self, bcx: block) -> (ValueRef, ValueRef) {
//! Converts a vector into the slice pair. Des not root
//! nor perform write guard checks.
let llval = self.to_appropriate_llval(bcx);
tvec::get_base_and_len(bcx, llval, self.ty)
}
fn root_and_write_guard(&self,
bcx: block,
span: span,
expr_id: ast::node_id,
derefs: uint) -> block {
write_guard::root_and_write_guard(self, bcx, span, expr_id, derefs)
}
fn to_result(&self, bcx: block) -> common::Result {

View file

@ -863,7 +863,7 @@ pub fn create_local_var(bcx: block, local: @ast::local)
bcx.tcx().sess.span_bug(local.span, "local is bound to something weird");
}
option::None => {
match *bcx.fcx.lllocals.get(&local.node.pat.id) {
match bcx.fcx.lllocals.get_copy(&local.node.pat.id) {
local_imm(v) => v,
_ => bcx.tcx().sess.span_bug(local.span, "local is bound to something weird")
}
@ -915,7 +915,7 @@ pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
};
update_cache(cache, tg, argument_metadata(mdval));
let llptr = match *fcx.llargs.get(&arg.id) {
let llptr = match fcx.llargs.get_copy(&arg.id) {
local_mem(v) | local_imm(v) => v,
};
let declargs = ~[llmdnode(~[llptr]), mdnode];
@ -958,7 +958,7 @@ pub fn create_function(fcx: fn_ctxt) -> @Metadata<SubProgramMetadata> {
let sp = fcx.span.get();
debug!("%s", cx.sess.codemap.span_to_str(sp));
let (ident, ret_ty, id) = match *cx.tcx.items.get(&fcx.id) {
let (ident, ret_ty, id) = match cx.tcx.items.get_copy(&fcx.id) {
ast_map::node_item(item, _) => {
match item.node {
ast::item_fn(ref decl, _, _, _, _) => {

View file

@ -123,7 +123,6 @@ use back::abi;
use lib;
use lib::llvm::{ValueRef, TypeRef, llvm};
use metadata::csearch;
use middle::borrowck::root_map_key;
use middle::trans::_match;
use middle::trans::adt;
use middle::trans::asm;
@ -146,9 +145,9 @@ use middle::trans::type_of;
use middle::ty;
use middle::ty::struct_mutable_fields;
use middle::ty::{AutoPtr, AutoBorrowVec, AutoBorrowVecRef, AutoBorrowFn,
AutoDerefRef, AutoAddEnv};
AutoDerefRef, AutoAddEnv, AutoUnsafe};
use util::common::indenter;
use util::ppaux::ty_to_str;
use util::ppaux::Repr;
use core::cast::transmute;
use core::hashmap::HashMap;
@ -201,33 +200,34 @@ pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
trans_to_datum_unadjusted(bcx, expr)
});
debug!("unadjusted datum: %s", datum.to_str(bcx.ccx()));
if adj.autoderefs > 0 {
let DatumBlock { bcx: new_bcx, datum: new_datum } =
datum.autoderef(bcx, expr.id, adj.autoderefs);
datum.autoderef(bcx, expr.span, expr.id, adj.autoderefs);
datum = new_datum;
bcx = new_bcx;
}
datum = match adj.autoref {
None => datum,
Some(ref autoref) => {
match autoref.kind {
AutoPtr => {
unpack_datum!(bcx, auto_ref(bcx, datum))
}
AutoBorrowVec => {
unpack_datum!(bcx, auto_slice(bcx, datum))
}
AutoBorrowVecRef => {
unpack_datum!(bcx, auto_slice_and_ref(bcx, datum))
}
AutoBorrowFn => {
// currently, all closure types are
// represented precisely the same, so no
// runtime adjustment is required:
datum
}
}
None => {
datum
}
Some(AutoUnsafe(*)) | // region + unsafe ptrs have same repr
Some(AutoPtr(*)) => {
unpack_datum!(bcx, auto_ref(bcx, datum))
}
Some(AutoBorrowVec(*)) => {
unpack_datum!(bcx, auto_slice(bcx, expr, datum))
}
Some(AutoBorrowVecRef(*)) => {
unpack_datum!(bcx, auto_slice_and_ref(bcx, expr, datum))
}
Some(AutoBorrowFn(*)) => {
// currently, all closure types are
// represented precisely the same, so no
// runtime adjustment is required:
datum
}
};
@ -241,7 +241,7 @@ pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
DatumBlock {bcx: bcx, datum: datum.to_rptr(bcx)}
}
fn auto_slice(bcx: block, datum: Datum) -> DatumBlock {
fn auto_slice(bcx: block, expr: @ast::expr, datum: Datum) -> DatumBlock {
// This is not the most efficient thing possible; since slices
// are two words it'd be better if this were compiled in
// 'dest' mode, but I can't find a nice way to structure the
@ -250,7 +250,10 @@ pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
let tcx = bcx.tcx();
let unit_ty = ty::sequence_element_type(tcx, datum.ty);
let (base, len) = datum.get_base_and_len(bcx);
// FIXME(#6272) need to distinguish "auto-slice" from explicit index?
let (bcx, base, len) =
datum.get_vec_base_and_len(bcx, expr.span, expr.id);
// this type may have a different region/mutability than the
// real one, but it will have the same runtime representation
@ -273,7 +276,7 @@ pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
let tcx = bcx.tcx();
let closure_ty = expr_ty_adjusted(bcx, expr);
debug!("add_env(closure_ty=%s)", ty_to_str(tcx, closure_ty));
debug!("add_env(closure_ty=%s)", closure_ty.repr(tcx));
let scratch = scratch_datum(bcx, closure_ty, false);
let llfn = GEPi(bcx, scratch.val, [0u, abi::fn_field_code]);
assert!(datum.appropriate_mode() == ByValue);
@ -283,8 +286,10 @@ pub fn trans_to_datum(bcx: block, expr: @ast::expr) -> DatumBlock {
DatumBlock {bcx: bcx, datum: scratch}
}
fn auto_slice_and_ref(bcx: block, datum: Datum) -> DatumBlock {
let DatumBlock { bcx, datum } = auto_slice(bcx, datum);
fn auto_slice_and_ref(bcx: block,
expr: @ast::expr,
datum: Datum) -> DatumBlock {
let DatumBlock { bcx, datum } = auto_slice(bcx, expr, datum);
auto_ref(bcx, datum)
}
}
@ -562,7 +567,6 @@ fn trans_rvalue_stmt_unadjusted(bcx: block, expr: @ast::expr) -> block {
fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr,
dest: Dest) -> block {
let mut bcx = bcx;
let _icx = bcx.insn_ctxt("trans_rvalue_dps_unadjusted");
let tcx = bcx.tcx();
@ -612,7 +616,7 @@ fn trans_rvalue_dps_unadjusted(bcx: block, expr: @ast::expr,
let sigil = ty::ty_closure_sigil(expr_ty);
debug!("translating fn_block %s with type %s",
expr_to_str(expr, tcx.sess.intr()),
ty_to_str(tcx, expr_ty));
expr_ty.repr(tcx));
return closure::trans_expr_fn(bcx, sigil, decl, body,
expr.id, expr.id,
None, dest);
@ -820,67 +824,35 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
trace_span!(bcx, expr.span, @shorten(bcx.expr_to_str(expr)));
let unrooted_datum = unpack_datum!(bcx, unrooted(bcx, expr));
// If the lvalue must remain rooted, create a scratch datum, copy
// the lvalue in there, and then arrange for it to be cleaned up
// at the end of the scope with id `scope_id`:
let root_key = root_map_key { id: expr.id, derefs: 0u };
for bcx.ccx().maps.root_map.find(&root_key).each |&root_info| {
bcx = unrooted_datum.root(bcx, *root_info);
}
return DatumBlock {bcx: bcx, datum: unrooted_datum};
fn unrooted(bcx: block, expr: @ast::expr) -> DatumBlock {
/*!
*
* Translates `expr`. Note that this version generally
* yields an unrooted, unmoved version. Rooting and possible
* moves are dealt with above in trans_lvalue_unadjusted().
*
* One exception is if `expr` refers to a local variable,
* in which case the source may already be FromMovedLvalue
* if appropriate.
*/
let mut bcx = bcx;
match expr.node {
ast::expr_paren(e) => {
return unrooted(bcx, e);
}
ast::expr_path(_) => {
return trans_def_lvalue(bcx, expr, bcx.def(expr.id));
}
ast::expr_field(base, ident, _) => {
return trans_rec_field(bcx, base, ident);
}
ast::expr_index(base, idx) => {
return trans_index(bcx, expr, base, idx);
}
ast::expr_unary(ast::deref, base) => {
let basedatum = unpack_datum!(bcx, trans_to_datum(bcx, base));
return basedatum.deref(bcx, base, 0);
}
_ => {
bcx.tcx().sess.span_bug(
expr.span,
fmt!("trans_lvalue reached fall-through case: %?",
expr.node));
}
return match expr.node {
ast::expr_paren(e) => {
trans_lvalue_unadjusted(bcx, e)
}
}
ast::expr_path(_) => {
trans_def_lvalue(bcx, expr, bcx.def(expr.id))
}
ast::expr_field(base, ident, _) => {
trans_rec_field(bcx, base, ident)
}
ast::expr_index(base, idx) => {
trans_index(bcx, expr, base, idx)
}
ast::expr_unary(ast::deref, base) => {
let basedatum = unpack_datum!(bcx, trans_to_datum(bcx, base));
basedatum.deref(bcx, expr, 0)
}
_ => {
bcx.tcx().sess.span_bug(
expr.span,
fmt!("trans_lvalue reached fall-through case: %?",
expr.node));
}
};
fn trans_rec_field(bcx: block,
base: @ast::expr,
field: ast::ident) -> DatumBlock {
/*!
*
* Translates `base.field`. Note that this version always
* yields an unrooted, unmoved version. Rooting and possible
* moves are dealt with above in trans_lvalue_unadjusted().
*/
//! Translates `base.field`.
let mut bcx = bcx;
let _icx = bcx.insn_ctxt("trans_rec_field");
@ -904,12 +876,7 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
index_expr: @ast::expr,
base: @ast::expr,
idx: @ast::expr) -> DatumBlock {
/*!
*
* Translates `base[idx]`. Note that this version always
* yields an unrooted, unmoved version. Rooting and possible
* moves are dealt with above in trans_lvalue_unadjusted().
*/
//! Translates `base[idx]`.
let _icx = bcx.insn_ctxt("trans_index");
let ccx = bcx.ccx();
@ -940,7 +907,8 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
let scaled_ix = Mul(bcx, ix_val, vt.llunit_size);
base::maybe_name_value(bcx.ccx(), scaled_ix, ~"scaled_ix");
let mut (base, len) = base_datum.get_base_and_len(bcx);
let mut (bcx, base, len) =
base_datum.get_vec_base_and_len(bcx, index_expr.span, index_expr.id);
if ty::type_is_str(base_ty) {
// acccount for null terminator in the case of string
@ -972,14 +940,7 @@ fn trans_lvalue_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
def: ast::def)
-> DatumBlock
{
/*!
*
* Translates a reference to a path. Note that this version
* generally yields an unrooted, unmoved version. Rooting and
* possible moves are dealt with above in
* trans_lvalue_unadjusted(), with the caveat that local variables
* may already be in move mode.
*/
//! Translates a reference to a path.
let _icx = bcx.insn_ctxt("trans_def_lvalue");
let ccx = bcx.ccx();
@ -1087,6 +1048,9 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum {
}
};
debug!("def_self() reference, self_info.t=%s",
self_info.t.repr(bcx.tcx()));
// This cast should not be necessary. We should cast self *once*,
// but right now this conflicts with default methods.
let real_self_ty = monomorphize_type(bcx, self_info.t);
@ -1150,10 +1114,10 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
tcx.sess.bug(fmt!(
"cannot get field types from the enum type %s \
without a node ID",
ty_to_str(tcx, ty)));
ty.repr(tcx)));
}
Some(node_id) => {
match *tcx.def_map.get(&node_id) {
match tcx.def_map.get_copy(&node_id) {
ast::def_variant(enum_id, variant_id) => {
let variant_info = ty::enum_variant_with_id(
tcx, enum_id, variant_id);
@ -1172,7 +1136,7 @@ pub fn with_field_tys<R>(tcx: ty::ctxt,
_ => {
tcx.sess.bug(fmt!(
"cannot get field types from the type %s",
ty_to_str(tcx, ty)));
ty.repr(tcx)));
}
}
}
@ -1403,7 +1367,6 @@ fn trans_eager_binop(bcx: block,
lhs_datum: &Datum,
rhs_datum: &Datum)
-> DatumBlock {
let mut bcx = bcx;
let _icx = bcx.insn_ctxt("trans_eager_binop");
let lhs = lhs_datum.to_appropriate_llval(bcx);
@ -1573,7 +1536,7 @@ fn trans_overloaded_op(bcx: block,
ret_ty: ty::t,
dest: Dest)
-> block {
let origin = *bcx.ccx().maps.method_map.get(&expr.id);
let origin = bcx.ccx().maps.method_map.get_copy(&expr.id);
let fty = node_id_type(bcx, expr.callee_id);
callee::trans_call_inner(bcx,
expr.info(),

View file

@ -724,7 +724,7 @@ pub fn trans_intrinsic(ccx: @CrateContext,
let in_type_size = machine::llbitsize_of_real(ccx, llintype);
let out_type_size = machine::llbitsize_of_real(ccx, llouttype);
if in_type_size != out_type_size {
let sp = match *ccx.tcx.items.get(&ref_id.get()) {
let sp = match ccx.tcx.items.get_copy(&ref_id.get()) {
ast_map::node_expr(e) => e.span,
_ => fail!(~"transmute has non-expr arg"),
};

View file

@ -29,96 +29,101 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::def_id,
-> ast::def_id {
let _icx = ccx.insn_ctxt("maybe_instantiate_inline");
match ccx.external.find(&fn_id) {
Some(&Some(node_id)) => {
// Already inline
debug!("maybe_instantiate_inline(%s): already inline as node id %d",
ty::item_path_str(ccx.tcx, fn_id), node_id);
local_def(node_id)
}
Some(&None) => fn_id, // Not inlinable
None => { // Not seen yet
match csearch::maybe_get_item_ast(
Some(&Some(node_id)) => {
// Already inline
debug!("maybe_instantiate_inline(%s): already inline as node id %d",
ty::item_path_str(ccx.tcx, fn_id), node_id);
return local_def(node_id);
}
Some(&None) => {
return fn_id; // Not inlinable
}
None => {
// Not seen yet
}
}
let csearch_result =
csearch::maybe_get_item_ast(
ccx.tcx, fn_id,
|a,b,c,d| {
astencode::decode_inlined_item(a, b, ccx.maps,
/*bad*/ copy c, d)
}) {
csearch::not_found => {
});
return match csearch_result {
csearch::not_found => {
ccx.external.insert(fn_id, None);
fn_id
}
csearch::found(ast::ii_item(item)) => {
ccx.external.insert(fn_id, Some(item.id));
ccx.stats.n_inlines += 1;
if translate { trans_item(ccx, item); }
local_def(item.id)
}
csearch::found(ast::ii_foreign(item)) => {
ccx.external.insert(fn_id, Some(item.id));
local_def(item.id)
}
csearch::found_parent(parent_id, ast::ii_item(item)) => {
ccx.external.insert(parent_id, Some(item.id));
let mut my_id = 0;
match item.node {
ast::item_enum(_, _) => {
let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id));
let vs_there = ty::enum_variants(ccx.tcx, parent_id);
for vec::each2(*vs_here, *vs_there) |here, there| {
if there.id == fn_id { my_id = here.id.node; }
ccx.external.insert(there.id, Some(here.id.node));
}
}
_ => ccx.sess.bug(~"maybe_instantiate_inline: item has a \
non-enum parent")
}
if translate { trans_item(ccx, item); }
local_def(my_id)
}
csearch::found_parent(_, _) => {
ccx.sess.bug(~"maybe_get_item_ast returned a found_parent \
with a non-item parent");
}
csearch::found(ast::ii_method(impl_did, mth)) => {
ccx.stats.n_inlines += 1;
ccx.external.insert(fn_id, Some(mth.id));
let impl_tpt = ty::lookup_item_type(ccx.tcx, impl_did);
let num_type_params =
impl_tpt.generics.type_param_defs.len() +
mth.generics.ty_params.len();
if translate && num_type_params == 0 {
let llfn = get_item_val(ccx, mth.id);
let path = vec::append(
ty::item_path(ccx.tcx, impl_did),
~[path_name(mth.ident)]);
let self_kind = match mth.self_ty.node {
ast::sty_static => no_self,
_ => {
let self_ty = ty::node_id_to_type(ccx.tcx,
mth.self_id);
debug!("calling inline trans_fn with self_ty %s",
ty_to_str(ccx.tcx, self_ty));
match mth.self_ty.node {
ast::sty_value => impl_owned_self(self_ty),
_ => impl_self(self_ty),
}
}
};
trans_fn(ccx,
path,
&mth.decl,
&mth.body,
llfn,
self_kind,
None,
mth.id,
Some(impl_did),
[]);
}
local_def(mth.id)
}
}
}
}
csearch::found(ast::ii_item(item)) => {
ccx.external.insert(fn_id, Some(item.id));
ccx.stats.n_inlines += 1;
if translate { trans_item(ccx, item); }
local_def(item.id)
}
csearch::found(ast::ii_foreign(item)) => {
ccx.external.insert(fn_id, Some(item.id));
local_def(item.id)
}
csearch::found_parent(parent_id, ast::ii_item(item)) => {
ccx.external.insert(parent_id, Some(item.id));
let mut my_id = 0;
match item.node {
ast::item_enum(_, _) => {
let vs_here = ty::enum_variants(ccx.tcx, local_def(item.id));
let vs_there = ty::enum_variants(ccx.tcx, parent_id);
for vec::each2(*vs_here, *vs_there) |here, there| {
if there.id == fn_id { my_id = here.id.node; }
ccx.external.insert(there.id, Some(here.id.node));
}
}
_ => ccx.sess.bug(~"maybe_instantiate_inline: item has a \
non-enum parent")
}
if translate { trans_item(ccx, item); }
local_def(my_id)
}
csearch::found_parent(_, _) => {
ccx.sess.bug(~"maybe_get_item_ast returned a found_parent \
with a non-item parent");
}
csearch::found(ast::ii_method(impl_did, mth)) => {
ccx.stats.n_inlines += 1;
ccx.external.insert(fn_id, Some(mth.id));
let impl_tpt = ty::lookup_item_type(ccx.tcx, impl_did);
let num_type_params =
impl_tpt.generics.type_param_defs.len() +
mth.generics.ty_params.len();
if translate && num_type_params == 0 {
let llfn = get_item_val(ccx, mth.id);
let path = vec::append(
ty::item_path(ccx.tcx, impl_did),
~[path_name(mth.ident)]);
let self_kind = match mth.self_ty.node {
ast::sty_static => no_self,
_ => {
let self_ty = ty::node_id_to_type(ccx.tcx,
mth.self_id);
debug!("calling inline trans_fn with self_ty %s",
ty_to_str(ccx.tcx, self_ty));
match mth.self_ty.node {
ast::sty_value => impl_owned_self(self_ty),
_ => impl_self(self_ty),
}
}
};
trans_fn(ccx,
path,
&mth.decl,
&mth.body,
llfn,
self_kind,
None,
mth.id,
Some(impl_did),
[]);
}
local_def(mth.id)
}
};
}

View file

@ -118,7 +118,7 @@ pub fn llalign_of(cx: @CrateContext, t: TypeRef) -> ValueRef {
// Computes the size of the data part of an enum.
pub fn static_size_of_enum(cx: @CrateContext, t: ty::t) -> uint {
if cx.enum_sizes.contains_key(&t) {
return *cx.enum_sizes.get(&t);
return cx.enum_sizes.get_copy(&t);
}
debug!("static_size_of_enum %s", ty_to_str(cx.tcx, t));

View file

@ -44,6 +44,11 @@ pub fn trans_impl(ccx: @CrateContext, path: path, name: ast::ident,
methods: &[@ast::method], generics: &ast::Generics,
self_ty: Option<ty::t>, id: ast::node_id) {
let _icx = ccx.insn_ctxt("impl::trans_impl");
let tcx = ccx.tcx;
debug!("trans_impl(path=%s, name=%s, self_ty=%s, id=%?)",
path.repr(tcx), name.repr(tcx), self_ty.repr(tcx), id);
if !generics.ty_params.is_empty() { return; }
let sub_path = vec::append_one(path, path_name(name));
for vec::each(methods) |method| {
@ -307,7 +312,7 @@ pub fn trans_static_method_callee(bcx: block,
};
let mname = if method_id.crate == ast::local_crate {
match *bcx.tcx().items.get(&method_id.node) {
match bcx.tcx().items.get_copy(&method_id.node) {
ast_map::node_trait_method(trait_method, _, _) => {
ast_util::trait_method_to_ty_method(trait_method).ident
}
@ -324,7 +329,7 @@ pub fn trans_static_method_callee(bcx: block,
name=%s", method_id, callee_id, *ccx.sess.str_of(mname));
let vtbls = resolve_vtables_in_fn_ctxt(
bcx.fcx, *ccx.maps.vtable_map.get(&callee_id));
bcx.fcx, ccx.maps.vtable_map.get_copy(&callee_id));
match vtbls[bound_index] {
typeck::vtable_static(impl_did, ref rcvr_substs, rcvr_origins) => {
@ -362,7 +367,7 @@ pub fn method_from_methods(ms: &[@ast::method], name: ast::ident)
pub fn method_with_name(ccx: @CrateContext, impl_id: ast::def_id,
name: ast::ident) -> ast::def_id {
if impl_id.crate == ast::local_crate {
match *ccx.tcx.items.get(&impl_id.node) {
match ccx.tcx.items.get_copy(&impl_id.node) {
ast_map::node_item(@ast::item {
node: ast::item_impl(_, _, _, ref ms),
_
@ -380,7 +385,7 @@ pub fn method_with_name_or_default(ccx: @CrateContext,
impl_id: ast::def_id,
name: ast::ident) -> ast::def_id {
if impl_id.crate == ast::local_crate {
match *ccx.tcx.items.get(&impl_id.node) {
match ccx.tcx.items.get_copy(&impl_id.node) {
ast_map::node_item(@ast::item {
node: ast::item_impl(_, _, _, ref ms), _
}, _) => {

View file

@ -100,12 +100,14 @@ pub fn monomorphic_fn(ccx: @CrateContext,
let tpt = ty::lookup_item_type(ccx.tcx, fn_id);
let llitem_ty = tpt.ty;
let map_node = session::expect(ccx.sess, ccx.tcx.items.find(&fn_id.node),
|| fmt!("While monomorphizing %?, couldn't find it in the item map \
(may have attempted to monomorphize an item defined in a different \
crate?)", fn_id));
let map_node = session::expect(
ccx.sess,
ccx.tcx.items.find_copy(&fn_id.node),
|| fmt!("While monomorphizing %?, couldn't find it in the item map \
(may have attempted to monomorphize an item \
defined in a different crate?)", fn_id));
// Get the path so that we can create a symbol
let (pt, name, span) = match *map_node {
let (pt, name, span) = match map_node {
ast_map::node_item(i, pt) => (pt, i.ident, i.span),
ast_map::node_variant(ref v, enm, pt) => (pt, (*v).node.name, enm.span),
ast_map::node_method(m, _, pt) => (pt, m.ident, m.span),
@ -134,6 +136,9 @@ pub fn monomorphic_fn(ccx: @CrateContext,
ast_map::node_local(*) => {
ccx.tcx.sess.bug(~"Can't monomorphize a local")
}
ast_map::node_callee_scope(*) => {
ccx.tcx.sess.bug(~"Can't monomorphize a callee-scope")
}
ast_map::node_struct_ctor(_, i, pt) => (pt, i.ident, i.span)
};
@ -182,7 +187,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
self_ty: impl_ty_opt
});
let lldecl = match *map_node {
let lldecl = match map_node {
ast_map::node_item(i@@ast::item {
node: ast::item_fn(ref decl, _, _, _, ref body),
_
@ -266,6 +271,7 @@ pub fn monomorphic_fn(ccx: @CrateContext,
ast_map::node_trait_method(*) |
ast_map::node_arg(*) |
ast_map::node_block(*) |
ast_map::node_callee_scope(*) |
ast_map::node_local(*) => {
ccx.tcx.sess.bug(fmt!("Can't monomorphize a %?", map_node))
}

View file

@ -42,19 +42,19 @@ pub fn find_reachable(crate_mod: &_mod, exp_map2: resolve::ExportMap2,
tcx: ty::ctxt, method_map: typeck::method_map) -> map {
let mut rmap = HashSet::new();
{
let cx = ctx {
let cx = @mut ctx {
exp_map2: exp_map2,
tcx: tcx,
method_map: method_map,
rmap: &mut rmap
};
traverse_public_mod(&cx, ast::crate_node_id, crate_mod);
traverse_all_resources_and_impls(&cx, crate_mod);
traverse_public_mod(cx, ast::crate_node_id, crate_mod);
traverse_all_resources_and_impls(cx, crate_mod);
}
return @rmap;
}
fn traverse_exports(cx: &ctx, mod_id: node_id) -> bool {
fn traverse_exports(cx: @mut ctx, mod_id: node_id) -> bool {
let mut found_export = false;
match cx.exp_map2.find(&mod_id) {
Some(ref exp2s) => {
@ -68,23 +68,25 @@ fn traverse_exports(cx: &ctx, mod_id: node_id) -> bool {
return found_export;
}
fn traverse_def_id(cx: &ctx, did: def_id) {
fn traverse_def_id(cx: @mut ctx, did: def_id) {
if did.crate != local_crate { return; }
match cx.tcx.items.find(&did.node) {
None => (), // This can happen for self, for example
Some(&ast_map::node_item(item, _)) => traverse_public_item(cx, item),
Some(&ast_map::node_method(_, impl_id, _)) => traverse_def_id(cx, impl_id),
Some(&ast_map::node_foreign_item(item, _, _, _)) => {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(item.id);
}
Some(&ast_map::node_variant(ref v, _, _)) => {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(v.node.id);
}
_ => ()
}
}
fn traverse_public_mod(cx: &ctx, mod_id: node_id, m: &_mod) {
fn traverse_public_mod(cx: @mut ctx, mod_id: node_id, m: &_mod) {
if !traverse_exports(cx, mod_id) {
// No exports, so every local item is exported
for m.items.each |item| {
@ -93,16 +95,21 @@ fn traverse_public_mod(cx: &ctx, mod_id: node_id, m: &_mod) {
}
}
fn traverse_public_item(cx: &ctx, item: @item) {
// FIXME #6021: naming rmap shouldn't be necessary
let rmap: &mut HashSet<node_id> = cx.rmap;
if rmap.contains(&item.id) { return; }
rmap.insert(item.id);
fn traverse_public_item(cx: @mut ctx, item: @item) {
{
// FIXME #6021: naming rmap shouldn't be necessary
let cx = &mut *cx;
let rmap: &mut HashSet<node_id> = cx.rmap;
if rmap.contains(&item.id) { return; }
rmap.insert(item.id);
}
match item.node {
item_mod(ref m) => traverse_public_mod(cx, item.id, m),
item_foreign_mod(ref nm) => {
if !traverse_exports(cx, item.id) {
for nm.items.each |item| {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(item.id);
}
}
@ -119,13 +126,17 @@ fn traverse_public_item(cx: &ctx, item: @item) {
m.generics.ty_params.len() > 0u ||
attr::find_inline_attr(m.attrs) != attr::ia_none
{
cx.rmap.insert(m.id);
{
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(m.id);
}
traverse_inline_body(cx, &m.body);
}
}
}
item_struct(ref struct_def, _) => {
for struct_def.ctor_id.each |&ctor_id| {
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
cx.rmap.insert(ctor_id);
}
}
@ -140,11 +151,12 @@ fn traverse_public_item(cx: &ctx, item: @item) {
}
}
fn traverse_ty<'a, 'b>(ty: @Ty, cx: &'b ctx<'a>, v: visit::vt<&'b ctx<'a>>) {
// FIXME #6021: naming rmap shouldn't be necessary
let rmap: &mut HashSet<node_id> = cx.rmap;
if rmap.contains(&ty.id) { return; }
rmap.insert(ty.id);
fn traverse_ty<'a>(ty: @Ty, cx: @mut ctx<'a>, v: visit::vt<@mut ctx<'a>>) {
{
let cx = &mut *cx; // FIXME(#6269) reborrow @mut to &mut
if cx.rmap.contains(&ty.id) { return; }
cx.rmap.insert(ty.id);
}
match ty.node {
ty_path(p, p_id) => {
@ -163,9 +175,9 @@ fn traverse_ty<'a, 'b>(ty: @Ty, cx: &'b ctx<'a>, v: visit::vt<&'b ctx<'a>>) {
}
}
fn traverse_inline_body(cx: &ctx, body: &blk) {
fn traverse_expr<'a, 'b>(e: @expr, cx: &'b ctx<'a>,
v: visit::vt<&'b ctx<'a>>) {
fn traverse_inline_body(cx: @mut ctx, body: &blk) {
fn traverse_expr<'a>(e: @expr, cx: @mut ctx<'a>,
v: visit::vt<@mut ctx<'a>>) {
match e.node {
expr_path(_) => {
match cx.tcx.def_map.find(&e.id) {
@ -212,7 +224,7 @@ fn traverse_inline_body(cx: &ctx, body: &blk) {
// Don't ignore nested items: for example if a generic fn contains a
// generic impl (as in deque::create), we need to monomorphize the
// impl as well
fn traverse_item(i: @item, cx: &ctx, _v: visit::vt<&ctx>) {
fn traverse_item(i: @item, cx: @mut ctx, _v: visit::vt<@mut ctx>) {
traverse_public_item(cx, i);
}
visit::visit_block(body, cx, visit::mk_vt(@visit::Visitor {
@ -222,7 +234,7 @@ fn traverse_inline_body(cx: &ctx, body: &blk) {
}));
}
fn traverse_all_resources_and_impls(cx: &ctx, crate_mod: &_mod) {
fn traverse_all_resources_and_impls(cx: @mut ctx, crate_mod: &_mod) {
visit::visit_mod(
crate_mod,
codemap::dummy_sp(),

View file

@ -274,8 +274,9 @@ pub impl Reflector {
let repr = adt::represent_type(bcx.ccx(), t);
let variants = ty::substd_enum_variants(ccx.tcx, did, substs);
let llptrty = T_ptr(type_of(ccx, t));
let (_, opaquety) = *(ccx.tcx.intrinsic_defs.find(&ccx.sess.ident_of(~"Opaque"))
.expect("Failed to resolve intrinsic::Opaque"));
let (_, opaquety) =
ccx.tcx.intrinsic_defs.find_copy(&ccx.sess.ident_of(~"Opaque"))
.expect("Failed to resolve intrinsic::Opaque");
let opaqueptrty = ty::mk_ptr(ccx.tcx, ty::mt { ty: opaquety, mutbl: ast::m_imm });
let make_get_disr = || {
@ -374,7 +375,7 @@ pub fn emit_calls_to_trait_visit_ty(bcx: block,
use syntax::parse::token::special_idents::tydesc;
let final = sub_block(bcx, ~"final");
assert!(bcx.ccx().tcx.intrinsic_defs.contains_key(&tydesc));
let (_, tydesc_ty) = *bcx.ccx().tcx.intrinsic_defs.get(&tydesc);
let (_, tydesc_ty) = bcx.ccx().tcx.intrinsic_defs.get_copy(&tydesc);
let tydesc_ty = type_of(bcx.ccx(), tydesc_ty);
let mut r = Reflector {
visitor_val: visitor_val,

View file

@ -110,8 +110,7 @@ pub fn type_of_non_gc_box(cx: @CrateContext, t: ty::t) -> TypeRef {
pub fn sizing_type_of(cx: @CrateContext, t: ty::t) -> TypeRef {
match cx.llsizingtypes.find(&t) {
// FIXME(#5562): removing this copy causes a segfault in stage1 core
Some(t) => return /*bad*/ copy *t,
Some(t) => return *t,
None => ()
}
@ -178,8 +177,7 @@ pub fn type_of(cx: @CrateContext, t: ty::t) -> TypeRef {
// Check the cache.
match cx.lltypes.find(&t) {
// FIXME(#5562): removing this copy causes a segfault in stage1 core
Some(t) => return /*bad*/ copy *t,
Some(&t) => return t,
None => ()
}

View file

@ -236,18 +236,11 @@ pub fn node_type_needs(cx: Context, use_: uint, id: node_id) {
}
pub fn mark_for_method_call(cx: Context, e_id: node_id, callee_id: node_id) {
let mut opt_static_did = None;
for cx.ccx.maps.method_map.find(&e_id).each |mth| {
match mth.origin {
typeck::method_static(did) => {
for cx.ccx.tcx.node_type_substs.find(&callee_id).each |ts| {
// FIXME(#5562): removing this copy causes a segfault
// before stage2
let ts = /*bad*/ copy **ts;
let type_uses = type_uses_for(cx.ccx, did, ts.len());
for vec::each2(*type_uses, ts) |uses, subst| {
type_needs(cx, *uses, *subst)
}
}
opt_static_did = Some(did);
}
typeck::method_param(typeck::method_param {
param_num: param,
@ -259,6 +252,19 @@ pub fn mark_for_method_call(cx: Context, e_id: node_id, callee_id: node_id) {
| typeck::method_super(*) => (),
}
}
// Note: we do not execute this code from within the each() call
// above because the recursive call to `type_needs` can trigger
// inlining and hence can cause `method_map` and
// `node_type_substs` to be modified.
for opt_static_did.each |&did| {
for cx.ccx.tcx.node_type_substs.find_copy(&callee_id).each |ts| {
let type_uses = type_uses_for(cx.ccx, did, ts.len());
for vec::each2(*type_uses, *ts) |uses, subst| {
type_needs(cx, *uses, *subst)
}
}
}
}
pub fn mark_for_expr(cx: Context, e: @expr) {
@ -288,12 +294,11 @@ pub fn mark_for_expr(cx: Context, e: @expr) {
}
}
expr_path(_) => {
for cx.ccx.tcx.node_type_substs.find(&e.id).each |ts| {
// FIXME(#5562): removing this copy causes a segfault before stage2
let ts = copy **ts;
let id = ast_util::def_id_of_def(*cx.ccx.tcx.def_map.get(&e.id));
let opt_ts = cx.ccx.tcx.node_type_substs.find_copy(&e.id);
for opt_ts.each |ts| {
let id = ast_util::def_id_of_def(cx.ccx.tcx.def_map.get_copy(&e.id));
let uses_for_ts = type_uses_for(cx.ccx, id, ts.len());
for vec::each2(*uses_for_ts, ts) |uses, subst| {
for vec::each2(*uses_for_ts, *ts) |uses, subst| {
type_needs(cx, *uses, *subst)
}
}

View file

@ -0,0 +1,201 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Logic relating to rooting and write guards for managed values
//! (`@` and `@mut`). This code is primarily for use by datum;
//! it exists in its own module both to keep datum.rs bite-sized
//! and for each in debugging (e.g., so you can use
//! `RUST_LOG=rustc::middle::trans::write_guard`).
use lib::llvm::ValueRef;
use middle::borrowck::{RootInfo, root_map_key, DynaImm, DynaMut};
use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee;
use middle::trans::common::*;
use middle::trans::datum::*;
use middle::trans::expr;
use middle::ty;
use driver::session;
use syntax::codemap::span;
use syntax::ast;
pub fn root_and_write_guard(datum: &Datum,
mut bcx: block,
span: span,
expr_id: ast::node_id,
derefs: uint) -> block {
let key = root_map_key { id: expr_id, derefs: derefs };
debug!("write_guard::root_and_write_guard(key=%?)", key);
// root the autoderef'd value, if necessary:
//
// (Note: root'd values are always boxes)
let ccx = bcx.ccx();
bcx = match ccx.maps.root_map.find(&key) {
None => bcx,
Some(&root_info) => root(datum, bcx, span, key, root_info)
};
// Perform the write guard, if necessary.
//
// (Note: write-guarded values are always boxes)
if ccx.maps.write_guard_map.contains(&key) {
perform_write_guard(datum, bcx, span)
} else {
bcx
}
}
pub fn return_to_mut(mut bcx: block,
root_key: root_map_key,
frozen_val_ref: ValueRef,
bits_val_ref: ValueRef,
filename_val: ValueRef,
line_val: ValueRef) -> block {
debug!("write_guard::return_to_mut(root_key=%?, %s, %s, %s)",
root_key,
bcx.to_str(),
val_str(bcx.ccx().tn, frozen_val_ref),
val_str(bcx.ccx().tn, bits_val_ref));
let box_ptr =
Load(bcx, PointerCast(bcx,
frozen_val_ref,
T_ptr(T_ptr(T_i8()))));
let bits_val =
Load(bcx, bits_val_ref);
if bcx.tcx().sess.opts.optimize == session::No {
bcx = callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.unrecord_borrow_fn(),
~[
box_ptr,
bits_val,
filename_val,
line_val
],
expr::Ignore);
}
callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.return_to_mut_fn(),
~[
box_ptr,
bits_val,
filename_val,
line_val
],
expr::Ignore
)
}
fn root(datum: &Datum,
mut bcx: block,
span: span,
root_key: root_map_key,
root_info: RootInfo) -> block {
//! In some cases, borrowck will decide that an @T/@[]/@str
//! value must be rooted for the program to be safe. In that
//! case, we will call this function, which will stash a copy
//! away until we exit the scope `scope_id`.
debug!("write_guard::root(root_key=%?, root_info=%?, datum=%?)",
root_key, root_info, datum.to_str(bcx.ccx()));
if bcx.sess().trace() {
trans_trace(
bcx, None,
@fmt!("preserving until end of scope %d",
root_info.scope));
}
// First, root the datum. Note that we must zero this value,
// because sometimes we root on one path but not another.
// See e.g. #4904.
let scratch = scratch_datum(bcx, datum.ty, true);
datum.copy_to_datum(bcx, INIT, scratch);
let cleanup_bcx = find_bcx_for_scope(bcx, root_info.scope);
add_clean_temp_mem(cleanup_bcx, scratch.val, scratch.ty);
// Now, consider also freezing it.
match root_info.freeze {
None => {}
Some(freeze_kind) => {
let (filename, line) = filename_and_line_num_from_span(bcx, span);
// in this case, we don't have to zero, because
// scratch.val will be NULL should the cleanup get
// called without the freezing actually occurring, and
// return_to_mut checks for this condition.
let scratch_bits = scratch_datum(bcx, ty::mk_uint(), false);
let freeze_did = match freeze_kind {
DynaImm => bcx.tcx().lang_items.borrow_as_imm_fn(),
DynaMut => bcx.tcx().lang_items.borrow_as_mut_fn(),
};
let box_ptr = Load(bcx,
PointerCast(bcx,
scratch.val,
T_ptr(T_ptr(T_i8()))));
bcx = callee::trans_lang_call(
bcx,
freeze_did,
~[
box_ptr,
filename,
line
],
expr::SaveIn(scratch_bits.val));
if bcx.tcx().sess.opts.optimize == session::No {
bcx = callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.record_borrow_fn(),
~[
box_ptr,
Load(bcx, scratch_bits.val),
filename,
line
],
expr::Ignore);
}
add_clean_return_to_mut(
cleanup_bcx, root_key, scratch.val, scratch_bits.val,
filename, line);
}
}
bcx
}
fn perform_write_guard(datum: &Datum,
bcx: block,
span: span) -> block {
debug!("perform_write_guard");
let llval = datum.to_value_llval(bcx);
let (filename, line) = filename_and_line_num_from_span(bcx, span);
callee::trans_lang_call(
bcx,
bcx.tcx().lang_items.check_not_borrowed_fn(),
~[PointerCast(bcx, llval, T_ptr(T_i8())),
filename,
line],
expr::Ignore)
}

View file

@ -183,26 +183,21 @@ pub struct AutoDerefRef {
#[auto_encode]
#[auto_decode]
pub struct AutoRef {
kind: AutoRefKind,
region: Region,
mutbl: ast::mutability
}
#[auto_encode]
#[auto_decode]
pub enum AutoRefKind {
pub enum AutoRef {
/// Convert from T to &T
AutoPtr,
AutoPtr(Region, ast::mutability),
/// Convert from @[]/~[]/&[] to &[] (or str)
AutoBorrowVec,
AutoBorrowVec(Region, ast::mutability),
/// Convert from @[]/~[]/&[] to &&[] (or str)
AutoBorrowVecRef,
AutoBorrowVecRef(Region, ast::mutability),
/// Convert from @fn()/~fn()/&fn() to &fn()
AutoBorrowFn
AutoBorrowFn(Region),
/// Convert from T to *T
AutoUnsafe(ast::mutability)
}
// Stores information about provided methods (a.k.a. default methods) in
@ -432,11 +427,20 @@ pub enum Region {
/// A concrete region naming some expression within the current function.
re_scope(node_id),
/// Static data that has an "infinite" lifetime.
/// Static data that has an "infinite" lifetime. Top in the region lattice.
re_static,
/// A region variable. Should not exist after typeck.
re_infer(InferRegion)
re_infer(InferRegion),
/// Empty lifetime is for data that is never accessed.
/// Bottom in the region lattice. We treat re_empty somewhat
/// specially; at least right now, we do not generate instances of
/// it during the GLB computations, but rather
/// generate an error instead. This is to improve error messages.
/// The only way to get an instance of re_empty is to have a region
/// variable with no constraints.
re_empty,
}
pub impl Region {
@ -1539,6 +1543,13 @@ pub fn type_is_ty_var(ty: t) -> bool {
pub fn type_is_bool(ty: t) -> bool { get(ty).sty == ty_bool }
pub fn type_is_self(ty: t) -> bool {
match get(ty).sty {
ty_self(*) => true,
_ => false
}
}
pub fn type_is_structural(ty: t) -> bool {
match get(ty).sty {
ty_struct(*) | ty_tup(_) | ty_enum(*) | ty_closure(_) | ty_trait(*) |
@ -1939,7 +1950,7 @@ pub fn type_contents(cx: ctxt, ty: t) -> TypeContents {
let _i = indenter();
let mut result = match get(ty).sty {
let result = match get(ty).sty {
// Scalar and unique types are sendable, constant, and owned
ty_nil | ty_bot | ty_bool | ty_int(_) | ty_uint(_) | ty_float(_) |
ty_bare_fn(_) | ty_ptr(_) => {
@ -2789,6 +2800,17 @@ pub fn ty_region(tcx: ctxt,
}
}
pub fn replace_fn_sig(cx: ctxt, fsty: &sty, new_sig: FnSig) -> t {
match *fsty {
ty_bare_fn(ref f) => mk_bare_fn(cx, BareFnTy {sig: new_sig, ..*f}),
ty_closure(ref f) => mk_closure(cx, ClosureTy {sig: new_sig, ..*f}),
ref s => {
cx.sess.bug(
fmt!("ty_fn_sig() called on non-fn type: %?", s));
}
}
}
pub fn replace_closure_return_type(tcx: ctxt, fn_type: t, ret_type: t) -> t {
/*!
*
@ -2908,26 +2930,26 @@ pub fn adjust_ty(cx: ctxt,
match adj.autoref {
None => adjusted_ty,
Some(ref autoref) => {
match autoref.kind {
AutoPtr => {
mk_rptr(cx, autoref.region,
mt {ty: adjusted_ty,
mutbl: autoref.mutbl})
match *autoref {
AutoPtr(r, m) => {
mk_rptr(cx, r, mt {ty: adjusted_ty, mutbl: m})
}
AutoBorrowVec => {
borrow_vec(cx, span, autoref, adjusted_ty)
AutoBorrowVec(r, m) => {
borrow_vec(cx, span, r, m, adjusted_ty)
}
AutoBorrowVecRef => {
adjusted_ty = borrow_vec(cx, span, autoref,
adjusted_ty);
mk_rptr(cx, autoref.region,
mt {ty: adjusted_ty, mutbl: ast::m_imm})
AutoBorrowVecRef(r, m) => {
adjusted_ty = borrow_vec(cx, span, r, m, adjusted_ty);
mk_rptr(cx, r, mt {ty: adjusted_ty, mutbl: ast::m_imm})
}
AutoBorrowFn => {
borrow_fn(cx, span, autoref, adjusted_ty)
AutoBorrowFn(r) => {
borrow_fn(cx, span, r, adjusted_ty)
}
AutoUnsafe(m) => {
mk_ptr(cx, mt {ty: adjusted_ty, mutbl: m})
}
}
}
@ -2936,15 +2958,15 @@ pub fn adjust_ty(cx: ctxt,
};
fn borrow_vec(cx: ctxt, span: span,
autoref: &AutoRef, ty: ty::t) -> ty::t {
r: Region, m: ast::mutability,
ty: ty::t) -> ty::t {
match get(ty).sty {
ty_evec(mt, _) => {
ty::mk_evec(cx, mt {ty: mt.ty, mutbl: autoref.mutbl},
vstore_slice(autoref.region))
ty::mk_evec(cx, mt {ty: mt.ty, mutbl: m}, vstore_slice(r))
}
ty_estr(_) => {
ty::mk_estr(cx, vstore_slice(autoref.region))
ty::mk_estr(cx, vstore_slice(r))
}
ref s => {
@ -2956,13 +2978,12 @@ pub fn adjust_ty(cx: ctxt,
}
}
fn borrow_fn(cx: ctxt, span: span,
autoref: &AutoRef, ty: ty::t) -> ty::t {
fn borrow_fn(cx: ctxt, span: span, r: Region, ty: ty::t) -> ty::t {
match get(ty).sty {
ty_closure(ref fty) => {
ty::mk_closure(cx, ClosureTy {
sigil: BorrowedSigil,
region: autoref.region,
region: r,
..copy *fty
})
}
@ -2977,6 +2998,18 @@ pub fn adjust_ty(cx: ctxt,
}
}
pub impl AutoRef {
fn map_region(&self, f: &fn(Region) -> Region) -> AutoRef {
match *self {
ty::AutoPtr(r, m) => ty::AutoPtr(f(r), m),
ty::AutoBorrowVec(r, m) => ty::AutoBorrowVec(f(r), m),
ty::AutoBorrowVecRef(r, m) => ty::AutoBorrowVecRef(f(r), m),
ty::AutoBorrowFn(r) => ty::AutoBorrowFn(f(r)),
ty::AutoUnsafe(m) => ty::AutoUnsafe(m),
}
}
}
pub struct ParamsTy {
params: ~[t],
ty: t
@ -3749,7 +3782,7 @@ pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[VariantInfo] {
call eval_const_expr, it should never get called twice for the same
expr, since check_enum_variants also updates the enum_var_cache
*/
match *cx.items.get(&id.node) {
match cx.items.get_copy(&id.node) {
ast_map::node_item(@ast::item {
node: ast::item_enum(ref enum_definition, _),
_
@ -3875,7 +3908,7 @@ pub fn lookup_field_type(tcx: ctxt,
}
else {
match tcx.tcache.find(&id) {
Some(tpt) => tpt.ty,
Some(&ty_param_bounds_and_ty {ty, _}) => ty,
None => {
let tpt = csearch::get_field_type(tcx, struct_id, id);
tcx.tcache.insert(id, tpt);
@ -4280,7 +4313,7 @@ pub fn get_impl_id(tcx: ctxt, trait_id: def_id, self_ty: t) -> def_id {
pub fn visitor_object_ty(tcx: ctxt) -> (@TraitRef, t) {
let ty_visitor_name = special_idents::ty_visitor;
assert!(tcx.intrinsic_traits.contains_key(&ty_visitor_name));
let trait_ref = *tcx.intrinsic_traits.get(&ty_visitor_name);
let trait_ref = tcx.intrinsic_traits.get_copy(&ty_visitor_name);
(trait_ref,
mk_trait(tcx, trait_ref.def_id, copy trait_ref.substs, BoxTraitStore, ast::m_imm))
}

View file

@ -118,8 +118,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::pat, path: @ast::Path,
Some((enm, var)) => {
// Assign the pattern the type of the *enum*, not the variant.
let enum_tpt = ty::lookup_item_type(tcx, enm);
instantiate_path(pcx.fcx, path, enum_tpt, pat.span, pat.id,
pcx.block_region);
instantiate_path(pcx.fcx, path, enum_tpt, pat.span, pat.id);
// check that the type of the value being matched is a subtype
// of the type of the pattern:
@ -175,8 +174,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: @ast::pat, path: @ast::Path,
} else {
ctor_tpt
};
instantiate_path(pcx.fcx, path, struct_tpt, pat.span, pat.id,
pcx.block_region);
instantiate_path(pcx.fcx, path, struct_tpt, pat.span, pat.id);
// Check that the type of the value being matched is a subtype of
// the type of the pattern.
@ -425,7 +423,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: @ast::pat, expected: ty::t) {
}
ast::pat_enum(*) |
ast::pat_ident(*) if pat_is_const(tcx.def_map, pat) => {
let const_did = ast_util::def_id_of_def(*tcx.def_map.get(&pat.id));
let const_did = ast_util::def_id_of_def(tcx.def_map.get_copy(&pat.id));
let const_tpt = ty::lookup_item_type(tcx, const_did);
demand::suptype(fcx, pat.span, expected, const_tpt.ty);
fcx.write_ty(pat.id, const_tpt.ty);

View file

@ -119,7 +119,8 @@ pub fn lookup(
// In a call `a.b::<X, Y, ...>(...)`:
expr: @ast::expr, // The expression `a.b(...)`.
self_expr: @ast::expr, // The expression `a`.
callee_id: node_id, // Where to store `a.b`'s type
callee_id: node_id, /* Where to store `a.b`'s type,
* also the scope of the call */
m_name: ast::ident, // The ident `b`.
self_ty: ty::t, // The type of `a`.
supplied_tps: &[ty::t], // The list of types X, Y, ... .
@ -127,7 +128,7 @@ pub fn lookup(
check_traits: CheckTraitsFlag, // Whether we check traits only.
autoderef_receiver: AutoderefReceiverFlag)
-> Option<method_map_entry> {
let mut impl_dups = HashSet::new();
let impl_dups = @mut HashSet::new();
let lcx = LookupContext {
fcx: fcx,
expr: expr,
@ -135,7 +136,7 @@ pub fn lookup(
callee_id: callee_id,
m_name: m_name,
supplied_tps: supplied_tps,
impl_dups: &mut impl_dups,
impl_dups: impl_dups,
inherent_candidates: @mut ~[],
extension_candidates: @mut ~[],
deref_args: deref_args,
@ -154,7 +155,7 @@ pub struct LookupContext<'self> {
callee_id: node_id,
m_name: ast::ident,
supplied_tps: &'self [ty::t],
impl_dups: &'self mut HashSet<def_id>,
impl_dups: @mut HashSet<def_id>,
inherent_candidates: @mut ~[Candidate],
extension_candidates: @mut ~[Candidate],
deref_args: check::DerefArgs,
@ -640,7 +641,7 @@ pub impl<'self> LookupContext<'self> {
/*!
*
* In the event that we are invoking a method with a receiver
* of a linear borrowed type like `&mut T` or `&mut [T]`,
* of a borrowed type like `&T`, `&mut T`, or `&mut [T]`,
* we will "reborrow" the receiver implicitly. For example, if
* you have a call `r.inc()` and where `r` has type `&mut T`,
* then we treat that like `(&mut *r).inc()`. This avoids
@ -657,26 +658,25 @@ pub impl<'self> LookupContext<'self> {
let tcx = self.tcx();
return match ty::get(self_ty).sty {
ty::ty_rptr(_, self_mt) if self_mt.mutbl == m_mutbl => {
let region = self.infcx().next_region_var(self.expr.span,
self.expr.id);
ty::ty_rptr(_, self_mt) if default_method_hack(self_mt) => {
(self_ty,
ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: None}))
}
ty::ty_rptr(_, self_mt) => {
let region = self.infcx().next_region_var_nb(self.expr.span);
(ty::mk_rptr(tcx, region, self_mt),
ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs+1,
autoref: Some(ty::AutoRef {kind: AutoPtr,
region: region,
mutbl: self_mt.mutbl})}))
autoref: Some(ty::AutoPtr(region, self_mt.mutbl))}))
}
ty::ty_evec(self_mt, vstore_slice(_))
if self_mt.mutbl == m_mutbl => {
let region = self.infcx().next_region_var(self.expr.span,
self.expr.id);
ty::ty_evec(self_mt, vstore_slice(_)) => {
let region = self.infcx().next_region_var_nb(self.expr.span);
(ty::mk_evec(tcx, self_mt, vstore_slice(region)),
ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: Some(ty::AutoRef {kind: AutoBorrowVec,
region: region,
mutbl: self_mt.mutbl})}))
autoderefs: autoderefs,
autoref: Some(ty::AutoBorrowVec(region, self_mt.mutbl))}))
}
_ => {
(self_ty,
@ -685,6 +685,16 @@ pub impl<'self> LookupContext<'self> {
autoref: None}))
}
};
fn default_method_hack(self_mt: ty::mt) -> bool {
// FIXME(#6129). Default methods can't deal with autoref.
//
// I am a horrible monster and I pray for death. Currently
// the default method code fails when you try to reborrow
// because it is not handling types correctly. In lieu of
// fixing that, I am introducing this horrible hack. - ndm
self_mt.mutbl == m_imm && ty::type_is_self(self_mt.ty)
}
}
fn search_for_autosliced_method(
@ -793,7 +803,7 @@ pub impl<'self> LookupContext<'self> {
fn search_for_some_kind_of_autorefd_method(
&self,
kind: AutoRefKind,
kind: &fn(Region, ast::mutability) -> ty::AutoRef,
autoderefs: uint,
mutbls: &[ast::mutability],
mk_autoref_ty: &fn(ast::mutability, ty::Region) -> ty::t)
@ -801,8 +811,7 @@ pub impl<'self> LookupContext<'self> {
{
// This is hokey. We should have mutability inference as a
// variable. But for now, try &const, then &, then &mut:
let region = self.infcx().next_region_var(self.expr.span,
self.expr.id);
let region = self.infcx().next_region_var_nb(self.expr.span);
for mutbls.each |mutbl| {
let autoref_ty = mk_autoref_ty(*mutbl, region);
match self.search_for_method(autoref_ty) {
@ -812,12 +821,7 @@ pub impl<'self> LookupContext<'self> {
self.self_expr.id,
@ty::AutoDerefRef(ty::AutoDerefRef {
autoderefs: autoderefs,
autoref: Some(ty::AutoRef {
kind: kind,
region: region,
mutbl: *mutbl,
}),
}));
autoref: Some(kind(region, *mutbl))}));
return Some(mme);
}
}
@ -1024,8 +1028,7 @@ pub impl<'self> LookupContext<'self> {
let (_, opt_transformed_self_ty, fn_sig) =
replace_bound_regions_in_fn_sig(
tcx, @Nil, Some(transformed_self_ty), &bare_fn_ty.sig,
|_br| self.fcx.infcx().next_region_var(
self.expr.span, self.expr.id));
|_br| self.fcx.infcx().next_region_var_nb(self.expr.span));
let transformed_self_ty = opt_transformed_self_ty.get();
let fty = ty::mk_bare_fn(tcx, ty::BareFnTy {sig: fn_sig, ..bare_fn_ty});
debug!("after replacing bound regions, fty=%s", self.ty_to_str(fty));

View file

@ -207,9 +207,11 @@ pub impl PurityState {
}
pub struct FnCtxt {
// var_bindings, locals and next_var_id are shared
// with any nested functions that capture the environment
// (and with any functions whose environment is being captured).
// Number of errors that had been reported when we started
// checking this function. On exit, if we find that *more* errors
// have been reported, we will skip regionck and other work that
// expects the types within the function to be consistent.
err_count_on_creation: uint,
ret_ty: ty::t,
// Used by loop bodies that return from the outer function
@ -263,6 +265,7 @@ pub fn blank_fn_ctxt(ccx: @mut CrateCtxt,
// It's kind of a kludge to manufacture a fake function context
// and statement context, but we might as well do write the code only once
@mut FnCtxt {
err_count_on_creation: ccx.tcx.sess.err_count(),
ret_ty: rty,
indirect_ret_ty: None,
ps: PurityState::function(ast::pure_fn, 0),
@ -328,6 +331,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
*/
let tcx = ccx.tcx;
let err_count_on_creation = tcx.sess.err_count();
// ______________________________________________________________________
// First, we have to replace any bound regions in the fn and self
@ -368,6 +372,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
};
@mut FnCtxt {
err_count_on_creation: err_count_on_creation,
ret_ty: ret_ty,
indirect_ret_ty: indirect_ret_ty,
ps: PurityState::function(purity, id),
@ -433,7 +438,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
assign(self_info.self_id, Some(self_info.self_ty));
debug!("self is assigned to %s",
fcx.infcx().ty_to_str(
*fcx.inh.locals.get(&self_info.self_id)));
fcx.inh.locals.get_copy(&self_info.self_id)));
}
// Add formal parameters.
@ -466,7 +471,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
debug!("Local variable %s is assigned type %s",
fcx.pat_to_str(local.node.pat),
fcx.infcx().ty_to_str(
*fcx.inh.locals.get(&local.node.id)));
fcx.inh.locals.get_copy(&local.node.id)));
visit::visit_local(local, e, v);
};
@ -479,7 +484,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
debug!("Pattern binding %s is assigned to %s",
*tcx.sess.str_of(path.idents[0]),
fcx.infcx().ty_to_str(
*fcx.inh.locals.get(&p.id)));
fcx.inh.locals.get_copy(&p.id)));
}
_ => {}
}
@ -642,7 +647,12 @@ impl AstConv for FnCtxt {
}
pub impl FnCtxt {
fn infcx(&self) -> @mut infer::InferCtxt { self.inh.infcx }
fn infcx(&self) -> @mut infer::InferCtxt {
self.inh.infcx
}
fn err_count_since_creation(&self) -> uint {
self.ccx.tcx.sess.err_count() - self.err_count_on_creation
}
fn search_in_scope_regions(
&self,
span: span,
@ -898,11 +908,9 @@ pub impl FnCtxt {
fn region_var_if_parameterized(&self,
rp: Option<ty::region_variance>,
span: span,
lower_bound: ty::Region)
span: span)
-> Option<ty::Region> {
rp.map(
|_rp| self.infcx().next_region_var_with_lb(span, lower_bound))
rp.map(|_rp| self.infcx().next_region_var_nb(span))
}
fn type_error_message(&self,
@ -1083,8 +1091,7 @@ pub fn impl_self_ty(vcx: &VtableContext,
};
let self_r = if region_param.is_some() {
Some(vcx.infcx.next_region_var(location_info.span,
location_info.id))
Some(vcx.infcx.next_region_var_nb(location_info.span))
} else {
None
};
@ -1291,9 +1298,17 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
// that they appear in call position.
check_expr(fcx, f);
// Store the type of `f` as the type of the callee
let fn_ty = fcx.expr_ty(f);
// FIXME(#6273) should write callee type AFTER regions have
// been subst'd. However, it is awkward to deal with this
// now. Best thing would I think be to just have a separate
// "callee table" that contains the FnSig and not a general
// purpose ty::t
fcx.write_ty(call_expr.callee_id, fn_ty);
// Extract the function signature from `in_fty`.
let fn_ty = fcx.expr_ty(f);
let fn_sty = structure_of(fcx, f.span, fn_ty);
// FIXME(#3678) For now, do not permit calls to C abi functions.
@ -1330,7 +1345,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let (_, _, fn_sig) =
replace_bound_regions_in_fn_sig(
fcx.tcx(), @Nil, None, &fn_sig,
|_br| fcx.infcx().next_region_var(call_expr.span, call_expr.id));
|_br| fcx.infcx().next_region_var_nb(call_expr.span));
// Call the generic checker.
check_argument_types(fcx, call_expr.span, fn_sig.inputs, f,
@ -1651,7 +1666,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
};
// construct the function type
let mut fn_ty = astconv::ty_of_closure(fcx,
let fn_ty = astconv::ty_of_closure(fcx,
fcx,
sigil,
purity,
@ -1662,7 +1677,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
&opt_vec::Empty,
expr.span);
let mut fty_sig;
let fty_sig;
let fty = if error_happened {
fty_sig = FnSig {
bound_lifetime_names: opt_vec::Empty,
@ -1909,9 +1924,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
// Generate the struct type.
let self_region =
fcx.region_var_if_parameterized(region_parameterized,
span,
ty::re_scope(id));
fcx.region_var_if_parameterized(region_parameterized, span);
let type_parameters = fcx.infcx().next_ty_vars(type_parameter_count);
let substitutions = substs {
self_r: self_region,
@ -1997,9 +2010,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
// Generate the enum type.
let self_region =
fcx.region_var_if_parameterized(region_parameterized,
span,
ty::re_scope(id));
fcx.region_var_if_parameterized(region_parameterized, span);
let type_parameters = fcx.infcx().next_ty_vars(type_parameter_count);
let substitutions = substs {
self_r: self_region,
@ -2336,13 +2347,12 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
// (and how long it is valid), which we don't know yet until type
// inference is complete.
//
// Therefore, here we simply generate a region variable with
// the current expression as a lower bound. The region
// inferencer will then select the ultimate value. Finally,
// borrowck is charged with guaranteeing that the value whose
// address was taken can actually be made to live as long as
// it needs to live.
let region = fcx.infcx().next_region_var(expr.span, expr.id);
// Therefore, here we simply generate a region variable. The
// region inferencer will then select the ultimate value.
// Finally, borrowck is charged with guaranteeing that the
// value whose address was taken can actually be made to live
// as long as it needs to live.
let region = fcx.infcx().next_region_var_nb(expr.span);
let tm = ty::mt { ty: fcx.expr_ty(oprnd), mutbl: mutbl };
let oprnd_t = if ty::type_is_error(tm.ty) {
@ -2359,8 +2369,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let defn = lookup_def(fcx, pth.span, id);
let tpt = ty_param_bounds_and_ty_for_def(fcx, expr.span, defn);
let region_lb = ty::re_scope(expr.id);
instantiate_path(fcx, pth, tpt, expr.span, expr.id, region_lb);
instantiate_path(fcx, pth, tpt, expr.span, expr.id);
}
ast::expr_inline_asm(ref ia) => {
fcx.require_unsafe(expr.span, ~"use of inline assembly");
@ -2936,7 +2945,8 @@ pub fn check_block(fcx0: @mut FnCtxt, blk: &ast::blk) {
pub fn check_block_with_expected(fcx: @mut FnCtxt,
blk: &ast::blk,
expected: Option<ty::t>) {
let prev = replace(&mut fcx.ps, fcx.ps.recurse(blk));
let purity_state = fcx.ps.recurse(blk);
let prev = replace(&mut fcx.ps, purity_state);
do fcx.with_region_lb(blk.node.id) {
let mut warned = false;
@ -3227,8 +3237,7 @@ pub fn instantiate_path(fcx: @mut FnCtxt,
pth: @ast::Path,
tpt: ty_param_bounds_and_ty,
span: span,
node_id: ast::node_id,
region_lb: ty::Region) {
node_id: ast::node_id) {
debug!(">>> instantiate_path");
let ty_param_count = tpt.generics.type_param_defs.len();
@ -3254,8 +3263,7 @@ pub fn instantiate_path(fcx: @mut FnCtxt,
}
}
None => { // no lifetime parameter supplied, insert default
fcx.region_var_if_parameterized(
tpt.generics.region_param, span, region_lb)
fcx.region_var_if_parameterized(tpt.generics.region_param, span)
}
};
@ -3339,7 +3347,7 @@ pub fn ast_expr_vstore_to_vstore(fcx: @mut FnCtxt,
ast::expr_vstore_uniq => ty::vstore_uniq,
ast::expr_vstore_box | ast::expr_vstore_mut_box => ty::vstore_box,
ast::expr_vstore_slice | ast::expr_vstore_mut_slice => {
let r = fcx.infcx().next_region_var(e.span, e.id);
let r = fcx.infcx().next_region_var_nb(e.span);
ty::vstore_slice(r)
}
}
@ -3462,7 +3470,7 @@ pub fn check_intrinsic_type(ccx: @mut CrateCtxt, it: @ast::foreign_item) {
~"visit_tydesc" => {
let tydesc_name = special_idents::tydesc;
assert!(tcx.intrinsic_defs.contains_key(&tydesc_name));
let (_, tydesc_ty) = *tcx.intrinsic_defs.get(&tydesc_name);
let (_, tydesc_ty) = tcx.intrinsic_defs.get_copy(&tydesc_name);
let (_, visitor_object_ty) = ty::visitor_object_ty(tcx);
let td_ptr = ty::mk_ptr(ccx.tcx, ty::mt {
ty: tydesc_ty,

View file

@ -28,16 +28,15 @@ this point a bit better.
*/
use middle::freevars::get_freevars;
use middle::pat_util::pat_bindings;
use middle::ty::{re_scope};
use middle::ty;
use middle::typeck::check::FnCtxt;
use middle::typeck::check::lookup_def;
use middle::typeck::check::regionmanip::relate_nested_regions;
use middle::typeck::infer::resolve_and_force_all_but_regions;
use middle::typeck::infer::resolve_type;
use util::ppaux::{note_and_explain_region, ty_to_str,
region_to_str};
use middle::pat_util;
use syntax::ast::{ManagedSigil, OwnedSigil, BorrowedSigil};
use syntax::ast::{def_arg, def_binding, def_local, def_self, def_upvar};
@ -73,7 +72,11 @@ fn encl_region_of_def(fcx: @mut FnCtxt, def: ast::def) -> ty::Region {
}
pub impl Rcx {
fn resolve_type(@mut self, unresolved_ty: ty::t) -> ty::t {
fn tcx(&self) -> ty::ctxt {
self.fcx.ccx.tcx
}
fn resolve_type(&mut self, unresolved_ty: ty::t) -> ty::t {
/*!
* Try to resolve the type for the given node, returning
* t_err if an error results. Note that we never care
@ -135,24 +138,40 @@ pub impl Rcx {
pub fn regionck_expr(fcx: @mut FnCtxt, e: @ast::expr) {
let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 };
let v = regionck_visitor();
(v.visit_expr)(e, rcx, v);
if fcx.err_count_since_creation() == 0 {
// regionck assumes typeck succeeded
let v = regionck_visitor();
(v.visit_expr)(e, rcx, v);
}
fcx.infcx().resolve_regions();
}
pub fn regionck_fn(fcx: @mut FnCtxt, blk: &ast::blk) {
let rcx = @mut Rcx { fcx: fcx, errors_reported: 0 };
let v = regionck_visitor();
(v.visit_block)(blk, rcx, v);
if fcx.err_count_since_creation() == 0 {
// regionck assumes typeck succeeded
let v = regionck_visitor();
(v.visit_block)(blk, rcx, v);
}
fcx.infcx().resolve_regions();
}
fn regionck_visitor() -> rvt {
// (*) FIXME(#3238) should use visit_pat, not visit_arm/visit_local,
// However, right now we run into an issue whereby some free
// regions are not properly related if they appear within the
// types of arguments that must be inferred. This could be
// addressed by deferring the construction of the region
// hierarchy, and in particular the relationships between free
// regions, until regionck, as described in #3238.
visit::mk_vt(@visit::Visitor {visit_item: visit_item,
visit_stmt: visit_stmt,
visit_expr: visit_expr,
visit_block: visit_block,
//visit_pat: visit_pat, // (*) see above
visit_arm: visit_arm,
visit_local: visit_local,
visit_block: visit_block,
.. *visit::default_visitor()})
}
@ -160,44 +179,110 @@ fn visit_item(_item: @ast::item, _rcx: @mut Rcx, _v: rvt) {
// Ignore items
}
fn visit_local(l: @ast::local, rcx: @mut Rcx, v: rvt) {
// Check to make sure that the regions in all local variables are
// within scope.
//
// Note: we do this here rather than in visit_pat because we do
// not wish to constrain the regions in *patterns* in quite the
// same way. `visit_node()` guarantees that the region encloses
// the node in question, which ultimately constrains the regions
// in patterns to enclose the match expression as a whole. But we
// want them to enclose the *arm*. However, regions in patterns
// must either derive from the discriminant or a ref pattern: in
// the case of the discriminant, the regions will be constrained
// when the type of the discriminant is checked. In the case of a
// ref pattern, the variable is created with a suitable lower
// bound.
let e = rcx.errors_reported;
(v.visit_pat)(l.node.pat, rcx, v);
let def_map = rcx.fcx.ccx.tcx.def_map;
do pat_bindings(def_map, l.node.pat) |_bm, id, sp, _path| {
visit_node(id, sp, rcx);
}
if e != rcx.errors_reported {
return; // if decl has errors, skip initializer expr
}
(v.visit_ty)(l.node.ty, rcx, v);
for l.node.init.each |i| {
(v.visit_expr)(*i, rcx, v);
}
fn visit_block(b: &ast::blk, rcx: @mut Rcx, v: rvt) {
rcx.fcx.tcx().region_maps.record_cleanup_scope(b.node.id);
visit::visit_block(b, rcx, v);
}
fn visit_block(b: &ast::blk, rcx: @mut Rcx, v: rvt) {
visit::visit_block(b, rcx, v);
fn visit_arm(arm: &ast::arm, rcx: @mut Rcx, v: rvt) {
// see above
for arm.pats.each |&p| {
constrain_bindings_in_pat(p, rcx);
}
visit::visit_arm(arm, rcx, v);
}
fn visit_local(l: @ast::local, rcx: @mut Rcx, v: rvt) {
// see above
constrain_bindings_in_pat(l.node.pat, rcx);
visit::visit_local(l, rcx, v);
}
fn constrain_bindings_in_pat(pat: @ast::pat, rcx: @mut Rcx) {
let tcx = rcx.fcx.tcx();
debug!("regionck::visit_pat(pat=%s)", pat.repr(tcx));
do pat_util::pat_bindings(tcx.def_map, pat) |_, id, span, _| {
// If we have a variable that contains region'd data, that
// data will be accessible from anywhere that the variable is
// accessed. We must be wary of loops like this:
//
// // from src/test/compile-fail/borrowck-lend-flow.rs
// let mut v = ~3, w = ~4;
// let mut x = &mut w;
// loop {
// **x += 1; // (2)
// borrow(v); //~ ERROR cannot borrow
// x = &mut v; // (1)
// }
//
// Typically, we try to determine the region of a borrow from
// those points where it is dereferenced. In this case, one
// might imagine that the lifetime of `x` need only be the
// body of the loop. But of course this is incorrect because
// the pointer that is created at point (1) is consumed at
// point (2), meaning that it must be live across the loop
// iteration. The easiest way to guarantee this is to require
// that the lifetime of any regions that appear in a
// variable's type enclose at least the variable's scope.
let encl_region = tcx.region_maps.encl_region(id);
constrain_regions_in_type_of_node(rcx, id, encl_region, span);
}
}
fn visit_expr(expr: @ast::expr, rcx: @mut Rcx, v: rvt) {
debug!("regionck::visit_expr(e=%s)", rcx.fcx.expr_to_str(expr));
let has_method_map = rcx.fcx.inh.method_map.contains_key(&expr.id);
// Record cleanup scopes, which are used by borrowck to decide the
// maximum lifetime of a temporary rvalue. These were derived by
// examining where trans creates block scopes, not because this
// reflects some principled decision around temporary lifetimes.
// Ordinarily this would seem like something that should be setup
// in region, but we need to know which uses of operators are
// overloaded. See #3511.
let tcx = rcx.fcx.tcx();
match expr.node {
// You'd think that x += y where `+=` is overloaded would be a
// cleanup scope. You'd be... kind of right. In fact the
// handling of `+=` and friends in trans for overloaded
// operators is a hopeless mess and I can't figure out how to
// represent it. - ndm
//
// ast::expr_assign_op(*) |
ast::expr_index(*) |
ast::expr_binary(*) |
ast::expr_unary(*) if has_method_map => {
tcx.region_maps.record_cleanup_scope(expr.id);
}
ast::expr_binary(ast::and, lhs, rhs) |
ast::expr_binary(ast::or, lhs, rhs) => {
tcx.region_maps.record_cleanup_scope(lhs.id);
tcx.region_maps.record_cleanup_scope(rhs.id);
}
ast::expr_call(*) |
ast::expr_method_call(*) => {
tcx.region_maps.record_cleanup_scope(expr.id);
}
ast::expr_match(_, ref arms) => {
tcx.region_maps.record_cleanup_scope(expr.id);
for arms.each |arm| {
for arm.guard.each |guard| {
tcx.region_maps.record_cleanup_scope(guard.id);
}
}
}
ast::expr_while(cond, ref body) => {
tcx.region_maps.record_cleanup_scope(cond.id);
tcx.region_maps.record_cleanup_scope(body.node.id);
}
_ => {}
}
// Check any autoderefs or autorefs that appear.
for rcx.fcx.inh.adjustments.find(&expr.id).each |&adjustment| {
debug!("adjustment=%?", adjustment);
match *adjustment {
@ -208,6 +293,13 @@ fn visit_expr(expr: @ast::expr, rcx: @mut Rcx, v: rvt) {
constrain_derefs(rcx, expr, autoderefs, expr_ty);
for opt_autoref.each |autoref| {
guarantor::for_autoref(rcx, expr, autoderefs, autoref);
// Require that the resulting region encompasses
// the current node.
//
// FIXME(#6268) remove to support nested method calls
constrain_regions_in_type_of_node(
rcx, expr.id, ty::re_scope(expr.id), expr.span);
}
}
_ => {}
@ -215,58 +307,40 @@ fn visit_expr(expr: @ast::expr, rcx: @mut Rcx, v: rvt) {
}
match expr.node {
ast::expr_path(*) => {
// Avoid checking the use of local variables, as we
// already check their definitions. The def'n always
// encloses the use. So if the def'n is enclosed by the
// region, then the uses will also be enclosed (and
// otherwise, an error will have been reported at the
// def'n site).
match lookup_def(rcx.fcx, expr.span, expr.id) {
ast::def_local(*) | ast::def_arg(*) |
ast::def_upvar(*) => return,
_ => ()
}
}
ast::expr_call(callee, ref args, _) => {
// Check for a.b() where b is a method. Ensure that
// any types in the callee are valid for the entire
// method call.
// FIXME(#3387)--we should really invoke
// `constrain_auto_ref()` on all exprs. But that causes a
// lot of spurious errors because of how the region
// hierarchy is setup.
if rcx.fcx.inh.method_map.contains_key(&callee.id) {
match callee.node {
ast::expr_field(base, _, _) => {
constrain_auto_ref(rcx, base);
}
_ => {
// This can happen if you have code like
// (x[0])() where `x[0]` is overloaded. Just
// ignore it.
}
}
} else {
constrain_auto_ref(rcx, callee);
}
for args.each |arg| {
constrain_auto_ref(rcx, *arg);
}
constrain_callee(rcx, expr, callee);
constrain_call(rcx, expr, None, *args, false);
}
ast::expr_method_call(rcvr, _, _, ref args, _) => {
// Check for a.b() where b is a method. Ensure that
// any types in the callee are valid for the entire
// method call.
ast::expr_method_call(arg0, _, _, ref args, _) => {
constrain_call(rcx, expr, Some(arg0), *args, false);
}
constrain_auto_ref(rcx, rcvr);
for args.each |arg| {
constrain_auto_ref(rcx, *arg);
}
ast::expr_index(lhs, rhs) |
ast::expr_assign_op(_, lhs, rhs) |
ast::expr_binary(_, lhs, rhs) if has_method_map => {
// As `expr_method_call`, but the call is via an
// overloaded op. Note that we (sadly) currently use an
// implicit "by ref" sort of passing style here. This
// should be converted to an adjustment!
constrain_call(rcx, expr, Some(lhs), [rhs], true);
}
ast::expr_unary(_, lhs) if has_method_map => {
// As above.
constrain_call(rcx, expr, Some(lhs), [], true);
}
ast::expr_unary(ast::deref, base) => {
// For *a, the lifetime of a must enclose the deref
let base_ty = rcx.resolve_node_type(base.id);
constrain_derefs(rcx, expr, 1, base_ty);
}
ast::expr_index(vec_expr, _) => {
// For a[b], the lifetime of a must enclose the deref
let vec_type = rcx.resolve_expr_type_adjusted(vec_expr);
constrain_index(rcx, expr, vec_type);
}
ast::expr_cast(source, _) => {
@ -294,18 +368,18 @@ fn visit_expr(expr: @ast::expr, rcx: @mut Rcx, v: rvt) {
}
}
ast::expr_index(vec_expr, _) => {
let vec_type = rcx.resolve_expr_type_adjusted(vec_expr);
constrain_index(rcx, expr, vec_type);
}
ast::expr_unary(ast::deref, base) => {
let base_ty = rcx.resolve_node_type(base.id);
constrain_derefs(rcx, expr, 1, base_ty);
}
ast::expr_addr_of(_, base) => {
guarantor::for_addr_of(rcx, expr, base);
// Require that when you write a `&expr` expression, the
// resulting pointer has a lifetime that encompasses the
// `&expr` expression itself. Note that we constraining
// the type of the node expr.id here *before applying
// adjustments*.
//
// FIXME(#6268) nested method calls requires that this rule change
let ty0 = rcx.resolve_node_type(expr.id);
constrain_regions_in_type(rcx, ty::re_scope(expr.id), expr.span, ty0);
}
ast::expr_match(discr, ref arms) => {
@ -313,6 +387,8 @@ fn visit_expr(expr: @ast::expr, rcx: @mut Rcx, v: rvt) {
}
ast::expr_fn_block(*) => {
// The lifetime of a block fn must not outlive the variables
// it closes over
let function_type = rcx.resolve_node_type(expr.id);
match ty::get(function_type).sty {
ty::ty_closure(ty::ClosureTy {sigil: ast::BorrowedSigil,
@ -326,46 +402,101 @@ fn visit_expr(expr: @ast::expr, rcx: @mut Rcx, v: rvt) {
_ => ()
}
if !visit_node(expr.id, expr.span, rcx) { return; }
visit::visit_expr(expr, rcx, v);
}
fn visit_stmt(s: @ast::stmt, rcx: @mut Rcx, v: rvt) {
visit::visit_stmt(s, rcx, v);
fn constrain_callee(rcx: @mut Rcx,
call_expr: @ast::expr,
callee_expr: @ast::expr)
{
let tcx = rcx.fcx.tcx();
let call_region = ty::re_scope(call_expr.id);
let callee_ty = rcx.resolve_node_type(call_expr.callee_id);
match ty::get(callee_ty).sty {
ty::ty_bare_fn(*) => { }
ty::ty_closure(ref closure_ty) => {
match rcx.fcx.mk_subr(true, callee_expr.span,
call_region, closure_ty.region) {
result::Err(_) => {
tcx.sess.span_err(
callee_expr.span,
fmt!("cannot invoke closure outside of its lifetime"));
note_and_explain_region(
tcx,
"the closure is only valid for ",
closure_ty.region,
"");
}
result::Ok(_) => {}
}
}
_ => {
// this should not happen, but it does if the program is
// erroneous
//
// tcx.sess.span_bug(
// callee_expr.span,
// fmt!("Calling non-function: %s", callee_ty.repr(tcx)));
}
}
}
fn visit_node(id: ast::node_id, span: span, rcx: @mut Rcx) -> bool {
/*!
*
* checks the type of the node `id` and reports an error if it
* references a region that is not in scope for that node.
* Returns false if an error is reported; this is used to cause us
* to cut off region checking for that subtree to avoid reporting
* tons of errors. */
let fcx = rcx.fcx;
// find the region where this expr evaluation is taking place
let tcx = fcx.ccx.tcx;
let encl_region = match tcx.region_maps.opt_encl_scope(id) {
None => ty::re_static,
Some(r) => ty::re_scope(r)
};
// Otherwise, look at the type and see if it is a region pointer.
constrain_regions_in_type_of_node(rcx, id, encl_region, span)
}
fn encl_region_or_static(rcx: @mut Rcx, expr: @ast::expr) -> ty::Region {
// FIXME(#3850) --- interactions with modes compel overly large granularity
// that is, we would probably prefer to just return re_scope(expr.id)
// here but we cannot just yet.
fn constrain_call(rcx: @mut Rcx,
// might be expr_call, expr_method_call, or an overloaded
// operator
call_expr: @ast::expr,
receiver: Option<@ast::expr>,
arg_exprs: &[@ast::expr],
implicitly_ref_args: bool)
{
//! Invoked on every call site (i.e., normal calls, method calls,
//! and overloaded operators). Constrains the regions which appear
//! in the type of the function. Also constrains the regions that
//! appear in the arguments appropriately.
let tcx = rcx.fcx.tcx();
match tcx.region_maps.opt_encl_scope(expr.id) {
Some(s) => ty::re_scope(s),
None => ty::re_static // occurs in constants
debug!("constrain_call(call_expr=%s, implicitly_ref_args=%?)",
call_expr.repr(tcx), implicitly_ref_args);
let callee_ty = rcx.resolve_node_type(call_expr.callee_id);
let fn_sig = ty::ty_fn_sig(callee_ty);
// `callee_region` is the scope representing the time in which the
// call occurs.
//
// FIXME(#6268) to support nested method calls, should be callee_id
let callee_scope = call_expr.id;
let callee_region = ty::re_scope(callee_scope);
for arg_exprs.each |&arg_expr| {
// ensure that any regions appearing in the argument type are
// valid for at least the lifetime of the function:
constrain_regions_in_type_of_node(
rcx, arg_expr.id, callee_region, arg_expr.span);
// unfortunately, there are two means of taking implicit
// references, and we need to propagate constraints as a
// result. modes are going away and the "DerefArgs" code
// should be ported to use adjustments
if implicitly_ref_args {
guarantor::for_by_ref(rcx, arg_expr, callee_scope);
}
}
// as loop above, but for receiver
for receiver.each |&r| {
constrain_regions_in_type_of_node(
rcx, r.id, callee_region, r.span);
if implicitly_ref_args {
guarantor::for_by_ref(rcx, r, callee_scope);
}
}
// constrain regions that may appear in the return type to be
// valid for the function call:
constrain_regions_in_type(
rcx, callee_region, call_expr.span, fn_sig.output);
}
fn constrain_derefs(rcx: @mut Rcx,
@ -379,9 +510,8 @@ fn constrain_derefs(rcx: @mut Rcx,
* pointer being derefenced, the lifetime of the pointer includes
* the deref expr.
*/
let tcx = rcx.fcx.tcx();
let r_deref_expr = encl_region_or_static(rcx, deref_expr);
let r_deref_expr = ty::re_scope(deref_expr.id);
for uint::range(0, derefs) |i| {
debug!("constrain_derefs(deref_expr=%s, derefd_ty=%s, derefs=%?/%?",
rcx.fcx.expr_to_str(deref_expr),
@ -390,19 +520,8 @@ fn constrain_derefs(rcx: @mut Rcx,
match ty::get(derefd_ty).sty {
ty::ty_rptr(r_ptr, _) => {
match rcx.fcx.mk_subr(true, deref_expr.span, r_deref_expr, r_ptr) {
result::Ok(*) => {}
result::Err(*) => {
tcx.sess.span_err(
deref_expr.span,
fmt!("dereference of reference outside its lifetime"));
note_and_explain_region(
tcx,
"the reference is only valid for ",
r_ptr,
"");
}
}
mk_subregion_due_to_derefence(rcx, deref_expr.span,
r_deref_expr, r_ptr);
}
_ => {}
@ -417,6 +536,27 @@ fn constrain_derefs(rcx: @mut Rcx,
}
}
pub fn mk_subregion_due_to_derefence(rcx: @mut Rcx,
deref_span: span,
minimum_lifetime: ty::Region,
maximum_lifetime: ty::Region) {
match rcx.fcx.mk_subr(true, deref_span,
minimum_lifetime, maximum_lifetime) {
result::Ok(*) => {}
result::Err(*) => {
rcx.tcx().sess.span_err(
deref_span,
fmt!("dereference of reference outside its lifetime"));
note_and_explain_region(
rcx.tcx(),
"the reference is only valid for ",
maximum_lifetime,
"");
}
}
}
fn constrain_index(rcx: @mut Rcx,
index_expr: @ast::expr,
indexed_ty: ty::t)
@ -433,7 +573,7 @@ fn constrain_index(rcx: @mut Rcx,
rcx.fcx.expr_to_str(index_expr),
rcx.fcx.infcx().ty_to_str(indexed_ty));
let r_index_expr = encl_region_or_static(rcx, index_expr);
let r_index_expr = ty::re_scope(index_expr.id);
match ty::get(indexed_ty).sty {
ty::ty_estr(ty::vstore_slice(r_ptr)) |
ty::ty_evec(_, ty::vstore_slice(r_ptr)) => {
@ -456,66 +596,22 @@ fn constrain_index(rcx: @mut Rcx,
}
}
fn constrain_auto_ref(rcx: @mut Rcx, expr: @ast::expr) {
fn constrain_free_variables(rcx: @mut Rcx,
region: ty::Region,
expr: @ast::expr) {
/*!
*
* If `expr` is auto-ref'd (e.g., as part of a borrow), then this
* function ensures that the lifetime of the resulting borrowed
* ptr includes at least the expression `expr`. */
debug!("constrain_auto_ref(expr=%s)", rcx.fcx.expr_to_str(expr));
let adjustment = rcx.fcx.inh.adjustments.find(&expr.id);
let region = match adjustment {
Some(&@ty::AutoDerefRef(
ty::AutoDerefRef {
autoref: Some(ref auto_ref), _})) => {
auto_ref.region
}
_ => { return; }
};
let tcx = rcx.fcx.tcx();
let encl_region = tcx.region_maps.encl_region(expr.id);
match rcx.fcx.mk_subr(true, expr.span, encl_region, region) {
result::Ok(()) => {}
result::Err(_) => {
// In practice, this cannot happen: `region` is always a
// region variable, and constraints on region variables
// are collected and then resolved later. However, I
// included the span_err() here (rather than, say,
// span_bug()) because it seemed more future-proof: if,
// for some reason, the code were to change so that in
// some cases `region` is not a region variable, then
// reporting an error would be the correct path.
tcx.sess.span_err(
expr.span,
"lifetime of borrowed pointer does not include \
the expression being borrowed");
note_and_explain_region(
tcx,
"lifetime of the borrowed pointer is",
region,
"");
rcx.errors_reported += 1;
}
}
}
fn constrain_free_variables(
rcx: @mut Rcx,
region: ty::Region,
expr: @ast::expr) {
/*!
*
* Make sure that all free variables referenced inside the closure
* outlive the closure itself. */
* outlive the closure itself.
*/
let tcx = rcx.fcx.ccx.tcx;
debug!("constrain_free_variables(%s, %s)",
region.repr(tcx), expr.repr(tcx));
for get_freevars(tcx, expr.id).each |freevar| {
debug!("freevar def is %?", freevar.def);
let def = freevar.def;
let en_region = encl_region_of_def(rcx.fcx, def);
debug!("en_region = %s", en_region.repr(tcx));
match rcx.fcx.mk_subr(true, freevar.span,
region, en_region) {
result::Ok(()) => {}
@ -541,9 +637,13 @@ fn constrain_free_variables(
fn constrain_regions_in_type_of_node(
rcx: @mut Rcx,
id: ast::node_id,
encl_region: ty::Region,
minimum_lifetime: ty::Region,
span: span) -> bool
{
//! Guarantees that any lifetimes which appear in the type of
//! the node `id` (after applying adjustments) are valid for at
//! least `minimum_lifetime`
let tcx = rcx.fcx.tcx();
// Try to resolve the type. If we encounter an error, then typeck
@ -553,22 +653,21 @@ fn constrain_regions_in_type_of_node(
let adjustment = rcx.fcx.inh.adjustments.find(&id);
let ty = ty::adjust_ty(tcx, span, ty0, adjustment);
debug!("constrain_regions_in_type_of_node(\
ty=%s, ty0=%s, id=%d, encl_region=%?, adjustment=%?)",
ty=%s, ty0=%s, id=%d, minimum_lifetime=%?, adjustment=%?)",
ty_to_str(tcx, ty), ty_to_str(tcx, ty0),
id, encl_region, adjustment);
constrain_regions_in_type(rcx, encl_region, span, ty)
id, minimum_lifetime, adjustment);
constrain_regions_in_type(rcx, minimum_lifetime, span, ty)
}
fn constrain_regions_in_type(
rcx: @mut Rcx,
encl_region: ty::Region,
minimum_lifetime: ty::Region,
span: span,
ty: ty::t) -> bool
{
/*!
*
* Requires that any regions which appear in `ty` must be
* superregions of `encl_region`. Also enforces the constraint
* superregions of `minimum_lifetime`. Also enforces the constraint
* that given a pointer type `&'r T`, T must not contain regions
* that outlive 'r, as well as analogous constraints for other
* lifetime'd types.
@ -583,11 +682,11 @@ fn constrain_regions_in_type(
let e = rcx.errors_reported;
let tcx = rcx.fcx.ccx.tcx;
debug!("constrain_regions_in_type(encl_region=%s, ty=%s)",
region_to_str(tcx, encl_region),
debug!("constrain_regions_in_type(minimum_lifetime=%s, ty=%s)",
region_to_str(tcx, minimum_lifetime),
ty_to_str(tcx, ty));
do relate_nested_regions(tcx, Some(encl_region), ty) |r_sub, r_sup| {
do relate_nested_regions(tcx, Some(minimum_lifetime), ty) |r_sub, r_sup| {
debug!("relate(r_sub=%s, r_sup=%s)",
region_to_str(tcx, r_sub),
region_to_str(tcx, r_sup));
@ -595,12 +694,12 @@ fn constrain_regions_in_type(
if r_sup.is_bound() || r_sub.is_bound() {
// a bound region is one which appears inside an fn type.
// (e.g., the `&` in `fn(&T)`). Such regions need not be
// constrained by `encl_region` as they are placeholders
// constrained by `minimum_lifetime` as they are placeholders
// for regions that are as-yet-unknown.
} else {
match rcx.fcx.mk_subr(true, span, r_sub, r_sup) {
result::Err(_) => {
if r_sub == encl_region {
if r_sub == minimum_lifetime {
tcx.sess.span_err(
span,
fmt!("reference is not valid outside of its lifetime"));
@ -639,7 +738,6 @@ fn constrain_regions_in_type(
pub mod guarantor {
/*!
*
* The routines in this module are aiming to deal with the case
* where a the contents of a borrowed pointer are re-borrowed.
* Imagine you have a borrowed pointer `b` with lifetime L1 and
@ -686,6 +784,7 @@ pub mod guarantor {
*/
use middle::typeck::check::regionck::{Rcx, infallibly_mk_subr};
use middle::typeck::check::regionck::mk_subregion_due_to_derefence;
use middle::ty;
use syntax::ast;
use syntax::codemap::span;
@ -693,14 +792,12 @@ pub mod guarantor {
pub fn for_addr_of(rcx: @mut Rcx, expr: @ast::expr, base: @ast::expr) {
/*!
*
* Computes the guarantor for an expression `&base` and then
* ensures that the lifetime of the resulting pointer is linked
* to the lifetime of its guarantor (if any).
*/
debug!("guarantor::for_addr_of(base=%s)", rcx.fcx.expr_to_str(base));
let _i = ::util::common::indenter();
let guarantor = guarantor(rcx, base);
link(rcx, expr.span, expr.id, guarantor);
@ -708,13 +805,14 @@ pub mod guarantor {
pub fn for_match(rcx: @mut Rcx, discr: @ast::expr, arms: &[ast::arm]) {
/*!
*
* Computes the guarantors for any ref bindings in a match and
* then ensures that the lifetime of the resulting pointer is
* linked to the lifetime of its guarantor (if any).
*/
debug!("regionck::for_match()");
let discr_guarantor = guarantor(rcx, discr);
debug!("discr_guarantor=%s", discr_guarantor.repr(rcx.tcx()));
for arms.each |arm| {
for arm.pats.each |pat| {
link_ref_bindings_in_pat(rcx, *pat, discr_guarantor);
@ -727,7 +825,6 @@ pub mod guarantor {
autoderefs: uint,
autoref: &ty::AutoRef) {
/*!
*
* Computes the guarantor for an expression that has an
* autoref adjustment and links it to the lifetime of the
* autoref. This is only important when auto re-borrowing
@ -736,30 +833,30 @@ pub mod guarantor {
debug!("guarantor::for_autoref(expr=%s, autoref=%?)",
rcx.fcx.expr_to_str(expr), autoref);
let _i = ::util::common::indenter();
let mut expr_ct = categorize_unadjusted(rcx, expr);
debug!(" unadjusted cat=%?", expr_ct.cat);
expr_ct = apply_autoderefs(
rcx, expr, autoderefs, expr_ct);
match autoref.kind {
ty::AutoPtr => {
match *autoref {
ty::AutoPtr(r, _) => {
// In this case, we are implicitly adding an `&`.
maybe_make_subregion(rcx, expr, autoref.region,
expr_ct.cat.guarantor);
maybe_make_subregion(rcx, expr, r, expr_ct.cat.guarantor);
}
ty::AutoBorrowVec |
ty::AutoBorrowVecRef |
ty::AutoBorrowFn => {
ty::AutoBorrowVec(r, _) |
ty::AutoBorrowVecRef(r, _) |
ty::AutoBorrowFn(r) => {
// In each of these cases, what is being borrowed is
// not the (autoderef'd) expr itself but rather the
// contents of the autoderef'd expression (i.e., what
// the pointer points at).
maybe_make_subregion(rcx, expr, autoref.region,
maybe_make_subregion(rcx, expr, r,
guarantor_of_deref(&expr_ct.cat));
}
ty::AutoUnsafe(_) => {}
}
fn maybe_make_subregion(
@ -774,6 +871,28 @@ pub mod guarantor {
}
}
pub fn for_by_ref(rcx: @mut Rcx,
expr: @ast::expr,
callee_scope: ast::node_id) {
/*!
* Computes the guarantor for cases where the `expr` is
* being passed by implicit reference and must outlive
* `callee_scope`.
*/
let tcx = rcx.tcx();
debug!("guarantor::for_by_ref(expr=%s, callee_scope=%?)",
expr.repr(tcx), callee_scope);
let expr_cat = categorize(rcx, expr);
debug!("guarantor::for_by_ref(expr=%?, callee_scope=%?) category=%?",
expr.id, callee_scope, expr_cat);
let minimum_lifetime = ty::re_scope(callee_scope);
for expr_cat.guarantor.each |guarantor| {
mk_subregion_due_to_derefence(rcx, expr.span,
minimum_lifetime, *guarantor);
}
}
fn link(
rcx: @mut Rcx,
span: span,
@ -801,7 +920,7 @@ pub mod guarantor {
// expressions, both of which always yield a region variable, so
// mk_subr should never fail.
let rptr_ty = rcx.resolve_node_type(id);
if !ty::type_is_error(rptr_ty) && !ty::type_is_bot(rptr_ty) {
if !ty::type_is_bot(rptr_ty) {
let tcx = rcx.fcx.ccx.tcx;
debug!("rptr_ty=%s", ty_to_str(tcx, rptr_ty));
let r = ty::ty_region(tcx, span, rptr_ty);
@ -907,7 +1026,6 @@ pub mod guarantor {
fn categorize(rcx: @mut Rcx, expr: @ast::expr) -> ExprCategorization {
debug!("categorize(expr=%s)", rcx.fcx.expr_to_str(expr));
let _i = ::util::common::indenter();
let mut expr_ct = categorize_unadjusted(rcx, expr);
debug!("before adjustments, cat=%?", expr_ct.cat);
@ -928,12 +1046,24 @@ pub mod guarantor {
expr_ct = apply_autoderefs(
rcx, expr, adjustment.autoderefs, expr_ct);
for adjustment.autoref.each |autoref| {
// If there is an autoref, then the result of this
// expression will be some sort of borrowed pointer.
expr_ct.cat.guarantor = None;
expr_ct.cat.pointer = BorrowedPointer(autoref.region);
debug!("autoref, cat=%?", expr_ct.cat);
match adjustment.autoref {
None => {
}
Some(ty::AutoUnsafe(_)) => {
expr_ct.cat.guarantor = None;
expr_ct.cat.pointer = OtherPointer;
debug!("autoref, cat=%?", expr_ct.cat);
}
Some(ty::AutoPtr(r, _)) |
Some(ty::AutoBorrowVec(r, _)) |
Some(ty::AutoBorrowVecRef(r, _)) |
Some(ty::AutoBorrowFn(r)) => {
// If there is an autoref, then the result of this
// expression will be some sort of borrowed pointer.
expr_ct.cat.guarantor = None;
expr_ct.cat.pointer = BorrowedPointer(r);
debug!("autoref, cat=%?", expr_ct.cat);
}
}
}
@ -948,7 +1078,6 @@ pub mod guarantor {
expr: @ast::expr)
-> ExprCategorizationType {
debug!("categorize_unadjusted(expr=%s)", rcx.fcx.expr_to_str(expr));
let _i = ::util::common::indenter();
let guarantor = {
if rcx.fcx.inh.method_map.contains_key(&expr.id) {
@ -1053,7 +1182,6 @@ pub mod guarantor {
debug!("link_ref_bindings_in_pat(pat=%s, guarantor=%?)",
rcx.fcx.pat_to_str(pat), guarantor);
let _i = ::util::common::indenter();
match pat.node {
ast::pat_wild => {}
@ -1069,7 +1197,10 @@ pub mod guarantor {
link_ref_bindings_in_pat(rcx, *p, guarantor);
}
}
ast::pat_enum(*) => {}
ast::pat_enum(_, None) => {}
ast::pat_enum(_, Some(ref pats)) => {
link_ref_bindings_in_pats(rcx, pats, guarantor);
}
ast::pat_struct(_, ref fpats, _) => {
for fpats.each |fpat| {
link_ref_bindings_in_pat(rcx, fpat.pat, guarantor);
@ -1086,29 +1217,25 @@ pub mod guarantor {
}
ast::pat_region(p) => {
let rptr_ty = rcx.resolve_node_type(pat.id);
if !ty::type_is_error(rptr_ty) {
let r = ty::ty_region(rcx.fcx.tcx(), pat.span, rptr_ty);
link_ref_bindings_in_pat(rcx, p, Some(r));
}
let r = ty::ty_region(rcx.fcx.tcx(), pat.span, rptr_ty);
link_ref_bindings_in_pat(rcx, p, Some(r));
}
ast::pat_lit(*) => {}
ast::pat_range(*) => {}
ast::pat_vec(ref before, ref slice, ref after) => {
let vec_ty = rcx.resolve_node_type(pat.id);
if !ty::type_is_error(vec_ty) {
let vstore = ty::ty_vstore(vec_ty);
let guarantor1 = match vstore {
ty::vstore_fixed(_) | ty::vstore_uniq => guarantor,
ty::vstore_slice(r) => Some(r),
ty::vstore_box => None
};
let vstore = ty::ty_vstore(vec_ty);
let guarantor1 = match vstore {
ty::vstore_fixed(_) | ty::vstore_uniq => guarantor,
ty::vstore_slice(r) => Some(r),
ty::vstore_box => None
};
link_ref_bindings_in_pats(rcx, before, guarantor1);
for slice.each |&p| {
link_ref_bindings_in_pat(rcx, p, guarantor);
}
link_ref_bindings_in_pats(rcx, after, guarantor1);
link_ref_bindings_in_pats(rcx, before, guarantor1);
for slice.each |&p| {
link_ref_bindings_in_pat(rcx, p, guarantor);
}
link_ref_bindings_in_pats(rcx, after, guarantor1);
}
}
}

View file

@ -87,7 +87,7 @@ pub fn replace_bound_regions_in_fn_sig(
to_r: &fn(ty::bound_region) -> ty::Region,
r: ty::Region) -> isr_alist {
match r {
ty::re_free(*) | ty::re_static | ty::re_scope(_) |
ty::re_empty | ty::re_free(*) | ty::re_static | ty::re_scope(_) |
ty::re_infer(_) => {
isr
}
@ -153,6 +153,7 @@ pub fn replace_bound_regions_in_fn_sig(
}
// Free regions like these just stay the same:
ty::re_empty |
ty::re_static |
ty::re_scope(_) |
ty::re_free(*) |

View file

@ -244,11 +244,14 @@ fn lookup_vtable(vcx: &VtableContext,
// Nothing found. Continue.
}
Some(implementations) => {
let implementations: &mut ~[@Impl] = *implementations;
let len = { // FIXME(#5074): stage0 requires it
let implementations: &mut ~[@Impl] = *implementations;
implementations.len()
};
// implementations is the list of all impls in scope for
// trait_ref. (Usually, there's just one.)
for uint::range(0, implementations.len()) |i| {
for uint::range(0, len) |i| {
let im = implementations[i];
// im is one specific impl of trait_ref.
@ -487,7 +490,7 @@ pub fn early_resolve_expr(ex: @ast::expr,
for fcx.opt_node_ty_substs(ex.id) |substs| {
debug!("vtable resolution on parameter bounds for expr %s",
ex.repr(fcx.tcx()));
let def = *cx.tcx.def_map.get(&ex.id);
let def = cx.tcx.def_map.get_copy(&ex.id);
let did = ast_util::def_id_of_def(def);
let item_ty = ty::lookup_item_type(cx.tcx, did);
debug!("early resolve expr: def %? %?, %?, %s", ex.id, did, def,

View file

@ -134,23 +134,22 @@ fn resolve_type_vars_for_node(wbcx: @mut WbCtxt, sp: span, id: ast::node_id)
}
Some(&@ty::AutoDerefRef(adj)) => {
let resolved_autoref = match adj.autoref {
Some(ref autoref) => {
match resolve_region(fcx.infcx(), autoref.region,
resolve_all | force_all) {
Err(e) => {
// This should not, I think, happen.
fcx.ccx.tcx.sess.span_err(
sp, fmt!("cannot resolve scope of borrow: %s",
infer::fixup_err_to_str(e)));
Some(*autoref)
}
Ok(r) => {
Some(ty::AutoRef {region: r, ..*autoref})
}
let fixup_region = |r| {
match resolve_region(fcx.infcx(), r, resolve_all | force_all) {
Ok(r1) => r1,
Err(e) => {
// This should not, I think, happen.
fcx.ccx.tcx.sess.span_err(
sp, fmt!("cannot resolve scope of borrow: %s",
infer::fixup_err_to_str(e)));
r
}
}
None => None
};
let resolved_autoref = match adj.autoref {
None => None,
Some(ref r) => Some(r.map_region(fixup_region))
};
let resolved_adj = @ty::AutoDerefRef(ty::AutoDerefRef {

View file

@ -238,8 +238,8 @@ pub impl CoherenceChecker {
fn check_implementation(&self,
item: @item, associated_traits: ~[@trait_ref]) {
let self_type = self.crate_context.tcx.tcache.get(
&local_def(item.id));
let tcx = self.crate_context.tcx;
let self_type = ty::lookup_item_type(tcx, local_def(item.id));
// If there are no traits, then this implementation must have a
// base type.
@ -390,7 +390,7 @@ pub impl CoherenceChecker {
let pmm = self.crate_context.tcx.provided_methods;
match pmm.find(&local_def(impl_id)) {
Some(mis) => {
Some(&mis) => {
// If the trait already has an entry in the
// provided_methods_map, we just need to add this
// method to that entry.
@ -423,8 +423,8 @@ pub impl CoherenceChecker {
self.crate_context.coherence_info.inherent_methods
.insert(base_def_id, implementation_list);
}
Some(existing_implementation_list) => {
implementation_list = *existing_implementation_list;
Some(&existing_implementation_list) => {
implementation_list = existing_implementation_list;
}
}
@ -440,8 +440,8 @@ pub impl CoherenceChecker {
self.crate_context.coherence_info.extension_methods
.insert(trait_id, implementation_list);
}
Some(existing_implementation_list) => {
implementation_list = *existing_implementation_list;
Some(&existing_implementation_list) => {
implementation_list = existing_implementation_list;
}
}
@ -449,10 +449,8 @@ pub impl CoherenceChecker {
}
fn check_implementation_coherence(&self) {
let coherence_info = &mut self.crate_context.coherence_info;
let extension_methods = &coherence_info.extension_methods;
for extension_methods.each_key |&trait_id| {
let coherence_info = self.crate_context.coherence_info;
for coherence_info.extension_methods.each_key |&trait_id| {
self.check_implementation_coherence_of(trait_id);
}
}
@ -502,20 +500,23 @@ pub impl CoherenceChecker {
m.insert(self_t, the_impl);
self.crate_context.tcx.trait_impls.insert(trait_t, m);
}
Some(m) => {
Some(&m) => {
m.insert(self_t, the_impl);
}
}
}
fn iter_impls_of_trait(&self, trait_def_id: def_id, f: &fn(@Impl)) {
let coherence_info = &mut self.crate_context.coherence_info;
let extension_methods = &coherence_info.extension_methods;
let coherence_info = self.crate_context.coherence_info;
let extension_methods = &*coherence_info.extension_methods;
match extension_methods.find(&trait_def_id) {
Some(impls) => {
let impls: &mut ~[@Impl] = *impls;
for uint::range(0, impls.len()) |i| {
let len = { // FIXME(#5074) stage0 requires this
let impls: &mut ~[@Impl] = *impls;
impls.len()
};
for uint::range(0, len) |i| {
f(impls[i]);
}
}
@ -645,7 +646,7 @@ pub impl CoherenceChecker {
fn get_self_type_for_implementation(&self, implementation: @Impl)
-> ty_param_bounds_and_ty {
return *self.crate_context.tcx.tcache.get(&implementation.did);
return self.crate_context.tcx.tcache.get_copy(&implementation.did);
}
// Privileged scope checking
@ -701,7 +702,7 @@ pub impl CoherenceChecker {
fn trait_ref_to_trait_def_id(&self, trait_ref: @trait_ref) -> def_id {
let def_map = self.crate_context.tcx.def_map;
let trait_def = *def_map.get(&trait_ref.ref_id);
let trait_def = def_map.get_copy(&trait_ref.ref_id);
let trait_id = def_id_of_def(trait_def);
return trait_id;
}
@ -741,7 +742,7 @@ pub impl CoherenceChecker {
-> bool {
match original_type.node {
ty_path(_, path_id) => {
match *self.crate_context.tcx.def_map.get(&path_id) {
match self.crate_context.tcx.def_map.get_copy(&path_id) {
def_ty(def_id) | def_struct(def_id) => {
if def_id.crate != local_crate {
return false;
@ -1003,7 +1004,7 @@ pub impl CoherenceChecker {
//
fn populate_destructor_table(&self) {
let coherence_info = &mut self.crate_context.coherence_info;
let coherence_info = self.crate_context.coherence_info;
let tcx = self.crate_context.tcx;
let drop_trait = tcx.lang_items.drop_trait();
let impls_opt = coherence_info.extension_methods.find(&drop_trait);

View file

@ -219,7 +219,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt,
{
let tcx = ccx.tcx;
let region_paramd = tcx.region_paramd_items.find(&trait_id).map(|&x| *x);
match *tcx.items.get(&trait_id) {
match tcx.items.get_copy(&trait_id) {
ast_map::node_item(@ast::item {
node: ast::item_trait(ref generics, _, ref ms),
_

View file

@ -65,7 +65,7 @@ we may want to adjust precisely when coercions occur.
*/
use middle::ty::{AutoPtr, AutoBorrowVec, AutoBorrowFn};
use middle::ty::{AutoDerefRef, AutoRef};
use middle::ty::{AutoDerefRef};
use middle::ty::{vstore_slice, vstore_box, vstore_uniq};
use middle::ty::{mt};
use middle::ty;
@ -120,9 +120,9 @@ pub impl Coerce {
};
}
ty::ty_ptr(_) => {
ty::ty_ptr(mt_b) => {
return do self.unpack_actual_value(a) |sty_a| {
self.coerce_unsafe_ptr(a, sty_a, b)
self.coerce_unsafe_ptr(a, sty_a, b, mt_b)
};
}
@ -205,11 +205,7 @@ pub impl Coerce {
if_ok!(sub.tys(a_borrowed, b));
Ok(Some(@AutoDerefRef(AutoDerefRef {
autoderefs: 1,
autoref: Some(AutoRef {
kind: AutoPtr,
region: r_borrow,
mutbl: mt_b.mutbl
})
autoref: Some(AutoPtr(r_borrow, mt_b.mutbl))
})))
}
@ -235,11 +231,7 @@ pub impl Coerce {
if_ok!(self.subtype(a_borrowed, b));
Ok(Some(@AutoDerefRef(AutoDerefRef {
autoderefs: 0,
autoref: Some(AutoRef {
kind: AutoBorrowVec,
region: r_a,
mutbl: m_imm
})
autoref: Some(AutoBorrowVec(r_a, m_imm))
})))
}
@ -268,11 +260,7 @@ pub impl Coerce {
if_ok!(sub.tys(a_borrowed, b));
Ok(Some(@AutoDerefRef(AutoDerefRef {
autoderefs: 0,
autoref: Some(AutoRef {
kind: AutoBorrowVec,
region: r_borrow,
mutbl: mt_b.mutbl
})
autoref: Some(AutoBorrowVec(r_borrow, mt_b.mutbl))
})))
}
@ -308,11 +296,7 @@ pub impl Coerce {
if_ok!(self.subtype(a_borrowed, b));
Ok(Some(@AutoDerefRef(AutoDerefRef {
autoderefs: 0,
autoref: Some(AutoRef {
kind: AutoBorrowFn,
region: r_borrow,
mutbl: m_imm
})
autoref: Some(AutoBorrowFn(r_borrow))
})))
}
@ -363,7 +347,8 @@ pub impl Coerce {
fn coerce_unsafe_ptr(&self,
a: ty::t,
sty_a: &ty::sty,
b: ty::t) -> CoerceResult
b: ty::t,
mt_b: ty::mt) -> CoerceResult
{
debug!("coerce_unsafe_ptr(a=%s, sty_a=%?, b=%s)",
a.inf_str(self.infcx), sty_a,
@ -376,10 +361,17 @@ pub impl Coerce {
}
};
// borrowed pointers and unsafe pointers have the same
// representation, so just check that the types which they
// point at are compatible:
// check that the types which they point at are compatible
let a_unsafe = ty::mk_ptr(self.infcx.tcx, mt_a);
self.subtype(a_unsafe, b)
if_ok!(self.subtype(a_unsafe, b));
// although borrowed ptrs and unsafe ptrs have the same
// representation, we still register an AutoDerefRef so that
// regionck knows that that the region for `a` must be valid
// here
Ok(Some(@AutoDerefRef(AutoDerefRef {
autoderefs: 1,
autoref: Some(ty::AutoUnsafe(mt_b.mutbl))
})))
}
}

View file

@ -16,6 +16,7 @@ use middle::typeck::infer::lub::Lub;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::isr_alist;
use syntax::ast;
use syntax::ast::{Many, Once, extern_fn, impure_fn, m_const, m_imm, m_mutbl};
@ -188,7 +189,8 @@ impl Combine for Glb {
let new_vars =
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
let sig1 =
self.infcx.fold_regions_in_sig(
fold_regions_in_sig(
self.infcx.tcx,
&sig0,
|r, _in_fn| generalize_region(self, snapshot,
new_vars, a_isr, a_vars, b_vars,

View file

@ -16,6 +16,7 @@ use middle::typeck::infer::lattice::*;
use middle::typeck::infer::sub::Sub;
use middle::typeck::infer::to_str::InferStr;
use middle::typeck::infer::{cres, InferCtxt};
use middle::typeck::infer::fold_regions_in_sig;
use middle::typeck::isr_alist;
use util::common::indent;
use util::ppaux::mt_to_str;
@ -141,7 +142,8 @@ impl Combine for Lub {
let new_vars =
self.infcx.region_vars.vars_created_since_snapshot(snapshot);
let sig1 =
self.infcx.fold_regions_in_sig(
fold_regions_in_sig(
self.infcx.tcx,
&sig0,
|r, _in_fn| generalize_region(self, snapshot, new_vars,
a_isr, r));

View file

@ -339,7 +339,7 @@ pub fn fixup_err_to_str(f: fixup_err) -> ~str {
fn new_ValsAndBindings<V:Copy,T:Copy>() -> ValsAndBindings<V, T> {
ValsAndBindings {
vals: @mut SmallIntMap::new(),
vals: SmallIntMap::new(),
bindings: ~[]
}
}
@ -469,28 +469,6 @@ pub fn resolve_region(cx: @mut InferCtxt, r: ty::Region, modes: uint)
resolver.resolve_region_chk(r)
}
/*
fn resolve_borrowings(cx: @mut InferCtxt) {
for cx.borrowings.each |item| {
match resolve_region(cx, item.scope, resolve_all|force_all) {
Ok(region) => {
debug!("borrowing for expr %d resolved to region %?, mutbl %?",
item.expr_id, region, item.mutbl);
cx.tcx.borrowings.insert(
item.expr_id, {region: region, mutbl: item.mutbl});
}
Err(e) => {
let str = fixup_err_to_str(e);
cx.tcx.sess.span_err(
item.span,
fmt!("could not resolve lifetime for borrow: %s", str));
}
}
}
}
*/
trait then {
fn then<T:Copy>(&self, f: &fn() -> Result<T,ty::type_err>)
-> Result<T,ty::type_err>;
@ -554,7 +532,8 @@ struct Snapshot {
}
pub impl InferCtxt {
fn combine_fields(@mut self, a_is_expected: bool,
fn combine_fields(@mut self,
a_is_expected: bool,
span: span) -> CombineFields {
CombineFields {infcx: self,
a_is_expected: a_is_expected,
@ -565,25 +544,24 @@ pub impl InferCtxt {
Sub(self.combine_fields(a_is_expected, span))
}
fn in_snapshot(@mut self) -> bool {
fn in_snapshot(&self) -> bool {
self.region_vars.in_snapshot()
}
fn start_snapshot(@mut self) -> Snapshot {
let this = &mut *self;
fn start_snapshot(&mut self) -> Snapshot {
Snapshot {
ty_var_bindings_len:
this.ty_var_bindings.bindings.len(),
self.ty_var_bindings.bindings.len(),
int_var_bindings_len:
this.int_var_bindings.bindings.len(),
self.int_var_bindings.bindings.len(),
float_var_bindings_len:
this.float_var_bindings.bindings.len(),
self.float_var_bindings.bindings.len(),
region_vars_snapshot:
this.region_vars.start_snapshot(),
self.region_vars.start_snapshot(),
}
}
fn rollback_to(@mut self, snapshot: &Snapshot) {
fn rollback_to(&mut self, snapshot: &Snapshot) {
debug!("rollback!");
rollback_to(&mut self.ty_var_bindings, snapshot.ty_var_bindings_len);
@ -647,45 +625,47 @@ fn next_simple_var<V:Copy,T:Copy>(
}
pub impl InferCtxt {
fn next_ty_var_id(@mut self) -> TyVid {
fn next_ty_var_id(&mut self) -> TyVid {
let id = self.ty_var_counter;
self.ty_var_counter += 1;
let vals = self.ty_var_bindings.vals;
vals.insert(id, Root(Bounds { lb: None, ub: None }, 0u));
{
let vals = &mut self.ty_var_bindings.vals;
vals.insert(id, Root(Bounds { lb: None, ub: None }, 0u));
}
return TyVid(id);
}
fn next_ty_var(@mut self) -> ty::t {
fn next_ty_var(&mut self) -> ty::t {
ty::mk_var(self.tcx, self.next_ty_var_id())
}
fn next_ty_vars(@mut self, n: uint) -> ~[ty::t] {
fn next_ty_vars(&mut self, n: uint) -> ~[ty::t] {
vec::from_fn(n, |_i| self.next_ty_var())
}
fn next_int_var_id(@mut self) -> IntVid {
fn next_int_var_id(&mut self) -> IntVid {
IntVid(next_simple_var(&mut self.int_var_counter,
&mut self.int_var_bindings))
}
fn next_int_var(@mut self) -> ty::t {
fn next_int_var(&mut self) -> ty::t {
ty::mk_int_var(self.tcx, self.next_int_var_id())
}
fn next_float_var_id(@mut self) -> FloatVid {
fn next_float_var_id(&mut self) -> FloatVid {
FloatVid(next_simple_var(&mut self.float_var_counter,
&mut self.float_var_bindings))
}
fn next_float_var(@mut self) -> ty::t {
fn next_float_var(&mut self) -> ty::t {
ty::mk_float_var(self.tcx, self.next_float_var_id())
}
fn next_region_var_nb(@mut self, span: span) -> ty::Region {
fn next_region_var_nb(&mut self, span: span) -> ty::Region {
ty::re_infer(ty::ReVar(self.region_vars.new_region_var(span)))
}
fn next_region_var_with_lb(@mut self, span: span,
fn next_region_var_with_lb(&mut self, span: span,
lb_region: ty::Region) -> ty::Region {
let region_var = self.next_region_var_nb(span);
@ -697,12 +677,12 @@ pub impl InferCtxt {
return region_var;
}
fn next_region_var(@mut self, span: span, scope_id: ast::node_id)
fn next_region_var(&mut self, span: span, scope_id: ast::node_id)
-> ty::Region {
self.next_region_var_with_lb(span, ty::re_scope(scope_id))
}
fn resolve_regions(@mut self) {
fn resolve_regions(&mut self) {
self.region_vars.resolve_regions();
}
@ -722,7 +702,6 @@ pub impl InferCtxt {
result::Err(_) => typ
}
}
fn resolve_type_vars_in_trait_ref_if_possible(@mut self,
trait_ref: &ty::TraitRef)
-> ty::TraitRef
@ -793,7 +772,7 @@ pub impl InferCtxt {
self.type_error_message(sp, mk_msg, a, Some(err));
}
fn replace_bound_regions_with_fresh_regions(@mut self,
fn replace_bound_regions_with_fresh_regions(&mut self,
span: span,
fsig: &ty::FnSig)
-> (ty::FnSig, isr_alist) {
@ -811,15 +790,14 @@ pub impl InferCtxt {
});
(fn_sig, isr)
}
fn fold_regions_in_sig(
@mut self,
fn_sig: &ty::FnSig,
fldr: &fn(r: ty::Region, in_fn: bool) -> ty::Region) -> ty::FnSig
{
do ty::fold_sig(fn_sig) |t| {
ty::fold_regions(self.tcx, t, fldr)
}
}
}
pub fn fold_regions_in_sig(
tcx: ty::ctxt,
fn_sig: &ty::FnSig,
fldr: &fn(r: ty::Region, in_fn: bool) -> ty::Region) -> ty::FnSig
{
do ty::fold_sig(fn_sig) |t| {
ty::fold_regions(tcx, t, fldr)
}
}

View file

@ -24,7 +24,7 @@ it's worth spending more time on a more involved analysis. Moreover,
regions are a simpler case than types: they don't have aggregate
structure, for example.
Unlike normal type inference, which is similar in spirit H-M and thus
Unlike normal type inference, which is similar in spirit to H-M and thus
works progressively, the region type inference works by accumulating
constraints over the course of a function. Finally, at the end of
processing a function, we process and solve the constraints all at
@ -130,7 +130,7 @@ of these variables can effectively be unified into a single variable.
Once SCCs are removed, we are left with a DAG. At this point, we can
walk the DAG in toplogical order once to compute the expanding nodes,
and again in reverse topological order to compute the contracting
nodes.The main reason I did not write it this way is that I did not
nodes. The main reason I did not write it this way is that I did not
feel like implementing the SCC and toplogical sort algorithms at the
moment.
@ -538,7 +538,7 @@ more convincing in the future.
use middle::ty;
use middle::ty::{FreeRegion, Region, RegionVid};
use middle::ty::{re_static, re_infer, re_free, re_bound};
use middle::ty::{re_empty, re_static, re_infer, re_free, re_bound};
use middle::ty::{re_scope, ReVar, ReSkolemized, br_fresh};
use middle::typeck::infer::cres;
use util::common::indenter;
@ -547,6 +547,9 @@ use util::ppaux::note_and_explain_region;
use core::cell::{Cell, empty_cell};
use core::hashmap::{HashMap, HashSet};
use core::to_bytes;
use core::uint;
use core::vec;
use core;
use syntax::codemap::span;
use syntax::ast;
@ -572,18 +575,12 @@ impl to_bytes::IterBytes for Constraint {
}
}
#[deriving(Eq)]
#[deriving(Eq, IterBytes)]
struct TwoRegions {
a: Region,
b: Region,
}
impl to_bytes::IterBytes for TwoRegions {
fn iter_bytes(&self, lsb0: bool, f: to_bytes::Cb) {
to_bytes::iter_bytes_2(&self.a, &self.b, lsb0, f)
}
}
enum UndoLogEntry {
Snapshot,
AddVar(RegionVid),
@ -637,7 +634,7 @@ pub fn RegionVarBindings(tcx: ty::ctxt) -> RegionVarBindings {
}
pub impl RegionVarBindings {
fn in_snapshot(&mut self) -> bool {
fn in_snapshot(&self) -> bool {
self.undo_log.len() > 0
}
@ -832,7 +829,6 @@ pub impl RegionVarBindings {
}
fn resolve_var(&mut self, rid: RegionVid) -> ty::Region {
debug!("RegionVarBindings: resolve_var(%?=%u)", rid, rid.to_uint());
if self.values.is_empty() {
self.tcx.sess.span_bug(
self.var_spans[rid.to_uint()],
@ -841,29 +837,14 @@ pub impl RegionVarBindings {
}
let v = self.values.with_ref(|values| values[rid.to_uint()]);
debug!("RegionVarBindings: resolve_var(%?=%u)=%?",
rid, rid.to_uint(), v);
match v {
Value(r) => r,
NoValue => {
// No constraints, report an error. It is plausible
// that we could select an arbitrary region here
// instead. At the moment I am not doing this because
// this generally masks bugs in the inference
// algorithm, and given our syntax one cannot create
// generally create a lifetime variable that isn't
// used in some type, and hence all lifetime variables
// should ultimately have some bounds.
self.tcx.sess.span_err(
self.var_spans[rid.to_uint()],
fmt!("Unconstrained region variable #%u", rid.to_uint()));
// Touch of a hack: to suppress duplicate messages,
// replace the NoValue entry with ErrorValue.
let mut values = self.values.take();
values[rid.to_uint()] = ErrorValue;
self.values.put_back(values);
re_static
// No constraints, return ty::re_empty
re_empty
}
ErrorValue => {
@ -1031,6 +1012,10 @@ priv impl RegionVarBindings {
re_static // nothing lives longer than static
}
(re_empty, r) | (r, re_empty) => {
r // everything lives longer than empty
}
(re_infer(ReVar(v_id)), _) | (_, re_infer(ReVar(v_id))) => {
self.tcx.sess.span_bug(
self.var_spans[v_id.to_uint()],
@ -1127,6 +1112,11 @@ priv impl RegionVarBindings {
Ok(r)
}
(re_empty, _) | (_, re_empty) => {
// nothing lives shorter than everything else
Ok(re_empty)
}
(re_infer(ReVar(v_id)), _) |
(_, re_infer(ReVar(v_id))) => {
self.tcx.sess.span_bug(
@ -1266,8 +1256,6 @@ struct SpannedRegion {
span: span,
}
type TwoRegionsMap = HashSet<TwoRegions>;
pub impl RegionVarBindings {
fn infer_variable_values(&mut self) -> ~[GraphNodeValue] {
let mut graph = self.construct_graph();
@ -1329,11 +1317,15 @@ pub impl RegionVarBindings {
node_id: RegionVid,
edge_dir: Direction,
edge_idx: uint) {
//! Insert edge `edge_idx` on the link list of edges in direction
//! `edge_dir` for the node `node_id`
let edge_dir = edge_dir as uint;
graph.edges[edge_idx].next_edge[edge_dir] =
graph.nodes[node_id.to_uint()].head_edge[edge_dir];
graph.nodes[node_id.to_uint()].head_edge[edge_dir] =
edge_idx;
assert_eq!(graph.edges[edge_idx].next_edge[edge_dir],
uint::max_value);
let n = node_id.to_uint();
let prev_head = graph.nodes[n].head_edge[edge_dir];
graph.edges[edge_idx].next_edge[edge_dir] = prev_head;
graph.nodes[n].head_edge[edge_dir] = edge_idx;
}
}
@ -1484,6 +1476,8 @@ pub impl RegionVarBindings {
}
}
Err(_) => {
debug!("Setting %? to ErrorValue: no glb of %?, %?",
a_vid, a_region, b_region);
a_node.value = ErrorValue;
false
}
@ -1495,7 +1489,21 @@ pub impl RegionVarBindings {
&mut self,
graph: &Graph) -> ~[GraphNodeValue]
{
let mut dup_map = HashSet::new();
debug!("extract_values_and_report_conflicts()");
// This is the best way that I have found to suppress
// duplicate and related errors. Basically we keep a set of
// flags for every node. Whenever an error occurs, we will
// walk some portion of the graph looking to find pairs of
// conflicting regions to report to the user. As we walk, we
// trip the flags from false to true, and if we find that
// we've already reported an error involving any particular
// node we just stop and don't report the current error. The
// idea is to report errors that derive from independent
// regions of the graph, but not those that derive from
// overlapping locations.
let mut dup_vec = graph.nodes.map(|_| uint::max_value);
graph.nodes.mapi(|idx, node| {
match node.value {
Value(_) => {
@ -1530,15 +1538,16 @@ pub impl RegionVarBindings {
that is not used is not a problem, so if this rule
starts to create problems we'll have to revisit
this portion of the code and think hard about it. =) */
let node_vid = RegionVid { id: idx };
match node.classification {
Expanding => {
self.report_error_for_expanding_node(
graph, &mut dup_map, node_vid);
graph, dup_vec, node_vid);
}
Contracting => {
self.report_error_for_contracting_node(
graph, &mut dup_map, node_vid);
graph, dup_vec, node_vid);
}
}
}
@ -1548,38 +1557,26 @@ pub impl RegionVarBindings {
})
}
// Used to suppress reporting the same basic error over and over
fn is_reported(&mut self,
dup_map: &mut TwoRegionsMap,
r_a: Region,
r_b: Region)
-> bool {
let key = TwoRegions { a: r_a, b: r_b };
!dup_map.insert(key)
}
fn report_error_for_expanding_node(&mut self,
graph: &Graph,
dup_map: &mut TwoRegionsMap,
dup_vec: &mut [uint],
node_idx: RegionVid) {
// Errors in expanding nodes result from a lower-bound that is
// not contained by an upper-bound.
let lower_bounds =
self.collect_concrete_regions(graph, node_idx, Incoming);
let upper_bounds =
self.collect_concrete_regions(graph, node_idx, Outgoing);
let (lower_bounds, lower_dup) =
self.collect_concrete_regions(graph, node_idx, Incoming, dup_vec);
let (upper_bounds, upper_dup) =
self.collect_concrete_regions(graph, node_idx, Outgoing, dup_vec);
if lower_dup || upper_dup {
return;
}
for vec::each(lower_bounds) |lower_bound| {
for vec::each(upper_bounds) |upper_bound| {
if !self.is_subregion_of(lower_bound.region,
upper_bound.region) {
if self.is_reported(dup_map,
lower_bound.region,
upper_bound.region) {
return;
}
self.tcx.sess.span_err(
self.var_spans[node_idx.to_uint()],
fmt!("cannot infer an appropriate lifetime \
@ -1609,16 +1606,28 @@ pub impl RegionVarBindings {
}
}
}
self.tcx.sess.span_bug(
self.var_spans[node_idx.to_uint()],
fmt!("report_error_for_expanding_node() could not find error \
for var %?, lower_bounds=%s, upper_bounds=%s",
node_idx,
lower_bounds.map(|x| x.region).repr(self.tcx),
upper_bounds.map(|x| x.region).repr(self.tcx)));
}
fn report_error_for_contracting_node(&mut self,
graph: &Graph,
dup_map: &mut TwoRegionsMap,
dup_vec: &mut [uint],
node_idx: RegionVid) {
// Errors in contracting nodes result from two upper-bounds
// that have no intersection.
let upper_bounds = self.collect_concrete_regions(graph, node_idx,
Outgoing);
let (upper_bounds, dup_found) =
self.collect_concrete_regions(graph, node_idx, Outgoing, dup_vec);
if dup_found {
return;
}
for vec::each(upper_bounds) |upper_bound_1| {
for vec::each(upper_bounds) |upper_bound_2| {
@ -1627,12 +1636,6 @@ pub impl RegionVarBindings {
Ok(_) => {}
Err(_) => {
if self.is_reported(dup_map,
upper_bound_1.region,
upper_bound_2.region) {
return;
}
self.tcx.sess.span_err(
self.var_spans[node_idx.to_uint()],
fmt!("cannot infer an appropriate lifetime \
@ -1663,50 +1666,94 @@ pub impl RegionVarBindings {
}
}
}
self.tcx.sess.span_bug(
self.var_spans[node_idx.to_uint()],
fmt!("report_error_for_contracting_node() could not find error \
for var %?, upper_bounds=%s",
node_idx,
upper_bounds.map(|x| x.region).repr(self.tcx)));
}
fn collect_concrete_regions(&mut self,
graph: &Graph,
orig_node_idx: RegionVid,
dir: Direction)
-> ~[SpannedRegion] {
let mut set = HashSet::new();
let mut stack = ~[orig_node_idx];
set.insert(orig_node_idx.to_uint());
let mut result = ~[];
while !vec::is_empty(stack) {
let node_idx = stack.pop();
for self.each_edge(graph, node_idx, dir) |edge| {
dir: Direction,
dup_vec: &mut [uint])
-> (~[SpannedRegion], bool) {
struct WalkState {
set: HashSet<RegionVid>,
stack: ~[RegionVid],
result: ~[SpannedRegion],
dup_found: bool
}
let mut state = WalkState {
set: HashSet::new(),
stack: ~[orig_node_idx],
result: ~[],
dup_found: false
};
state.set.insert(orig_node_idx);
// to start off the process, walk the source node in the
// direction specified
process_edges(self, &mut state, graph, orig_node_idx, dir);
while !state.stack.is_empty() {
let node_idx = state.stack.pop();
let classification = graph.nodes[node_idx.to_uint()].classification;
// check whether we've visited this node on some previous walk
if dup_vec[node_idx.to_uint()] == uint::max_value {
dup_vec[node_idx.to_uint()] = orig_node_idx.to_uint();
} else if dup_vec[node_idx.to_uint()] != orig_node_idx.to_uint() {
state.dup_found = true;
}
debug!("collect_concrete_regions(orig_node_idx=%?, node_idx=%?, \
classification=%?)",
orig_node_idx, node_idx, classification);
// figure out the direction from which this node takes its
// values, and search for concrete regions etc in that direction
let dir = match classification {
Expanding => Incoming,
Contracting => Outgoing
};
process_edges(self, &mut state, graph, node_idx, dir);
}
let WalkState {result, dup_found, _} = state;
return (result, dup_found);
fn process_edges(self: &mut RegionVarBindings,
state: &mut WalkState,
graph: &Graph,
source_vid: RegionVid,
dir: Direction) {
debug!("process_edges(source_vid=%?, dir=%?)", source_vid, dir);
for self.each_edge(graph, source_vid, dir) |edge| {
match edge.constraint {
ConstrainVarSubVar(from_vid, to_vid) => {
let vid = match dir {
Incoming => from_vid,
Outgoing => to_vid
};
if set.insert(vid.to_uint()) {
stack.push(vid);
ConstrainVarSubVar(from_vid, to_vid) => {
let opp_vid =
if from_vid == source_vid {to_vid} else {from_vid};
if state.set.insert(opp_vid) {
state.stack.push(opp_vid);
}
}
}
ConstrainRegSubVar(region, _) => {
assert!(dir == Incoming);
result.push(SpannedRegion {
region: region,
span: edge.span
});
}
ConstrainVarSubReg(_, region) => {
assert!(dir == Outgoing);
result.push(SpannedRegion {
region: region,
span: edge.span
});
}
ConstrainRegSubVar(region, _) |
ConstrainVarSubReg(_, region) => {
state.result.push(SpannedRegion {
region: region,
span: edge.span
});
}
}
}
}
return result;
}
fn each_edge(&mut self,

View file

@ -23,7 +23,7 @@ pub enum VarValue<V, T> {
}
pub struct ValsAndBindings<V, T> {
vals: @mut SmallIntMap<VarValue<V, T>>,
vals: SmallIntMap<VarValue<V, T>>,
bindings: ~[(V, VarValue<V, T>)],
}
@ -60,26 +60,25 @@ pub impl InferCtxt {
vid: V) -> Node<V, T>
{
let vid_u = vid.to_uint();
match vb.vals.find(&vid_u) {
let var_val = match vb.vals.find(&vid_u) {
Some(&var_val) => var_val,
None => {
tcx.sess.bug(fmt!(
"failed lookup of vid `%u`", vid_u));
}
Some(var_val) => {
match *var_val {
Redirect(vid) => {
let node: Node<V,T> = helper(tcx, vb, vid);
if node.root != vid {
// Path compression
vb.vals.insert(vid.to_uint(),
Redirect(node.root));
}
node
}
Root(ref pt, rk) => {
Node {root: vid, possible_types: *pt, rank: rk}
}
};
match var_val {
Redirect(vid) => {
let node: Node<V,T> = helper(tcx, vb, vid);
if node.root != vid {
// Path compression
vb.vals.insert(vid.to_uint(),
Redirect(node.root));
}
node
}
Root(pt, rk) => {
Node {root: vid, possible_types: pt, rank: rk}
}
}
}
@ -99,8 +98,8 @@ pub impl InferCtxt {
{ // FIXME(#4903)---borrow checker is not flow sensitive
let vb = UnifyVid::appropriate_vals_and_bindings(self);
let old_v = vb.vals.get(&vid.to_uint());
vb.bindings.push((vid, *old_v));
let old_v = { *vb.vals.get(&vid.to_uint()) }; // FIXME(#4903)
vb.bindings.push((vid, old_v));
vb.vals.insert(vid.to_uint(), new_v);
}
}

View file

@ -414,7 +414,11 @@ pub fn check_crate(tcx: ty::ctxt,
time(time_passes, ~"type collecting", ||
collect::collect_item_types(ccx, crate));
time(time_passes, ~"method resolution", ||
// this ensures that later parts of type checking can assume that items
// have valid types and not error
tcx.sess.abort_if_errors();
time(time_passes, ~"coherence checking", ||
coherence::check_coherence(ccx, crate));
time(time_passes, ~"type checking", ||

View file

@ -46,6 +46,7 @@ pub mod middle {
pub mod controlflow;
pub mod glue;
pub mod datum;
pub mod write_guard;
pub mod callee;
pub mod expr;
pub mod common;
@ -75,6 +76,9 @@ pub mod middle {
}
pub mod ty;
pub mod subst;
#[cfg(stage0)] #[path = "resolve_stage0.rs"]
pub mod resolve;
#[cfg(not(stage0))]
pub mod resolve;
#[path = "typeck/mod.rs"]
pub mod typeck;
@ -84,6 +88,7 @@ pub mod middle {
pub mod lint;
#[path = "borrowck/mod.rs"]
pub mod borrowck;
pub mod dataflow;
pub mod mem_categorization;
pub mod liveness;
pub mod kind;

View file

@ -13,7 +13,8 @@ use middle::ty::{ReSkolemized, ReVar};
use middle::ty::{bound_region, br_anon, br_named, br_self, br_cap_avoid};
use middle::ty::{br_fresh, ctxt, field, method};
use middle::ty::{mt, t, param_bound, param_ty};
use middle::ty::{re_bound, re_free, re_scope, re_infer, re_static, Region};
use middle::ty::{re_bound, re_free, re_scope, re_infer, re_static, Region,
re_empty};
use middle::ty::{ty_bool, ty_bot, ty_box, ty_struct, ty_enum};
use middle::ty::{ty_err, ty_estr, ty_evec, ty_float, ty_bare_fn, ty_closure};
use middle::ty::{ty_nil, ty_opaque_box, ty_opaque_closure_ptr, ty_param};
@ -65,6 +66,9 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
Some(&ast_map::node_block(ref blk)) => {
explain_span(cx, "block", blk.span)
}
Some(&ast_map::node_callee_scope(expr)) => {
explain_span(cx, "callee", expr.span)
}
Some(&ast_map::node_expr(expr)) => {
match expr.node {
ast::expr_call(*) => explain_span(cx, "call", expr.span),
@ -113,6 +117,8 @@ pub fn explain_region_and_span(cx: ctxt, region: ty::Region)
re_static => { (~"the static lifetime", None) }
re_empty => { (~"the empty lifetime", None) }
// I believe these cases should not occur (except when debugging,
// perhaps)
re_infer(_) | re_bound(_) => {
@ -212,7 +218,8 @@ pub fn region_to_str_space(cx: ctxt, prefix: &str, region: Region) -> ~str {
bound_region_to_str_space(cx, prefix, br)
}
re_infer(ReVar(_)) => prefix.to_str(),
re_static => fmt!("%s'static ", prefix)
re_static => fmt!("%s'static ", prefix),
re_empty => fmt!("%s'<empty> ", prefix)
}
}
@ -739,3 +746,12 @@ impl Repr for ty::vstore {
vstore_to_str(tcx, *self)
}
}
impl Repr for ast_map::path_elt {
fn repr(&self, tcx: ctxt) -> ~str {
match *self {
ast_map::path_mod(id) => id.repr(tcx),
ast_map::path_name(id) => id.repr(tcx)
}
}
}

View file

@ -419,26 +419,26 @@ pub struct RWReadMode<'self, T> {
pub impl<'self, T:Const + Owned> RWWriteMode<'self, T> {
/// Access the pre-downgrade RWARC in write mode.
fn write<U>(&self, blk: &fn(x: &mut T) -> U) -> U {
fn write<U>(&mut self, blk: &fn(x: &mut T) -> U) -> U {
match *self {
RWWriteMode {
data: ref data,
data: &ref mut data,
token: ref token,
poison: _
} => {
do token.write {
blk(&mut **data)
blk(data)
}
}
}
}
/// Access the pre-downgrade RWARC in write mode with a condvar.
fn write_cond<'x, 'c, U>(&self,
fn write_cond<'x, 'c, U>(&mut self,
blk: &fn(x: &'x mut T, c: &'c Condvar) -> U)
-> U {
match *self {
RWWriteMode {
data: ref data,
data: &ref mut data,
token: ref token,
poison: ref poison
} => {
@ -449,7 +449,7 @@ pub impl<'self, T:Const + Owned> RWWriteMode<'self, T> {
failed: &mut *poison.failed,
cond: cond
};
blk(&mut **data, &cvar)
blk(data, &cvar)
}
}
}
@ -598,8 +598,8 @@ mod tests {
let arc = ~RWARC(1);
let arc2 = (*arc).clone();
do task::try || {
do arc2.write_downgrade |write_mode| {
do (&write_mode).write |one| {
do arc2.write_downgrade |mut write_mode| {
do write_mode.write |one| {
assert!(*one == 2);
}
}
@ -733,8 +733,8 @@ mod tests {
}
// Downgrader (us)
do arc.write_downgrade |write_mode| {
do (&write_mode).write_cond |state, cond| {
do arc.write_downgrade |mut write_mode| {
do write_mode.write_cond |state, cond| {
wc1.send(()); // send to another writer who will wake us up
while *state == 0 {
cond.wait();

View file

@ -315,9 +315,6 @@ fn test_arena_destructors_fail() {
}
// Now, fail while allocating
do arena.alloc::<@int> {
// First, recursively allocate something else; that needs to
// get freed too.
do arena.alloc { @20 };
// Now fail.
fail!();
};

View file

@ -215,16 +215,16 @@ pub struct Bitv {
nbits: uint
}
priv impl Bitv {
fn die() -> ! {
fail!(~"Tried to do operation on bit vectors with different sizes");
}
fn die(&self) -> ! {
fail!(~"Tried to do operation on bit vectors with different sizes");
}
priv impl Bitv {
#[inline(always)]
fn do_op(&mut self, op: Op, other: &Bitv) -> bool {
if self.nbits != other.nbits {
self.die();
die();
}
match self.rep {
Small(ref mut s) => match other.rep {
@ -234,10 +234,10 @@ priv impl Bitv {
Assign => s.become(*s1, self.nbits),
Difference => s.difference(*s1, self.nbits)
},
Big(_) => self.die()
Big(_) => die()
},
Big(ref mut s) => match other.rep {
Small(_) => self.die(),
Small(_) => die(),
Big(ref s1) => match op {
Union => s.union(*s1, self.nbits),
Intersect => s.intersect(*s1, self.nbits),

View file

@ -735,7 +735,7 @@ pub mod writer {
priv impl Encoder {
// used internally to emit things like the vector length and so on
fn _emit_tagged_uint(&mut self, t: EbmlEncoderTag, v: uint) {
assert!(v <= 0xFFFF_FFFF_u);
assert!(v <= 0xFFFF_FFFF_u); // FIXME(#6130) assert warns on 32-bit
self.wr_tagged_u32(t as uint, v as u32);
}

View file

@ -883,8 +883,8 @@ impl io::Reader for TcpSocketBuf {
let ncopy = uint::min(nbuffered, needed);
let dst = ptr::mut_offset(
vec::raw::to_mut_ptr(buf), count);
let src = ptr::const_offset(
vec::raw::to_const_ptr(self.data.buf),
let src = ptr::offset(
vec::raw::to_ptr(self.data.buf),
self.data.buf_off);
ptr::copy_memory(dst, src, ncopy);
self.data.buf_off += ncopy;
@ -967,7 +967,7 @@ impl io::Reader for TcpSocketBuf {
/// Implementation of `io::Reader` trait for a buffered `net::tcp::TcpSocket`
impl io::Writer for TcpSocketBuf {
pub fn write(&self, data: &const [u8]) {
pub fn write(&self, data: &[u8]) {
unsafe {
let socket_data_ptr: *TcpSocketData =
&(*((*(self.data)).sock).socket_data);

View file

@ -1256,22 +1256,24 @@ mod tests {
match (r) {
node::Empty => return ~"",
node::Content(x) => {
let str = @mut ~"";
fn aux(str: @mut ~str, node: @node::Node) {
let mut str = ~"";
fn aux(str: &mut ~str, node: @node::Node) {
match (*node) {
node::Leaf(x) => {
*str += str::slice(
*x.content, x.byte_offset,
x.byte_offset + x.byte_len).to_owned();
}
node::Concat(ref x) => {
aux(str, x.left);
aux(str, x.right);
}
node::Leaf(x) => {
str::push_str(
str,
str::slice(
*x.content, x.byte_offset,
x.byte_offset + x.byte_len));
}
node::Concat(ref x) => {
aux(str, x.left);
aux(str, x.right);
}
}
}
aux(str, x);
return *str
aux(&mut str, x);
return str
}
}
}

View file

@ -11,7 +11,6 @@
//! Sorting methods
use core::cmp::{Eq, Ord};
use core::util;
use core::vec::len;
use core::vec;
@ -23,12 +22,12 @@ type Le<'self, T> = &'self fn(v1: &T, v2: &T) -> bool;
* Has worst case O(n log n) performance, best case O(n), but
* is not space efficient. This is a stable sort.
*/
pub fn merge_sort<T:Copy>(v: &const [T], le: Le<T>) -> ~[T] {
pub fn merge_sort<T:Copy>(v: &[T], le: Le<T>) -> ~[T] {
type Slice = (uint, uint);
return merge_sort_(v, (0u, len(v)), le);
fn merge_sort_<T:Copy>(v: &const [T], slice: Slice, le: Le<T>)
fn merge_sort_<T:Copy>(v: &[T], slice: Slice, le: Le<T>)
-> ~[T] {
let begin = slice.first();
let end = slice.second();
@ -61,6 +60,7 @@ pub fn merge_sort<T:Copy>(v: &const [T], le: Le<T>) -> ~[T] {
}
}
#[cfg(stage0)]
fn part<T>(arr: &mut [T], left: uint,
right: uint, pivot: uint, compare_func: Le<T>) -> uint {
arr[pivot] <-> arr[right];
@ -79,6 +79,23 @@ fn part<T>(arr: &mut [T], left: uint,
return storage_index;
}
#[cfg(not(stage0))]
fn part<T>(arr: &mut [T], left: uint,
right: uint, pivot: uint, compare_func: Le<T>) -> uint {
arr[pivot] <-> arr[right];
let mut storage_index: uint = left;
let mut i: uint = left;
while i < right {
if compare_func(&arr[i], &arr[right]) {
arr[i] <-> arr[storage_index];
storage_index += 1;
}
i += 1;
}
arr[storage_index] <-> arr[right];
return storage_index;
}
fn qsort<T>(arr: &mut [T], left: uint,
right: uint, compare_func: Le<T>) {
if right > left {
@ -162,7 +179,8 @@ fn qsort3<T:Copy + Ord + Eq>(arr: &mut [T], left: int, right: int) {
*/
pub fn quick_sort3<T:Copy + Ord + Eq>(arr: &mut [T]) {
if arr.len() <= 1 { return; }
qsort3(arr, 0, (arr.len() - 1) as int);
let len = arr.len(); // FIXME(#5074) nested calls
qsort3(arr, 0, (len - 1) as int);
}
pub trait Sort {
@ -195,15 +213,20 @@ pub fn tim_sort<T:Copy + Ord>(array: &mut [T]) {
let mut idx = 0;
let mut remaining = size;
loop {
let arr = vec::mut_slice(array, idx, size);
let mut run_len: uint = count_run_ascending(arr);
let run_len: uint = {
// This scope contains the slice `arr` here:
let arr = vec::mut_slice(array, idx, size);
let mut run_len: uint = count_run_ascending(arr);
if run_len < min_run {
let force = if remaining <= min_run {remaining} else {min_run};
let slice = vec::mut_slice(arr, 0, force);
binarysort(slice, run_len);
run_len = force;
}
if run_len < min_run {
let force = if remaining <= min_run {remaining} else {min_run};
let slice = vec::mut_slice(arr, 0, force);
binarysort(slice, run_len);
run_len = force;
}
run_len
};
ms.push_run(idx, run_len);
ms.merge_collapse(array);
@ -240,7 +263,7 @@ fn binarysort<T:Copy + Ord>(array: &mut [T], start: uint) {
assert!(left == right);
let n = start-left;
copy_vec(array, left+1, array, left, n);
shift_vec(array, left+1, left, n);
array[left] = pivot;
start += 1;
}
@ -250,7 +273,7 @@ fn binarysort<T:Copy + Ord>(array: &mut [T], start: uint) {
fn reverse_slice<T>(v: &mut [T], start: uint, end:uint) {
let mut i = start;
while i < end / 2 {
util::swap(&mut v[i], &mut v[end - i - 1]);
v[i] <-> v[end - i - 1];
i += 1;
}
}
@ -286,8 +309,8 @@ fn count_run_ascending<T:Copy + Ord>(array: &mut [T]) -> uint {
return run;
}
fn gallop_left<T:Copy + Ord>(key: &const T,
array: &const [T],
fn gallop_left<T:Copy + Ord>(key: &T,
array: &[T],
hint: uint)
-> uint {
let size = array.len();
@ -337,8 +360,8 @@ fn gallop_left<T:Copy + Ord>(key: &const T,
return ofs;
}
fn gallop_right<T:Copy + Ord>(key: &const T,
array: &const [T],
fn gallop_right<T:Copy + Ord>(key: &T,
array: &[T],
hint: uint)
-> uint {
let size = array.len();
@ -433,14 +456,17 @@ impl<T:Copy + Ord> MergeState<T> {
self.runs[n+1].len = self.runs[n+2].len;
}
let slice = vec::mut_slice(array, b1, b1+l1);
let k = gallop_right(&const array[b2], slice, 0);
let k = { // constrain lifetime of slice below
let slice = vec::slice(array, b1, b1+l1);
gallop_right(&array[b2], slice, 0)
};
b1 += k;
l1 -= k;
if l1 != 0 {
let slice = vec::mut_slice(array, b2, b2+l2);
let l2 = gallop_left(
&const array[b1+l1-1],slice,l2-1);
let l2 = { // constrain lifetime of slice below
let slice = vec::slice(array, b2, b2+l2);
gallop_left(&array[b1+l1-1],slice,l2-1)
};
if l2 > 0 {
if l1 <= l2 {
self.merge_lo(array, b1, l1, b2, l2);
@ -471,11 +497,11 @@ impl<T:Copy + Ord> MergeState<T> {
dest += 1; c2 += 1; len2 -= 1;
if len2 == 0 {
copy_vec(array, dest, tmp, 0, len1);
copy_vec(array, dest, tmp.slice(0, len1));
return;
}
if len1 == 1 {
copy_vec(array, dest, array, c2, len2);
shift_vec(array, dest, c2, len2);
array[dest+len2] <-> tmp[c1];
return;
}
@ -513,10 +539,12 @@ impl<T:Copy + Ord> MergeState<T> {
loop {
assert!(len1 > 1 && len2 != 0);
let tmp_view = vec::const_slice(tmp, c1, c1+len1);
count1 = gallop_right(&const array[c2], tmp_view, 0);
count1 = {
let tmp_view = vec::slice(tmp, c1, c1+len1);
gallop_right(&array[c2], tmp_view, 0)
};
if count1 != 0 {
copy_vec(array, dest, tmp, c1, count1);
copy_vec(array, dest, tmp.slice(c1, c1+count1));
dest += count1; c1 += count1; len1 -= count1;
if len1 <= 1 { break_outer = true; break; }
}
@ -524,10 +552,12 @@ impl<T:Copy + Ord> MergeState<T> {
dest += 1; c2 += 1; len2 -= 1;
if len2 == 0 { break_outer = true; break; }
let tmp_view = vec::const_slice(array, c2, c2+len2);
count2 = gallop_left(&const tmp[c1], tmp_view, 0);
count2 = {
let tmp_view = vec::slice(array, c2, c2+len2);
gallop_left(&tmp[c1], tmp_view, 0)
};
if count2 != 0 {
copy_vec(array, dest, array, c2, count2);
shift_vec(array, dest, c2, count2);
dest += count2; c2 += count2; len2 -= count2;
if len2 == 0 { break_outer = true; break; }
}
@ -547,14 +577,14 @@ impl<T:Copy + Ord> MergeState<T> {
if len1 == 1 {
assert!(len2 > 0);
copy_vec(array, dest, array, c2, len2);
shift_vec(array, dest, c2, len2);
array[dest+len2] <-> tmp[c1];
} else if len1 == 0 {
fail!(~"Comparison violates its contract!");
} else {
assert!(len2 == 0);
assert!(len1 > 1);
copy_vec(array, dest, tmp, c1, len1);
copy_vec(array, dest, tmp.slice(c1, c1+len1));
}
}
@ -577,13 +607,13 @@ impl<T:Copy + Ord> MergeState<T> {
dest -= 1; c1 -= 1; len1 -= 1;
if len1 == 0 {
copy_vec(array, dest-(len2-1), tmp, 0, len2);
copy_vec(array, dest-(len2-1), tmp.slice(0, len2));
return;
}
if len2 == 1 {
dest -= len1;
c1 -= len1;
copy_vec(array, dest+1, array, c1+1, len1);
shift_vec(array, dest+1, c1+1, len1);
array[dest] <-> tmp[c2];
return;
}
@ -621,13 +651,15 @@ impl<T:Copy + Ord> MergeState<T> {
loop {
assert!(len2 > 1 && len1 != 0);
let tmp_view = vec::mut_slice(array, base1, base1+len1);
count1 = len1 - gallop_right(
&const tmp[c2], tmp_view, len1-1);
{ // constrain scope of tmp_view:
let tmp_view = vec::mut_slice (array, base1, base1+len1);
count1 = len1 - gallop_right(
&tmp[c2], tmp_view, len1-1);
}
if count1 != 0 {
dest -= count1; c1 -= count1; len1 -= count1;
copy_vec(array, dest+1, array, c1+1, count1);
shift_vec(array, dest+1, c1+1, count1);
if len1 == 0 { break_outer = true; break; }
}
@ -636,17 +668,16 @@ impl<T:Copy + Ord> MergeState<T> {
if len2 == 1 { break_outer = true; break; }
let count2;
{
{ // constrain scope of tmp_view
let tmp_view = vec::mut_slice(tmp, 0, len2);
count2 = len2 - gallop_left(&const array[c1],
count2 = len2 - gallop_left(&array[c1],
tmp_view,
len2-1);
// Make tmp_view go out of scope to appease borrowck.
}
if count2 != 0 {
dest -= count2; c2 -= count2; len2 -= count2;
copy_vec(array, dest+1, tmp, c2+1, count2);
copy_vec(array, dest+1, tmp.slice(c2+1, c2+1+count2));
if len2 <= 1 { break_outer = true; break; }
}
array[dest] <-> array[c1];
@ -668,14 +699,14 @@ impl<T:Copy + Ord> MergeState<T> {
assert!(len1 > 0);
dest -= len1;
c1 -= len1;
copy_vec(array, dest+1, array, c1+1, len1);
shift_vec(array, dest+1, c1+1, len1);
array[dest] <-> tmp[c2];
} else if len2 == 0 {
fail!(~"Comparison violates its contract!");
} else {
assert!(len1 == 0);
assert!(len2 != 0);
copy_vec(array, dest-(len2-1), tmp, 0, len2);
copy_vec(array, dest-(len2-1), tmp.slice(0, len2));
}
}
@ -711,21 +742,25 @@ impl<T:Copy + Ord> MergeState<T> {
#[inline(always)]
fn copy_vec<T:Copy>(dest: &mut [T],
s1: uint,
from: &const [T],
s2: uint,
len: uint) {
assert!(s1+len <= dest.len() && s2+len <= from.len());
from: &[T]) {
assert!(s1+from.len() <= dest.len());
let mut slice = ~[];
for uint::range(s2, s2+len) |i| {
slice.push(from[i]);
}
for slice.eachi |i, v| {
for from.eachi |i, v| {
dest[s1+i] = *v;
}
}
#[inline(always)]
fn shift_vec<T:Copy>(dest: &mut [T],
s1: uint,
s2: uint,
len: uint) {
assert!(s1+len <= dest.len());
let tmp = dest.slice(s2, s2+len).to_vec();
copy_vec(dest, s1, tmp);
}
#[cfg(test)]
mod test_qsort3 {
use sort::*;
@ -737,8 +772,7 @@ mod test_qsort3 {
quick_sort3::<int>(v1);
let mut i = 0;
while i < len {
// debug!(v2[i]);
assert!((v2[i] == v1[i]));
assert_eq!(v2[i], v1[i]);
i += 1;
}
}
@ -1009,7 +1043,7 @@ mod big_tests {
tabulate_managed(low, high);
}
fn multiplyVec<T:Copy>(arr: &const [T], num: uint) -> ~[T] {
fn multiplyVec<T:Copy>(arr: &[T], num: uint) -> ~[T] {
let size = arr.len();
let res = do vec::from_fn(num) |i| {
arr[i % size]
@ -1025,7 +1059,7 @@ mod big_tests {
}
fn tabulate_unique(lo: uint, hi: uint) {
fn isSorted<T:Ord>(arr: &const [T]) {
fn isSorted<T:Ord>(arr: &[T]) {
for uint::range(0, arr.len()-1) |i| {
if arr[i] > arr[i+1] {
fail!(~"Array not sorted");
@ -1096,7 +1130,7 @@ mod big_tests {
}
fn tabulate_managed(lo: uint, hi: uint) {
fn isSorted<T:Ord>(arr: &const [@T]) {
fn isSorted<T:Ord>(arr: &[@T]) {
for uint::range(0, arr.len()-1) |i| {
if arr[i] > arr[i+1] {
fail!(~"Array not sorted");

1239
src/libstd/sort_stage0.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -69,7 +69,14 @@ pub mod list;
pub mod priority_queue;
pub mod rope;
pub mod smallintmap;
#[cfg(stage0)]
#[path="sort_stage0.rs"]
pub mod sort;
#[cfg(not(stage0))]
pub mod sort;
pub mod dlist;
pub mod treemap;

View file

@ -18,6 +18,7 @@ use diagnostic::span_handler;
use parse::token::ident_interner;
use print::pprust;
use visit;
use syntax::parse::token::special_idents;
use core::hashmap::HashMap;
@ -88,12 +89,11 @@ pub enum ast_node {
node_variant(variant, @item, @path),
node_expr(@expr),
node_stmt(@stmt),
// Locals are numbered, because the alias analysis needs to know in which
// order they are introduced.
node_arg(arg, uint),
node_local(uint),
node_arg,
node_local(ident),
node_block(blk),
node_struct_ctor(@struct_def, @item, @path),
node_callee_scope(@expr)
}
pub type map = @mut HashMap<node_id, ast_node>;
@ -101,7 +101,6 @@ pub type map = @mut HashMap<node_id, ast_node>;
pub struct Ctx {
map: map,
path: path,
local_id: uint,
diag: @span_handler,
}
@ -117,9 +116,8 @@ pub fn mk_ast_map_visitor() -> vt {
visit_expr: map_expr,
visit_stmt: map_stmt,
visit_fn: map_fn,
visit_local: map_local,
visit_arm: map_arm,
visit_block: map_block,
visit_pat: map_pat,
.. *visit::default_visitor()
});
}
@ -128,7 +126,6 @@ pub fn map_crate(diag: @span_handler, c: @crate) -> map {
let cx = @mut Ctx {
map: @mut HashMap::new(),
path: ~[],
local_id: 0u,
diag: diag,
};
visit::visit_crate(c, cx, mk_ast_map_visitor());
@ -151,7 +148,6 @@ pub fn map_decoded_item(diag: @span_handler,
let cx = @mut Ctx {
map: map,
path: copy path,
local_id: 0,
diag: diag,
};
let v = mk_ast_map_visitor();
@ -186,9 +182,7 @@ pub fn map_fn(
v: visit::vt<@mut Ctx>
) {
for decl.inputs.each |a| {
cx.map.insert(a.id,
node_arg(/* FIXME (#2543) */ copy *a, cx.local_id));
cx.local_id += 1u;
cx.map.insert(a.id, node_arg);
}
visit::visit_fn(fk, decl, body, sp, id, cx, v);
}
@ -198,33 +192,22 @@ pub fn map_block(b: &blk, cx: @mut Ctx, v: visit::vt<@mut Ctx>) {
visit::visit_block(b, cx, v);
}
pub fn number_pat(cx: @mut Ctx, pat: @pat) {
do ast_util::walk_pat(pat) |p| {
match p.node {
pat_ident(*) => {
cx.map.insert(p.id, node_local(cx.local_id));
cx.local_id += 1u;
}
_ => ()
pub fn map_pat(pat: @pat, cx: @mut Ctx, v: visit::vt<@mut Ctx>) {
match pat.node {
pat_ident(_, path, _) => {
// Note: this is at least *potentially* a pattern...
cx.map.insert(pat.id, node_local(ast_util::path_to_ident(path)));
}
};
}
_ => ()
}
pub fn map_local(loc: @local, cx: @mut Ctx, v: visit::vt<@mut Ctx>) {
number_pat(cx, loc.node.pat);
visit::visit_local(loc, cx, v);
}
pub fn map_arm(arm: &arm, cx: @mut Ctx, v: visit::vt<@mut Ctx>) {
number_pat(cx, arm.pats[0]);
visit::visit_arm(arm, cx, v);
visit::visit_pat(pat, cx, v);
}
pub fn map_method(impl_did: def_id, impl_path: @path,
m: @method, cx: @mut Ctx) {
cx.map.insert(m.id, node_method(m, impl_did, impl_path));
cx.map.insert(m.self_id, node_local(cx.local_id));
cx.local_id += 1u;
cx.map.insert(m.self_id, node_local(special_idents::self_));
}
pub fn map_item(i: @item, cx: @mut Ctx, v: visit::vt<@mut Ctx>) {
@ -293,6 +276,7 @@ pub fn map_item(i: @item, cx: @mut Ctx, v: visit::vt<@mut Ctx>) {
}
_ => ()
}
match i.node {
item_mod(_) | item_foreign_mod(_) => {
cx.path.push(path_mod(i.ident));
@ -328,6 +312,18 @@ pub fn map_struct_def(
pub fn map_expr(ex: @expr, cx: @mut Ctx, v: visit::vt<@mut Ctx>) {
cx.map.insert(ex.id, node_expr(ex));
match ex.node {
// Expressions which are or might be calls:
ast::expr_call(*) |
ast::expr_method_call(*) |
ast::expr_index(*) |
ast::expr_binary(*) |
ast::expr_assign_op(*) |
ast::expr_unary(*) => {
cx.map.insert(ex.callee_id, node_callee_scope(ex));
}
_ => {}
}
visit::visit_expr(ex, cx, v);
}
@ -377,15 +373,18 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
Some(&node_expr(expr)) => {
fmt!("expr %s (id=%?)", pprust::expr_to_str(expr, itr), id)
}
Some(&node_callee_scope(expr)) => {
fmt!("callee_scope %s (id=%?)", pprust::expr_to_str(expr, itr), id)
}
Some(&node_stmt(stmt)) => {
fmt!("stmt %s (id=%?)",
pprust::stmt_to_str(stmt, itr), id)
}
Some(&node_arg(_, _)) => { // add more info here
Some(&node_arg) => {
fmt!("arg (id=%?)", id)
}
Some(&node_local(_)) => { // add more info here
fmt!("local (id=%?)", id)
Some(&node_local(ident)) => {
fmt!("local (id=%?, name=%s)", id, *itr.get(ident))
}
Some(&node_block(_)) => {
fmt!("block")

Some files were not shown because too many files have changed in this diff Show more