auto merge of #6038 : alexcrichton/rust/more-unsafe, r=pcwalton

Because unsafe functions are never warned about, then all `unsafe` blocks in unsafe functions should definitely be warned about (no need to be redundant). This fixes this case, adds tests, cleans up remaining cases, and then fixes a few other import warnings being spit out.
This commit is contained in:
bors 2013-04-23 18:27:48 -07:00
commit 706096b319
15 changed files with 159 additions and 143 deletions

View file

@ -231,66 +231,64 @@ unsafe fn walk_gc_roots(mem: Memory, sentinel: **Word, visitor: Visitor) {
// the stack.
let mut reached_sentinel = ptr::is_null(sentinel);
for stackwalk::walk_stack |frame| {
unsafe {
let pc = last_ret;
let Segment {segment: next_segment, boundary: boundary} =
find_segment_for_frame(frame.fp, segment);
segment = next_segment;
// Each stack segment is bounded by a morestack frame. The
// morestack frame includes two return addresses, one for
// morestack itself, at the normal offset from the frame
// pointer, and then a second return address for the
// function prologue (which called morestack after
// determining that it had hit the end of the stack).
// Since morestack itself takes two parameters, the offset
// for this second return address is 3 greater than the
// return address for morestack.
let ret_offset = if boundary { 4 } else { 1 };
last_ret = *ptr::offset(frame.fp, ret_offset) as *Word;
let pc = last_ret;
let Segment {segment: next_segment, boundary: boundary} =
find_segment_for_frame(frame.fp, segment);
segment = next_segment;
// Each stack segment is bounded by a morestack frame. The
// morestack frame includes two return addresses, one for
// morestack itself, at the normal offset from the frame
// pointer, and then a second return address for the
// function prologue (which called morestack after
// determining that it had hit the end of the stack).
// Since morestack itself takes two parameters, the offset
// for this second return address is 3 greater than the
// return address for morestack.
let ret_offset = if boundary { 4 } else { 1 };
last_ret = *ptr::offset(frame.fp, ret_offset) as *Word;
if ptr::is_null(pc) {
loop;
}
if ptr::is_null(pc) {
loop;
}
let mut delay_reached_sentinel = reached_sentinel;
let sp = is_safe_point(pc);
match sp {
Some(sp_info) => {
for walk_safe_point(frame.fp, sp_info) |root, tydesc| {
// Skip roots until we see the sentinel.
if !reached_sentinel {
if root == sentinel {
delay_reached_sentinel = true;
}
loop;
let mut delay_reached_sentinel = reached_sentinel;
let sp = is_safe_point(pc);
match sp {
Some(sp_info) => {
for walk_safe_point(frame.fp, sp_info) |root, tydesc| {
// Skip roots until we see the sentinel.
if !reached_sentinel {
if root == sentinel {
delay_reached_sentinel = true;
}
loop;
}
// Skip null pointers, which can occur when a
// unique pointer has already been freed.
if ptr::is_null(*root) {
loop;
// Skip null pointers, which can occur when a
// unique pointer has already been freed.
if ptr::is_null(*root) {
loop;
}
if ptr::is_null(tydesc) {
// Root is a generic box.
let refcount = **root;
if mem | task_local_heap != 0 && refcount != -1 {
if !visitor(root, tydesc) { return; }
} else if mem | exchange_heap != 0 && refcount == -1 {
if !visitor(root, tydesc) { return; }
}
if ptr::is_null(tydesc) {
// Root is a generic box.
let refcount = **root;
if mem | task_local_heap != 0 && refcount != -1 {
if !visitor(root, tydesc) { return; }
} else if mem | exchange_heap != 0 && refcount == -1 {
if !visitor(root, tydesc) { return; }
}
} else {
// Root is a non-immediate.
if mem | stack != 0 {
if !visitor(root, tydesc) { return; }
}
} else {
// Root is a non-immediate.
if mem | stack != 0 {
if !visitor(root, tydesc) { return; }
}
}
}
None => ()
}
reached_sentinel = delay_reached_sentinel;
}
None => ()
}
reached_sentinel = delay_reached_sentinel;
}
}

View file

@ -156,9 +156,7 @@ pub impl PacketHeader {
unsafe fn unblock(&self) {
let old_task = swap_task(&mut self.blocked_task, ptr::null());
if !old_task.is_null() {
unsafe {
rustrt::rust_task_deref(old_task)
}
rustrt::rust_task_deref(old_task)
}
match swap_state_acq(&mut self.state, Empty) {
Empty | Blocked => (),

View file

@ -80,10 +80,8 @@ pub unsafe fn unsafe_borrow() -> &mut Scheduler {
}
pub unsafe fn unsafe_borrow_io() -> &mut IoFactoryObject {
unsafe {
let sched = unsafe_borrow();
return sched.event_loop.io().unwrap();
}
let sched = unsafe_borrow();
return sched.event_loop.io().unwrap();
}
fn tls_key() -> tls::Key {

View file

@ -11,7 +11,7 @@
use option::*;
use result::*;
use super::io::net::ip::{IpAddr, Ipv4}; // n.b. Ipv4 is used only in tests
use super::io::net::ip::IpAddr;
use super::uv::*;
use super::rtio::*;
use ops::Drop;
@ -19,6 +19,7 @@ use cell::{Cell, empty_cell};
use cast::transmute;
use super::sched::{Scheduler, local_sched};
#[cfg(test)] use super::io::net::ip::Ipv4;
#[cfg(test)] use super::sched::Task;
#[cfg(test)] use unstable::run_in_bare_thread;
#[cfg(test)] use uint;

View file

@ -98,7 +98,7 @@ pub enum uv_req_type {
pub unsafe fn malloc_handle(handle: uv_handle_type) -> *c_void {
assert!(handle != UV_UNKNOWN_HANDLE && handle != UV_HANDLE_TYPE_MAX);
let size = unsafe { rust_uv_handle_size(handle as uint) };
let size = rust_uv_handle_size(handle as uint);
let p = malloc(size);
assert!(p.is_not_null());
return p;
@ -110,7 +110,7 @@ pub unsafe fn free_handle(v: *c_void) {
pub unsafe fn malloc_req(req: uv_req_type) -> *c_void {
assert!(req != UV_UNKNOWN_REQ && req != UV_REQ_TYPE_MAX);
let size = unsafe { rust_uv_req_size(req as uint) };
let size = rust_uv_req_size(req as uint);
let p = malloc(size);
assert!(p.is_not_null());
return p;

View file

@ -196,6 +196,7 @@ impl ToStrConsume for ~[Ascii] {
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -262,18 +262,16 @@ pub impl<T:Owned> Exclusive<T> {
// the exclusive. Supporting that is a work in progress.
#[inline(always)]
unsafe fn with<U>(&self, f: &fn(x: &mut T) -> U) -> U {
unsafe {
let rec = get_shared_mutable_state(&self.x);
do (*rec).lock.lock {
if (*rec).failed {
fail!(
~"Poisoned exclusive - another task failed inside!");
}
(*rec).failed = true;
let result = f(&mut (*rec).data);
(*rec).failed = false;
result
let rec = get_shared_mutable_state(&self.x);
do (*rec).lock.lock {
if (*rec).failed {
fail!(
~"Poisoned exclusive - another task failed inside!");
}
(*rec).failed = true;
let result = f(&mut (*rec).data);
(*rec).failed = false;
result
}
}

View file

@ -43,11 +43,11 @@ pub unsafe fn weaken_task(f: &fn(Port<ShutdownMsg>)) {
let task = get_task_id();
// Expect the weak task service to be alive
assert!(service.try_send(RegisterWeakTask(task, shutdown_chan)));
unsafe { rust_dec_kernel_live_count(); }
rust_dec_kernel_live_count();
do (|| {
f(shutdown_port.take())
}).finally || {
unsafe { rust_inc_kernel_live_count(); }
rust_inc_kernel_live_count();
// Service my have already exited
service.send(UnregisterWeakTask(task));
}

View file

@ -26,7 +26,7 @@ use core::char;
use core::hash::Streaming;
use core::hash;
use core::io::WriterUtil;
use core::libc::{c_int, c_uint, c_char};
use core::libc::{c_int, c_uint};
use core::os::consts::{macos, freebsd, linux, android, win32};
use core::os;
use core::ptr;

View file

@ -20,6 +20,7 @@
use core::prelude::*;
use middle::moves;
use middle::typeck::check::PurityState;
use middle::borrowck::{Loan, bckerr, BorrowckCtxt, inherent_mutability};
use middle::borrowck::{ReqMaps, root_map_key, save_and_restore_managed};
use middle::borrowck::{MoveError, MoveOk, MoveFromIllegalCmt};
@ -41,11 +42,6 @@ use syntax::codemap::span;
use syntax::print::pprust;
use syntax::visit;
struct PurityState {
def: ast::node_id,
purity: ast::purity
}
struct CheckLoanCtxt {
bccx: @BorrowckCtxt,
req_maps: ReqMaps,
@ -85,8 +81,7 @@ pub fn check_loans(bccx: @BorrowckCtxt,
bccx: bccx,
req_maps: req_maps,
reported: HashSet::new(),
declared_purity: @mut PurityState { purity: ast::impure_fn,
def: 0 },
declared_purity: @mut PurityState::function(ast::impure_fn, 0),
fn_args: @mut @~[]
};
let vt = visit::mk_vt(@visit::Visitor {visit_expr: check_loans_in_expr,
@ -658,9 +653,7 @@ fn check_loans_in_fn(fk: &visit::fn_kind,
debug!("purity on entry=%?", copy self.declared_purity);
do save_and_restore_managed(self.declared_purity) {
do save_and_restore_managed(self.fn_args) {
self.declared_purity = @mut PurityState {
purity: declared_purity, def: src
};
self.declared_purity = @mut PurityState::function(declared_purity, src);
match *fk {
visit::fk_anon(*) |
@ -810,17 +803,7 @@ fn check_loans_in_block(blk: &ast::blk,
do save_and_restore_managed(self.declared_purity) {
self.check_for_conflicting_loans(blk.node.id);
match blk.node.rules {
ast::default_blk => {
}
ast::unsafe_blk => {
*self.declared_purity = PurityState {
purity: ast::unsafe_fn,
def: blk.node.id,
};
}
}
*self.declared_purity = self.declared_purity.recurse(blk);
visit::visit_block(blk, self, vt);
}
}

View file

@ -70,7 +70,7 @@ use core::hash;
use core::hashmap::{HashMap, HashSet};
use core::int;
use core::io;
use core::libc::{c_uint, c_ulonglong};
use core::libc::c_uint;
use core::uint;
use std::time;
use syntax::ast::ident;
@ -2628,13 +2628,11 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::node_id) -> ValueRef {
let class_ty = ty::lookup_item_type(tcx, parent_id).ty;
// This code shouldn't be reached if the class is generic
assert!(!ty::type_has_params(class_ty));
let lldty = unsafe {
T_fn(~[
let lldty = T_fn(~[
T_ptr(T_i8()),
T_ptr(type_of(ccx, class_ty))
],
T_nil())
};
T_nil());
let s = get_dtor_symbol(ccx, /*bad*/copy *pt, dt.node.id, None);
/* Make the declaration for the dtor */

View file

@ -514,7 +514,7 @@ pub struct substs {
}
mod primitives {
use super::{sty, t_box_};
use super::t_box_;
use syntax::ast;

View file

@ -115,6 +115,7 @@ use core::result::{Result, Ok, Err};
use core::result;
use core::str;
use core::vec;
use core::util::replace;
use std::list::Nil;
use syntax::abi::AbiSet;
use syntax::ast::{provided, required};
@ -179,9 +180,36 @@ pub enum FnKind {
Vanilla
}
struct PurityState {
pub struct PurityState {
def: ast::node_id,
purity: ast::purity,
from: ast::node_id,
priv from_fn: bool
}
pub impl PurityState {
pub fn function(purity: ast::purity, def: ast::node_id) -> PurityState {
PurityState { def: def, purity: purity, from_fn: true }
}
pub fn recurse(&mut self, blk: &ast::blk) -> PurityState {
match self.purity {
// If this unsafe, then if the outer function was already marked as
// unsafe we shouldn't attribute the unsafe'ness to the block. This
// way the block can be warned about instead of ignoring this
// extraneous block (functions are never warned about).
ast::unsafe_fn if self.from_fn => *self,
purity => {
let (purity, def) = match blk.node.rules {
ast::unsafe_blk => (ast::unsafe_fn, blk.node.id),
ast::default_blk => (purity, self.def),
};
PurityState{ def: def,
purity: purity,
from_fn: false }
}
}
}
}
pub struct FnCtxt {
@ -243,7 +271,7 @@ pub fn blank_fn_ctxt(ccx: @mut CrateCtxt,
@mut FnCtxt {
ret_ty: rty,
indirect_ret_ty: None,
ps: PurityState { purity: ast::pure_fn, from: 0 },
ps: PurityState::function(ast::pure_fn, 0),
region_lb: region_bnd,
in_scope_regions: @Nil,
fn_kind: Vanilla,
@ -348,7 +376,7 @@ pub fn check_fn(ccx: @mut CrateCtxt,
@mut FnCtxt {
ret_ty: ret_ty,
indirect_ret_ty: indirect_ret_ty,
ps: PurityState { purity: purity, from: id },
ps: PurityState::function(purity, id),
region_lb: body.node.id,
in_scope_regions: isr,
fn_kind: fn_kind,
@ -876,8 +904,8 @@ pub impl FnCtxt {
match self.ps.purity {
ast::unsafe_fn => {
// ok, but flag that we used the source of unsafeness
debug!("flagging %? as a used unsafe source", self.ps.from);
self.tcx().used_unsafe.insert(self.ps.from);
debug!("flagging %? as a used unsafe source", self.ps);
self.tcx().used_unsafe.insert(self.ps.def);
}
_ => {
self.ccx.tcx.sess.span_err(
@ -1689,7 +1717,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
fcx.write_ty(expr.id, fty);
let (inherited_purity, id) =
ty::determine_inherited_purity((fcx.ps.purity, fcx.ps.from),
ty::determine_inherited_purity((fcx.ps.purity, fcx.ps.def),
(purity, expr.id),
sigil);
@ -2929,16 +2957,11 @@ pub fn check_block(fcx0: @mut FnCtxt, blk: &ast::blk) {
check_block_with_expected(fcx0, blk, None)
}
pub fn check_block_with_expected(fcx0: @mut FnCtxt,
pub fn check_block_with_expected(fcx: @mut FnCtxt,
blk: &ast::blk,
expected: Option<ty::t>) {
let fcx = match blk.node.rules {
ast::unsafe_blk => @mut FnCtxt {
ps: PurityState { purity: ast::unsafe_fn, from: blk.node.id },
.. copy *fcx0
},
ast::default_blk => fcx0
};
let prev = replace(&mut fcx.ps, fcx.ps.recurse(blk));
do fcx.with_region_lb(blk.node.id) {
let mut warned = false;
let mut last_was_bot = false;
@ -2990,6 +3013,8 @@ pub fn check_block_with_expected(fcx0: @mut FnCtxt,
}
};
}
fcx.ps = prev;
}
pub fn check_const(ccx: @mut CrateCtxt,

View file

@ -177,15 +177,13 @@ pub impl<T:Owned> MutexARC<T> {
*/
#[inline(always)]
unsafe fn access<U>(&self, blk: &fn(x: &mut T) -> U) -> U {
unsafe {
let state = get_shared_mutable_state(&self.x);
// Borrowck would complain about this if the function were
// not already unsafe. See borrow_rwlock, far below.
do (&(*state).lock).lock {
check_poison(true, (*state).failed);
let _z = PoisonOnFail(&mut (*state).failed);
blk(&mut (*state).data)
}
let state = get_shared_mutable_state(&self.x);
// Borrowck would complain about this if the function were
// not already unsafe. See borrow_rwlock, far below.
do (&(*state).lock).lock {
check_poison(true, (*state).failed);
let _z = PoisonOnFail(&mut (*state).failed);
blk(&mut (*state).data)
}
}
@ -195,16 +193,14 @@ pub impl<T:Owned> MutexARC<T> {
&self,
blk: &fn(x: &'x mut T, c: &'c Condvar) -> U) -> U
{
unsafe {
let state = get_shared_mutable_state(&self.x);
do (&(*state).lock).lock_cond |cond| {
check_poison(true, (*state).failed);
let _z = PoisonOnFail(&mut (*state).failed);
blk(&mut (*state).data,
&Condvar {is_mutex: true,
failed: &mut (*state).failed,
cond: cond })
}
let state = get_shared_mutable_state(&self.x);
do (&(*state).lock).lock_cond |cond| {
check_poison(true, (*state).failed);
let _z = PoisonOnFail(&mut (*state).failed);
blk(&mut (*state).data,
&Condvar {is_mutex: true,
failed: &mut (*state).failed,
cond: cond })
}
}
}

View file

@ -12,30 +12,50 @@
#[deny(unused_unsafe)];
use core::libc;
extern mod foo {
fn bar();
}
fn callback<T>(_f: &fn() -> T) -> T { fail!() }
unsafe fn unsf() {}
fn bad1() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
fn bad2() { unsafe { bad1() } } //~ ERROR: unnecessary `unsafe` block
unsafe fn bad4() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
fn bad5() { unsafe { do callback {} } } //~ ERROR: unnecessary `unsafe` block
unsafe fn bad3() { unsafe {} } //~ ERROR: unnecessary `unsafe` block
fn bad4() { unsafe { do callback {} } } //~ ERROR: unnecessary `unsafe` block
unsafe fn bad5() { unsafe { unsf() } } //~ ERROR: unnecessary `unsafe` block
fn bad6() {
unsafe { //~ ERROR: unnecessary `unsafe` block
unsafe { // don't put the warning here
unsf()
}
}
}
unsafe fn bad7() {
unsafe { //~ ERROR: unnecessary `unsafe` block
unsafe { //~ ERROR: unnecessary `unsafe` block
unsf()
}
}
}
unsafe fn good0() { libc::exit(1) }
fn good1() { unsafe { libc::exit(1) } }
unsafe fn good0() { unsf() }
fn good1() { unsafe { unsf() } }
fn good2() {
/* bug uncovered when implementing warning about unused unsafe blocks. Be
sure that when purity is inherited that the source of the unsafe-ness
is tracked correctly */
unsafe {
unsafe fn what() -> ~[~str] { libc::exit(2) }
unsafe fn what() -> ~[~str] { fail!() }
do callback {
what();
}
}
}
unsafe fn good3() { foo::bar() }
fn good4() { unsafe { foo::bar() } }
#[allow(unused_unsafe)] fn allowed() { unsafe {} }
fn main() { }
fn main() {}