bubble up out-of-memory errors from liballoc

This makes the low-level allocation API suitable for use cases where
out-of-memory conditions need to be handled.

Closes #18292

[breaking-change]
This commit is contained in:
Daniel Micay 2014-10-28 17:06:06 -04:00
parent 39f90aead4
commit fea985a0b5
11 changed files with 49 additions and 63 deletions

View file

@ -8,9 +8,13 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::ptr::RawPtr;
// FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias`
/// Returns a pointer to `size` bytes of memory.
/// Return a pointer to `size` bytes of memory aligned to `align`.
///
/// On failure, return a null pointer.
///
/// Behavior is undefined if the requested size is 0 or the alignment is not a
/// power of 2. The alignment must be no larger than the largest supported page
@ -20,8 +24,9 @@ pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
imp::allocate(size, align)
}
/// Extends or shrinks the allocation referenced by `ptr` to `size` bytes of
/// memory.
/// Resize the allocation referenced by `ptr` to `size` bytes.
///
/// On failure, return a null pointer and leave the original allocation intact.
///
/// Behavior is undefined if the requested size is 0 or the alignment is not a
/// power of 2. The alignment must be no larger than the largest supported page
@ -35,8 +40,7 @@ pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint)
imp::reallocate(ptr, old_size, size, align)
}
/// Extends or shrinks the allocation referenced by `ptr` to `size` bytes of
/// memory in-place.
/// Resize the allocation referenced by `ptr` to `size` bytes.
///
/// If the operation succeeds, it returns `usable_size(size, align)` and if it
/// fails (or is a no-op) it returns `usable_size(old_size, align)`.
@ -95,7 +99,9 @@ unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
if size == 0 {
EMPTY as *mut u8
} else {
allocate(size, align)
let ptr = allocate(size, align);
if ptr.is_null() { ::oom() }
ptr
}
}
@ -120,7 +126,7 @@ const MIN_ALIGN: uint = 16;
#[cfg(jemalloc)]
mod imp {
use core::option::{None, Option};
use core::ptr::{RawPtr, null_mut, null};
use core::ptr::{null_mut, null};
use core::num::Int;
use libc::{c_char, c_int, c_void, size_t};
use super::MIN_ALIGN;
@ -131,10 +137,8 @@ mod imp {
extern {
fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void;
fn je_rallocx(ptr: *mut c_void, size: size_t,
flags: c_int) -> *mut c_void;
fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t,
flags: c_int) -> size_t;
fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;
fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;
fn je_sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);
fn je_nallocx(size: size_t, flags: c_int) -> size_t;
fn je_malloc_stats_print(write_cb: Option<extern "C" fn(cbopaque: *mut c_void,
@ -160,21 +164,13 @@ mod imp {
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
let flags = align_to_flags(align);
let ptr = je_mallocx(size as size_t, flags) as *mut u8;
if ptr.is_null() {
::oom()
}
ptr
je_mallocx(size as size_t, flags) as *mut u8
}
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 {
let flags = align_to_flags(align);
let ptr = je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8;
if ptr.is_null() {
::oom()
}
ptr
je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8
}
#[inline]
@ -207,7 +203,6 @@ mod imp {
mod imp {
use core::cmp;
use core::ptr;
use core::ptr::RawPtr;
use libc;
use super::MIN_ALIGN;
@ -220,31 +215,24 @@ mod imp {
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
if align <= MIN_ALIGN {
let ptr = libc::malloc(size as libc::size_t);
if ptr.is_null() {
::oom();
}
ptr as *mut u8
libc::malloc(size as libc::size_t) as *mut u8
} else {
let mut out = 0 as *mut libc::c_void;
let ret = posix_memalign(&mut out,
align as libc::size_t,
size as libc::size_t);
if ret != 0 {
::oom();
ptr::null_mut()
} else {
out as *mut u8
}
out as *mut u8
}
}
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 {
if align <= MIN_ALIGN {
let ptr = libc::realloc(ptr as *mut libc::c_void, size as libc::size_t);
if ptr.is_null() {
::oom();
}
ptr as *mut u8
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
} else {
let new_ptr = allocate(size, align);
ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
@ -276,7 +264,6 @@ mod imp {
mod imp {
use libc::{c_void, size_t};
use libc;
use core::ptr::RawPtr;
use super::MIN_ALIGN;
extern {
@ -289,35 +276,18 @@ mod imp {
#[inline]
pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
if align <= MIN_ALIGN {
let ptr = libc::malloc(size as size_t);
if ptr.is_null() {
::oom();
}
ptr as *mut u8
libc::malloc(size as size_t) as *mut u8
} else {
let ptr = _aligned_malloc(size as size_t, align as size_t);
if ptr.is_null() {
::oom();
}
ptr as *mut u8
_aligned_malloc(size as size_t, align as size_t) as *mut u8
}
}
#[inline]
pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 {
if align <= MIN_ALIGN {
let ptr = libc::realloc(ptr as *mut c_void, size as size_t);
if ptr.is_null() {
::oom();
}
ptr as *mut u8
libc::realloc(ptr as *mut c_void, size as size_t) as *mut u8
} else {
let ptr = _aligned_realloc(ptr as *mut c_void, size as size_t,
align as size_t);
if ptr.is_null() {
::oom();
}
ptr as *mut u8
_aligned_realloc(ptr as *mut c_void, size as size_t, align as size_t) as *mut u8
}
}
@ -348,6 +318,7 @@ mod imp {
mod test {
extern crate test;
use self::test::Bencher;
use core::ptr::RawPtr;
use heap;
#[test]
@ -355,6 +326,7 @@ mod test {
unsafe {
let size = 4000;
let ptr = heap::allocate(size, 8);
if ptr.is_null() { ::oom() }
let ret = heap::reallocate_inplace(ptr, size, size, 8);
heap::deallocate(ptr, size, 8);
assert_eq!(ret, heap::usable_size(size, 8));

View file

@ -11,8 +11,7 @@
//! # The Rust core allocation library
//!
//! This is the lowest level library through which allocation in Rust can be
//! performed where the allocation is assumed to succeed. This library will
//! abort the process when allocation fails.
//! performed.
//!
//! This library, like libcore, is not intended for general usage, but rather as
//! a building block of other libraries. The types and interfaces in this
@ -95,8 +94,10 @@ pub mod boxed;
pub mod arc;
pub mod rc;
/// Common OOM routine used by liballoc
fn oom() -> ! {
/// Common out-of-memory routine
#[cold]
#[inline(never)]
pub fn oom() -> ! {
// FIXME(#14674): This really needs to do something other than just abort
// here, but any printing done must be *guaranteed* to not
// allocate.

View file

@ -31,6 +31,8 @@
#![feature(unsafe_destructor)]
#![allow(missing_docs)]
extern crate alloc;
use std::cell::{Cell, RefCell};
use std::cmp;
use std::intrinsics::{TyDesc, get_tydesc};
@ -386,6 +388,7 @@ impl<T> TypedArenaChunk<T> {
let size = calculate_size::<T>(capacity);
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
as *mut TypedArenaChunk<T>;
if chunk.is_null() { alloc::oom() }
(*chunk).next = next;
(*chunk).capacity = capacity;
chunk

View file

@ -629,6 +629,7 @@ impl<T> Vec<T> {
.expect("capacity overflow");
unsafe {
self.ptr = alloc_or_realloc(self.ptr, self.cap * mem::size_of::<T>(), size);
if self.ptr.is_null() { ::alloc::oom() }
}
self.cap = capacity;
}
@ -666,6 +667,7 @@ impl<T> Vec<T> {
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
if self.ptr.is_null() { ::alloc::oom() }
}
self.cap = self.len;
}
@ -988,6 +990,7 @@ impl<T> Vec<T> {
if old_size > size { panic!("capacity overflow") }
unsafe {
self.ptr = alloc_or_realloc(self.ptr, old_size, size);
if self.ptr.is_null() { ::alloc::oom() }
}
self.cap = max(self.cap, 2) * 2;
}

View file

@ -101,7 +101,7 @@ impl Clone for CString {
fn clone(&self) -> CString {
let len = self.len() + 1;
let buf = unsafe { libc::malloc(len as libc::size_t) } as *mut libc::c_char;
if buf.is_null() { panic!("out of memory") }
if buf.is_null() { ::alloc::oom() }
unsafe { ptr::copy_nonoverlapping_memory(buf, self.buf, len); }
CString { buf: buf as *const libc::c_char, owns_buffer_: true }
}
@ -388,7 +388,7 @@ impl ToCStr for [u8] {
unsafe fn to_c_str_unchecked(&self) -> CString {
let self_len = self.len();
let buf = libc::malloc(self_len as libc::size_t + 1) as *mut u8;
if buf.is_null() { panic!("out of memory") }
if buf.is_null() { ::alloc::oom() }
ptr::copy_memory(buf, self.as_ptr(), self_len);
*buf.offset(self_len as int) = 0;

View file

@ -354,6 +354,7 @@ impl TLDValue {
let box_ptr = unsafe {
let allocation = heap::allocate(mem::size_of::<TLDValueBox<T>>(),
mem::min_align_of::<TLDValueBox<T>>());
if allocation.is_null() { ::alloc::oom() }
let value_box = allocation as *mut TLDValueBox<T>;
ptr::write(value_box, TLDValueBox {
value: value,

View file

@ -519,6 +519,7 @@ mod imp {
use alloc::heap;
use core::atomic;
use core::ptr;
use core::ptr::RawPtr;
use libc::{HANDLE, BOOL, LPSECURITY_ATTRIBUTES, c_void, DWORD, LPCSTR};
use libc;
@ -608,6 +609,7 @@ mod imp {
pub unsafe fn init_lock() -> uint {
let block = heap::allocate(CRIT_SECTION_SIZE, 8) as *mut c_void;
if block.is_null() { ::alloc::oom() }
InitializeCriticalSectionAndSpinCount(block, SPIN_COUNT);
return block as uint;
}

View file

@ -170,7 +170,7 @@ mod tests {
fn malloc(n: uint) -> CVec<u8> {
unsafe {
let mem = libc::malloc(n as libc::size_t);
if mem.is_null() { panic!("out of memory") }
if mem.is_null() { ::alloc::oom() }
CVec::new_with_dtor(mem as *mut u8, n,
proc() { libc::free(mem as *mut libc::c_void); })

View file

@ -607,6 +607,7 @@ impl<K, V> RawTable<K, V> {
"capacity overflow");
let buffer = allocate(size, malloc_alignment);
if buffer.is_null() { ::alloc::oom() }
let hashes = buffer.offset(hash_offset as int) as *mut u64;

View file

@ -353,6 +353,7 @@ impl<T: Send> Buffer<T> {
unsafe fn new(log_size: uint) -> Buffer<T> {
let size = buffer_alloc_size::<T>(log_size);
let buffer = allocate(size, min_align_of::<T>());
if buffer.is_null() { ::alloc::oom() }
Buffer {
storage: buffer as *const T,
log_size: log_size,

View file

@ -49,6 +49,7 @@ unsafe fn test_triangle() -> bool {
if PRINT { println!("allocate(size={:u} align={:u})", size, align); }
let ret = heap::allocate(size, align);
if ret.is_null() { alloc::oom() }
if PRINT { println!("allocate(size={:u} align={:u}) ret: 0x{:010x}",
size, align, ret as uint);
@ -70,6 +71,7 @@ unsafe fn test_triangle() -> bool {
}
let ret = heap::reallocate(ptr, old_size, size, align);
if ret.is_null() { alloc::oom() }
if PRINT {
println!("reallocate(ptr=0x{:010x} old_size={:u} size={:u} align={:u}) \