Rename alloc::Void to alloc::Opaque

This commit is contained in:
Simon Sapin 2018-04-11 17:19:48 +02:00
parent ed29777759
commit f607a3872a
18 changed files with 139 additions and 140 deletions

@ -1 +1 @@
Subproject commit 498ac2997420f7b25f7cd0a3f8202950d8ad93ec
Subproject commit 3c56329d1bd9038e5341f1962bcd8d043312a712

View file

@ -29,17 +29,17 @@ looks like:
```rust
#![feature(global_allocator, allocator_api, heap_api)]
use std::alloc::{GlobalAlloc, System, Layout, Void};
use std::alloc::{GlobalAlloc, System, Layout, Opaque};
use std::ptr::NonNull;
struct MyAllocator;
unsafe impl GlobalAlloc for MyAllocator {
unsafe fn alloc(&self, layout: Layout) -> *mut Void {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
System.alloc(layout)
}
unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
System.dealloc(ptr, layout)
}
}

View file

@ -76,36 +76,36 @@ pub const Heap: Global = Global;
unsafe impl GlobalAlloc for Global {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut Void {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
#[cfg(not(stage0))]
let ptr = __rust_alloc(layout.size(), layout.align());
#[cfg(stage0)]
let ptr = __rust_alloc(layout.size(), layout.align(), &mut 0);
ptr as *mut Void
ptr as *mut Opaque
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
__rust_dealloc(ptr as *mut u8, layout.size(), layout.align())
}
#[inline]
unsafe fn realloc(&self, ptr: *mut Void, layout: Layout, new_size: usize) -> *mut Void {
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
#[cfg(not(stage0))]
let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(), new_size);
#[cfg(stage0)]
let ptr = __rust_realloc(ptr as *mut u8, layout.size(), layout.align(),
new_size, layout.align(), &mut 0);
ptr as *mut Void
ptr as *mut Opaque
}
#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
#[cfg(not(stage0))]
let ptr = __rust_alloc_zeroed(layout.size(), layout.align());
#[cfg(stage0)]
let ptr = __rust_alloc_zeroed(layout.size(), layout.align(), &mut 0);
ptr as *mut Void
ptr as *mut Opaque
}
#[inline]
@ -121,27 +121,27 @@ unsafe impl GlobalAlloc for Global {
unsafe impl Alloc for Global {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
}
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
}
#[inline]
unsafe fn realloc(&mut self,
ptr: NonNull<Void>,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize)
-> Result<NonNull<Void>, AllocErr>
-> Result<NonNull<Opaque>, AllocErr>
{
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
}
#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
}
@ -178,7 +178,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
if size != 0 {
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr as *mut Void, layout);
Global.dealloc(ptr as *mut Opaque, layout);
}
}

View file

@ -518,7 +518,7 @@ impl<T: ?Sized> Arc<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
}
}
@ -637,7 +637,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Global.dealloc(self.mem.as_void(), self.layout.clone());
Global.dealloc(self.mem.as_opaque(), self.layout.clone());
}
}
}
@ -1156,7 +1156,7 @@ impl<T: ?Sized> Drop for Weak<T> {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()))
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()))
}
}
}

View file

@ -249,7 +249,7 @@ impl<K, V> Root<K, V> {
self.as_mut().as_leaf_mut().parent = ptr::null();
unsafe {
Global.dealloc(NonNull::from(top).as_void(), Layout::new::<InternalNode<K, V>>());
Global.dealloc(NonNull::from(top).as_opaque(), Layout::new::<InternalNode<K, V>>());
}
}
}
@ -435,7 +435,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Leaf> {
> {
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.as_void(), Layout::new::<LeafNode<K, V>>());
Global.dealloc(node.as_opaque(), Layout::new::<LeafNode<K, V>>());
ret
}
}
@ -456,7 +456,7 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::Internal> {
> {
let node = self.node;
let ret = self.ascend().ok();
Global.dealloc(node.as_void(), Layout::new::<InternalNode<K, V>>());
Global.dealloc(node.as_opaque(), Layout::new::<InternalNode<K, V>>());
ret
}
}
@ -1239,12 +1239,12 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
}
Global.dealloc(
right_node.node.as_void(),
right_node.node.as_opaque(),
Layout::new::<InternalNode<K, V>>(),
);
} else {
Global.dealloc(
right_node.node.as_void(),
right_node.node.as_opaque(),
Layout::new::<LeafNode<K, V>>(),
);
}

View file

@ -10,7 +10,7 @@
#![allow(deprecated)]
pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Void};
pub use alloc::{Layout, AllocErr, CannotReallocInPlace, Opaque};
use core::alloc::Alloc as CoreAlloc;
use core::ptr::NonNull;
@ -54,7 +54,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
CoreAlloc::dealloc(self, ptr, layout)
}
@ -70,7 +70,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<*mut u8, AllocErr> {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
CoreAlloc::realloc(self, ptr, layout, new_layout.size()).map(|ptr| ptr.cast().as_ptr())
}
@ -87,7 +87,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<Excess, AllocErr> {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
CoreAlloc::realloc_excess(self, ptr, layout, new_layout.size())
.map(|e| Excess(e.0 .cast().as_ptr(), e.1))
}
@ -96,7 +96,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
CoreAlloc::grow_in_place(self, ptr, layout, new_layout.size())
}
@ -104,7 +104,7 @@ unsafe impl<T> Alloc for T where T: CoreAlloc {
ptr: *mut u8,
layout: Layout,
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
let ptr = NonNull::new_unchecked(ptr as *mut Void);
let ptr = NonNull::new_unchecked(ptr as *mut Opaque);
CoreAlloc::shrink_in_place(self, ptr, layout, new_layout.size())
}
}

View file

@ -90,7 +90,7 @@ impl<T, A: Alloc> RawVec<T, A> {
// handles ZSTs and `cap = 0` alike
let ptr = if alloc_size == 0 {
NonNull::<T>::dangling().as_void()
NonNull::<T>::dangling().as_opaque()
} else {
let align = mem::align_of::<T>();
let result = if zeroed {
@ -310,7 +310,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).expect("capacity overflow");
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_void(),
let ptr_res = self.a.realloc(NonNull::from(self.ptr).as_opaque(),
cur,
new_size);
match ptr_res {
@ -369,7 +369,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).expect("capacity overflow");
match self.a.grow_in_place(NonNull::from(self.ptr).as_void(), old_layout, new_size) {
match self.a.grow_in_place(NonNull::from(self.ptr).as_opaque(), old_layout, new_size) {
Ok(_) => {
// We can't directly divide `size`.
self.cap = new_cap;
@ -426,7 +426,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let res = match self.current_layout() {
Some(layout) => {
debug_assert!(new_layout.align() == layout.align());
self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
}
None => self.a.alloc(new_layout),
};
@ -535,7 +535,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let res = match self.current_layout() {
Some(layout) => {
debug_assert!(new_layout.align() == layout.align());
self.a.realloc(NonNull::from(self.ptr).as_void(), layout, new_layout.size())
self.a.realloc(NonNull::from(self.ptr).as_opaque(), layout, new_layout.size())
}
None => self.a.alloc(new_layout),
};
@ -601,7 +601,7 @@ impl<T, A: Alloc> RawVec<T, A> {
// FIXME: may crash and burn on over-reserve
alloc_guard(new_layout.size()).expect("capacity overflow");
match self.a.grow_in_place(
NonNull::from(self.ptr).as_void(), old_layout, new_layout.size(),
NonNull::from(self.ptr).as_opaque(), old_layout, new_layout.size(),
) {
Ok(_) => {
self.cap = new_cap;
@ -662,7 +662,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_size = elem_size * amount;
let align = mem::align_of::<T>();
let old_layout = Layout::from_size_align_unchecked(old_size, align);
match self.a.realloc(NonNull::from(self.ptr).as_void(),
match self.a.realloc(NonNull::from(self.ptr).as_opaque(),
old_layout,
new_size) {
Ok(p) => self.ptr = p.cast().into(),
@ -698,7 +698,7 @@ impl<T, A: Alloc> RawVec<T, A> {
let elem_size = mem::size_of::<T>();
if elem_size != 0 {
if let Some(layout) = self.current_layout() {
self.a.dealloc(NonNull::from(self.ptr).as_void(), layout);
self.a.dealloc(NonNull::from(self.ptr).as_opaque(), layout);
}
}
}
@ -734,7 +734,7 @@ fn alloc_guard(alloc_size: usize) -> Result<(), CollectionAllocErr> {
#[cfg(test)]
mod tests {
use super::*;
use alloc::Void;
use alloc::Opaque;
#[test]
fn allocator_param() {
@ -754,7 +754,7 @@ mod tests {
// before allocation attempts start failing.
struct BoundedAlloc { fuel: usize }
unsafe impl Alloc for BoundedAlloc {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
let size = layout.size();
if size > self.fuel {
return Err(AllocErr);
@ -764,7 +764,7 @@ mod tests {
err @ Err(_) => err,
}
}
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
Global.dealloc(ptr, layout)
}
}

View file

@ -259,7 +259,7 @@ use core::ops::CoerceUnsized;
use core::ptr::{self, NonNull};
use core::convert::From;
use alloc::{Global, Alloc, Layout, Void, box_free};
use alloc::{Global, Alloc, Layout, Opaque, box_free};
use string::String;
use vec::Vec;
@ -737,7 +737,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
// In the event of a panic, elements that have been written
// into the new RcBox will be dropped, then the memory freed.
struct Guard<T> {
mem: NonNull<Void>,
mem: NonNull<Opaque>,
elems: *mut T,
layout: Layout,
n_elems: usize,
@ -760,7 +760,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
let v_ptr = v as *const [T];
let ptr = Self::allocate_for_ptr(v_ptr);
let mem = ptr as *mut _ as *mut Void;
let mem = ptr as *mut _ as *mut Opaque;
let layout = Layout::for_value(&*ptr);
// Pointer to first element
@ -844,7 +844,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
self.dec_weak();
if self.weak() == 0 {
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
}
}
}
@ -1268,7 +1268,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
Global.dealloc(self.ptr.as_void(), Layout::for_value(self.ptr.as_ref()));
Global.dealloc(self.ptr.as_opaque(), Layout::for_value(self.ptr.as_ref()));
}
}
}

View file

@ -41,7 +41,7 @@ const MIN_ALIGN: usize = 8;
#[allow(dead_code)]
const MIN_ALIGN: usize = 16;
use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Void};
use core::alloc::{Alloc, GlobalAlloc, AllocErr, Layout, Opaque};
use core::ptr::NonNull;
#[unstable(feature = "allocator_api", issue = "32838")]
@ -50,25 +50,25 @@ pub struct System;
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl Alloc for System {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc(self, layout)).ok_or(AllocErr)
}
#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc_zeroed(self, layout)).ok_or(AllocErr)
}
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
}
#[inline]
unsafe fn realloc(&mut self,
ptr: NonNull<Void>,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize) -> Result<NonNull<Void>, AllocErr> {
new_size: usize) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
}
@ -82,25 +82,25 @@ unsafe impl Alloc for System {
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl<'a> Alloc for &'a System {
#[inline]
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc(*self, layout)).ok_or(AllocErr)
}
#[inline]
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::alloc_zeroed(*self, layout)).ok_or(AllocErr)
}
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout) {
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout) {
GlobalAlloc::dealloc(*self, ptr.as_ptr(), layout)
}
#[inline]
unsafe fn realloc(&mut self,
ptr: NonNull<Void>,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize) -> Result<NonNull<Void>, AllocErr> {
new_size: usize) -> Result<NonNull<Opaque>, AllocErr> {
NonNull::new(GlobalAlloc::realloc(*self, ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)
}
@ -112,13 +112,13 @@ unsafe impl<'a> Alloc for &'a System {
#[cfg(any(windows, unix, target_os = "cloudabi", target_os = "redox"))]
mod realloc_fallback {
use core::alloc::{GlobalAlloc, Void, Layout};
use core::alloc::{GlobalAlloc, Opaque, Layout};
use core::cmp;
use core::ptr;
impl super::System {
pub(crate) unsafe fn realloc_fallback(&self, ptr: *mut Void, old_layout: Layout,
new_size: usize) -> *mut Void {
pub(crate) unsafe fn realloc_fallback(&self, ptr: *mut Opaque, old_layout: Layout,
new_size: usize) -> *mut Opaque {
// Docs for GlobalAlloc::realloc require this to be valid:
let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
@ -141,20 +141,21 @@ mod platform {
use MIN_ALIGN;
use System;
use core::alloc::{GlobalAlloc, Layout, Void};
use core::alloc::{GlobalAlloc, Layout, Opaque};
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl GlobalAlloc for System {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut Void {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
libc::malloc(layout.size()) as *mut Void
libc::malloc(layout.size()) as *mut Opaque
} else {
#[cfg(target_os = "macos")]
{
if layout.align() > (1 << 31) {
// FIXME: use Void::null_mut https://github.com/rust-lang/rust/issues/49659
return 0 as *mut Void
// FIXME: use Opaque::null_mut
// https://github.com/rust-lang/rust/issues/49659
return 0 as *mut Opaque
}
}
aligned_malloc(&layout)
@ -162,9 +163,9 @@ mod platform {
}
#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() {
libc::calloc(layout.size(), 1) as *mut Void
libc::calloc(layout.size(), 1) as *mut Opaque
} else {
let ptr = self.alloc(layout.clone());
if !ptr.is_null() {
@ -175,24 +176,23 @@ mod platform {
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut Void, _layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, _layout: Layout) {
libc::free(ptr as *mut libc::c_void)
}
#[inline]
unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
let align = old_layout.align();
if align <= MIN_ALIGN && align <= new_size {
libc::realloc(ptr as *mut libc::c_void, new_size) as *mut Void
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
if layout.align() <= MIN_ALIGN && layout.align() <= new_size {
libc::realloc(ptr as *mut libc::c_void, new_size) as *mut Opaque
} else {
self.realloc_fallback(ptr, old_layout, new_size)
self.realloc_fallback(ptr, layout, new_size)
}
}
}
#[cfg(any(target_os = "android", target_os = "redox", target_os = "solaris"))]
#[inline]
unsafe fn aligned_malloc(layout: &Layout) -> *mut Void {
unsafe fn aligned_malloc(layout: &Layout) -> *mut Opaque {
// On android we currently target API level 9 which unfortunately
// doesn't have the `posix_memalign` API used below. Instead we use
// `memalign`, but this unfortunately has the property on some systems
@ -210,19 +210,19 @@ mod platform {
// [3]: https://bugs.chromium.org/p/chromium/issues/detail?id=138579
// [4]: https://chromium.googlesource.com/chromium/src/base/+/master/
// /memory/aligned_memory.cc
libc::memalign(layout.align(), layout.size()) as *mut Void
libc::memalign(layout.align(), layout.size()) as *mut Opaque
}
#[cfg(not(any(target_os = "android", target_os = "redox", target_os = "solaris")))]
#[inline]
unsafe fn aligned_malloc(layout: &Layout) -> *mut Void {
unsafe fn aligned_malloc(layout: &Layout) -> *mut Opaque {
let mut out = ptr::null_mut();
let ret = libc::posix_memalign(&mut out, layout.align(), layout.size());
if ret != 0 {
// FIXME: use Void::null_mut https://github.com/rust-lang/rust/issues/49659
0 as *mut Void
// FIXME: use Opaque::null_mut https://github.com/rust-lang/rust/issues/49659
0 as *mut Opaque
} else {
out as *mut Void
out as *mut Opaque
}
}
}
@ -232,7 +232,7 @@ mod platform {
mod platform {
use MIN_ALIGN;
use System;
use core::alloc::{GlobalAlloc, Void, Layout};
use core::alloc::{GlobalAlloc, Opaque, Layout};
type LPVOID = *mut u8;
type HANDLE = LPVOID;
@ -264,7 +264,7 @@ mod platform {
}
#[inline]
unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) -> *mut Void {
unsafe fn allocate_with_flags(layout: Layout, flags: DWORD) -> *mut Opaque {
let ptr = if layout.align() <= MIN_ALIGN {
HeapAlloc(GetProcessHeap(), flags, layout.size())
} else {
@ -276,23 +276,23 @@ mod platform {
align_ptr(ptr, layout.align())
}
};
ptr as *mut Void
ptr as *mut Opaque
}
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl GlobalAlloc for System {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut Void {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
allocate_with_flags(layout, 0)
}
#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
allocate_with_flags(layout, HEAP_ZERO_MEMORY)
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
if layout.align() <= MIN_ALIGN {
let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID);
debug_assert!(err != 0, "Failed to free heap memory: {}",
@ -306,12 +306,11 @@ mod platform {
}
#[inline]
unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
let align = old_layout.align();
if align <= MIN_ALIGN {
HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, new_size) as *mut Void
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
if layout.align() <= MIN_ALIGN {
HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, new_size) as *mut Opaque
} else {
self.realloc_fallback(ptr, old_layout, new_size)
self.realloc_fallback(ptr, layout, new_size)
}
}
}
@ -338,7 +337,7 @@ mod platform {
mod platform {
extern crate dlmalloc;
use core::alloc::{GlobalAlloc, Layout, Void};
use core::alloc::{GlobalAlloc, Layout, Opaque};
use System;
// No need for synchronization here as wasm is currently single-threaded
@ -347,23 +346,23 @@ mod platform {
#[unstable(feature = "allocator_api", issue = "32838")]
unsafe impl GlobalAlloc for System {
#[inline]
unsafe fn alloc(&self, layout: Layout) -> *mut Void {
DLMALLOC.malloc(layout.size(), layout.align()) as *mut Void
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
DLMALLOC.malloc(layout.size(), layout.align()) as *mut Opaque
}
#[inline]
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
DLMALLOC.calloc(layout.size(), layout.align()) as *mut Void
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
DLMALLOC.calloc(layout.size(), layout.align()) as *mut Opaque
}
#[inline]
unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
DLMALLOC.free(ptr as *mut u8, layout.size(), layout.align())
}
#[inline]
unsafe fn realloc(&self, ptr: *mut Void, layout: Layout, new_size: usize) -> *mut Void {
DLMALLOC.realloc(ptr as *mut u8, layout.size(), layout.align(), new_size) as *mut Void
unsafe fn realloc(&self, ptr: *mut Opaque, layout: Layout, new_size: usize) -> *mut Opaque {
DLMALLOC.realloc(ptr as *mut u8, layout.size(), layout.align(), new_size) as *mut Opaque
}
}
}

View file

@ -24,12 +24,12 @@ use ptr::{self, NonNull};
extern {
/// An opaque, unsized type. Used for pointers to allocated memory.
///
/// This type can only be used behind a pointer like `*mut Void` or `ptr::NonNull<Void>`.
/// This type can only be used behind a pointer like `*mut Opaque` or `ptr::NonNull<Opaque>`.
/// Such pointers are similar to Cs `void*` type.
pub type Void;
pub type Opaque;
}
impl Void {
impl Opaque {
/// Similar to `std::ptr::null`, which requires `T: Sized`.
pub fn null() -> *const Self {
0 as _
@ -44,7 +44,7 @@ impl Void {
/// Represents the combination of a starting address and
/// a total capacity of the returned block.
#[derive(Debug)]
pub struct Excess(pub NonNull<Void>, pub usize);
pub struct Excess(pub NonNull<Opaque>, pub usize);
fn size_align<T>() -> (usize, usize) {
(mem::size_of::<T>(), mem::align_of::<T>())
@ -387,11 +387,11 @@ impl From<AllocErr> for CollectionAllocErr {
// FIXME: docs
pub unsafe trait GlobalAlloc {
unsafe fn alloc(&self, layout: Layout) -> *mut Void;
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque;
unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout);
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout);
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Void {
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut Opaque {
let size = layout.size();
let ptr = self.alloc(layout);
if !ptr.is_null() {
@ -404,7 +404,7 @@ pub unsafe trait GlobalAlloc {
///
/// `new_size`, when rounded up to the nearest multiple of `old_layout.align()`,
/// must not overflow (i.e. the rounded value must be less than `usize::MAX`).
unsafe fn realloc(&self, ptr: *mut Void, old_layout: Layout, new_size: usize) -> *mut Void {
unsafe fn realloc(&self, ptr: *mut Opaque, old_layout: Layout, new_size: usize) -> *mut Opaque {
let new_layout = Layout::from_size_align_unchecked(new_size, old_layout.align());
let new_ptr = self.alloc(new_layout);
if !new_ptr.is_null() {
@ -554,7 +554,7 @@ pub unsafe trait Alloc {
/// Clients wishing to abort computation in response to an
/// allocation error are encouraged to call the allocator's `oom`
/// method, rather than directly invoking `panic!` or similar.
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr>;
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr>;
/// Deallocate the memory referenced by `ptr`.
///
@ -571,7 +571,7 @@ pub unsafe trait Alloc {
/// * In addition to fitting the block of memory `layout`, the
/// alignment of the `layout` must match the alignment used
/// to allocate that block of memory.
unsafe fn dealloc(&mut self, ptr: NonNull<Void>, layout: Layout);
unsafe fn dealloc(&mut self, ptr: NonNull<Opaque>, layout: Layout);
/// Allocator-specific method for signaling an out-of-memory
/// condition.
@ -689,9 +689,9 @@ pub unsafe trait Alloc {
/// reallocation error are encouraged to call the allocator's `oom`
/// method, rather than directly invoking `panic!` or similar.
unsafe fn realloc(&mut self,
ptr: NonNull<Void>,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize) -> Result<NonNull<Void>, AllocErr> {
new_size: usize) -> Result<NonNull<Opaque>, AllocErr> {
let old_size = layout.size();
if new_size >= old_size {
@ -732,7 +732,7 @@ pub unsafe trait Alloc {
/// Clients wishing to abort computation in response to an
/// allocation error are encouraged to call the allocator's `oom`
/// method, rather than directly invoking `panic!` or similar.
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Void>, AllocErr> {
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<NonNull<Opaque>, AllocErr> {
let size = layout.size();
let p = self.alloc(layout);
if let Ok(p) = p {
@ -781,7 +781,7 @@ pub unsafe trait Alloc {
/// reallocation error are encouraged to call the allocator's `oom`
/// method, rather than directly invoking `panic!` or similar.
unsafe fn realloc_excess(&mut self,
ptr: NonNull<Void>,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize) -> Result<Excess, AllocErr> {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
@ -826,7 +826,7 @@ pub unsafe trait Alloc {
/// `grow_in_place` failures without aborting, or to fall back on
/// another reallocation method before resorting to an abort.
unsafe fn grow_in_place(&mut self,
ptr: NonNull<Void>,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize) -> Result<(), CannotReallocInPlace> {
let _ = ptr; // this default implementation doesn't care about the actual address.
@ -881,7 +881,7 @@ pub unsafe trait Alloc {
/// `shrink_in_place` failures without aborting, or to fall back
/// on another reallocation method before resorting to an abort.
unsafe fn shrink_in_place(&mut self,
ptr: NonNull<Void>,
ptr: NonNull<Opaque>,
layout: Layout,
new_size: usize) -> Result<(), CannotReallocInPlace> {
let _ = ptr; // this default implementation doesn't care about the actual address.
@ -960,7 +960,7 @@ pub unsafe trait Alloc {
{
let k = Layout::new::<T>();
if k.size() > 0 {
self.dealloc(ptr.as_void(), k);
self.dealloc(ptr.as_opaque(), k);
}
}
@ -1048,7 +1048,7 @@ pub unsafe trait Alloc {
match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
(Ok(ref k_old), Ok(ref k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
debug_assert!(k_old.align() == k_new.align());
self.realloc(ptr.as_void(), k_old.clone(), k_new.size()).map(NonNull::cast)
self.realloc(ptr.as_opaque(), k_old.clone(), k_new.size()).map(NonNull::cast)
}
_ => {
Err(AllocErr)
@ -1081,7 +1081,7 @@ pub unsafe trait Alloc {
{
match Layout::array::<T>(n) {
Ok(ref k) if k.size() > 0 => {
Ok(self.dealloc(ptr.as_void(), k.clone()))
Ok(self.dealloc(ptr.as_opaque(), k.clone()))
}
_ => {
Err(AllocErr)

View file

@ -2751,9 +2751,9 @@ impl<T: ?Sized> NonNull<T> {
}
}
/// Cast to a `Void` pointer
/// Cast to an `Opaque` pointer
#[unstable(feature = "allocator_api", issue = "32838")]
pub fn as_void(self) -> NonNull<::alloc::Void> {
pub fn as_opaque(self) -> NonNull<::alloc::Opaque> {
unsafe {
NonNull::new_unchecked(self.as_ptr() as _)
}

View file

@ -221,7 +221,7 @@ impl<'a> AllocFnFactory<'a> {
let ident = ident();
args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
let arg = self.cx.expr_ident(self.span, ident);
self.cx.expr_cast(self.span, arg, self.ptr_void())
self.cx.expr_cast(self.span, arg, self.ptr_opaque())
}
AllocatorTy::Usize => {
@ -276,13 +276,13 @@ impl<'a> AllocFnFactory<'a> {
self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable)
}
fn ptr_void(&self) -> P<Ty> {
let void = self.cx.path(self.span, vec![
fn ptr_opaque(&self) -> P<Ty> {
let opaque = self.cx.path(self.span, vec![
self.core,
Ident::from_str("alloc"),
Ident::from_str("Void"),
Ident::from_str("Opaque"),
]);
let ty_void = self.cx.ty_path(void);
self.cx.ty_ptr(self.span, ty_void, Mutability::Mutable)
let ty_opaque = self.cx.ty_path(opaque);
self.cx.ty_ptr(self.span, ty_opaque, Mutability::Mutable)
}
}

View file

@ -21,7 +21,7 @@
#[doc(hidden)]
#[allow(unused_attributes)]
pub mod __default_lib_allocator {
use super::{System, Layout, GlobalAlloc, Void};
use super::{System, Layout, GlobalAlloc, Opaque};
// for symbol names src/librustc/middle/allocator.rs
// for signatures src/librustc_allocator/lib.rs
@ -46,7 +46,7 @@ pub mod __default_lib_allocator {
pub unsafe extern fn __rdl_dealloc(ptr: *mut u8,
size: usize,
align: usize) {
System.dealloc(ptr as *mut Void, Layout::from_size_align_unchecked(size, align))
System.dealloc(ptr as *mut Opaque, Layout::from_size_align_unchecked(size, align))
}
#[no_mangle]
@ -56,7 +56,7 @@ pub mod __default_lib_allocator {
align: usize,
new_size: usize) -> *mut u8 {
let old_layout = Layout::from_size_align_unchecked(old_size, align);
System.realloc(ptr as *mut Void, old_layout, new_size) as *mut u8
System.realloc(ptr as *mut Opaque, old_layout, new_size) as *mut u8
}
#[no_mangle]

View file

@ -1183,7 +1183,7 @@ unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for RawTable<K, V> {
debug_assert!(!oflo, "should be impossible");
unsafe {
Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_void(),
Global.dealloc(NonNull::new_unchecked(self.hashes.ptr()).as_opaque(),
Layout::from_size_align(size, align).unwrap());
// Remember how everything was allocated out of one buffer
// during initialization? We only need one call to free here.

View file

@ -16,11 +16,11 @@ use std::alloc::*;
pub struct A;
unsafe impl GlobalAlloc for A {
unsafe fn alloc(&self, _: Layout) -> *mut Void {
unsafe fn alloc(&self, _: Layout) -> *mut Opaque {
loop {}
}
unsafe fn dealloc(&self, _ptr: *mut Void, _: Layout) {
unsafe fn dealloc(&self, _ptr: *mut Opaque, _: Layout) {
loop {}
}
}

View file

@ -13,18 +13,18 @@
#![feature(heap_api, allocator_api)]
#![crate_type = "rlib"]
use std::heap::{GlobalAlloc, System, Layout, Void};
use std::heap::{GlobalAlloc, System, Layout, Opaque};
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct A(pub AtomicUsize);
unsafe impl GlobalAlloc for A {
unsafe fn alloc(&self, layout: Layout) -> *mut Void {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
self.0.fetch_add(1, Ordering::SeqCst);
System.alloc(layout)
}
unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
self.0.fetch_add(1, Ordering::SeqCst);
System.dealloc(ptr, layout)
}

View file

@ -15,7 +15,7 @@
extern crate helper;
use std::alloc::{self, Global, Alloc, System, Layout, Void};
use std::alloc::{self, Global, Alloc, System, Layout, Opaque};
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};
static HITS: AtomicUsize = ATOMIC_USIZE_INIT;
@ -23,12 +23,12 @@ static HITS: AtomicUsize = ATOMIC_USIZE_INIT;
struct A;
unsafe impl alloc::GlobalAlloc for A {
unsafe fn alloc(&self, layout: Layout) -> *mut Void {
unsafe fn alloc(&self, layout: Layout) -> *mut Opaque {
HITS.fetch_add(1, Ordering::SeqCst);
System.alloc(layout)
}
unsafe fn dealloc(&self, ptr: *mut Void, layout: Layout) {
unsafe fn dealloc(&self, ptr: *mut Opaque, layout: Layout) {
HITS.fetch_add(1, Ordering::SeqCst);
System.dealloc(ptr, layout)
}

View file

@ -64,7 +64,7 @@ unsafe fn test_triangle() -> bool {
println!("deallocate({:?}, {:?}", ptr, layout);
}
Global.dealloc(NonNull::new_unchecked(ptr).as_void(), layout);
Global.dealloc(NonNull::new_unchecked(ptr).as_opaque(), layout);
}
unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 {
@ -72,7 +72,7 @@ unsafe fn test_triangle() -> bool {
println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
}
let ret = Global.realloc(NonNull::new_unchecked(ptr).as_void(), old.clone(), new.size())
let ret = Global.realloc(NonNull::new_unchecked(ptr).as_opaque(), old.clone(), new.size())
.unwrap_or_else(|_| Global.oom());
if PRINT {