rustfmt liballoc

This commit is contained in:
Nick Cameron 2015-09-24 10:00:54 +12:00
parent afae2ff723
commit 8f51c8d687
7 changed files with 244 additions and 115 deletions

View file

@ -214,7 +214,9 @@ impl<T> Arc<T> {
#[stable(feature = "arc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
// See `drop` for why all these atomics are like this
if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { return Err(this) }
if this.inner().strong.compare_and_swap(1, 0, Release) != 1 {
return Err(this)
}
atomic::fence(Acquire);
@ -251,7 +253,9 @@ impl<T: ?Sized> Arc<T> {
let cur = this.inner().weak.load(Relaxed);
// check if the weak counter is currently "locked"; if so, spin.
if cur == usize::MAX { continue }
if cur == usize::MAX {
continue
}
// NOTE: this code currently ignores the possibility of overflow
// into usize::MAX; in general both Rc and Arc need to be adjusted
@ -348,7 +352,9 @@ impl<T: ?Sized> Clone for Arc<T> {
// We abort because such a program is incredibly degenerate, and we
// don't care to support it.
if old_size > MAX_REFCOUNT {
unsafe { abort(); }
unsafe {
abort();
}
}
Arc { _ptr: self._ptr }
@ -556,7 +562,9 @@ impl<T: ?Sized> Drop for Arc<T> {
// Because `fetch_sub` is already atomic, we do not need to synchronize
// with other threads unless we are going to delete the object. This
// same logic applies to the below `fetch_sub` to the `weak` count.
if self.inner().strong.fetch_sub(1, Release) != 1 { return }
if self.inner().strong.fetch_sub(1, Release) != 1 {
return
}
// This fence is needed to prevent reordering of use of the data and
// deletion of the data. Because it is marked `Release`, the decreasing
@ -577,9 +585,7 @@ impl<T: ?Sized> Drop for Arc<T> {
// [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
atomic::fence(Acquire);
unsafe {
self.drop_slow()
}
unsafe { self.drop_slow() }
}
}
@ -613,11 +619,15 @@ impl<T: ?Sized> Weak<T> {
// "stale" read of 0 is fine), and any other value is
// confirmed via the CAS below.
let n = inner.strong.load(Relaxed);
if n == 0 { return None }
if n == 0 {
return None
}
// Relaxed is valid for the same reason it is on Arc's Clone impl
let old = inner.strong.compare_and_swap(n, n + 1, Relaxed);
if old == n { return Some(Arc { _ptr: self._ptr }) }
if old == n {
return Some(Arc { _ptr: self._ptr })
}
}
}
@ -653,7 +663,9 @@ impl<T: ?Sized> Clone for Weak<T> {
// See comments in Arc::clone() for why we do this (for mem::forget).
if old_size > MAX_REFCOUNT {
unsafe { abort(); }
unsafe {
abort();
}
}
return Weak { _ptr: self._ptr }
@ -705,9 +717,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// ref, which can only happen after the lock is released.
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8,
size_of_val(&*ptr),
align_of_val(&*ptr)) }
unsafe { deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) }
}
}
}
@ -727,7 +737,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
///
/// five == Arc::new(5);
/// ```
fn eq(&self, other: &Arc<T>) -> bool { *(*self) == *(*other) }
fn eq(&self, other: &Arc<T>) -> bool {
*(*self) == *(*other)
}
/// Inequality for two `Arc<T>`s.
///
@ -742,7 +754,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
///
/// five != Arc::new(5);
/// ```
fn ne(&self, other: &Arc<T>) -> bool { *(*self) != *(*other) }
fn ne(&self, other: &Arc<T>) -> bool {
*(*self) != *(*other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
@ -776,7 +790,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five < Arc::new(5);
/// ```
fn lt(&self, other: &Arc<T>) -> bool { *(*self) < *(*other) }
fn lt(&self, other: &Arc<T>) -> bool {
*(*self) < *(*other)
}
/// 'Less-than or equal to' comparison for two `Arc<T>`s.
///
@ -791,7 +807,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five <= Arc::new(5);
/// ```
fn le(&self, other: &Arc<T>) -> bool { *(*self) <= *(*other) }
fn le(&self, other: &Arc<T>) -> bool {
*(*self) <= *(*other)
}
/// Greater-than comparison for two `Arc<T>`s.
///
@ -806,7 +824,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five > Arc::new(5);
/// ```
fn gt(&self, other: &Arc<T>) -> bool { *(*self) > *(*other) }
fn gt(&self, other: &Arc<T>) -> bool {
*(*self) > *(*other)
}
/// 'Greater-than or equal to' comparison for two `Arc<T>`s.
///
@ -821,11 +841,15 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
///
/// five >= Arc::new(5);
/// ```
fn ge(&self, other: &Arc<T>) -> bool { *(*self) >= *(*other) }
fn ge(&self, other: &Arc<T>) -> bool {
*(*self) >= *(*other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Arc<T> {
fn cmp(&self, other: &Arc<T>) -> Ordering { (**self).cmp(&**other) }
fn cmp(&self, other: &Arc<T>) -> Ordering {
(**self).cmp(&**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Eq> Eq for Arc<T> {}
@ -854,7 +878,9 @@ impl<T> fmt::Pointer for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Arc<T> {
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> Arc<T> { Arc::new(Default::default()) }
fn default() -> Arc<T> {
Arc::new(Default::default())
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -1015,7 +1041,7 @@ mod tests {
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: Mutex<Option<Weak<Cycle>>>
x: Mutex<Option<Weak<Cycle>>>,
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
@ -1095,7 +1121,9 @@ mod tests {
// Make sure deriving works with Arc<T>
#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
struct Foo { inner: Arc<i32> }
struct Foo {
inner: Arc<i32>,
}
#[test]
fn test_unsized() {
@ -1108,5 +1136,7 @@ mod tests {
}
impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
fn borrow(&self) -> &T { &**self }
fn borrow(&self) -> &T {
&**self
}
}

View file

@ -66,7 +66,7 @@ use core::mem;
use core::ops::{CoerceUnsized, Deref, DerefMut};
use core::ops::{Placer, Boxed, Place, InPlace, BoxPlace};
use core::ptr::{self, Unique};
use core::raw::{TraitObject};
use core::raw::TraitObject;
/// A value that represents the heap. This is the default place that the `box`
/// keyword allocates into when no place is supplied.
@ -96,7 +96,9 @@ pub const HEAP: ExchangeHeapSingleton =
reason = "may be renamed; uncertain about custom allocator design",
issue = "27779")]
#[derive(Copy, Clone)]
pub struct ExchangeHeapSingleton { _force_singleton: () }
pub struct ExchangeHeapSingleton {
_force_singleton: (),
}
/// A pointer type for heap allocation.
///
@ -127,7 +129,7 @@ pub struct Box<T: ?Sized>(Unique<T>);
#[unstable(feature = "placement_in",
reason = "placement box design is still being worked out.",
issue = "27779")]
pub struct IntermediateBox<T: ?Sized>{
pub struct IntermediateBox<T: ?Sized> {
ptr: *mut u8,
size: usize,
align: usize,
@ -153,9 +155,7 @@ fn make_place<T>() -> IntermediateBox<T> {
let p = if size == 0 {
heap::EMPTY as *mut u8
} else {
let p = unsafe {
heap::allocate(size, align)
};
let p = unsafe { heap::allocate(size, align) };
if p.is_null() {
panic!("Box make_place allocation failure.");
}
@ -166,18 +166,24 @@ fn make_place<T>() -> IntermediateBox<T> {
}
impl<T> BoxPlace<T> for IntermediateBox<T> {
fn make_place() -> IntermediateBox<T> { make_place() }
fn make_place() -> IntermediateBox<T> {
make_place()
}
}
impl<T> InPlace<T> for IntermediateBox<T> {
type Owner = Box<T>;
unsafe fn finalize(self) -> Box<T> { finalize(self) }
unsafe fn finalize(self) -> Box<T> {
finalize(self)
}
}
impl<T> Boxed for Box<T> {
type Data = T;
type Place = IntermediateBox<T>;
unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> { finalize(b) }
unsafe fn finalize(b: IntermediateBox<T>) -> Box<T> {
finalize(b)
}
}
impl<T> Placer<T> for ExchangeHeapSingleton {
@ -191,9 +197,7 @@ impl<T> Placer<T> for ExchangeHeapSingleton {
impl<T: ?Sized> Drop for IntermediateBox<T> {
fn drop(&mut self) {
if self.size > 0 {
unsafe {
heap::deallocate(self.ptr, self.size, self.align)
}
unsafe { heap::deallocate(self.ptr, self.size, self.align) }
}
}
}
@ -257,13 +261,17 @@ impl<T : ?Sized> Box<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Default> Default for Box<T> {
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> Box<T> { box Default::default() }
fn default() -> Box<T> {
box Default::default()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> Default for Box<[T]> {
#[stable(feature = "rust1", since = "1.0.0")]
fn default() -> Box<[T]> { Box::<[T; 0]>::new([]) }
fn default() -> Box<[T]> {
Box::<[T; 0]>::new([])
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -277,7 +285,11 @@ impl<T: Clone> Clone for Box<T> {
/// let y = x.clone();
/// ```
#[inline]
fn clone(&self) -> Box<T> { box {(**self).clone()} }
fn clone(&self) -> Box<T> {
box {
(**self).clone()
}
}
/// Copies `source`'s contents into `self` without creating a new allocation.
///
/// # Examples
@ -312,9 +324,13 @@ impl Clone for Box<str> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialEq> PartialEq for Box<T> {
#[inline]
fn eq(&self, other: &Box<T>) -> bool { PartialEq::eq(&**self, &**other) }
fn eq(&self, other: &Box<T>) -> bool {
PartialEq::eq(&**self, &**other)
}
#[inline]
fn ne(&self, other: &Box<T>) -> bool { PartialEq::ne(&**self, &**other) }
fn ne(&self, other: &Box<T>) -> bool {
PartialEq::ne(&**self, &**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
@ -323,13 +339,21 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
PartialOrd::partial_cmp(&**self, &**other)
}
#[inline]
fn lt(&self, other: &Box<T>) -> bool { PartialOrd::lt(&**self, &**other) }
fn lt(&self, other: &Box<T>) -> bool {
PartialOrd::lt(&**self, &**other)
}
#[inline]
fn le(&self, other: &Box<T>) -> bool { PartialOrd::le(&**self, &**other) }
fn le(&self, other: &Box<T>) -> bool {
PartialOrd::le(&**self, &**other)
}
#[inline]
fn ge(&self, other: &Box<T>) -> bool { PartialOrd::ge(&**self, &**other) }
fn ge(&self, other: &Box<T>) -> bool {
PartialOrd::ge(&**self, &**other)
}
#[inline]
fn gt(&self, other: &Box<T>) -> bool { PartialOrd::gt(&**self, &**other) }
fn gt(&self, other: &Box<T>) -> bool {
PartialOrd::gt(&**self, &**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Ord> Ord for Box<T> {
@ -357,8 +381,7 @@ impl Box<Any> {
unsafe {
// Get the raw representation of the trait object
let raw = Box::into_raw(self);
let to: TraitObject =
mem::transmute::<*mut Any, TraitObject>(raw);
let to: TraitObject = mem::transmute::<*mut Any, TraitObject>(raw);
// Extract the data pointer
Ok(Box::from_raw(to.data as *mut T))
@ -409,23 +432,33 @@ impl<T> fmt::Pointer for Box<T> {
impl<T: ?Sized> Deref for Box<T> {
type Target = T;
fn deref(&self) -> &T { &**self }
fn deref(&self) -> &T {
&**self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> DerefMut for Box<T> {
fn deref_mut(&mut self) -> &mut T { &mut **self }
fn deref_mut(&mut self) -> &mut T {
&mut **self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: Iterator + ?Sized> Iterator for Box<I> {
type Item = I::Item;
fn next(&mut self) -> Option<I::Item> { (**self).next() }
fn size_hint(&self) -> (usize, Option<usize>) { (**self).size_hint() }
fn next(&mut self) -> Option<I::Item> {
(**self).next()
}
fn size_hint(&self) -> (usize, Option<usize>) {
(**self).size_hint()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
fn next_back(&mut self) -> Option<I::Item> { (**self).next_back() }
fn next_back(&mut self) -> Option<I::Item> {
(**self).next_back()
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {}
@ -507,10 +540,7 @@ impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
#[stable(feature = "box_slice_clone", since = "1.3.0")]
impl<T: Clone> Clone for Box<[T]> {
fn clone(&self) -> Self {
let mut new = BoxBuilder {
data: RawVec::with_capacity(self.len()),
len: 0
};
let mut new = BoxBuilder { data: RawVec::with_capacity(self.len()), len: 0 };
let mut target = new.data.ptr();
@ -556,9 +586,13 @@ impl<T: Clone> Clone for Box<[T]> {
}
impl<T: ?Sized> borrow::Borrow<T> for Box<T> {
fn borrow(&self) -> &T { &**self }
fn borrow(&self) -> &T {
&**self
}
}
impl<T: ?Sized> borrow::BorrowMut<T> for Box<T> {
fn borrow_mut(&mut self) -> &mut T { &mut **self }
fn borrow_mut(&mut self) -> &mut T {
&mut **self
}
}

View file

@ -34,12 +34,16 @@ fn any_move() {
let b = Box::new(Test) as Box<Any>;
match a.downcast::<i32>() {
Ok(a) => { assert!(a == Box::new(8)); }
Err(..) => panic!()
Ok(a) => {
assert!(a == Box::new(8));
}
Err(..) => panic!(),
}
match b.downcast::<Test>() {
Ok(a) => { assert!(a == Box::new(Test)); }
Err(..) => panic!()
Ok(a) => {
assert!(a == Box::new(Test));
}
Err(..) => panic!(),
}
let a = Box::new(8) as Box<Any>;
@ -70,7 +74,8 @@ fn test_show() {
#[test]
fn deref() {
fn homura<T: Deref<Target=i32>>(_: T) { }
fn homura<T: Deref<Target = i32>>(_: T) {
}
homura(Box::new(765));
}

View file

@ -22,10 +22,12 @@ extern {
#[allocator]
fn __rust_allocate(size: usize, align: usize) -> *mut u8;
fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> *mut u8;
fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize;
fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8;
fn __rust_reallocate_inplace(ptr: *mut u8,
old_size: usize,
size: usize,
align: usize)
-> usize;
fn __rust_usable_size(size: usize, align: usize) -> usize;
}
@ -84,8 +86,11 @@ pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usiz
/// create the allocation referenced by `ptr`. The `old_size` parameter may be
/// any value in range_inclusive(requested_size, usable_size).
#[inline]
pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize {
pub unsafe fn reallocate_inplace(ptr: *mut u8,
old_size: usize,
size: usize,
align: usize)
-> usize {
check_size_and_alignment(size, align);
__rust_reallocate_inplace(ptr, old_size, size, align)
}
@ -124,7 +129,9 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
EMPTY as *mut u8
} else {
let ptr = allocate(size, align);
if ptr.is_null() { ::oom() }
if ptr.is_null() {
::oom()
}
ptr
}
}
@ -148,7 +155,9 @@ mod tests {
unsafe {
let size = 4000;
let ptr = heap::allocate(size, 8);
if ptr.is_null() { ::oom() }
if ptr.is_null() {
::oom()
}
let ret = heap::reallocate_inplace(ptr, size, size, 8);
heap::deallocate(ptr, size, 8);
assert_eq!(ret, heap::usable_size(size, 8));

View file

@ -107,8 +107,12 @@ extern crate alloc_system;
// Allow testing this library
#[cfg(test)] #[macro_use] extern crate std;
#[cfg(test)] #[macro_use] extern crate log;
#[cfg(test)]
#[macro_use]
extern crate std;
#[cfg(test)]
#[macro_use]
extern crate log;
// Heaps provided for low-level allocation strategies
@ -123,7 +127,9 @@ pub mod heap;
#[cfg(not(test))]
pub mod boxed;
#[cfg(test)]
mod boxed { pub use std::boxed::{Box, HEAP}; }
mod boxed {
pub use std::boxed::{Box, HEAP};
}
#[cfg(test)]
mod boxed_test;
pub mod arc;

View file

@ -58,7 +58,11 @@ impl<T> RawVec<T> {
pub fn new() -> Self {
unsafe {
// !0 is usize::MAX. This branch should be stripped at compile time.
let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
let cap = if mem::size_of::<T>() == 0 {
!0
} else {
0
};
// heap::EMPTY doubles as "unallocated" and "zero-sized allocation"
RawVec { ptr: Unique::new(heap::EMPTY as *mut T), cap: cap }
@ -92,7 +96,9 @@ impl<T> RawVec<T> {
} else {
let align = mem::align_of::<T>();
let ptr = heap::allocate(alloc_size, align);
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
ptr
};
@ -133,7 +139,11 @@ impl<T> RawVec<T> {
///
/// This will always be `usize::MAX` if `T` is zero-sized.
pub fn cap(&self) -> usize {
if mem::size_of::<T>() == 0 { !0 } else { self.cap }
if mem::size_of::<T>() == 0 {
!0
} else {
self.cap
}
}
/// Doubles the size of the type's backing allocation. This is common enough
@ -190,7 +200,11 @@ impl<T> RawVec<T> {
let (new_cap, ptr) = if self.cap == 0 {
// skip to 4 because tiny Vec's are dumb; but not if that would cause overflow
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
let new_cap = if elem_size > (!0) / 8 {
1
} else {
4
};
let ptr = heap::allocate(new_cap * elem_size, align);
(new_cap, ptr)
} else {
@ -207,7 +221,9 @@ impl<T> RawVec<T> {
};
// If allocate or reallocate fail, we'll get `null` back
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
@ -246,7 +262,9 @@ impl<T> RawVec<T> {
// Don't actually need any more capacity.
// Wrapping in case they gave a bad `used_cap`.
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; }
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return;
}
// Nothing we can really do about these checks :(
let new_cap = used_cap.checked_add(needed_extra_cap).expect("capacity overflow");
@ -256,14 +274,13 @@ impl<T> RawVec<T> {
let ptr = if self.cap == 0 {
heap::allocate(new_alloc_size, align)
} else {
heap::reallocate(self.ptr() as *mut _,
self.cap * elem_size,
new_alloc_size,
align)
heap::reallocate(self.ptr() as *mut _, self.cap * elem_size, new_alloc_size, align)
};
// If allocate or reallocate fail, we'll get `null` back
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
@ -326,7 +343,9 @@ impl<T> RawVec<T> {
// Don't actually need any more capacity.
// Wrapping in case they give a bas `used_cap`
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap { return; }
if self.cap().wrapping_sub(used_cap) >= needed_extra_cap {
return;
}
// Nothing we can really do about these checks :(
let new_cap = used_cap.checked_add(needed_extra_cap)
@ -339,14 +358,13 @@ impl<T> RawVec<T> {
let ptr = if self.cap == 0 {
heap::allocate(new_alloc_size, align)
} else {
heap::reallocate(self.ptr() as *mut _,
self.cap * elem_size,
new_alloc_size,
align)
heap::reallocate(self.ptr() as *mut _, self.cap * elem_size, new_alloc_size, align)
};
// If allocate or reallocate fail, we'll get `null` back
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
self.cap = new_cap;
@ -386,7 +404,9 @@ impl<T> RawVec<T> {
self.cap * elem_size,
amount * elem_size,
align);
if ptr.is_null() { oom() }
if ptr.is_null() {
oom()
}
self.ptr = Unique::new(ptr as *mut _);
}
self.cap = amount;

View file

@ -213,7 +213,7 @@ impl<T> Rc<T> {
_ptr: NonZero::new(Box::into_raw(box RcBox {
strong: Cell::new(1),
weak: Cell::new(1),
value: value
value: value,
})),
}
}
@ -290,13 +290,17 @@ impl<T: ?Sized> Rc<T> {
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
pub fn weak_count(this: &Self) -> usize { this.weak() - 1 }
pub fn weak_count(this: &Self) -> usize {
this.weak() - 1
}
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful",
issue = "28356")]
pub fn strong_count(this: &Self) -> usize { this.strong() }
pub fn strong_count(this: &Self) -> usize {
this.strong()
}
/// Returns true if there are no other `Rc` or `Weak<T>` values that share
/// the same inner value.
@ -451,7 +455,7 @@ impl<T: ?Sized> Drop for Rc<T> {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
ptr as *const () as usize != mem::POST_DROP_USIZE {
ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
@ -462,9 +466,7 @@ impl<T: ?Sized> Drop for Rc<T> {
self.dec_weak();
if self.weak() == 0 {
deallocate(ptr as *mut u8,
size_of_val(&*ptr),
align_of_val(&*ptr))
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
@ -530,7 +532,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// five == Rc::new(5);
/// ```
#[inline(always)]
fn eq(&self, other: &Rc<T>) -> bool { **self == **other }
fn eq(&self, other: &Rc<T>) -> bool {
**self == **other
}
/// Inequality for two `Rc<T>`s.
///
@ -546,7 +550,9 @@ impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
/// five != Rc::new(5);
/// ```
#[inline(always)]
fn ne(&self, other: &Rc<T>) -> bool { **self != **other }
fn ne(&self, other: &Rc<T>) -> bool {
**self != **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -586,7 +592,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five < Rc::new(5);
/// ```
#[inline(always)]
fn lt(&self, other: &Rc<T>) -> bool { **self < **other }
fn lt(&self, other: &Rc<T>) -> bool {
**self < **other
}
/// 'Less-than or equal to' comparison for two `Rc<T>`s.
///
@ -602,7 +610,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five <= Rc::new(5);
/// ```
#[inline(always)]
fn le(&self, other: &Rc<T>) -> bool { **self <= **other }
fn le(&self, other: &Rc<T>) -> bool {
**self <= **other
}
/// Greater-than comparison for two `Rc<T>`s.
///
@ -618,7 +628,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five > Rc::new(5);
/// ```
#[inline(always)]
fn gt(&self, other: &Rc<T>) -> bool { **self > **other }
fn gt(&self, other: &Rc<T>) -> bool {
**self > **other
}
/// 'Greater-than or equal to' comparison for two `Rc<T>`s.
///
@ -634,7 +646,9 @@ impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
/// five >= Rc::new(5);
/// ```
#[inline(always)]
fn ge(&self, other: &Rc<T>) -> bool { **self >= **other }
fn ge(&self, other: &Rc<T>) -> bool {
**self >= **other
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -653,7 +667,9 @@ impl<T: ?Sized + Ord> Ord for Rc<T> {
/// five.partial_cmp(&Rc::new(5));
/// ```
#[inline]
fn cmp(&self, other: &Rc<T>) -> Ordering { (**self).cmp(&**other) }
fn cmp(&self, other: &Rc<T>) -> Ordering {
(**self).cmp(&**other)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@ -764,13 +780,12 @@ impl<T: ?Sized> Drop for Weak<T> {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
ptr as *const () as usize != mem::POST_DROP_USIZE {
ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr),
align_of_val(&*ptr))
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
@ -821,7 +836,9 @@ trait RcBoxPtr<T: ?Sized> {
fn inner(&self) -> &RcBox<T>;
#[inline]
fn strong(&self) -> usize { self.inner().strong.get() }
fn strong(&self) -> usize {
self.inner().strong.get()
}
#[inline]
fn inc_strong(&self) {
@ -829,10 +846,14 @@ trait RcBoxPtr<T: ?Sized> {
}
#[inline]
fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); }
fn dec_strong(&self) {
self.inner().strong.set(self.strong() - 1);
}
#[inline]
fn weak(&self) -> usize { self.inner().weak.get() }
fn weak(&self) -> usize {
self.inner().weak.get()
}
#[inline]
fn inc_weak(&self) {
@ -840,7 +861,9 @@ trait RcBoxPtr<T: ?Sized> {
}
#[inline]
fn dec_weak(&self) { self.inner().weak.set(self.weak() - 1); }
fn dec_weak(&self) {
self.inner().weak.set(self.weak() - 1);
}
}
impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
@ -928,7 +951,7 @@ mod tests {
#[test]
fn weak_self_cyclic() {
struct Cycle {
x: RefCell<Option<Weak<Cycle>>>
x: RefCell<Option<Weak<Cycle>>>,
}
let a = Rc::new(Cycle { x: RefCell::new(None) });
@ -1086,5 +1109,7 @@ mod tests {
}
impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
fn borrow(&self) -> &T { &**self }
fn borrow(&self) -> &T {
&**self
}
}