Auto merge of #61274 - Centril:rollup-23dekk4, r=Centril

Rollup of 4 pull requests

Successful merges:

 - #61123 (Allow to specify profiling data output directory as -Zself-profile argument.)
 - #61159 (split core::ptr module into multiple files)
 - #61164 (rename Scalar::Bits to Scalar::Raw and bits field to data)
 - #61250 (Remove special case for *ios* builds in run-make-fulldeps/print-target-list Makefile)

Failed merges:

r? @ghost
This commit is contained in:
bors 2019-05-28 17:38:01 +00:00
commit 7212685837
29 changed files with 648 additions and 621 deletions

View file

@ -1,5 +1,3 @@
// ignore-tidy-filelength
//! Manually manage memory through raw pointers.
//!
//! *[See also the pointer primitive types](../../std/primitive.pointer.html).*
@ -65,14 +63,10 @@
#![stable(feature = "rust1", since = "1.0.0")]
use crate::convert::From;
use crate::intrinsics;
use crate::ops::{CoerceUnsized, DispatchFromDyn};
use crate::fmt;
use crate::hash;
use crate::marker::{PhantomData, Unsize};
use crate::mem::{self, MaybeUninit};
use crate::cmp::Ordering::{self, Less, Equal, Greater};
#[stable(feature = "rust1", since = "1.0.0")]
@ -84,6 +78,14 @@ pub use crate::intrinsics::copy;
#[stable(feature = "rust1", since = "1.0.0")]
pub use crate::intrinsics::write_bytes;
mod non_null;
#[stable(feature = "nonnull", since = "1.25.0")]
pub use non_null::NonNull;
mod unique;
#[unstable(feature = "ptr_internals", issue = "0")]
pub use unique::Unique;
/// Executes the destructor (if any) of the pointed-to value.
///
/// This is semantically equivalent to calling [`ptr::read`] and discarding
@ -2742,384 +2744,3 @@ impl<T: ?Sized> PartialOrd for *mut T {
#[inline]
fn ge(&self, other: &*mut T) -> bool { *self >= *other }
}
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. Useful for building abstractions like
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
///
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
/// the kind of strong aliasing guarantees an instance of `T` can expect:
/// the referent of the pointer should not be modified without a unique path to
/// its owning Unique.
///
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
/// consider using `NonNull`, which has weaker semantics.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
/// for any type which upholds Unique's aliasing requirements.
#[unstable(feature = "ptr_internals", issue = "0",
reason = "use NonNull instead and consider PhantomData<T> \
(if you also use #[may_dangle]), Send, and/or Sync")]
#[doc(hidden)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
pub struct Unique<T: ?Sized> {
pointer: *const T,
// NOTE: this marker has no consequences for variance, but is necessary
// for dropck to understand that we logically own a `T`.
//
// For details, see:
// https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
_marker: PhantomData<T>,
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> fmt::Debug for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "0")]
unsafe impl<T: Send + ?Sized> Send for Unique<T> { }
/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "0")]
unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: Sized> Unique<T> {
/// Creates a new `Unique` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
// FIXME: rename to dangling() to match NonNull?
pub const fn empty() -> Self {
unsafe {
Unique::new_unchecked(mem::align_of::<T>() as *mut T)
}
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> Unique<T> {
/// Creates a new `Unique`.
///
/// # Safety
///
/// `ptr` must be non-null.
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
Unique { pointer: ptr as _, _marker: PhantomData }
}
/// Creates a new `Unique` if `ptr` is non-null.
pub fn new(ptr: *mut T) -> Option<Self> {
if !ptr.is_null() {
Some(unsafe { Unique { pointer: ptr as _, _marker: PhantomData } })
} else {
None
}
}
/// Acquires the underlying `*mut` pointer.
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
pub unsafe fn as_ref(&self) -> &T {
&*self.as_ptr()
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
pub unsafe fn as_mut(&mut self) -> &mut T {
&mut *self.as_ptr()
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> Clone for Unique<T> {
fn clone(&self) -> Self {
*self
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> Copy for Unique<T> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> fmt::Pointer for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> From<&mut T> for Unique<T> {
fn from(reference: &mut T) -> Self {
unsafe { Unique { pointer: reference as *mut T, _marker: PhantomData } }
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> From<&T> for Unique<T> {
fn from(reference: &T) -> Self {
unsafe { Unique { pointer: reference as *const T, _marker: PhantomData } }
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<'a, T: ?Sized> From<NonNull<T>> for Unique<T> {
fn from(p: NonNull<T>) -> Self {
unsafe { Unique { pointer: p.pointer, _marker: PhantomData } }
}
}
/// `*mut T` but non-zero and covariant.
///
/// This is often the correct thing to use when building data structures using
/// raw pointers, but is ultimately more dangerous to use because of its additional
/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. If this is incorrect
/// for your use case, you should include some [`PhantomData`] in your type to
/// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
/// Usually this won't be necessary; covariance is correct for most safe abstractions,
/// such as `Box`, `Rc`, `Arc`, `Vec`, and `LinkedList`. This is the case because they
/// provide a public API that follows the normal shared XOR mutable rules of Rust.
///
/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
/// not change the fact that mutating through a (pointer derived from a) shared
/// reference is undefined behavior unless the mutation happens inside an
/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
/// reference. When using this `From` instance without an `UnsafeCell<T>`,
/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
/// is never used for mutation.
///
/// [`PhantomData`]: ../marker/struct.PhantomData.html
/// [`UnsafeCell<T>`]: ../cell/struct.UnsafeCell.html
#[stable(feature = "nonnull", since = "1.25.0")]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
#[cfg_attr(not(stage0), rustc_nonnull_optimization_guaranteed)]
pub struct NonNull<T: ?Sized> {
pointer: *const T,
}
/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
// N.B., this impl is unnecessary, but should provide better error messages.
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> !Send for NonNull<T> { }
/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
// N.B., this impl is unnecessary, but should provide better error messages.
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> !Sync for NonNull<T> { }
impl<T: Sized> NonNull<T> {
/// Creates a new `NonNull` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub const fn dangling() -> Self {
unsafe {
let ptr = mem::align_of::<T>() as *mut T;
NonNull::new_unchecked(ptr)
}
}
}
impl<T: ?Sized> NonNull<T> {
/// Creates a new `NonNull`.
///
/// # Safety
///
/// `ptr` must be non-null.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
NonNull { pointer: ptr as _ }
}
/// Creates a new `NonNull` if `ptr` is non-null.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub fn new(ptr: *mut T) -> Option<Self> {
if !ptr.is_null() {
Some(unsafe { Self::new_unchecked(ptr) })
} else {
None
}
}
/// Acquires the underlying `*mut` pointer.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub unsafe fn as_ref(&self) -> &T {
&*self.as_ptr()
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
&mut *self.as_ptr()
}
/// Cast to a pointer of another type
#[stable(feature = "nonnull_cast", since = "1.27.0")]
#[inline]
pub const fn cast<U>(self) -> NonNull<U> {
unsafe {
NonNull::new_unchecked(self.as_ptr() as *mut U)
}
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Clone for NonNull<T> {
fn clone(&self) -> Self {
*self
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Copy for NonNull<T> { }
#[unstable(feature = "coerce_unsized", issue = "27732")]
impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> { }
#[unstable(feature = "dispatch_from_dyn", issue = "0")]
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> { }
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> fmt::Debug for NonNull<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> fmt::Pointer for NonNull<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Eq for NonNull<T> {}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> PartialEq for NonNull<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.as_ptr() == other.as_ptr()
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Ord for NonNull<T> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.as_ptr().cmp(&other.as_ptr())
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> PartialOrd for NonNull<T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.as_ptr().partial_cmp(&other.as_ptr())
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> hash::Hash for NonNull<T> {
#[inline]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.as_ptr().hash(state)
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> From<Unique<T>> for NonNull<T> {
#[inline]
fn from(unique: Unique<T>) -> Self {
unsafe { NonNull { pointer: unique.pointer } }
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> From<&mut T> for NonNull<T> {
#[inline]
fn from(reference: &mut T) -> Self {
unsafe { NonNull { pointer: reference as *mut T } }
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> From<&T> for NonNull<T> {
#[inline]
fn from(reference: &T) -> Self {
unsafe { NonNull { pointer: reference as *const T } }
}
}

226
src/libcore/ptr/non_null.rs Normal file
View file

@ -0,0 +1,226 @@
use crate::convert::From;
use crate::ops::{CoerceUnsized, DispatchFromDyn};
use crate::fmt;
use crate::hash;
use crate::marker::Unsize;
use crate::mem;
use crate::ptr::Unique;
use crate::cmp::Ordering;
/// `*mut T` but non-zero and covariant.
///
/// This is often the correct thing to use when building data structures using
/// raw pointers, but is ultimately more dangerous to use because of its additional
/// properties. If you're not sure if you should use `NonNull<T>`, just use `*mut T`!
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<NonNull<T>>` has the same size as `*mut T`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `NonNull<T>` is covariant over `T`. If this is incorrect
/// for your use case, you should include some [`PhantomData`] in your type to
/// provide invariance, such as `PhantomData<Cell<T>>` or `PhantomData<&'a mut T>`.
/// Usually this won't be necessary; covariance is correct for most safe abstractions,
/// such as `Box`, `Rc`, `Arc`, `Vec`, and `LinkedList`. This is the case because they
/// provide a public API that follows the normal shared XOR mutable rules of Rust.
///
/// Notice that `NonNull<T>` has a `From` instance for `&T`. However, this does
/// not change the fact that mutating through a (pointer derived from a) shared
/// reference is undefined behavior unless the mutation happens inside an
/// [`UnsafeCell<T>`]. The same goes for creating a mutable reference from a shared
/// reference. When using this `From` instance without an `UnsafeCell<T>`,
/// it is your responsibility to ensure that `as_mut` is never called, and `as_ptr`
/// is never used for mutation.
///
/// [`PhantomData`]: ../marker/struct.PhantomData.html
/// [`UnsafeCell<T>`]: ../cell/struct.UnsafeCell.html
#[stable(feature = "nonnull", since = "1.25.0")]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
#[cfg_attr(not(stage0), rustc_nonnull_optimization_guaranteed)]
pub struct NonNull<T: ?Sized> {
pointer: *const T,
}
/// `NonNull` pointers are not `Send` because the data they reference may be aliased.
// N.B., this impl is unnecessary, but should provide better error messages.
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> !Send for NonNull<T> { }
/// `NonNull` pointers are not `Sync` because the data they reference may be aliased.
// N.B., this impl is unnecessary, but should provide better error messages.
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> !Sync for NonNull<T> { }
impl<T: Sized> NonNull<T> {
/// Creates a new `NonNull` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub const fn dangling() -> Self {
unsafe {
let ptr = mem::align_of::<T>() as *mut T;
NonNull::new_unchecked(ptr)
}
}
}
impl<T: ?Sized> NonNull<T> {
/// Creates a new `NonNull`.
///
/// # Safety
///
/// `ptr` must be non-null.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
NonNull { pointer: ptr as _ }
}
/// Creates a new `NonNull` if `ptr` is non-null.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub fn new(ptr: *mut T) -> Option<Self> {
if !ptr.is_null() {
Some(unsafe { Self::new_unchecked(ptr) })
} else {
None
}
}
/// Acquires the underlying `*mut` pointer.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub unsafe fn as_ref(&self) -> &T {
&*self.as_ptr()
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
#[stable(feature = "nonnull", since = "1.25.0")]
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
&mut *self.as_ptr()
}
/// Cast to a pointer of another type
#[stable(feature = "nonnull_cast", since = "1.27.0")]
#[inline]
pub const fn cast<U>(self) -> NonNull<U> {
unsafe {
NonNull::new_unchecked(self.as_ptr() as *mut U)
}
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Clone for NonNull<T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Copy for NonNull<T> { }
#[unstable(feature = "coerce_unsized", issue = "27732")]
impl<T: ?Sized, U: ?Sized> CoerceUnsized<NonNull<U>> for NonNull<T> where T: Unsize<U> { }
#[unstable(feature = "dispatch_from_dyn", issue = "0")]
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<NonNull<U>> for NonNull<T> where T: Unsize<U> { }
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> fmt::Debug for NonNull<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> fmt::Pointer for NonNull<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Eq for NonNull<T> {}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> PartialEq for NonNull<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.as_ptr() == other.as_ptr()
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> Ord for NonNull<T> {
#[inline]
fn cmp(&self, other: &Self) -> Ordering {
self.as_ptr().cmp(&other.as_ptr())
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> PartialOrd for NonNull<T> {
#[inline]
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.as_ptr().partial_cmp(&other.as_ptr())
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> hash::Hash for NonNull<T> {
#[inline]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.as_ptr().hash(state)
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> From<Unique<T>> for NonNull<T> {
#[inline]
fn from(unique: Unique<T>) -> Self {
unsafe { NonNull::new_unchecked(unique.as_ptr()) }
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> From<&mut T> for NonNull<T> {
#[inline]
fn from(reference: &mut T) -> Self {
unsafe { NonNull { pointer: reference as *mut T } }
}
}
#[stable(feature = "nonnull", since = "1.25.0")]
impl<T: ?Sized> From<&T> for NonNull<T> {
#[inline]
fn from(reference: &T) -> Self {
unsafe { NonNull { pointer: reference as *const T } }
}
}

180
src/libcore/ptr/unique.rs Normal file
View file

@ -0,0 +1,180 @@
use crate::convert::From;
use crate::ops::{CoerceUnsized, DispatchFromDyn};
use crate::fmt;
use crate::marker::{PhantomData, Unsize};
use crate::mem;
use crate::ptr::NonNull;
/// A wrapper around a raw non-null `*mut T` that indicates that the possessor
/// of this wrapper owns the referent. Useful for building abstractions like
/// `Box<T>`, `Vec<T>`, `String`, and `HashMap<K, V>`.
///
/// Unlike `*mut T`, `Unique<T>` behaves "as if" it were an instance of `T`.
/// It implements `Send`/`Sync` if `T` is `Send`/`Sync`. It also implies
/// the kind of strong aliasing guarantees an instance of `T` can expect:
/// the referent of the pointer should not be modified without a unique path to
/// its owning Unique.
///
/// If you're uncertain of whether it's correct to use `Unique` for your purposes,
/// consider using `NonNull`, which has weaker semantics.
///
/// Unlike `*mut T`, the pointer must always be non-null, even if the pointer
/// is never dereferenced. This is so that enums may use this forbidden value
/// as a discriminant -- `Option<Unique<T>>` has the same size as `Unique<T>`.
/// However the pointer may still dangle if it isn't dereferenced.
///
/// Unlike `*mut T`, `Unique<T>` is covariant over `T`. This should always be correct
/// for any type which upholds Unique's aliasing requirements.
#[unstable(feature = "ptr_internals", issue = "0",
reason = "use NonNull instead and consider PhantomData<T> \
(if you also use #[may_dangle]), Send, and/or Sync")]
#[doc(hidden)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
pub struct Unique<T: ?Sized> {
pointer: *const T,
// NOTE: this marker has no consequences for variance, but is necessary
// for dropck to understand that we logically own a `T`.
//
// For details, see:
// https://github.com/rust-lang/rfcs/blob/master/text/0769-sound-generic-drop.md#phantom-data
_marker: PhantomData<T>,
}
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "0")]
unsafe impl<T: Send + ?Sized> Send for Unique<T> { }
/// `Unique` pointers are `Sync` if `T` is `Sync` because the data they
/// reference is unaliased. Note that this aliasing invariant is
/// unenforced by the type system; the abstraction using the
/// `Unique` must enforce it.
#[unstable(feature = "ptr_internals", issue = "0")]
unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: Sized> Unique<T> {
/// Creates a new `Unique` that is dangling, but well-aligned.
///
/// This is useful for initializing types which lazily allocate, like
/// `Vec::new` does.
///
/// Note that the pointer value may potentially represent a valid pointer to
/// a `T`, which means this must not be used as a "not yet initialized"
/// sentinel value. Types that lazily allocate must track initialization by
/// some other means.
// FIXME: rename to dangling() to match NonNull?
#[inline]
pub const fn empty() -> Self {
unsafe {
Unique::new_unchecked(mem::align_of::<T>() as *mut T)
}
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> Unique<T> {
/// Creates a new `Unique`.
///
/// # Safety
///
/// `ptr` must be non-null.
#[inline]
pub const unsafe fn new_unchecked(ptr: *mut T) -> Self {
Unique { pointer: ptr as _, _marker: PhantomData }
}
/// Creates a new `Unique` if `ptr` is non-null.
#[inline]
pub fn new(ptr: *mut T) -> Option<Self> {
if !ptr.is_null() {
Some(unsafe { Unique { pointer: ptr as _, _marker: PhantomData } })
} else {
None
}
}
/// Acquires the underlying `*mut` pointer.
#[inline]
pub const fn as_ptr(self) -> *mut T {
self.pointer as *mut T
}
/// Dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&*my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_ref(&self) -> &T {
&*self.as_ptr()
}
/// Mutably dereferences the content.
///
/// The resulting lifetime is bound to self so this behaves "as if"
/// it were actually an instance of T that is getting borrowed. If a longer
/// (unbound) lifetime is needed, use `&mut *my_ptr.as_ptr()`.
#[inline]
pub unsafe fn as_mut(&mut self) -> &mut T {
&mut *self.as_ptr()
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> Clone for Unique<T> {
#[inline]
fn clone(&self) -> Self {
*self
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> Copy for Unique<T> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized, U: ?Sized> CoerceUnsized<Unique<U>> for Unique<T> where T: Unsize<U> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized, U: ?Sized> DispatchFromDyn<Unique<U>> for Unique<T> where T: Unsize<U> { }
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> fmt::Debug for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> fmt::Pointer for Unique<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Pointer::fmt(&self.as_ptr(), f)
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> From<&mut T> for Unique<T> {
#[inline]
fn from(reference: &mut T) -> Self {
unsafe { Unique { pointer: reference as *mut T, _marker: PhantomData } }
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<T: ?Sized> From<&T> for Unique<T> {
#[inline]
fn from(reference: &T) -> Self {
unsafe { Unique { pointer: reference as *const T, _marker: PhantomData } }
}
}
#[unstable(feature = "ptr_internals", issue = "0")]
impl<'a, T: ?Sized> From<NonNull<T>> for Unique<T> {
#[inline]
fn from(p: NonNull<T>) -> Self {
unsafe { Unique::new_unchecked(p.as_ptr()) }
}
}

View file

@ -2,7 +2,6 @@
use super::{
Pointer, EvalResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar,
truncate,
};
use crate::ty::layout::{Size, Align};
@ -407,18 +406,9 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
ScalarMaybeUndef::Undef => return self.mark_definedness(ptr, type_size, false),
};
let bytes = match val {
Scalar::Ptr(val) => {
assert_eq!(type_size, cx.data_layout().pointer_size);
val.offset.bytes() as u128
}
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, type_size.bytes());
debug_assert_eq!(truncate(bits, Size::from_bytes(size.into())), bits,
"Unexpected value of size {} when writing to memory", size);
bits
},
let bytes = match val.to_bits_or_ptr(type_size, cx) {
Err(val) => val.offset.bytes() as u128,
Ok(data) => data,
};
let endian = cx.data_layout().endian;

View file

@ -349,6 +349,7 @@ impl<'tcx> AllocMap<'tcx> {
/// illegal and will likely ICE.
/// This function exists to allow const eval to detect the difference between evaluation-
/// local dangling pointers and allocations in constants/statics.
#[inline]
pub fn get(&self, id: AllocId) -> Option<AllocKind<'tcx>> {
self.id_to_kind.get(&id).cloned()
}
@ -397,6 +398,7 @@ impl<'tcx> AllocMap<'tcx> {
// Methods to access integers in the target endianness
////////////////////////////////////////////////////////////////////////////////
#[inline]
pub fn write_target_uint(
endianness: layout::Endian,
mut target: &mut [u8],
@ -409,6 +411,7 @@ pub fn write_target_uint(
}
}
#[inline]
pub fn read_target_uint(endianness: layout::Endian, mut source: &[u8]) -> Result<u128, io::Error> {
match endianness {
layout::Endian::Little => source.read_uint128::<LittleEndian>(source.len()),
@ -420,8 +423,15 @@ pub fn read_target_uint(endianness: layout::Endian, mut source: &[u8]) -> Result
// Methods to facilitate working with signed integers stored in a u128
////////////////////////////////////////////////////////////////////////////////
/// Truncate `value` to `size` bits and then sign-extend it to 128 bits
/// (i.e., if it is negative, fill with 1's on the left).
#[inline]
pub fn sign_extend(value: u128, size: Size) -> u128 {
let size = size.bits();
if size == 0 {
// Truncated until nothing is left.
return 0;
}
// sign extend
let shift = 128 - size;
// shift the unsigned value to the left
@ -429,8 +439,14 @@ pub fn sign_extend(value: u128, size: Size) -> u128 {
(((value << shift) as i128) >> shift) as u128
}
/// Truncate `value` to `size` bits.
#[inline]
pub fn truncate(value: u128, size: Size) -> u128 {
let size = size.bits();
if size == 0 {
// Truncated until nothing is left.
return 0;
}
let shift = 128 - size;
// truncate (shift left to drop out leftover values, shift right to fill with zeroes)
(value << shift) >> shift

View file

@ -20,11 +20,35 @@ pub trait PointerArithmetic: layout::HasDataLayout {
self.data_layout().pointer_size
}
//// Trunace the given value to the pointer size; also return whether there was an overflow
/// Helper function: truncate given value-"overflowed flag" pair to pointer size and
/// update "overflowed flag" if there was an overflow.
/// This should be called by all the other methods before returning!
#[inline]
fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
fn truncate_to_ptr(&self, (val, over): (u64, bool)) -> (u64, bool) {
let val = val as u128;
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
((val % max_ptr_plus_1) as u64, over || val >= max_ptr_plus_1)
}
#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let res = val.overflowing_add(i);
self.truncate_to_ptr(res)
}
// Overflow checking only works properly on the range from -u64 to +u64.
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// Trickery to ensure that i64::min_value() works fine: compute n = -i.
// This formula only works for true negative values, it overflows for zero!
let n = u64::max_value() - (i as u64) + 1;
let res = val.overflowing_sub(n);
self.truncate_to_ptr(res)
} else {
self.overflowing_offset(val, i as u64)
}
}
#[inline]
@ -33,32 +57,11 @@ pub trait PointerArithmetic: layout::HasDataLayout {
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}
#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let (res, over1) = val.overflowing_add(i);
let (res, over2) = self.truncate_to_ptr(u128::from(res));
(res, over1 || over2)
}
#[inline]
fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i128::from(i));
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}
// Overflow checking only works properly on the range from -u64 to +u64.
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// trickery to ensure that i64::min_value() works fine
// this formula only works for true negative values, it panics for zero!
let n = u64::max_value() - (i as u64) + 1;
val.overflowing_sub(n)
} else {
self.overflowing_offset(val, i as u64)
}
}
}
impl<T: layout::HasDataLayout> PointerArithmetic for T {}

View file

@ -87,11 +87,11 @@ impl<'tcx> ConstValue<'tcx> {
RustcEncodable, RustcDecodable, Hash, HashStable)]
pub enum Scalar<Tag=(), Id=AllocId> {
/// The raw bytes of a simple value.
Bits {
/// The first `size` bytes are the value.
Raw {
/// The first `size` bytes of `data` are the value.
/// Do not try to read less or more bytes than that. The remaining bytes must be 0.
data: u128,
size: u8,
bits: u128,
},
/// A pointer into an `Allocation`. An `Allocation` in the `memory` module has a list of
@ -108,16 +108,14 @@ impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for Scalar<Tag, Id> {
match self {
Scalar::Ptr(ptr) =>
write!(f, "{:?}", ptr),
&Scalar::Bits { bits, size } => {
&Scalar::Raw { data, size } => {
Scalar::check_data(data, size);
if size == 0 {
assert_eq!(bits, 0, "ZST value must be 0");
write!(f, "<ZST>")
} else {
assert_eq!(truncate(bits, Size::from_bytes(size as u64)), bits,
"Scalar value {:#x} exceeds size of {} bytes", bits, size);
// Format as hex number wide enough to fit any value of the given `size`.
// So bits=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
write!(f, "0x{:>0width$x}", bits, width=(size*2) as usize)
// So data=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
write!(f, "0x{:>0width$x}", data, width=(size*2) as usize)
}
}
}
@ -128,17 +126,23 @@ impl<Tag> fmt::Display for Scalar<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Scalar::Ptr(_) => write!(f, "a pointer"),
Scalar::Bits { bits, .. } => write!(f, "{}", bits),
Scalar::Raw { data, .. } => write!(f, "{}", data),
}
}
}
impl<'tcx> Scalar<()> {
#[inline(always)]
fn check_data(data: u128, size: u8) {
debug_assert_eq!(truncate(data, Size::from_bytes(size as u64)), data,
"Scalar value {:#x} exceeds size of {} bytes", data, size);
}
#[inline]
pub fn with_tag<Tag>(self, new_tag: Tag) -> Scalar<Tag> {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.with_tag(new_tag)),
Scalar::Bits { bits, size } => Scalar::Bits { bits, size },
Scalar::Raw { data, size } => Scalar::Raw { data, size },
}
}
@ -155,31 +159,31 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn erase_tag(self) -> Scalar {
match self {
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.erase_tag()),
Scalar::Bits { bits, size } => Scalar::Bits { bits, size },
Scalar::Raw { data, size } => Scalar::Raw { data, size },
}
}
#[inline]
pub fn ptr_null(cx: &impl HasDataLayout) -> Self {
Scalar::Bits {
bits: 0,
Scalar::Raw {
data: 0,
size: cx.data_layout().pointer_size.bytes() as u8,
}
}
#[inline]
pub fn zst() -> Self {
Scalar::Bits { bits: 0, size: 0 }
Scalar::Raw { data: 0, size: 0 }
}
#[inline]
pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Ok(Scalar::Bits {
bits: dl.offset(bits as u64, i.bytes())? as u128,
Ok(Scalar::Raw {
data: dl.offset(data as u64, i.bytes())? as u128,
size,
})
}
@ -191,10 +195,10 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn ptr_wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Scalar::Bits {
bits: dl.overflowing_offset(bits as u64, i.bytes()).0 as u128,
Scalar::Raw {
data: dl.overflowing_offset(data as u64, i.bytes()).0 as u128,
size,
}
}
@ -206,10 +210,10 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size().bytes());
Ok(Scalar::Bits {
bits: dl.signed_offset(bits as u64, i)? as u128,
Ok(Scalar::Raw {
data: dl.signed_offset(data as u64, i)? as u128,
size,
})
}
@ -221,10 +225,10 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
let dl = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, dl.pointer_size.bytes());
Scalar::Bits {
bits: dl.overflowing_signed_offset(bits as u64, i128::from(i)).0 as u128,
Scalar::Raw {
data: dl.overflowing_signed_offset(data as u64, i128::from(i)).0 as u128,
size,
}
}
@ -232,14 +236,14 @@ impl<'tcx, Tag> Scalar<Tag> {
}
}
/// Returns this pointers offset from the allocation base, or from NULL (for
/// Returns this pointer's offset from the allocation base, or from NULL (for
/// integer pointers).
#[inline]
pub fn get_ptr_offset(self, cx: &impl HasDataLayout) -> Size {
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, cx.pointer_size().bytes());
Size::from_bytes(bits as u64)
Size::from_bytes(data as u64)
}
Scalar::Ptr(ptr) => ptr.offset,
}
@ -248,9 +252,9 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn is_null_ptr(self, cx: &impl HasDataLayout) -> bool {
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
bits == 0
data == 0
},
Scalar::Ptr(_) => false,
}
@ -258,20 +262,22 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn from_bool(b: bool) -> Self {
Scalar::Bits { bits: b as u128, size: 1 }
Scalar::Raw { data: b as u128, size: 1 }
}
#[inline]
pub fn from_char(c: char) -> Self {
Scalar::Bits { bits: c as u128, size: 4 }
Scalar::Raw { data: c as u128, size: 4 }
}
#[inline]
pub fn from_uint(i: impl Into<u128>, size: Size) -> Self {
let i = i.into();
debug_assert_eq!(truncate(i, size), i,
"Unsigned value {} does not fit in {} bits", i, size.bits());
Scalar::Bits { bits: i, size: size.bytes() as u8 }
assert_eq!(
truncate(i, size), i,
"Unsigned value {:#x} does not fit in {} bits", i, size.bits()
);
Scalar::Raw { data: i, size: size.bytes() as u8 }
}
#[inline]
@ -279,28 +285,51 @@ impl<'tcx, Tag> Scalar<Tag> {
let i = i.into();
// `into` performed sign extension, we have to truncate
let truncated = truncate(i as u128, size);
debug_assert_eq!(sign_extend(truncated, size) as i128, i,
"Signed value {} does not fit in {} bits", i, size.bits());
Scalar::Bits { bits: truncated, size: size.bytes() as u8 }
assert_eq!(
sign_extend(truncated, size) as i128, i,
"Signed value {:#x} does not fit in {} bits", i, size.bits()
);
Scalar::Raw { data: truncated, size: size.bytes() as u8 }
}
#[inline]
pub fn from_f32(f: f32) -> Self {
Scalar::Bits { bits: f.to_bits() as u128, size: 4 }
Scalar::Raw { data: f.to_bits() as u128, size: 4 }
}
#[inline]
pub fn from_f64(f: f64) -> Self {
Scalar::Bits { bits: f.to_bits() as u128, size: 8 }
Scalar::Raw { data: f.to_bits() as u128, size: 8 }
}
#[inline]
pub fn to_bits_or_ptr(
self,
target_size: Size,
cx: &impl HasDataLayout,
) -> Result<u128, Pointer<Tag>> {
match self {
Scalar::Raw { data, size } => {
assert_eq!(target_size.bytes(), size as u64);
assert_ne!(size, 0, "you should never look at the bits of a ZST");
Scalar::check_data(data, size);
Ok(data)
}
Scalar::Ptr(ptr) => {
assert_eq!(target_size, cx.data_layout().pointer_size);
Err(ptr)
}
}
}
#[inline]
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
match self {
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(target_size.bytes(), size as u64);
assert_ne!(size, 0, "to_bits cannot be used with zsts");
Ok(bits)
assert_ne!(size, 0, "you should never look at the bits of a ZST");
Scalar::check_data(data, size);
Ok(data)
}
Scalar::Ptr(_) => err!(ReadPointerAsBytes),
}
@ -309,8 +338,8 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
match self {
Scalar::Bits { bits: 0, .. } => err!(InvalidNullPointerUsage),
Scalar::Bits { .. } => err!(ReadBytesAsPointer),
Scalar::Raw { data: 0, .. } => err!(InvalidNullPointerUsage),
Scalar::Raw { .. } => err!(ReadBytesAsPointer),
Scalar::Ptr(p) => Ok(p),
}
}
@ -318,7 +347,7 @@ impl<'tcx, Tag> Scalar<Tag> {
#[inline]
pub fn is_bits(self) -> bool {
match self {
Scalar::Bits { .. } => true,
Scalar::Raw { .. } => true,
_ => false,
}
}
@ -333,8 +362,8 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
match self {
Scalar::Bits { bits: 0, size: 1 } => Ok(false),
Scalar::Bits { bits: 1, size: 1 } => Ok(true),
Scalar::Raw { data: 0, size: 1 } => Ok(false),
Scalar::Raw { data: 1, size: 1 } => Ok(true),
_ => err!(InvalidBool),
}
}
@ -350,27 +379,23 @@ impl<'tcx, Tag> Scalar<Tag> {
pub fn to_u8(self) -> EvalResult<'static, u8> {
let sz = Size::from_bits(8);
let b = self.to_bits(sz)?;
assert_eq!(b as u8 as u128, b);
Ok(b as u8)
}
pub fn to_u32(self) -> EvalResult<'static, u32> {
let sz = Size::from_bits(32);
let b = self.to_bits(sz)?;
assert_eq!(b as u32 as u128, b);
Ok(b as u32)
}
pub fn to_u64(self) -> EvalResult<'static, u64> {
let sz = Size::from_bits(64);
let b = self.to_bits(sz)?;
assert_eq!(b as u64 as u128, b);
Ok(b as u64)
}
pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'static, u64> {
let b = self.to_bits(cx.data_layout().pointer_size)?;
assert_eq!(b as u64 as u128, b);
Ok(b as u64)
}
@ -378,7 +403,6 @@ impl<'tcx, Tag> Scalar<Tag> {
let sz = Size::from_bits(8);
let b = self.to_bits(sz)?;
let b = sign_extend(b, sz) as i128;
assert_eq!(b as i8 as i128, b);
Ok(b as i8)
}
@ -386,7 +410,6 @@ impl<'tcx, Tag> Scalar<Tag> {
let sz = Size::from_bits(32);
let b = self.to_bits(sz)?;
let b = sign_extend(b, sz) as i128;
assert_eq!(b as i32 as i128, b);
Ok(b as i32)
}
@ -394,14 +417,13 @@ impl<'tcx, Tag> Scalar<Tag> {
let sz = Size::from_bits(64);
let b = self.to_bits(sz)?;
let b = sign_extend(b, sz) as i128;
assert_eq!(b as i64 as i128, b);
Ok(b as i64)
}
pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'static, i64> {
let b = self.to_bits(cx.data_layout().pointer_size)?;
let b = sign_extend(b, cx.data_layout().pointer_size) as i128;
assert_eq!(b as i64 as i128, b);
let sz = cx.data_layout().pointer_size;
let b = self.to_bits(sz)?;
let b = sign_extend(b, sz) as i128;
Ok(b as i64)
}

View file

@ -1669,10 +1669,7 @@ impl<'tcx> TerminatorKind<'tcx> {
.map(|&u| {
tcx.mk_const(ty::Const {
val: ConstValue::Scalar(
Scalar::Bits {
bits: u,
size: size.bytes() as u8,
}.into(),
Scalar::from_uint(u, size).into(),
),
ty: switch_ty,
}).to_string().into()

View file

@ -117,16 +117,16 @@ impl LinkerPluginLto {
}
#[derive(Clone, PartialEq, Hash)]
pub enum PgoGenerate {
pub enum SwitchWithOptPath {
Enabled(Option<PathBuf>),
Disabled,
}
impl PgoGenerate {
impl SwitchWithOptPath {
pub fn enabled(&self) -> bool {
match *self {
PgoGenerate::Enabled(_) => true,
PgoGenerate::Disabled => false,
SwitchWithOptPath::Enabled(_) => true,
SwitchWithOptPath::Disabled => false,
}
}
}
@ -834,7 +834,7 @@ macro_rules! options {
pub const parse_linker_plugin_lto: Option<&str> =
Some("either a boolean (`yes`, `no`, `on`, `off`, etc), \
or the path to the linker plugin");
pub const parse_pgo_generate: Option<&str> =
pub const parse_switch_with_opt_path: Option<&str> =
Some("an optional path to the profiling data output directory");
pub const parse_merge_functions: Option<&str> =
Some("one of: `disabled`, `trampolines`, or `aliases`");
@ -842,7 +842,7 @@ macro_rules! options {
#[allow(dead_code)]
mod $mod_set {
use super::{$struct_name, Passes, Sanitizer, LtoCli, LinkerPluginLto, PgoGenerate};
use super::{$struct_name, Passes, Sanitizer, LtoCli, LinkerPluginLto, SwitchWithOptPath};
use rustc_target::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, RelroLevel};
use std::path::PathBuf;
use std::str::FromStr;
@ -1097,10 +1097,10 @@ macro_rules! options {
true
}
fn parse_pgo_generate(slot: &mut PgoGenerate, v: Option<&str>) -> bool {
fn parse_switch_with_opt_path(slot: &mut SwitchWithOptPath, v: Option<&str>) -> bool {
*slot = match v {
None => PgoGenerate::Enabled(None),
Some(path) => PgoGenerate::Enabled(Some(PathBuf::from(path))),
None => SwitchWithOptPath::Enabled(None),
Some(path) => SwitchWithOptPath::Enabled(Some(PathBuf::from(path))),
};
true
}
@ -1379,7 +1379,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"extra arguments to prepend to the linker invocation (space separated)"),
profile: bool = (false, parse_bool, [TRACKED],
"insert profiling code"),
pgo_gen: PgoGenerate = (PgoGenerate::Disabled, parse_pgo_generate, [TRACKED],
pgo_gen: SwitchWithOptPath = (SwitchWithOptPath::Disabled,
parse_switch_with_opt_path, [TRACKED],
"Generate PGO profile data, to a given file, or to the default location if it's empty."),
pgo_use: Option<PathBuf> = (None, parse_opt_pathbuf, [TRACKED],
"Use PGO profile data from the given profile file."),
@ -1447,7 +1448,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"don't interleave execution of lints; allows benchmarking individual lints"),
crate_attr: Vec<String> = (Vec::new(), parse_string_push, [TRACKED],
"inject the given attribute in the crate"),
self_profile: bool = (false, parse_bool, [UNTRACKED],
self_profile: SwitchWithOptPath = (SwitchWithOptPath::Disabled,
parse_switch_with_opt_path, [UNTRACKED],
"run the self profiler and output the raw event data"),
self_profile_events: Option<Vec<String>> = (None, parse_opt_comma_list, [UNTRACKED],
"specifies which kinds of events get recorded by the self profiler"),
@ -2558,7 +2560,7 @@ mod dep_tracking {
use std::path::PathBuf;
use std::collections::hash_map::DefaultHasher;
use super::{CrateType, DebugInfo, ErrorOutputType, OptLevel, OutputTypes,
Passes, Sanitizer, LtoCli, LinkerPluginLto, PgoGenerate};
Passes, Sanitizer, LtoCli, LinkerPluginLto, SwitchWithOptPath};
use syntax::feature_gate::UnstableFeatures;
use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel, TargetTriple};
use syntax::edition::Edition;
@ -2626,7 +2628,7 @@ mod dep_tracking {
impl_dep_tracking_hash_via_hash!(TargetTriple);
impl_dep_tracking_hash_via_hash!(Edition);
impl_dep_tracking_hash_via_hash!(LinkerPluginLto);
impl_dep_tracking_hash_via_hash!(PgoGenerate);
impl_dep_tracking_hash_via_hash!(SwitchWithOptPath);
impl_dep_tracking_hash_for_sortable_vec_of!(String);
impl_dep_tracking_hash_for_sortable_vec_of!(PathBuf);
@ -2694,7 +2696,7 @@ mod tests {
build_session_options_and_crate_config,
to_crate_config
};
use crate::session::config::{LtoCli, LinkerPluginLto, PgoGenerate, ExternEntry};
use crate::session::config::{LtoCli, LinkerPluginLto, SwitchWithOptPath, ExternEntry};
use crate::session::build_session;
use crate::session::search_paths::SearchPath;
use std::collections::{BTreeMap, BTreeSet};
@ -3207,7 +3209,7 @@ mod tests {
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
opts = reference.clone();
opts.debugging_opts.pgo_gen = PgoGenerate::Enabled(None);
opts.debugging_opts.pgo_gen = SwitchWithOptPath::Enabled(None);
assert_ne!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
opts = reference.clone();

View file

@ -9,7 +9,7 @@ use crate::lint;
use crate::lint::builtin::BuiltinLintDiagnostics;
use crate::middle::allocator::AllocatorKind;
use crate::middle::dependency_format;
use crate::session::config::OutputType;
use crate::session::config::{OutputType, SwitchWithOptPath};
use crate::session::search_paths::{PathKind, SearchPath};
use crate::util::nodemap::{FxHashMap, FxHashSet};
use crate::util::common::{duration_to_secs_str, ErrorReported};
@ -1137,8 +1137,18 @@ fn build_session_(
driver_lint_caps: FxHashMap<lint::LintId, lint::Level>,
) -> Session {
let self_profiler =
if sopts.debugging_opts.self_profile {
let profiler = SelfProfiler::new(&sopts.debugging_opts.self_profile_events);
if let SwitchWithOptPath::Enabled(ref d) = sopts.debugging_opts.self_profile {
let directory = if let Some(ref directory) = d {
directory
} else {
std::path::Path::new(".")
};
let profiler = SelfProfiler::new(
directory,
sopts.crate_name.as_ref().map(|s| &s[..]),
&sopts.debugging_opts.self_profile_events
);
match profiler {
Ok(profiler) => {
crate::ty::query::QueryName::register_with_profiler(&profiler);

View file

@ -1001,7 +1001,7 @@ impl<'tcx> CommonConsts<'tcx> {
CommonConsts {
err: mk_const(ty::Const {
val: ConstValue::Scalar(Scalar::Bits { bits: 0, size: 0 }),
val: ConstValue::Scalar(Scalar::zst()),
ty: types.err,
}),
}

View file

@ -845,22 +845,22 @@ pub trait PrettyPrinter<'gcx: 'tcx, 'tcx>:
p!(write("{}", name));
return Ok(self);
}
if let ConstValue::Scalar(Scalar::Bits { bits, .. }) = ct.val {
if let ConstValue::Scalar(Scalar::Raw { data, .. }) = ct.val {
match ct.ty.sty {
ty::Bool => {
p!(write("{}", if bits == 0 { "false" } else { "true" }));
p!(write("{}", if data == 0 { "false" } else { "true" }));
return Ok(self);
},
ty::Float(ast::FloatTy::F32) => {
p!(write("{}f32", Single::from_bits(bits)));
p!(write("{}f32", Single::from_bits(data)));
return Ok(self);
},
ty::Float(ast::FloatTy::F64) => {
p!(write("{}f64", Double::from_bits(bits)));
p!(write("{}f64", Double::from_bits(data)));
return Ok(self);
},
ty::Uint(ui) => {
p!(write("{}{}", bits, ui));
p!(write("{}{}", data, ui));
return Ok(self);
},
ty::Int(i) =>{
@ -868,11 +868,11 @@ pub trait PrettyPrinter<'gcx: 'tcx, 'tcx>:
let size = self.tcx().layout_of(ty::ParamEnv::empty().and(ty))
.unwrap()
.size;
p!(write("{}{}", sign_extend(bits, size) as i128, i));
p!(write("{}{}", sign_extend(data, size) as i128, i));
return Ok(self);
},
ty::Char => {
p!(write("{:?}", ::std::char::from_u32(bits as u32).unwrap()));
p!(write("{:?}", ::std::char::from_u32(data as u32).unwrap()));
return Ok(self);
}
_ => {},

View file

@ -613,7 +613,7 @@ where
(ConstValue::Placeholder(p1), ConstValue::Placeholder(p2)) if p1 == p2 => {
Ok(a)
}
(ConstValue::Scalar(Scalar::Bits { .. }), _) if a == b => {
(ConstValue::Scalar(Scalar::Raw { .. }), _) if a == b => {
Ok(a)
}
(ConstValue::ByRef(..), _) => {

View file

@ -3,7 +3,7 @@
use crate::hir;
use crate::hir::def_id::DefId;
use crate::infer::canonical::Canonical;
use crate::mir::interpret::{ConstValue, truncate};
use crate::mir::interpret::ConstValue;
use crate::middle::region;
use polonius_engine::Atom;
use rustc_data_structures::indexed_vec::Idx;
@ -2232,14 +2232,12 @@ impl<'tcx> Const<'tcx> {
let size = tcx.layout_of(ty).unwrap_or_else(|e| {
panic!("could not compute layout for {:?}: {:?}", ty, e)
}).size;
let truncated = truncate(bits, size);
assert_eq!(truncated, bits, "from_bits called with untruncated value");
Self::from_scalar(tcx, Scalar::Bits { bits, size: size.bytes() as u8 }, ty.value)
Self::from_scalar(tcx, Scalar::from_uint(bits, size), ty.value)
}
#[inline]
pub fn zero_sized(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
Self::from_scalar(tcx, Scalar::Bits { bits: 0, size: 0 }, ty)
Self::from_scalar(tcx, Scalar::zst(), ty)
}
#[inline]

View file

@ -1,6 +1,8 @@
use std::borrow::Cow;
use std::error::Error;
use std::fs;
use std::mem::{self, Discriminant};
use std::path::Path;
use std::process;
use std::thread::ThreadId;
use std::u32;
@ -71,10 +73,17 @@ pub struct SelfProfiler {
}
impl SelfProfiler {
pub fn new(event_filters: &Option<Vec<String>>) -> Result<SelfProfiler, Box<dyn Error>> {
let filename = format!("pid-{}.rustc_profile", process::id());
let path = std::path::Path::new(&filename);
let profiler = Profiler::new(path)?;
pub fn new(
output_directory: &Path,
crate_name: Option<&str>,
event_filters: &Option<Vec<String>>
) -> Result<SelfProfiler, Box<dyn Error>> {
fs::create_dir_all(output_directory)?;
let crate_name = crate_name.unwrap_or("unknown-crate");
let filename = format!("{}-{}.rustc_profile", crate_name, process::id());
let path = output_directory.join(&filename);
let profiler = Profiler::new(&path)?;
let query_event_kind = profiler.alloc_string("Query");
let generic_activity_event_kind = profiler.alloc_string("GenericActivity");

View file

@ -13,7 +13,7 @@ use crate::LlvmCodegenBackend;
use rustc::hir::def_id::LOCAL_CRATE;
use rustc_codegen_ssa::back::write::{CodegenContext, ModuleConfig, run_assembler};
use rustc_codegen_ssa::traits::*;
use rustc::session::config::{self, OutputType, Passes, Lto, PgoGenerate};
use rustc::session::config::{self, OutputType, Passes, Lto, SwitchWithOptPath};
use rustc::session::Session;
use rustc::ty::TyCtxt;
use rustc_codegen_ssa::{RLIB_BYTECODE_EXTENSION, ModuleCodegen, CompiledModule};
@ -707,7 +707,7 @@ pub unsafe fn with_llvm_pmb(llmod: &llvm::Module,
let inline_threshold = config.inline_threshold;
let pgo_gen_path = match config.pgo_gen {
PgoGenerate::Enabled(ref opt_dir_path) => {
SwitchWithOptPath::Enabled(ref opt_dir_path) => {
let path = if let Some(dir_path) = opt_dir_path {
dir_path.join("default_%m.profraw")
} else {
@ -716,7 +716,7 @@ pub unsafe fn with_llvm_pmb(llmod: &llvm::Module,
Some(CString::new(format!("{}", path.display())).unwrap())
}
PgoGenerate::Disabled => {
SwitchWithOptPath::Disabled => {
None
}
};

View file

@ -294,13 +294,13 @@ impl ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
) -> &'ll Value {
let bitsize = if layout.is_bool() { 1 } else { layout.value.size(self).bits() };
match cv {
Scalar::Bits { size: 0, .. } => {
Scalar::Raw { size: 0, .. } => {
assert_eq!(0, layout.value.size(self).bytes());
self.const_undef(self.type_ix(0))
},
Scalar::Bits { bits, size } => {
Scalar::Raw { data, size } => {
assert_eq!(size as u64, layout.value.size(self).bytes());
let llval = self.const_uint_big(self.type_ix(bitsize), bits);
let llval = self.const_uint_big(self.type_ix(bitsize), data);
if layout.value == layout::Pointer {
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
} else {

View file

@ -13,7 +13,7 @@ use rustc::dep_graph::{WorkProduct, WorkProductId, WorkProductFileKind};
use rustc::dep_graph::cgu_reuse_tracker::CguReuseTracker;
use rustc::middle::cstore::EncodedMetadata;
use rustc::session::config::{self, OutputFilenames, OutputType, Passes, Lto,
Sanitizer, PgoGenerate};
Sanitizer, SwitchWithOptPath};
use rustc::session::Session;
use rustc::util::nodemap::FxHashMap;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
@ -56,7 +56,7 @@ pub struct ModuleConfig {
/// Some(level) to optimize binary size, or None to not affect program size.
pub opt_size: Option<config::OptLevel>,
pub pgo_gen: PgoGenerate,
pub pgo_gen: SwitchWithOptPath,
pub pgo_use: Option<PathBuf>,
// Flags indicating which outputs to produce.
@ -94,7 +94,7 @@ impl ModuleConfig {
opt_level: None,
opt_size: None,
pgo_gen: PgoGenerate::Disabled,
pgo_gen: SwitchWithOptPath::Disabled,
pgo_use: None,
emit_no_opt_bc: false,

View file

@ -443,7 +443,7 @@ impl Printer<'tcx, 'tcx> for SymbolPrinter<'_, 'tcx> {
ct: &'tcx ty::Const<'tcx>,
) -> Result<Self::Const, Self::Error> {
// only print integers
if let ConstValue::Scalar(Scalar::Bits { .. }) = ct.val {
if let ConstValue::Scalar(Scalar::Raw { .. }) = ct.val {
if ct.ty.is_integral() {
return self.pretty_print_const(ct);
}

View file

@ -115,7 +115,7 @@ fn op_to_const<'tcx>(
ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
ptr.offset.bytes(),
),
Scalar::Bits { .. } => (
Scalar::Raw { .. } => (
ecx.tcx.intern_const_alloc(Allocation::from_byte_aligned_bytes(b"", ())),
0,
),

View file

@ -1,5 +1,5 @@
use syntax::ast;
use rustc::ty::{self, Ty, TyCtxt, ParamEnv};
use rustc::ty::{self, Ty, TyCtxt, ParamEnv, layout::Size};
use syntax_pos::symbol::Symbol;
use rustc::mir::interpret::{ConstValue, Scalar};
@ -23,10 +23,7 @@ crate fn lit_to_const<'a, 'gcx, 'tcx>(
trace!("trunc {} with size {} and shift {}", n, width.bits(), 128 - width.bits());
let result = truncate(n, width);
trace!("trunc result: {}", result);
Ok(ConstValue::Scalar(Scalar::Bits {
bits: result,
size: width.bytes() as u8,
}))
Ok(ConstValue::Scalar(Scalar::from_uint(result, width)))
};
use rustc::mir::interpret::*;
@ -50,10 +47,7 @@ crate fn lit_to_const<'a, 'gcx, 'tcx>(
let id = tcx.allocate_bytes(data);
ConstValue::Scalar(Scalar::Ptr(id.into()))
},
LitKind::Byte(n) => ConstValue::Scalar(Scalar::Bits {
bits: n as u128,
size: 1,
}),
LitKind::Byte(n) => ConstValue::Scalar(Scalar::from_uint(n, Size::from_bytes(1))),
LitKind::Int(n, _) if neg => {
let n = n as i128;
let n = n.overflowing_neg().0;
@ -84,7 +78,7 @@ fn parse_float<'tcx>(
let num = num.as_str();
use rustc_apfloat::ieee::{Single, Double};
use rustc_apfloat::Float;
let (bits, size) = match fty {
let (data, size) = match fty {
ast::FloatTy::F32 => {
num.parse::<f32>().map_err(|_| ())?;
let mut f = num.parse::<Single>().unwrap_or_else(|e| {
@ -107,5 +101,5 @@ fn parse_float<'tcx>(
}
};
Ok(ConstValue::Scalar(Scalar::Bits { bits, size }))
Ok(ConstValue::Scalar(Scalar::from_uint(data, Size::from_bytes(size))))
}

View file

@ -6,7 +6,7 @@ use syntax::symbol::sym;
use rustc_apfloat::ieee::{Single, Double};
use rustc::mir::interpret::{
Scalar, EvalResult, Pointer, PointerArithmetic, InterpError, truncate
Scalar, EvalResult, Pointer, PointerArithmetic, InterpError,
};
use rustc::mir::CastKind;
use rustc_apfloat::Float;
@ -135,29 +135,13 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
use rustc::ty::TyKind::*;
trace!("Casting {:?}: {:?} to {:?}", val, src_layout.ty, dest_layout.ty);
match val {
Scalar::Ptr(ptr) => self.cast_from_ptr(ptr, dest_layout.ty),
Scalar::Bits { bits, size } => {
debug_assert_eq!(size as u64, src_layout.size.bytes());
debug_assert_eq!(truncate(bits, Size::from_bytes(size.into())), bits,
"Unexpected value of size {} before casting", size);
let res = match src_layout.ty.sty {
Float(fty) => self.cast_from_float(bits, fty, dest_layout.ty)?,
_ => self.cast_from_int(bits, src_layout, dest_layout)?,
};
// Sanity check
match res {
Scalar::Ptr(_) => bug!("Fabricated a ptr value from an int...?"),
Scalar::Bits { bits, size } => {
debug_assert_eq!(size as u64, dest_layout.size.bytes());
debug_assert_eq!(truncate(bits, Size::from_bytes(size.into())), bits,
"Unexpected value of size {} after casting", size);
}
match val.to_bits_or_ptr(src_layout.size, self) {
Err(ptr) => self.cast_from_ptr(ptr, dest_layout.ty),
Ok(data) => {
match src_layout.ty.sty {
Float(fty) => self.cast_from_float(data, fty, dest_layout.ty),
_ => self.cast_from_int(data, src_layout, dest_layout),
}
// Done
Ok(res)
}
}
}
@ -177,7 +161,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
trace!("cast_from_int: {}, {}, {}", v, src_layout.ty, dest_layout.ty);
use rustc::ty::TyKind::*;
match dest_layout.ty.sty {
Int(_) | Uint(_) => {
Int(_) | Uint(_) | RawPtr(_) => {
let v = self.truncate(v, dest_layout);
Ok(Scalar::from_uint(v, dest_layout.size))
}
@ -205,15 +189,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
Ok(Scalar::from_uint(v, Size::from_bytes(4)))
},
// No alignment check needed for raw pointers.
// But we have to truncate to target ptr size.
RawPtr(_) => {
Ok(Scalar::from_uint(
self.truncate_to_ptr(v).0,
self.pointer_size(),
))
},
// Casts to bool are not permitted by rustc, no need to handle them here.
_ => err!(Unimplemented(format!("int to {:?} cast", dest_layout.ty))),
}

View file

@ -12,7 +12,6 @@ use std::borrow::Cow;
use rustc::ty::{self, Instance, ParamEnv, query::TyCtxtAt};
use rustc::ty::layout::{Align, TargetDataLayout, Size, HasDataLayout};
pub use rustc::mir::interpret::{truncate, write_target_uint, read_target_uint};
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use syntax::ast::Mutability;
@ -248,23 +247,21 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
required_align: Align
) -> EvalResult<'tcx> {
// Check non-NULL/Undef, extract offset
let (offset, alloc_align) = match ptr {
Scalar::Ptr(ptr) => {
let (offset, alloc_align) = match ptr.to_bits_or_ptr(self.pointer_size(), self) {
Err(ptr) => {
// check this is not NULL -- which we can ensure only if this is in-bounds
// of some (potentially dead) allocation.
let align = self.check_bounds_ptr(ptr, InboundsCheck::MaybeDead,
CheckInAllocMsg::NullPointerTest)?;
(ptr.offset.bytes(), align)
}
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, self.pointer_size().bytes());
assert!(bits < (1u128 << self.pointer_size().bits()));
Ok(data) => {
// check this is not NULL
if bits == 0 {
if data == 0 {
return err!(InvalidNullPointerUsage);
}
// the "base address" is 0 and hence always aligned
(bits as u64, required_align)
(data as u64, required_align)
}
};
// Check alignment

View file

@ -641,19 +641,20 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
} => {
let variants_start = niche_variants.start().as_u32() as u128;
let variants_end = niche_variants.end().as_u32() as u128;
match raw_discr {
ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) => {
let raw_discr = raw_discr.not_undef()
.map_err(|_| InterpError::InvalidDiscriminant(ScalarMaybeUndef::Undef))?;
match raw_discr.to_bits_or_ptr(discr_val.layout.size, self) {
Err(ptr) => {
// The niche must be just 0 (which an inbounds pointer value never is)
let ptr_valid = niche_start == 0 && variants_start == variants_end &&
self.memory.check_bounds_ptr(ptr, InboundsCheck::MaybeDead,
CheckInAllocMsg::NullPointerTest).is_ok();
if !ptr_valid {
return err!(InvalidDiscriminant(raw_discr.erase_tag()));
return err!(InvalidDiscriminant(raw_discr.erase_tag().into()));
}
(dataful_variant.as_u32() as u128, dataful_variant)
},
ScalarMaybeUndef::Scalar(Scalar::Bits { bits: raw_discr, size }) => {
assert_eq!(size as u64, discr_val.layout.size.bytes());
Ok(raw_discr) => {
let adjusted_discr = raw_discr.wrapping_sub(niche_start)
.wrapping_add(variants_start);
if variants_start <= adjusted_discr && adjusted_discr <= variants_end {
@ -668,8 +669,6 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M>
(dataful_variant.as_u32() as u128, dataful_variant)
}
},
ScalarMaybeUndef::Undef =>
return err!(InvalidDiscriminant(ScalarMaybeUndef::Undef)),
}
}
})

View file

@ -686,7 +686,7 @@ where
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Ptr(_))) =>
assert_eq!(self.pointer_size(), dest.layout.size,
"Size mismatch when writing pointer"),
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Bits { size, .. })) =>
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Raw { size, .. })) =>
assert_eq!(Size::from_bytes(size.into()), dest.layout.size,
"Size mismatch when writing bits"),
Immediate::Scalar(ScalarMaybeUndef::Undef) => {}, // undef can have any size

View file

@ -186,9 +186,9 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for Scalar
fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
match self {
Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),
Scalar::Bits{ size, bits } => Scalar::Bits {
Scalar::Raw{ size, data } => Scalar::Raw {
data: *data,
size: *size,
bits: *bits,
},
}
}

View file

@ -480,8 +480,8 @@ impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>
wrapping_range_format(&layout.valid_range, max_hi),
)
);
let bits = match value {
Scalar::Ptr(ptr) => {
let bits = match value.to_bits_or_ptr(op.layout.size, self.ecx) {
Err(ptr) => {
if lo == 1 && hi == max_hi {
// only NULL is not allowed.
// We can call `check_align` to check non-NULL-ness, but have to also look
@ -509,10 +509,8 @@ impl<'rt, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>
);
}
}
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, op.layout.size.bytes());
bits
}
Ok(data) =>
data
};
// Now compare. This is slightly subtle because this is a special "wrap-around" range.
if wrapping_range_contains(&layout.valid_range, bits) {

View file

@ -382,10 +382,7 @@ impl<'a, 'mir, 'tcx> ConstPropagator<'a, 'mir, 'tcx> {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some(
ImmTy {
imm: Immediate::Scalar(
Scalar::Bits {
bits: n as u128,
size: self.tcx.data_layout.pointer_size.bytes() as u8,
}.into()
Scalar::from_uint(n, self.tcx.data_layout.pointer_size).into()
),
layout: self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
}.into()
@ -713,18 +710,18 @@ impl<'b, 'a, 'tcx> MutVisitor<'tcx> for ConstPropagator<'b, 'a, 'tcx> {
.eval_operand(len, source_info)
.expect("len must be const");
let len = match self.ecx.read_scalar(len) {
Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
Ok(ScalarMaybeUndef::Scalar(Scalar::Raw {
data, ..
})) => data,
other => bug!("const len not primitive: {:?}", other),
};
let index = self
.eval_operand(index, source_info)
.expect("index must be const");
let index = match self.ecx.read_scalar(index) {
Ok(ScalarMaybeUndef::Scalar(Scalar::Bits {
bits, ..
})) => bits,
Ok(ScalarMaybeUndef::Scalar(Scalar::Raw {
data, ..
})) => data,
other => bug!("const index not primitive: {:?}", other),
};
format!(

View file

@ -2,14 +2,7 @@
# Checks that all the targets returned by `rustc --print target-list` are valid
# target specifications
# TODO remove the '*ios*' case when rust-lang/rust#29812 is fixed
all:
for target in $(shell $(BARE_RUSTC) --print target-list); do \
case $$target in \
*ios*) \
;; \
*) \
$(BARE_RUSTC) --target $$target --print sysroot \
;; \
esac \
$(BARE_RUSTC) --target $$target --print sysroot; \
done