Auto merge of #94860 - Dylan-DPC:rollup-n06j8h6, r=Dylan-DPC

Rollup of 7 pull requests

Successful merges:

 - #87618 (Add missing documentation for std::char types)
 - #94769 (Collapse blanket and auto-trait impls by default)
 - #94798 (`parse_tt` refactorings)
 - #94818 (Rename `IntoFuture::Future` to `IntoFuture::IntoFuture`)
 - #94827 (CTFE/Miri: detect out-of-bounds pointers in offset_from)
 - #94838 (Make float parsing docs more comprehensive)
 - #94839 (Suggest using double colon when a struct field type include single colon)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-03-11 21:44:06 +00:00
commit 2c6a29af35
16 changed files with 335 additions and 184 deletions

View file

@ -307,53 +307,57 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.write_pointer(offset_ptr, dest)?; self.write_pointer(offset_ptr, dest)?;
} }
sym::ptr_offset_from => { sym::ptr_offset_from => {
let a = self.read_immediate(&args[0])?.to_scalar()?; let a = self.read_pointer(&args[0])?;
let b = self.read_immediate(&args[1])?.to_scalar()?; let b = self.read_pointer(&args[1])?;
// Special case: if both scalars are *equal integers* // Special case: if both scalars are *equal integers*
// and not null, we pretend there is an allocation of size 0 right there, // and not null, we pretend there is an allocation of size 0 right there,
// and their offset is 0. (There's never a valid object at null, making it an // and their offset is 0. (There's never a valid object at null, making it an
// exception from the exception.) // exception from the exception.)
// This is the dual to the special exception for offset-by-0 // This is the dual to the special exception for offset-by-0
// in the inbounds pointer offset operation (see the Miri code, `src/operator.rs`). // in the inbounds pointer offset operation (see `ptr_offset_inbounds` below).
// match (self.memory.ptr_try_get_alloc(a), self.memory.ptr_try_get_alloc(b)) {
// Control flow is weird because we cannot early-return (to reach the (Err(a), Err(b)) if a == b && a != 0 => {
// `go_to_block` at the end). // Both are the same non-null integer.
let done = if let (Ok(a), Ok(b)) = (a.try_to_int(), b.try_to_int()) {
let a = a.try_to_machine_usize(*self.tcx).unwrap();
let b = b.try_to_machine_usize(*self.tcx).unwrap();
if a == b && a != 0 {
self.write_scalar(Scalar::from_machine_isize(0, self), dest)?; self.write_scalar(Scalar::from_machine_isize(0, self), dest)?;
true
} else {
false
} }
} else { (Err(offset), _) | (_, Err(offset)) => {
false throw_ub!(DanglingIntPointer(offset, CheckInAllocMsg::OffsetFromTest));
}; }
(Ok((a_alloc_id, a_offset, _)), Ok((b_alloc_id, b_offset, _))) => {
// Both are pointers. They must be into the same allocation.
if a_alloc_id != b_alloc_id {
throw_ub_format!(
"ptr_offset_from cannot compute offset of pointers into different \
allocations.",
);
}
// And they must both be valid for zero-sized accesses ("in-bounds or one past the end").
self.memory.check_ptr_access_align(
a,
Size::ZERO,
Align::ONE,
CheckInAllocMsg::OffsetFromTest,
)?;
self.memory.check_ptr_access_align(
b,
Size::ZERO,
Align::ONE,
CheckInAllocMsg::OffsetFromTest,
)?;
if !done { // Compute offset.
// General case: we need two pointers. let usize_layout = self.layout_of(self.tcx.types.usize)?;
let a = self.scalar_to_ptr(a); let isize_layout = self.layout_of(self.tcx.types.isize)?;
let b = self.scalar_to_ptr(b); let a_offset = ImmTy::from_uint(a_offset.bytes(), usize_layout);
let (a_alloc_id, a_offset, _) = self.memory.ptr_get_alloc(a)?; let b_offset = ImmTy::from_uint(b_offset.bytes(), usize_layout);
let (b_alloc_id, b_offset, _) = self.memory.ptr_get_alloc(b)?; let (val, _overflowed, _ty) =
if a_alloc_id != b_alloc_id { self.overflowing_binary_op(BinOp::Sub, &a_offset, &b_offset)?;
throw_ub_format!( let pointee_layout = self.layout_of(substs.type_at(0))?;
"ptr_offset_from cannot compute offset of pointers into different \ let val = ImmTy::from_scalar(val, isize_layout);
allocations.", let size = ImmTy::from_int(pointee_layout.size.bytes(), isize_layout);
); self.exact_div(&val, &size, dest)?;
} }
let usize_layout = self.layout_of(self.tcx.types.usize)?;
let isize_layout = self.layout_of(self.tcx.types.isize)?;
let a_offset = ImmTy::from_uint(a_offset.bytes(), usize_layout);
let b_offset = ImmTy::from_uint(b_offset.bytes(), usize_layout);
let (val, _overflowed, _ty) =
self.overflowing_binary_op(BinOp::Sub, &a_offset, &b_offset)?;
let pointee_layout = self.layout_of(substs.type_at(0))?;
let val = ImmTy::from_scalar(val, isize_layout);
let size = ImmTy::from_int(pointee_layout.size.bytes(), isize_layout);
self.exact_div(&val, &size, dest)?;
} }
} }

View file

@ -388,9 +388,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
CheckInAllocMsg::DerefTest | CheckInAllocMsg::MemoryAccessTest => { CheckInAllocMsg::DerefTest | CheckInAllocMsg::MemoryAccessTest => {
AllocCheck::Dereferenceable AllocCheck::Dereferenceable
} }
CheckInAllocMsg::PointerArithmeticTest | CheckInAllocMsg::InboundsTest => { CheckInAllocMsg::PointerArithmeticTest
AllocCheck::Live | CheckInAllocMsg::OffsetFromTest
} | CheckInAllocMsg::InboundsTest => AllocCheck::Live,
}; };
let (size, align) = self.get_size_and_align(alloc_id, check)?; let (size, align) = self.get_size_and_align(alloc_id, check)?;
Ok((size, align, ())) Ok((size, align, ()))

View file

@ -122,7 +122,7 @@ impl<'tt> TokenTreeOrTokenTreeSlice<'tt> {
/// An unzipping of `TokenTree`s... see the `stack` field of `MatcherPos`. /// An unzipping of `TokenTree`s... see the `stack` field of `MatcherPos`.
/// ///
/// This is used by `inner_parse_loop` to keep track of delimited submatchers that we have /// This is used by `parse_tt_inner` to keep track of delimited submatchers that we have
/// descended into. /// descended into.
#[derive(Clone)] #[derive(Clone)]
struct MatcherTtFrame<'tt> { struct MatcherTtFrame<'tt> {
@ -439,9 +439,8 @@ fn nameize<I: Iterator<Item = NamedMatch>>(
} }
Occupied(..) => return Err((sp, format!("duplicated bind name: {}", bind_name))), Occupied(..) => return Err((sp, format!("duplicated bind name: {}", bind_name))),
}, },
// FIXME(c410-f3r) MetaVar and MetaVarExpr should be handled instead of being ignored TokenTree::Token(..) => (),
// https://github.com/rust-lang/rust/issues/9390 TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) | TokenTree::Token(..) => {}
} }
Ok(()) Ok(())
@ -481,21 +480,24 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
/// successful execution of this function. /// successful execution of this function.
/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in /// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
/// the function `parse`. /// the function `parse`.
/// - `eof_items`: the set of items that would be valid if this was the EOF.
/// - `bb_items`: the set of items that are waiting for the black-box parser. /// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `token`: the current token of the parser. /// - `token`: the current token of the parser.
/// ///
/// # Returns /// # Returns
/// ///
/// A `ParseResult`. Note that matches are kept track of through the items generated. /// `Some(result)` if everything is finished, `None` otherwise. Note that matches are kept track of
fn inner_parse_loop<'root, 'tt>( /// through the items generated.
fn parse_tt_inner<'root, 'tt>(
sess: &ParseSess, sess: &ParseSess,
ms: &[TokenTree],
cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>, next_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>, bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
eof_items: &mut EofItems<'root, 'tt>,
token: &Token, token: &Token,
) -> Result<(), (rustc_span::Span, String)> { ) -> Option<NamedParseResult> {
// Matcher positions that would be valid if the macro invocation was over now
let mut eof_items = EofItems::None;
// Pop items from `cur_items` until it is empty. // Pop items from `cur_items` until it is empty.
while let Some(mut item) = cur_items.pop() { while let Some(mut item) = cur_items.pop() {
// When unzipped trees end, remove them. This corresponds to backtracking out of a // When unzipped trees end, remove them. This corresponds to backtracking out of a
@ -522,6 +524,8 @@ fn inner_parse_loop<'root, 'tt>(
// then we could be at the end of a sequence or at the beginning of the next // then we could be at the end of a sequence or at the beginning of the next
// repetition. // repetition.
if let Some(repetition) = &item.repetition { if let Some(repetition) = &item.repetition {
debug_assert!(matches!(item.top_elts, Tt(TokenTree::Sequence(..))));
// At this point, regardless of whether there is a separator, we should add all // At this point, regardless of whether there is a separator, we should add all
// matches from the complete repetition of the sequence to the shared, top-level // matches from the complete repetition of the sequence to the shared, top-level
// `matches` list (actually, `up.matches`, which could itself not be the top-level, // `matches` list (actually, `up.matches`, which could itself not be the top-level,
@ -565,7 +569,7 @@ fn inner_parse_loop<'root, 'tt>(
} else { } else {
// If we are not in a repetition, then being at the end of a matcher means that we // If we are not in a repetition, then being at the end of a matcher means that we
// have reached the potential end of the input. // have reached the potential end of the input.
*eof_items = match eof_items { eof_items = match eof_items {
EofItems::None => EofItems::One(item), EofItems::None => EofItems::One(item),
EofItems::One(_) | EofItems::Multiple => EofItems::Multiple, EofItems::One(_) | EofItems::Multiple => EofItems::Multiple,
} }
@ -613,7 +617,7 @@ fn inner_parse_loop<'root, 'tt>(
// We need to match a metavar (but the identifier is invalid)... this is an error // We need to match a metavar (but the identifier is invalid)... this is an error
TokenTree::MetaVarDecl(span, _, None) => { TokenTree::MetaVarDecl(span, _, None) => {
if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() { if sess.missing_fragment_specifiers.borrow_mut().remove(&span).is_some() {
return Err((span, "missing fragment specifier".to_string())); return Some(Error(span, "missing fragment specifier".to_string()));
} }
} }
@ -655,13 +659,36 @@ fn inner_parse_loop<'root, 'tt>(
// rules. NOTE that this is not necessarily an error unless _all_ items in // rules. NOTE that this is not necessarily an error unless _all_ items in
// `cur_items` end up doing this. There may still be some other matchers that do // `cur_items` end up doing this. There may still be some other matchers that do
// end up working out. // end up working out.
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => {} TokenTree::Token(..) => {}
TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
} }
} }
} }
// Yay a successful parse (so far)! // If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
Ok(()) // either the parse is ambiguous (which should never happen) or there is a syntax error.
if *token == token::Eof {
Some(match eof_items {
EofItems::One(mut eof_item) => {
let matches =
eof_item.matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap());
nameize(sess, ms, matches)
}
EofItems::Multiple => {
Error(token.span, "ambiguity: multiple successful parses".to_string())
}
EofItems::None => Failure(
Token::new(
token::Eof,
if token.span.is_dummy() { token.span } else { token.span.shrink_to_hi() },
),
"missing tokens in macro arguments",
),
})
} else {
None
}
} }
/// Use the given sequence of token trees (`ms`) as a matcher. Match the token /// Use the given sequence of token trees (`ms`) as a matcher. Match the token
@ -672,7 +699,7 @@ pub(super) fn parse_tt(
macro_name: Ident, macro_name: Ident,
) -> NamedParseResult { ) -> NamedParseResult {
// A queue of possible matcher positions. We initialize it with the matcher position in which // A queue of possible matcher positions. We initialize it with the matcher position in which
// the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then // the "dot" is before the first token of the first token tree in `ms`. `parse_tt_inner` then
// processes all of these possible matcher positions and produces possible next positions into // processes all of these possible matcher positions and produces possible next positions into
// `next_items`. After some post-processing, the contents of `next_items` replenish `cur_items` // `next_items`. After some post-processing, the contents of `next_items` replenish `cur_items`
// and we start over again. // and we start over again.
@ -681,135 +708,118 @@ pub(super) fn parse_tt(
// there are frequently *no* others! -- are allocated on the heap. // there are frequently *no* others! -- are allocated on the heap.
let mut initial = MatcherPos::new(ms); let mut initial = MatcherPos::new(ms);
let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)]; let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
let mut next_items = Vec::new();
loop { loop {
assert!(next_items.is_empty()); let mut next_items = SmallVec::new();
// Matcher positions black-box parsed by parser.rs (`parser`) // Matcher positions black-box parsed by parser.rs (`parser`)
let mut bb_items = SmallVec::new(); let mut bb_items = SmallVec::new();
// Matcher positions that would be valid if the macro invocation was over now
let mut eof_items = EofItems::None;
// Process `cur_items` until either we have finished the input or we need to get some // Process `cur_items` until either we have finished the input or we need to get some
// parsing from the black-box parser done. The result is that `next_items` will contain a // parsing from the black-box parser done. The result is that `next_items` will contain a
// bunch of possible next matcher positions in `next_items`. // bunch of possible next matcher positions in `next_items`.
match inner_parse_loop( if let Some(result) = parse_tt_inner(
parser.sess, parser.sess,
ms,
&mut cur_items, &mut cur_items,
&mut next_items, &mut next_items,
&mut bb_items, &mut bb_items,
&mut eof_items,
&parser.token, &parser.token,
) { ) {
Ok(()) => {} return result;
Err((sp, msg)) => return Error(sp, msg),
} }
// inner parse loop handled all cur_items, so it's empty // `parse_tt_inner` handled all cur_items, so it's empty.
assert!(cur_items.is_empty()); assert!(cur_items.is_empty());
// We need to do some post processing after the `inner_parse_loop`. // We need to do some post processing after the `parse_tt_inner`.
// //
// Error messages here could be improved with links to original rules. // Error messages here could be improved with links to original rules.
// If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise, match (next_items.len(), bb_items.len()) {
// either the parse is ambiguous (which should never happen) or there is a syntax error. (0, 0) => {
if parser.token == token::Eof { // There are no possible next positions AND we aren't waiting for the black-box
return match eof_items { // parser: syntax error.
EofItems::One(mut eof_item) => { return Failure(parser.token.clone(), "no rules expected this token in macro call");
let matches = }
eof_item.matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap());
nameize(parser.sess, ms, matches) (_, 0) => {
} // Dump all possible `next_items` into `cur_items` for the next iteration. Then
EofItems::Multiple => { // process the next token.
Error(parser.token.span, "ambiguity: multiple successful parses".to_string()) cur_items.extend(next_items.drain(..));
} parser.to_mut().bump();
EofItems::None => Failure( }
Token::new(
token::Eof, (0, 1) => {
if parser.token.span.is_dummy() { // We need to call the black-box parser to get some nonterminal.
parser.token.span let mut item = bb_items.pop().unwrap();
} else { if let TokenTree::MetaVarDecl(span, _, Some(kind)) = item.top_elts.get_tt(item.idx)
parser.token.span.shrink_to_hi() {
}, let match_cur = item.match_cur;
), // We use the span of the metavariable declaration to determine any
"missing tokens in macro arguments", // edition-specific matching behavior for non-terminals.
), let nt = match parser.to_mut().parse_nonterminal(kind) {
}; Err(mut err) => {
} err.span_label(
// Performance hack: `eof_items` may share matchers via `Rc` with other things that we want span,
// to modify. Dropping `eof_items` now may drop these refcounts to 1, preventing an format!("while parsing argument for this `{kind}` macro fragment"),
// unnecessary implicit clone later in `Rc::make_mut`. )
drop(eof_items); .emit();
return ErrorReported;
// If there are no possible next positions AND we aren't waiting for the black-box parser, }
// then there is a syntax error. Ok(nt) => nt,
if bb_items.is_empty() && next_items.is_empty() { };
return Failure(parser.token.clone(), "no rules expected this token in macro call"); item.push_match(match_cur, MatchedNonterminal(Lrc::new(nt)));
} item.idx += 1;
item.match_cur += 1;
if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 { } else {
// We need to call out to parse some rust nonterminal (black-box) parser. But something unreachable!()
// is wrong, because there is not EXACTLY ONE of these. }
let nts = bb_items cur_items.push(item);
.iter() }
.map(|item| match item.top_elts.get_tt(item.idx) {
TokenTree::MetaVarDecl(_, bind, Some(kind)) => format!("{} ('{}')", kind, bind), (_, _) => {
_ => panic!(), // We need to call the black-box parser to get some nonterminal, but something is
}) // wrong.
.collect::<Vec<String>>() return bb_items_ambiguity_error(
.join(" or "); macro_name,
next_items,
return Error( bb_items,
parser.token.span, parser.token.span,
format!( );
"local ambiguity when calling macro `{macro_name}`: multiple parsing options: {}",
match next_items.len() {
0 => format!("built-in NTs {}.", nts),
1 => format!("built-in NTs {} or 1 other option.", nts),
n => format!("built-in NTs {} or {} other options.", nts, n),
}
),
);
}
if !next_items.is_empty() {
// Dump all possible `next_items` into `cur_items` for the next iteration. Then process
// the next token.
cur_items.extend(next_items.drain(..));
parser.to_mut().bump();
} else {
// Finally, we have the case where we need to call the black-box parser to get some
// nonterminal.
assert_eq!(bb_items.len(), 1);
let mut item = bb_items.pop().unwrap();
if let TokenTree::MetaVarDecl(span, _, Some(kind)) = item.top_elts.get_tt(item.idx) {
let match_cur = item.match_cur;
// We use the span of the metavariable declaration to determine any
// edition-specific matching behavior for non-terminals.
let nt = match parser.to_mut().parse_nonterminal(kind) {
Err(mut err) => {
err.span_label(
span,
format!("while parsing argument for this `{}` macro fragment", kind),
)
.emit();
return ErrorReported;
}
Ok(nt) => nt,
};
item.push_match(match_cur, MatchedNonterminal(Lrc::new(nt)));
item.idx += 1;
item.match_cur += 1;
} else {
unreachable!()
} }
cur_items.push(item);
} }
assert!(!cur_items.is_empty()); assert!(!cur_items.is_empty());
} }
} }
fn bb_items_ambiguity_error<'root, 'tt>(
macro_name: Ident,
next_items: SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
bb_items: SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
token_span: rustc_span::Span,
) -> NamedParseResult {
let nts = bb_items
.iter()
.map(|item| match item.top_elts.get_tt(item.idx) {
TokenTree::MetaVarDecl(_, bind, Some(kind)) => {
format!("{} ('{}')", kind, bind)
}
_ => panic!(),
})
.collect::<Vec<String>>()
.join(" or ");
Error(
token_span,
format!(
"local ambiguity when calling macro `{macro_name}`: multiple parsing options: {}",
match next_items.len() {
0 => format!("built-in NTs {}.", nts),
1 => format!("built-in NTs {} or 1 other option.", nts),
n => format!("built-in NTs {} or {} other options.", nts, n),
}
),
)
}

View file

@ -184,6 +184,8 @@ pub enum CheckInAllocMsg {
MemoryAccessTest, MemoryAccessTest,
/// We are doing pointer arithmetic. /// We are doing pointer arithmetic.
PointerArithmeticTest, PointerArithmeticTest,
/// We are doing pointer offset_from.
OffsetFromTest,
/// None of the above -- generic/unspecific inbounds test. /// None of the above -- generic/unspecific inbounds test.
InboundsTest, InboundsTest,
} }
@ -199,6 +201,7 @@ impl fmt::Display for CheckInAllocMsg {
CheckInAllocMsg::DerefTest => "dereferencing pointer failed: ", CheckInAllocMsg::DerefTest => "dereferencing pointer failed: ",
CheckInAllocMsg::MemoryAccessTest => "memory access failed: ", CheckInAllocMsg::MemoryAccessTest => "memory access failed: ",
CheckInAllocMsg::PointerArithmeticTest => "pointer arithmetic failed: ", CheckInAllocMsg::PointerArithmeticTest => "pointer arithmetic failed: ",
CheckInAllocMsg::OffsetFromTest => "out-of-bounds offset_from: ",
CheckInAllocMsg::InboundsTest => "", CheckInAllocMsg::InboundsTest => "",
} }
) )
@ -358,6 +361,9 @@ impl fmt::Display for UndefinedBehaviorInfo<'_> {
DanglingIntPointer(0, CheckInAllocMsg::InboundsTest) => { DanglingIntPointer(0, CheckInAllocMsg::InboundsTest) => {
write!(f, "null pointer is not a valid pointer for this operation") write!(f, "null pointer is not a valid pointer for this operation")
} }
DanglingIntPointer(0, msg) => {
write!(f, "{}null pointer is not a valid pointer", msg)
}
DanglingIntPointer(i, msg) => { DanglingIntPointer(i, msg) => {
write!(f, "{}0x{:x} is not a valid pointer", msg, i) write!(f, "{}0x{:x} is not a valid pointer", msg, i)
} }

View file

@ -1534,6 +1534,16 @@ impl<'a> Parser<'a> {
let name = self.parse_field_ident(adt_ty, lo)?; let name = self.parse_field_ident(adt_ty, lo)?;
self.expect_field_ty_separator()?; self.expect_field_ty_separator()?;
let ty = self.parse_ty()?; let ty = self.parse_ty()?;
if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
self.struct_span_err(self.token.span, "found single colon in a struct field type path")
.span_suggestion_verbose(
self.token.span,
"write a path separator here",
"::".to_string(),
Applicability::MaybeIncorrect,
)
.emit();
}
if self.token.kind == token::Eq { if self.token.kind == token::Eq {
self.bump(); self.bump();
let const_expr = self.parse_anon_const_expr()?; let const_expr = self.parse_anon_const_expr()?;

View file

@ -218,6 +218,8 @@ impl const From<u8> for char {
} }
/// An error which can be returned when parsing a char. /// An error which can be returned when parsing a char.
///
/// This `struct` is created when using the [`char::from_str`] method.
#[stable(feature = "char_from_str", since = "1.20.0")] #[stable(feature = "char_from_str", since = "1.20.0")]
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub struct ParseCharError { pub struct ParseCharError {
@ -300,7 +302,10 @@ impl TryFrom<u32> for char {
} }
} }
/// The error type returned when a conversion from u32 to char fails. /// The error type returned when a conversion from [`prim@u32`] to [`prim@char`] fails.
///
/// This `struct` is created by the [`char::try_from<u32>`](char#impl-TryFrom<u32>) method.
/// See its documentation for more.
#[stable(feature = "try_from", since = "1.34.0")] #[stable(feature = "try_from", since = "1.34.0")]
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct CharTryFromError(()); pub struct CharTryFromError(());

View file

@ -9,20 +9,20 @@ pub trait IntoFuture {
/// Which kind of future are we turning this into? /// Which kind of future are we turning this into?
#[unstable(feature = "into_future", issue = "67644")] #[unstable(feature = "into_future", issue = "67644")]
type Future: Future<Output = Self::Output>; type IntoFuture: Future<Output = Self::Output>;
/// Creates a future from a value. /// Creates a future from a value.
#[unstable(feature = "into_future", issue = "67644")] #[unstable(feature = "into_future", issue = "67644")]
#[lang = "into_future"] #[lang = "into_future"]
fn into_future(self) -> Self::Future; fn into_future(self) -> Self::IntoFuture;
} }
#[unstable(feature = "into_future", issue = "67644")] #[unstable(feature = "into_future", issue = "67644")]
impl<F: Future> IntoFuture for F { impl<F: Future> IntoFuture for F {
type Output = F::Output; type Output = F::Output;
type Future = F; type IntoFuture = F;
fn into_future(self) -> Self::Future { fn into_future(self) -> Self::IntoFuture {
self self
} }
} }

View file

@ -112,21 +112,24 @@ macro_rules! from_str_float_impl {
/// * '2.5E-10' /// * '2.5E-10'
/// * '5.' /// * '5.'
/// * '.5', or, equivalently, '0.5' /// * '.5', or, equivalently, '0.5'
/// * 'inf', '-inf', 'NaN' /// * 'inf', '-inf', '+infinity', 'NaN'
///
/// Note that alphabetical characters are not case-sensitive.
/// ///
/// Leading and trailing whitespace represent an error. /// Leading and trailing whitespace represent an error.
/// ///
/// # Grammar /// # Grammar
/// ///
/// All strings that adhere to the following [EBNF] grammar /// All strings that adhere to the following [EBNF] grammar when
/// will result in an [`Ok`] being returned: /// lowercased will result in an [`Ok`] being returned:
/// ///
/// ```txt /// ```txt
/// Float ::= Sign? ( 'inf' | 'NaN' | Number ) /// Float ::= Sign? ( 'inf' | 'infinity' | 'nan' | Number )
/// Number ::= ( Digit+ | /// Number ::= ( Digit+ |
/// '.' Digit* |
/// Digit+ '.' Digit* | /// Digit+ '.' Digit* |
/// Digit* '.' Digit+ ) Exp? /// Digit* '.' Digit+ ) Exp?
/// Exp ::= [eE] Sign? Digit+ /// Exp ::= 'e' Sign? Digit+
/// Sign ::= [+-] /// Sign ::= [+-]
/// Digit ::= [0-9] /// Digit ::= [0-9]
/// ``` /// ```

View file

@ -699,7 +699,13 @@ fn short_item_info(
// Render the list of items inside one of the sections "Trait Implementations", // Render the list of items inside one of the sections "Trait Implementations",
// "Auto Trait Implementations," "Blanket Trait Implementations" (on struct/enum pages). // "Auto Trait Implementations," "Blanket Trait Implementations" (on struct/enum pages).
fn render_impls(cx: &Context<'_>, w: &mut Buffer, impls: &[&&Impl], containing_item: &clean::Item) { fn render_impls(
cx: &Context<'_>,
w: &mut Buffer,
impls: &[&&Impl],
containing_item: &clean::Item,
toggle_open_by_default: bool,
) {
let tcx = cx.tcx(); let tcx = cx.tcx();
let mut rendered_impls = impls let mut rendered_impls = impls
.iter() .iter()
@ -722,7 +728,7 @@ fn render_impls(cx: &Context<'_>, w: &mut Buffer, impls: &[&&Impl], containing_i
is_on_foreign_type: false, is_on_foreign_type: false,
show_default_items: true, show_default_items: true,
show_non_assoc_items: true, show_non_assoc_items: true,
toggle_open_by_default: true, toggle_open_by_default,
}, },
); );
buffer.into_inner() buffer.into_inner()
@ -1143,7 +1149,7 @@ fn render_assoc_items_inner(
concrete.into_iter().partition(|t| t.inner_impl().kind.is_blanket()); concrete.into_iter().partition(|t| t.inner_impl().kind.is_blanket());
let mut impls = Buffer::empty_from(w); let mut impls = Buffer::empty_from(w);
render_impls(cx, &mut impls, &concrete, containing_item); render_impls(cx, &mut impls, &concrete, containing_item, true);
let impls = impls.into_inner(); let impls = impls.into_inner();
if !impls.is_empty() { if !impls.is_empty() {
write!( write!(
@ -1165,7 +1171,7 @@ fn render_assoc_items_inner(
</h2>\ </h2>\
<div id=\"synthetic-implementations-list\">", <div id=\"synthetic-implementations-list\">",
); );
render_impls(cx, w, &synthetic, containing_item); render_impls(cx, w, &synthetic, containing_item, false);
w.write_str("</div>"); w.write_str("</div>");
} }
@ -1177,7 +1183,7 @@ fn render_assoc_items_inner(
</h2>\ </h2>\
<div id=\"blanket-implementations-list\">", <div id=\"blanket-implementations-list\">",
); );
render_impls(cx, w, &blanket_impl, containing_item); render_impls(cx, w, &blanket_impl, containing_item, false);
w.write_str("</div>"); w.write_str("</div>");
} }
} }

View file

@ -17,7 +17,14 @@ assert-text: ("#toggle-all-docs", "[]")
goto: file://|DOC_PATH|/test_docs/struct.Foo.html goto: file://|DOC_PATH|/test_docs/struct.Foo.html
// We first check that everything is visible. // We first check that everything is visible.
assert-text: ("#toggle-all-docs", "[]") assert-text: ("#toggle-all-docs", "[]")
assert-attribute: ("details.rustdoc-toggle", {"open": ""}, ALL) assert-attribute: ("#implementations-list details.rustdoc-toggle", {"open": ""}, ALL)
assert-attribute: ("#trait-implementations-list details.rustdoc-toggle", {"open": ""}, ALL)
assert-attribute-false: (
"#blanket-implementations-list > details.rustdoc-toggle",
{"open": ""},
ALL,
)
// We collapse them all. // We collapse them all.
click: "#toggle-all-docs" click: "#toggle-all-docs"
wait-for: 50 wait-for: 50

View file

@ -10,9 +10,9 @@ struct AwaitMe;
impl IntoFuture for AwaitMe { impl IntoFuture for AwaitMe {
type Output = i32; type Output = i32;
type Future = Pin<Box<dyn Future<Output = i32>>>; type IntoFuture = Pin<Box<dyn Future<Output = i32>>>;
fn into_future(self) -> Self::Future { fn into_future(self) -> Self::IntoFuture {
Box::pin(me()) Box::pin(me())
} }
} }

View file

@ -1,4 +1,4 @@
#![feature(const_ptr_offset_from)] #![feature(const_ptr_offset_from, const_ptr_offset)]
#![feature(core_intrinsics)] #![feature(core_intrinsics)]
use std::intrinsics::ptr_offset_from; use std::intrinsics::ptr_offset_from;
@ -44,4 +44,30 @@ pub const DIFFERENT_INT: isize = { // offset_from with two different integers: l
//~| 0x10 is not a valid pointer //~| 0x10 is not a valid pointer
}; };
const OUT_OF_BOUNDS_1: isize = {
let start_ptr = &4 as *const _ as *const u8;
let length = 10;
let end_ptr = (start_ptr).wrapping_add(length);
// First ptr is out of bounds
unsafe { ptr_offset_from(end_ptr, start_ptr) } //~ERROR evaluation of constant value failed
//~| pointer at offset 10 is out-of-bounds
};
const OUT_OF_BOUNDS_2: isize = {
let start_ptr = &4 as *const _ as *const u8;
let length = 10;
let end_ptr = (start_ptr).wrapping_add(length);
// Second ptr is out of bounds
unsafe { ptr_offset_from(start_ptr, end_ptr) } //~ERROR evaluation of constant value failed
//~| pointer at offset 10 is out-of-bounds
};
const OUT_OF_BOUNDS_SAME: isize = {
let start_ptr = &4 as *const _ as *const u8;
let length = 10;
let end_ptr = (start_ptr).wrapping_add(length);
unsafe { ptr_offset_from(end_ptr, end_ptr) } //~ERROR evaluation of constant value failed
//~| pointer at offset 10 is out-of-bounds
};
fn main() {} fn main() {}

View file

@ -10,7 +10,7 @@ error[E0080]: evaluation of constant value failed
LL | unsafe { intrinsics::ptr_offset_from(self, origin) } LL | unsafe { intrinsics::ptr_offset_from(self, origin) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| | | |
| 0x2a is not a valid pointer | out-of-bounds offset_from: 0x2a is not a valid pointer
| inside `ptr::const_ptr::<impl *const u8>::offset_from` at $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL | inside `ptr::const_ptr::<impl *const u8>::offset_from` at $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
| |
::: $DIR/offset_from_ub.rs:23:14 ::: $DIR/offset_from_ub.rs:23:14
@ -28,14 +28,32 @@ error[E0080]: evaluation of constant value failed
--> $DIR/offset_from_ub.rs:36:14 --> $DIR/offset_from_ub.rs:36:14
| |
LL | unsafe { ptr_offset_from(ptr, ptr) } LL | unsafe { ptr_offset_from(ptr, ptr) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^ null pointer is not a valid pointer for this operation | ^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds offset_from: null pointer is not a valid pointer
error[E0080]: evaluation of constant value failed error[E0080]: evaluation of constant value failed
--> $DIR/offset_from_ub.rs:43:14 --> $DIR/offset_from_ub.rs:43:14
| |
LL | unsafe { ptr_offset_from(ptr2, ptr1) } LL | unsafe { ptr_offset_from(ptr2, ptr1) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 0x10 is not a valid pointer | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds offset_from: 0x10 is not a valid pointer
error: aborting due to 5 previous errors error[E0080]: evaluation of constant value failed
--> $DIR/offset_from_ub.rs:52:14
|
LL | unsafe { ptr_offset_from(end_ptr, start_ptr) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds offset_from: alloc18 has size 4, so pointer at offset 10 is out-of-bounds
error[E0080]: evaluation of constant value failed
--> $DIR/offset_from_ub.rs:61:14
|
LL | unsafe { ptr_offset_from(start_ptr, end_ptr) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds offset_from: alloc21 has size 4, so pointer at offset 10 is out-of-bounds
error[E0080]: evaluation of constant value failed
--> $DIR/offset_from_ub.rs:69:14
|
LL | unsafe { ptr_offset_from(end_ptr, end_ptr) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ out-of-bounds offset_from: alloc24 has size 4, so pointer at offset 10 is out-of-bounds
error: aborting due to 8 previous errors
For more information about this error, try `rustc --explain E0080`. For more information about this error, try `rustc --explain E0080`.

View file

@ -144,7 +144,7 @@ error[E0080]: evaluation of constant value failed
LL | unsafe { intrinsics::offset(self, count) } LL | unsafe { intrinsics::offset(self, count) }
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| | | |
| pointer arithmetic failed: 0x0 is not a valid pointer | pointer arithmetic failed: null pointer is not a valid pointer
| inside `ptr::const_ptr::<impl *const u8>::offset` at $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL | inside `ptr::const_ptr::<impl *const u8>::offset` at $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
| |
::: $DIR/offset_ub.rs:22:50 ::: $DIR/offset_ub.rs:22:50

View file

@ -0,0 +1,20 @@
mod foo {
struct A;
mod bar {
struct B;
}
}
struct Foo {
a: foo:A,
//~^ ERROR found single colon in a struct field type path
//~| expected `,`, or `}`, found `:`
}
struct Bar {
b: foo::bar:B,
//~^ ERROR found single colon in a struct field type path
//~| expected `,`, or `}`, found `:`
}
fn main() {}

View file

@ -0,0 +1,36 @@
error: found single colon in a struct field type path
--> $DIR/struct-field-type-including-single-colon.rs:9:11
|
LL | a: foo:A,
| ^
|
help: write a path separator here
|
LL | a: foo::A,
| ~~
error: expected `,`, or `}`, found `:`
--> $DIR/struct-field-type-including-single-colon.rs:9:11
|
LL | a: foo:A,
| ^
error: found single colon in a struct field type path
--> $DIR/struct-field-type-including-single-colon.rs:15:16
|
LL | b: foo::bar:B,
| ^
|
help: write a path separator here
|
LL | b: foo::bar::B,
| ~~
error: expected `,`, or `}`, found `:`
--> $DIR/struct-field-type-including-single-colon.rs:15:16
|
LL | b: foo::bar:B,
| ^
error: aborting due to 4 previous errors