Use SortedMap instead of BTreeMap for relocations in MIRI.

This commit is contained in:
Michael Woerister 2018-05-18 16:06:20 +02:00
parent 879eb972ad
commit 3ed23a4bd0
3 changed files with 56 additions and 26 deletions

View file

@ -12,7 +12,6 @@ pub use self::error::{EvalError, EvalResult, EvalErrorKind, AssertMessage};
pub use self::value::{PrimVal, PrimValKind, Value, Pointer, ConstValue};
use std::collections::BTreeMap;
use std::fmt;
use mir;
use hir::def_id::DefId;
@ -21,9 +20,11 @@ use ty::layout::{self, Align, HasDataLayout, Size};
use middle::region;
use std::iter;
use std::io;
use std::ops::{Deref, DerefMut};
use std::hash::Hash;
use syntax::ast::Mutability;
use rustc_serialize::{Encoder, Decoder, Decodable, Encodable};
use rustc_data_structures::sorted_map::SortedMap;
use rustc_data_structures::fx::FxHashMap;
use byteorder::{WriteBytesExt, ReadBytesExt, LittleEndian, BigEndian};
@ -341,7 +342,7 @@ pub struct Allocation {
pub bytes: Vec<u8>,
/// Maps from byte addresses to allocations.
/// Only the first byte of a pointer is inserted into the map.
pub relocations: BTreeMap<Size, AllocId>,
pub relocations: Relocations,
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
pub undef_mask: UndefMask,
/// The alignment of the allocation to detect unaligned reads.
@ -358,7 +359,7 @@ impl Allocation {
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
Self {
bytes: slice.to_owned(),
relocations: BTreeMap::new(),
relocations: Relocations::new(),
undef_mask,
align,
runtime_mutability: Mutability::Immutable,
@ -373,7 +374,7 @@ impl Allocation {
assert_eq!(size.bytes() as usize as u64, size.bytes());
Allocation {
bytes: vec![0; size.bytes() as usize],
relocations: BTreeMap::new(),
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
runtime_mutability: Mutability::Immutable,
@ -383,6 +384,35 @@ impl Allocation {
impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}
#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Relocations(SortedMap<Size, AllocId>);
impl Relocations {
pub fn new() -> Relocations {
Relocations(SortedMap::new())
}
// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
pub fn from_presorted(r: Vec<(Size, AllocId)>) -> Relocations {
Relocations(SortedMap::from_presorted_elements(r))
}
}
impl Deref for Relocations {
type Target = SortedMap<Size, AllocId>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for Relocations {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
////////////////////////////////////////////////////////////////////////////////
// Methods to access integers in the target endianness
////////////////////////////////////////////////////////////////////////////////

View file

@ -83,7 +83,7 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx, alloc: &Allocation) -> ValueRef {
let pointer_size = layout.pointer_size.bytes() as usize;
let mut next_offset = 0;
for (&offset, &alloc_id) in &alloc.relocations {
for &(offset, alloc_id) in alloc.relocations.iter() {
let offset = offset.bytes();
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;

View file

@ -1,4 +1,4 @@
use std::collections::{btree_map, VecDeque};
use std::collections::VecDeque;
use std::ptr;
use rustc::hir::def_id::DefId;
@ -519,7 +519,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
fn get_bytes(&self, ptr: MemoryPointer, size: Size, align: Align) -> EvalResult<'tcx, &[u8]> {
assert_ne!(size.bytes(), 0);
if self.relocations(ptr, size)?.count() != 0 {
if self.relocations(ptr, size)?.len() != 0 {
return err!(ReadPointerAsBytes);
}
self.check_defined(ptr, size)?;
@ -614,9 +614,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
// first copy the relocations to a temporary buffer, because
// `get_bytes_mut` will clear the relocations, which is correct,
// since we don't want to keep any relocations at the target.
let relocations: Vec<_> = self.relocations(src, size)?
.map(|(&offset, &alloc_id)| {
.iter()
.map(|&(offset, alloc_id)| {
// Update relocation offsets for the new positions in the destination allocation.
(offset + dest.offset - src.offset, alloc_id)
})
@ -648,7 +648,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
self.copy_undef_mask(src, dest, size)?;
// copy back the relocations
self.get_mut(dest.alloc_id)?.relocations.extend(relocations);
self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations);
Ok(())
}
@ -660,7 +660,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
match alloc.bytes[offset..].iter().position(|&c| c == 0) {
Some(size) => {
let p1 = Size::from_bytes((size + 1) as u64);
if self.relocations(ptr, p1)?.count() != 0 {
if self.relocations(ptr, p1)?.len() != 0 {
return err!(ReadPointerAsBytes);
}
self.check_defined(ptr, p1)?;
@ -720,7 +720,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
let bytes = read_target_uint(endianness, bytes).unwrap();
// See if we got a pointer
if size != self.pointer_size() {
if self.relocations(ptr, size)?.count() != 0 {
if self.relocations(ptr, size)?.len() != 0 {
return err!(ReadPointerAsBytes);
}
} else {
@ -808,24 +808,26 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
&self,
ptr: MemoryPointer,
size: Size,
) -> EvalResult<'tcx, btree_map::Range<Size, AllocId>> {
) -> EvalResult<'tcx, &[(Size, AllocId)]> {
let start = ptr.offset.bytes().saturating_sub(self.pointer_size().bytes() - 1);
let end = ptr.offset + size;
Ok(self.get(ptr.alloc_id)?.relocations.range(Size::from_bytes(start)..end))
}
fn clear_relocations(&mut self, ptr: MemoryPointer, size: Size) -> EvalResult<'tcx> {
// Find all relocations overlapping the given range.
let keys: Vec<_> = self.relocations(ptr, size)?.map(|(&k, _)| k).collect();
if keys.is_empty() {
return Ok(());
}
// Find the start and end of the given range and its outermost relocations.
let (first, last) = {
// Find all relocations overlapping the given range.
let relocations = self.relocations(ptr, size)?;
if relocations.is_empty() {
return Ok(());
}
(relocations.first().unwrap().0,
relocations.last().unwrap().0 + self.pointer_size())
};
let start = ptr.offset;
let end = start + size;
let first = *keys.first().unwrap();
let last = *keys.last().unwrap() + self.pointer_size();
let alloc = self.get_mut(ptr.alloc_id)?;
@ -839,16 +841,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
}
// Forget all the relocations.
for k in keys {
alloc.relocations.remove(&k);
}
alloc.relocations.remove_range(first ..= last);
Ok(())
}
fn check_relocation_edges(&self, ptr: MemoryPointer, size: Size) -> EvalResult<'tcx> {
let overlapping_start = self.relocations(ptr, Size::from_bytes(0))?.count();
let overlapping_end = self.relocations(ptr.offset(size, self)?, Size::from_bytes(0))?.count();
let overlapping_start = self.relocations(ptr, Size::from_bytes(0))?.len();
let overlapping_end = self.relocations(ptr.offset(size, self)?, Size::from_bytes(0))?.len();
if overlapping_start + overlapping_end != 0 {
return err!(ReadPointerAsBytes);
}