Skip to content

Commit

Permalink
make Size::from* methods generic in the integer type they accept
Browse files Browse the repository at this point in the history
  • Loading branch information
RalfJung committed Mar 25, 2020
1 parent f16b491 commit 0bc108a
Show file tree
Hide file tree
Showing 9 changed files with 22 additions and 26 deletions.
13 changes: 4 additions & 9 deletions src/librustc/mir/interpret/allocation.rs
Expand Up @@ -92,7 +92,7 @@ impl<Tag> Allocation<Tag> {
/// Creates a read-only allocation initialized by the given bytes
pub fn from_bytes<'a>(slice: impl Into<Cow<'a, [u8]>>, align: Align) -> Self {
let bytes = slice.into().into_owned();
let size = Size::from_bytes(u64::try_from(bytes.len()).unwrap());
let size = Size::from_bytes(bytes.len());
Self {
bytes,
relocations: Relocations::new(),
Expand Down Expand Up @@ -293,8 +293,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
let offset = usize::try_from(ptr.offset.bytes()).unwrap();
Ok(match self.bytes[offset..].iter().position(|&c| c == 0) {
Some(size) => {
let size_with_null =
Size::from_bytes(u64::try_from(size.checked_add(1).unwrap()).unwrap());
let size_with_null = Size::from_bytes(size.checked_add(1).unwrap());
// Go through `get_bytes` for checks and AllocationExtra hooks.
// We read the null, so we include it in the request, but we want it removed
// from the result, so we do subslicing.
Expand Down Expand Up @@ -339,7 +338,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
let (lower, upper) = src.size_hint();
let len = upper.expect("can only write bounded iterators");
assert_eq!(lower, len, "can only write iterators with a precise length");
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(u64::try_from(len).unwrap()))?;
let bytes = self.get_bytes_mut(cx, ptr, Size::from_bytes(len))?;
// `zip` would stop when the first iterator ends; we want to definitely
// cover all of `bytes`.
for dest in bytes {
Expand Down Expand Up @@ -382,11 +381,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
} else {
match self.relocations.get(&ptr.offset) {
Some(&(tag, alloc_id)) => {
let ptr = Pointer::new_with_tag(
alloc_id,
Size::from_bytes(u64::try_from(bits).unwrap()),
tag,
);
let ptr = Pointer::new_with_tag(alloc_id, Size::from_bytes(bits), tag);
return Ok(ScalarMaybeUndef::Scalar(ptr.into()));
}
None => {}
Expand Down
4 changes: 2 additions & 2 deletions src/librustc/mir/interpret/value.rs
Expand Up @@ -686,8 +686,8 @@ pub fn get_slice_bytes<'tcx>(cx: &impl HasDataLayout, val: ConstValue<'tcx>) ->
data.get_bytes(
cx,
// invent a pointer, only the offset is relevant anyway
Pointer::new(AllocId(0), Size::from_bytes(u64::try_from(start).unwrap())),
Size::from_bytes(u64::try_from(len).unwrap()),
Pointer::new(AllocId(0), Size::from_bytes(start)),
Size::from_bytes(len),
)
.unwrap_or_else(|err| bug!("const slice is invalid: {:?}", err))
} else {
Expand Down
4 changes: 2 additions & 2 deletions src/librustc/ty/print/pretty.rs
Expand Up @@ -981,7 +981,7 @@ pub trait PrettyPrinter<'tcx>:
.alloc_map
.lock()
.unwrap_memory(ptr.alloc_id)
.get_bytes(&self.tcx(), ptr, Size::from_bytes(*data as u64))
.get_bytes(&self.tcx(), ptr, Size::from_bytes(*data))
.unwrap();
p!(pretty_print_byte_str(byte_str));
}
Expand Down Expand Up @@ -1169,7 +1169,7 @@ pub trait PrettyPrinter<'tcx>:
(ConstValue::ByRef { alloc, offset }, ty::Array(t, n)) if *t == u8_type => {
let n = n.val.try_to_bits(self.tcx().data_layout.pointer_size).unwrap();
// cast is ok because we already checked for pointer size (32 or 64 bit) above
let n = Size::from_bytes(n as u64);
let n = Size::from_bytes(n);
let ptr = Pointer::new(AllocId(0), offset);

let byte_str = alloc.get_bytes(&self.tcx(), ptr, n).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_codegen_ssa/mir/operand.rs
Expand Up @@ -91,7 +91,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
};
let a = Scalar::from(Pointer::new(
bx.tcx().alloc_map.lock().create_memory_alloc(data),
Size::from_bytes(start as u64),
Size::from_bytes(start),
));
let a_llval = bx.scalar_to_backend(
a,
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_mir/interpret/memory.rs
Expand Up @@ -836,7 +836,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
src: impl IntoIterator<Item = u8>,
) -> InterpResult<'tcx> {
let src = src.into_iter();
let size = Size::from_bytes(src.size_hint().0 as u64);
let size = Size::from_bytes(src.size_hint().0);
// `write_bytes` checks that this lower bound `size` matches the upper bound and reality.
let ptr = match self.check_ptr_access(ptr, size, Align::from_bytes(1).unwrap())? {
Some(ptr) => ptr,
Expand Down
4 changes: 2 additions & 2 deletions src/librustc_mir/interpret/operand.rs
@@ -1,7 +1,7 @@
//! Functions concerning immediate values and operands, and reading from operands.
//! All high-level functions to read from memory work on operands as sources.

use std::convert::{TryFrom, TryInto};
use std::convert::TryFrom;

use super::{InterpCx, MPlaceTy, Machine, MemPlace, Place, PlaceTy};
pub use rustc::mir::interpret::ScalarMaybeUndef;
Expand Down Expand Up @@ -570,7 +570,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// where none should happen.
let ptr = Pointer::new(
self.tcx.alloc_map.lock().create_memory_alloc(data),
Size::from_bytes(start.try_into().unwrap()), // offset: `start`
Size::from_bytes(start), // offset: `start`
);
Operand::Immediate(Immediate::new_slice(
self.tag_global_base_pointer(ptr).into(),
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_mir/interpret/place.rs
Expand Up @@ -739,7 +739,7 @@ where
),
Immediate::Scalar(ScalarMaybeUndef::Scalar(Scalar::Raw { size, .. })) => {
assert_eq!(
Size::from_bytes(size.into()),
Size::from_bytes(size),
dest.layout.size,
"Size mismatch when writing bits"
)
Expand Down
6 changes: 3 additions & 3 deletions src/librustc_mir_build/hair/pattern/_match.rs
Expand Up @@ -1920,8 +1920,8 @@ fn slice_pat_covered_by_const<'tcx>(
}
(ConstValue::Slice { data, start, end }, ty::Slice(t)) => {
assert_eq!(*t, tcx.types.u8);
let ptr = Pointer::new(AllocId(0), Size::from_bytes(start as u64));
data.get_bytes(&tcx, ptr, Size::from_bytes((end - start) as u64)).unwrap()
let ptr = Pointer::new(AllocId(0), Size::from_bytes(start));
data.get_bytes(&tcx, ptr, Size::from_bytes(end - start)).unwrap()
}
// FIXME(oli-obk): create a way to extract fat pointers from ByRef
(_, ty::Slice(_)) => return Ok(false),
Expand Down Expand Up @@ -2375,7 +2375,7 @@ fn specialize_one_pattern<'p, 'tcx>(
ty::Slice(t) => {
match value.val {
ty::ConstKind::Value(ConstValue::Slice { data, start, end }) => {
let offset = Size::from_bytes(start as u64);
let offset = Size::from_bytes(start);
let n = (end - start) as u64;
(Cow::Borrowed(data), offset, n, t)
}
Expand Down
11 changes: 6 additions & 5 deletions src/librustc_target/abi/mod.rs
Expand Up @@ -3,7 +3,7 @@ pub use Primitive::*;

use crate::spec::Target;

use std::convert::TryFrom;
use std::convert::{TryFrom, TryInto};
use std::ops::{Add, AddAssign, Deref, Mul, Range, RangeInclusive, Sub};

use rustc_index::vec::{Idx, IndexVec};
Expand Down Expand Up @@ -241,17 +241,18 @@ pub struct Size {
}

impl Size {
pub const ZERO: Size = Self::from_bytes(0);
pub const ZERO: Size = Size { raw: 0 };

#[inline]
pub fn from_bits(bits: u64) -> Size {
pub fn from_bits(bits: impl TryInto<u64>) -> Size {
let bits = bits.try_into().ok().unwrap();
// Avoid potential overflow from `bits + 7`.
Size::from_bytes(bits / 8 + ((bits % 8) + 7) / 8)
}

#[inline]
pub const fn from_bytes(bytes: u64) -> Size {
Size { raw: bytes }
pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
Size { raw: bytes.try_into().ok().unwrap() }
}

#[inline]
Expand Down

0 comments on commit 0bc108a

Please sign in to comment.