Skip to content

Commit

Permalink
rustc_const_eval: adopt let else in more places
Browse files Browse the repository at this point in the history
  • Loading branch information
est31 committed Feb 19, 2022
1 parent b8c56fa commit 5cc292e
Show file tree
Hide file tree
Showing 13 changed files with 86 additions and 123 deletions.
5 changes: 2 additions & 3 deletions compiler/rustc_const_eval/src/const_eval/eval_queries.rs
Expand Up @@ -231,9 +231,8 @@ pub fn eval_to_const_value_raw_provider<'tcx>(
// Catch such calls and evaluate them instead of trying to load a constant's MIR.
if let ty::InstanceDef::Intrinsic(def_id) = key.value.instance.def {
let ty = key.value.instance.ty(tcx, key.param_env);
let substs = match ty.kind() {
ty::FnDef(_, substs) => substs,
_ => bug!("intrinsic with type {:?}", ty),
let ty::FnDef(_, substs) = ty.kind() else {
bug!("intrinsic with type {:?}", ty);
};
return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs).map_err(|error| {
let span = tcx.def_span(def_id);
Expand Down
15 changes: 6 additions & 9 deletions compiler/rustc_const_eval/src/const_eval/machine.rs
Expand Up @@ -318,15 +318,12 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
let intrinsic_name = ecx.tcx.item_name(instance.def_id());

// CTFE-specific intrinsics.
let (dest, ret) = match ret {
None => {
return Err(ConstEvalErrKind::NeedsRfc(format!(
"calling intrinsic `{}`",
intrinsic_name
))
.into());
}
Some(p) => p,
let Some((dest, ret)) = ret else {
return Err(ConstEvalErrKind::NeedsRfc(format!(
"calling intrinsic `{}`",
intrinsic_name
))
.into());
};
match intrinsic_name {
sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
Expand Down
14 changes: 5 additions & 9 deletions compiler/rustc_const_eval/src/interpret/eval_context.rs
Expand Up @@ -631,15 +631,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// the last field). Can't have foreign types here, how would we
// adjust alignment and size for them?
let field = layout.field(self, layout.fields.count() - 1);
let (unsized_size, unsized_align) =
match self.size_and_align_of(metadata, &field)? {
Some(size_and_align) => size_and_align,
None => {
// A field with an extern type. We don't know the actual dynamic size
// or the alignment.
return Ok(None);
}
};
let Some((unsized_size, unsized_align)) = self.size_and_align_of(metadata, &field)? else {
// A field with an extern type. We don't know the actual dynamic size
// or the alignment.
return Ok(None);
};

// FIXME (#26403, #27023): We should be adding padding
// to `sized_size` (to accommodate the `unsized_align`
Expand Down
27 changes: 12 additions & 15 deletions compiler/rustc_const_eval/src/interpret/intern.rs
Expand Up @@ -84,22 +84,19 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
// remove allocation
let tcx = ecx.tcx;
let (kind, mut alloc) = match ecx.memory.alloc_map.remove(&alloc_id) {
Some(entry) => entry,
None => {
// Pointer not found in local memory map. It is either a pointer to the global
// map, or dangling.
// If the pointer is dangling (neither in local nor global memory), we leave it
// to validation to error -- it has the much better error messages, pointing out where
// in the value the dangling reference lies.
// The `delay_span_bug` ensures that we don't forget such a check in validation.
if tcx.get_global_alloc(alloc_id).is_none() {
tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
}
// treat dangling pointers like other statics
// just to stop trying to recurse into them
return Some(IsStaticOrFn);
let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
// Pointer not found in local memory map. It is either a pointer to the global
// map, or dangling.
// If the pointer is dangling (neither in local nor global memory), we leave it
// to validation to error -- it has the much better error messages, pointing out where
// in the value the dangling reference lies.
// The `delay_span_bug` ensures that we don't forget such a check in validation.
if tcx.get_global_alloc(alloc_id).is_none() {
tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
}
// treat dangling pointers like other statics
// just to stop trying to recurse into them
return Some(IsStaticOrFn);
};
// This match is just a canary for future changes to `MemoryKind`, which most likely need
// changes in this function.
Expand Down
60 changes: 27 additions & 33 deletions compiler/rustc_const_eval/src/interpret/memory.rs
Expand Up @@ -291,21 +291,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
);
}

let (alloc_kind, mut alloc) = match self.alloc_map.remove(&alloc_id) {
Some(alloc) => alloc,
None => {
// Deallocating global memory -- always an error
return Err(match self.tcx.get_global_alloc(alloc_id) {
Some(GlobalAlloc::Function(..)) => {
err_ub_format!("deallocating {}, which is a function", alloc_id)
}
Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
err_ub_format!("deallocating {}, which is static memory", alloc_id)
}
None => err_ub!(PointerUseAfterFree(alloc_id)),
let Some((alloc_kind, mut alloc)) = self.alloc_map.remove(&alloc_id) else {
// Deallocating global memory -- always an error
return Err(match self.tcx.get_global_alloc(alloc_id) {
Some(GlobalAlloc::Function(..)) => {
err_ub_format!("deallocating {}, which is a function", alloc_id)
}
.into());
Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
err_ub_format!("deallocating {}, which is static memory", alloc_id)
}
None => err_ub!(PointerUseAfterFree(alloc_id)),
}
.into());
};

if alloc.mutability == Mutability::Not {
Expand Down Expand Up @@ -957,9 +954,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
) -> InterpResult<'tcx, &[u8]> {
let alloc_ref = match self.get(ptr, size, Align::ONE)? {
Some(a) => a,
None => return Ok(&[]), // zero-sized access
let Some(alloc_ref) = self.get(ptr, size, Align::ONE)? else {
// zero-sized access
return Ok(&[]);
};
// Side-step AllocRef and directly access the underlying bytes more efficiently.
// (We are staying inside the bounds here so all is good.)
Expand All @@ -983,17 +980,14 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
assert_eq!(lower, len, "can only write iterators with a precise length");

let size = Size::from_bytes(len);
let alloc_ref = match self.get_mut(ptr, size, Align::ONE)? {
Some(alloc_ref) => alloc_ref,
None => {
// zero-sized access
assert_matches!(
src.next(),
None,
"iterator said it was empty but returned an element"
);
return Ok(());
}
let Some(alloc_ref) = self.get_mut(ptr, size, Align::ONE)? else {
// zero-sized access
assert_matches!(
src.next(),
None,
"iterator said it was empty but returned an element"
);
return Ok(());
};

// Side-step AllocRef and directly access the underlying bytes more efficiently.
Expand Down Expand Up @@ -1043,18 +1037,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// and once below to get the underlying `&[mut] Allocation`.

// Source alloc preparations and access hooks.
let (src_alloc_id, src_offset, src) = match src_parts {
None => return Ok(()), // Zero-sized *source*, that means dst is also zero-sized and we have nothing to do.
Some(src_ptr) => src_ptr,
let Some((src_alloc_id, src_offset, src)) = src_parts else {
// Zero-sized *source*, that means dst is also zero-sized and we have nothing to do.
return Ok(());
};
let src_alloc = self.get_raw(src_alloc_id)?;
let src_range = alloc_range(src_offset, size);
M::memory_read(&self.extra, &src_alloc.extra, src.provenance, src_range)?;
// We need the `dest` ptr for the next operation, so we get it now.
// We already did the source checks and called the hooks so we are good to return early.
let (dest_alloc_id, dest_offset, dest) = match dest_parts {
None => return Ok(()), // Zero-sized *destiantion*.
Some(dest_ptr) => dest_ptr,
let Some((dest_alloc_id, dest_offset, dest)) = dest_parts else {
// Zero-sized *destiantion*.
return Ok(());
};

// This checks relocation edges on the src, which needs to happen before
Expand Down
15 changes: 6 additions & 9 deletions compiler/rustc_const_eval/src/interpret/operand.rs
Expand Up @@ -258,15 +258,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(None);
}

let alloc = match self.get_alloc(mplace)? {
Some(ptr) => ptr,
None => {
return Ok(Some(ImmTy {
// zero-sized type
imm: Scalar::ZST.into(),
layout: mplace.layout,
}));
}
let Some(alloc) = self.get_alloc(mplace)? else {
return Ok(Some(ImmTy {
// zero-sized type
imm: Scalar::ZST.into(),
layout: mplace.layout,
}));
};

match mplace.layout.abi {
Expand Down
11 changes: 5 additions & 6 deletions compiler/rustc_const_eval/src/interpret/place.rs
Expand Up @@ -420,9 +420,8 @@ where
) -> InterpResult<'tcx, impl Iterator<Item = InterpResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'a>
{
let len = base.len(self)?; // also asserts that we have a type where this makes sense
let stride = match base.layout.fields {
FieldsShape::Array { stride, .. } => stride,
_ => span_bug!(self.cur_span(), "mplace_array_fields: expected an array layout"),
let FieldsShape::Array { stride, .. } = base.layout.fields else {
span_bug!(self.cur_span(), "mplace_array_fields: expected an array layout");
};
let layout = base.layout.field(self, 0);
let dl = &self.tcx.data_layout;
Expand Down Expand Up @@ -747,9 +746,9 @@ where

// Invalid places are a thing: the return place of a diverging function
let tcx = *self.tcx;
let mut alloc = match self.get_alloc_mut(dest)? {
Some(a) => a,
None => return Ok(()), // zero-sized access
let Some(mut alloc) = self.get_alloc_mut(dest)? else {
// zero-sized access
return Ok(());
};

// FIXME: We should check that there are dest.layout.size many bytes available in
Expand Down
15 changes: 6 additions & 9 deletions compiler/rustc_const_eval/src/interpret/step.rs
Expand Up @@ -46,15 +46,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(false);
}

let loc = match self.frame().loc {
Ok(loc) => loc,
Err(_) => {
// We are unwinding and this fn has no cleanup code.
// Just go on unwinding.
trace!("unwinding: skipping frame");
self.pop_stack_frame(/* unwinding */ true)?;
return Ok(true);
}
let Ok(loc) = self.frame().loc else {
// We are unwinding and this fn has no cleanup code.
// Just go on unwinding.
trace!("unwinding: skipping frame");
self.pop_stack_frame(/* unwinding */ true)?;
return Ok(true);
};
let basic_block = &self.body().basic_blocks()[loc.block];

Expand Down
7 changes: 3 additions & 4 deletions compiler/rustc_const_eval/src/interpret/terminator.rs
Expand Up @@ -321,10 +321,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
| ty::InstanceDef::CloneShim(..)
| ty::InstanceDef::Item(_) => {
// We need MIR for this fn
let (body, instance) =
match M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? {
Some(body) => body,
None => return Ok(()),
let Some((body, instance)) =
M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? else {
return Ok(());
};

// Compute callee information using the `instance` returned by
Expand Down
9 changes: 3 additions & 6 deletions compiler/rustc_const_eval/src/interpret/validity.rs
Expand Up @@ -851,12 +851,9 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// to reject those pointers, we just do not have the machinery to
// talk about parts of a pointer.
// We also accept uninit, for consistency with the slow path.
let alloc = match self.ecx.memory.get(mplace.ptr, size, mplace.align)? {
Some(a) => a,
None => {
// Size 0, nothing more to check.
return Ok(());
}
let Some(alloc) = self.ecx.memory.get(mplace.ptr, size, mplace.align)? else {
// Size 0, nothing more to check.
return Ok(());
};

let allow_uninit_and_ptr = !M::enforce_number_validity(self.ecx);
Expand Down
7 changes: 2 additions & 5 deletions compiler/rustc_const_eval/src/transform/check_consts/check.rs
Expand Up @@ -134,11 +134,8 @@ impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
.find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
.map(|(bb, _)| bb);

let return_block = match return_block {
None => {
return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
}
Some(bb) => bb,
let Some(return_block) = return_block else {
return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
};

let return_loc = ccx.body.terminator_loc(return_block);
Expand Down
15 changes: 6 additions & 9 deletions compiler/rustc_const_eval/src/transform/promote_consts.rs
Expand Up @@ -747,15 +747,12 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
if loc.statement_index < num_stmts {
let (mut rvalue, source_info) = {
let statement = &mut self.source[loc.block].statements[loc.statement_index];
let rhs = match statement.kind {
StatementKind::Assign(box (_, ref mut rhs)) => rhs,
_ => {
span_bug!(
statement.source_info.span,
"{:?} is not an assignment",
statement
);
}
let StatementKind::Assign(box (_, ref mut rhs)) = statement.kind else {
span_bug!(
statement.source_info.span,
"{:?} is not an assignment",
statement
);
};

(
Expand Down
9 changes: 3 additions & 6 deletions compiler/rustc_const_eval/src/util/alignment.rs
Expand Up @@ -15,12 +15,9 @@ where
L: HasLocalDecls<'tcx>,
{
debug!("is_disaligned({:?})", place);
let pack = match is_within_packed(tcx, local_decls, place) {
None => {
debug!("is_disaligned({:?}) - not within packed", place);
return false;
}
Some(pack) => pack,
let Some(pack) = is_within_packed(tcx, local_decls, place) else {
debug!("is_disaligned({:?}) - not within packed", place);
return false;
};

let ty = place.ty(local_decls, tcx).ty;
Expand Down

0 comments on commit 5cc292e

Please sign in to comment.