Skip to content

Commit

Permalink
Auto merge of #123838 - matthiaskrgr:rollup-zkgwyye, r=matthiaskrgr
Browse files Browse the repository at this point in the history
Rollup of 7 pull requests

Successful merges:

 - #123599 (remove some things that do not need to be)
 - #123763 (Set the host library path in run-make v2)
 - #123775 (Make `PlaceRef` and `OperandValue::Ref` share a common `PlaceValue` type)
 - #123789 (move QueryKeyStringCache from rustc_middle to rustc_query_impl, where it actually used)
 - #123826 (Move rare overflow error to a cold function)
 - #123827 (linker: Avoid some allocations in search directory iteration)
 - #123829 (Fix revisions syntax in cfg(ub_checks) test)

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Apr 12, 2024
2 parents 46961d2 + 2679ea0 commit 6475796
Show file tree
Hide file tree
Showing 37 changed files with 335 additions and 250 deletions.
1 change: 0 additions & 1 deletion Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -4263,7 +4263,6 @@ dependencies = [
"either",
"field-offset",
"gsgdt",
"measureme",
"polonius-engine",
"rustc-rayon",
"rustc-rayon-core",
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -960,7 +960,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
sm.span_to_diagnostic_string(span)
}
};
let mut spans: MultiSpan = spans.clone().into();
let mut spans: MultiSpan = spans.into();
// Point at all the `continue`s and explicit `break`s in the relevant loops.
for (desc, elements) in [
("`break` exits", &finder.found_breaks),
Expand Down
10 changes: 4 additions & 6 deletions compiler/rustc_builtin_macros/src/source_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -333,10 +333,8 @@ fn find_path_suggestion(
.flatten()
.take(4);

for new_path in root_absolute.chain(add).chain(remove) {
if source_map.file_exists(&base_dir.join(&new_path)) {
return Some(new_path);
}
}
None
root_absolute
.chain(add)
.chain(remove)
.find(|new_path| source_map.file_exists(&base_dir.join(&new_path)))
}
25 changes: 13 additions & 12 deletions compiler/rustc_codegen_gcc/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -974,7 +974,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
&mut self,
place: PlaceRef<'tcx, RValue<'gcc>>,
) -> OperandRef<'tcx, RValue<'gcc>> {
assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());

if place.layout.is_zst() {
return OperandRef::zero_sized(place.layout);
Expand All @@ -999,10 +999,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
}
}

let val = if let Some(llextra) = place.llextra {
OperandValue::Ref(place.llval, Some(llextra), place.align)
let val = if let Some(_) = place.val.llextra {
// FIXME: Merge with the `else` below?
OperandValue::Ref(place.val)
} else if place.layout.is_gcc_immediate() {
let load = self.load(place.layout.gcc_type(self), place.llval, place.align);
let load = self.load(place.layout.gcc_type(self), place.val.llval, place.val.align);
if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
scalar_load_metadata(self, load, scalar);
}
Expand All @@ -1012,9 +1013,9 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {

let mut load = |i, scalar: &abi::Scalar, align| {
let llptr = if i == 0 {
place.llval
place.val.llval
} else {
self.inbounds_ptradd(place.llval, self.const_usize(b_offset.bytes()))
self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
};
let llty = place.layout.scalar_pair_element_gcc_type(self, i);
let load = self.load(llty, llptr, align);
Expand All @@ -1027,11 +1028,11 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
};

OperandValue::Pair(
load(0, a, place.align),
load(1, b, place.align.restrict_for_offset(b_offset)),
load(0, a, place.val.align),
load(1, b, place.val.align.restrict_for_offset(b_offset)),
)
} else {
OperandValue::Ref(place.llval, None, place.align)
OperandValue::Ref(place.val)
};

OperandRef { val, layout: place.layout }
Expand All @@ -1045,8 +1046,8 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
) {
let zero = self.const_usize(0);
let count = self.const_usize(count);
let start = dest.project_index(self, zero).llval;
let end = dest.project_index(self, count).llval;
let start = dest.project_index(self, zero).val.llval;
let end = dest.project_index(self, count).val.llval;

let header_bb = self.append_sibling_block("repeat_loop_header");
let body_bb = self.append_sibling_block("repeat_loop_body");
Expand All @@ -1064,7 +1065,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
self.cond_br(keep_going, body_bb, next_bb);

self.switch_to_block(body_bb);
let align = dest.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
let align = dest.val.align.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
cg_elem.val.store(self, PlaceRef::new_sized_aligned(current_val, cg_elem.layout, align));

let next = self.inbounds_gep(
Expand Down
21 changes: 13 additions & 8 deletions compiler/rustc_codegen_gcc/src/intrinsic/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use rustc_codegen_ssa::base::wants_msvc_seh;
use rustc_codegen_ssa::common::IntPredicate;
use rustc_codegen_ssa::errors::InvalidMonomorphization;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
use rustc_codegen_ssa::traits::{
ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods,
};
Expand Down Expand Up @@ -354,7 +354,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {

let block = self.llbb();
let extended_asm = block.add_extended_asm(None, "");
extended_asm.add_input_operand(None, "r", result.llval);
extended_asm.add_input_operand(None, "r", result.val.llval);
extended_asm.add_clobber("memory");
extended_asm.set_volatile_flag(true);

Expand Down Expand Up @@ -388,8 +388,8 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
if !fn_abi.ret.is_ignore() {
if let PassMode::Cast { cast: ty, .. } = &fn_abi.ret.mode {
let ptr_llty = self.type_ptr_to(ty.gcc_type(self));
let ptr = self.pointercast(result.llval, ptr_llty);
self.store(llval, ptr, result.align);
let ptr = self.pointercast(result.val.llval, ptr_llty);
self.store(llval, ptr, result.val.align);
} else {
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
.val
Expand Down Expand Up @@ -502,7 +502,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
return;
}
if self.is_sized_indirect() {
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst)
OperandValue::Ref(PlaceValue::new_sized(val, self.layout.align.abi)).store(bx, dst)
} else if self.is_unsized_indirect() {
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
} else if let PassMode::Cast { ref cast, .. } = self.mode {
Expand All @@ -511,7 +511,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
let can_store_through_cast_ptr = false;
if can_store_through_cast_ptr {
let cast_ptr_llty = bx.type_ptr_to(cast.gcc_type(bx));
let cast_dst = bx.pointercast(dst.llval, cast_ptr_llty);
let cast_dst = bx.pointercast(dst.val.llval, cast_ptr_llty);
bx.store(val, cast_dst, self.layout.align.abi);
} else {
// The actual return type is a struct, but the ABI
Expand Down Expand Up @@ -539,7 +539,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {

// ... and then memcpy it to the intended destination.
bx.memcpy(
dst.llval,
dst.val.llval,
self.layout.align.abi,
llscratch,
scratch_align,
Expand Down Expand Up @@ -571,7 +571,12 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
OperandValue::Pair(next(), next()).store(bx, dst);
}
PassMode::Indirect { meta_attrs: Some(_), .. } => {
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst);
let place_val = PlaceValue {
llval: next(),
llextra: Some(next()),
align: self.layout.align.abi,
};
OperandValue::Ref(place_val).store(bx, dst);
}
PassMode::Direct(_)
| PassMode::Indirect { meta_attrs: None, .. }
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_gcc/src/intrinsic/simd.rs
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
let place = PlaceRef::alloca(bx, args[0].layout);
args[0].val.store(bx, place);
let int_ty = bx.type_ix(expected_bytes * 8);
let ptr = bx.pointercast(place.llval, bx.cx.type_ptr_to(int_ty));
let ptr = bx.pointercast(place.val.llval, bx.cx.type_ptr_to(int_ty));
bx.load(int_ty, ptr, Align::ONE)
}
_ => return_error!(InvalidMonomorphization::InvalidBitmask {
Expand Down
13 changes: 9 additions & 4 deletions compiler/rustc_codegen_llvm/src/abi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::type_of::LayoutLlvmExt;
use crate::value::Value;

use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef;
use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags;
use rustc_middle::bug;
Expand Down Expand Up @@ -207,7 +207,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
// Sized indirect arguments
PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
let align = attrs.pointee_align.unwrap_or(self.layout.align.abi);
OperandValue::Ref(val, None, align).store(bx, dst);
OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst);
}
// Unsized indirect qrguments
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
Expand All @@ -233,7 +233,7 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
bx.store(val, llscratch, scratch_align);
// ... and then memcpy it to the intended destination.
bx.memcpy(
dst.llval,
dst.val.llval,
self.layout.align.abi,
llscratch,
scratch_align,
Expand Down Expand Up @@ -265,7 +265,12 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
OperandValue::Pair(next(), next()).store(bx, dst);
}
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst);
let place_val = PlaceValue {
llval: next(),
llextra: Some(next()),
align: self.layout.align.abi,
};
OperandValue::Ref(place_val).store(bx, dst);
}
PassMode::Direct(_)
| PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }
Expand Down
21 changes: 11 additions & 10 deletions compiler/rustc_codegen_llvm/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -535,7 +535,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
panic!("unsized locals must not be `extern` types");
}
}
assert_eq!(place.llextra.is_some(), place.layout.is_unsized());
assert_eq!(place.val.llextra.is_some(), place.layout.is_unsized());

if place.layout.is_zst() {
return OperandRef::zero_sized(place.layout);
Expand Down Expand Up @@ -579,13 +579,14 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
}
}

let val = if let Some(llextra) = place.llextra {
OperandValue::Ref(place.llval, Some(llextra), place.align)
let val = if let Some(_) = place.val.llextra {
// FIXME: Merge with the `else` below?
OperandValue::Ref(place.val)
} else if place.layout.is_llvm_immediate() {
let mut const_llval = None;
let llty = place.layout.llvm_type(self);
unsafe {
if let Some(global) = llvm::LLVMIsAGlobalVariable(place.llval) {
if let Some(global) = llvm::LLVMIsAGlobalVariable(place.val.llval) {
if llvm::LLVMIsGlobalConstant(global) == llvm::True {
if let Some(init) = llvm::LLVMGetInitializer(global) {
if self.val_ty(init) == llty {
Expand All @@ -596,7 +597,7 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
}
}
let llval = const_llval.unwrap_or_else(|| {
let load = self.load(llty, place.llval, place.align);
let load = self.load(llty, place.val.llval, place.val.align);
if let abi::Abi::Scalar(scalar) = place.layout.abi {
scalar_load_metadata(self, load, scalar, place.layout, Size::ZERO);
}
Expand All @@ -608,9 +609,9 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {

let mut load = |i, scalar: abi::Scalar, layout, align, offset| {
let llptr = if i == 0 {
place.llval
place.val.llval
} else {
self.inbounds_ptradd(place.llval, self.const_usize(b_offset.bytes()))
self.inbounds_ptradd(place.val.llval, self.const_usize(b_offset.bytes()))
};
let llty = place.layout.scalar_pair_element_llvm_type(self, i, false);
let load = self.load(llty, llptr, align);
Expand All @@ -619,11 +620,11 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
};

OperandValue::Pair(
load(0, a, place.layout, place.align, Size::ZERO),
load(1, b, place.layout, place.align.restrict_for_offset(b_offset), b_offset),
load(0, a, place.layout, place.val.align, Size::ZERO),
load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset),
)
} else {
OperandValue::Ref(place.llval, None, place.align)
OperandValue::Ref(place.val)
};

OperandRef { val, layout: place.layout }
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_codegen_llvm/src/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
llvm::LLVMSetAlignment(load, align);
}
if !result.layout.is_zst() {
self.store(load, result.llval, result.align);
self.store_to_place(load, result.val);
}
return Ok(());
}
Expand Down Expand Up @@ -428,7 +428,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {

sym::black_box => {
args[0].val.store(self, result);
let result_val_span = [result.llval];
let result_val_span = [result.val.llval];
// We need to "use" the argument in some way LLVM can't introspect, and on
// targets that support it we can typically leverage inline assembly to do
// this. LLVM's interpretation of inline assembly is that it's, well, a black
Expand Down Expand Up @@ -482,7 +482,7 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {

if !fn_abi.ret.is_ignore() {
if let PassMode::Cast { .. } = &fn_abi.ret.mode {
self.store(llval, result.llval, result.align);
self.store(llval, result.val.llval, result.val.align);
} else {
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
.val
Expand Down Expand Up @@ -1065,7 +1065,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
let place = PlaceRef::alloca(bx, args[0].layout);
args[0].val.store(bx, place);
let int_ty = bx.type_ix(expected_bytes * 8);
bx.load(int_ty, place.llval, Align::ONE)
bx.load(int_ty, place.val.llval, Align::ONE)
}
_ => return_error!(InvalidMonomorphization::InvalidBitmask {
span,
Expand Down
27 changes: 12 additions & 15 deletions compiler/rustc_codegen_ssa/src/back/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,8 +56,13 @@ use std::{env, fmt, fs, io, mem, str};
pub struct SearchPaths(OnceCell<Vec<PathBuf>>);

impl SearchPaths {
pub(super) fn get(&self, sess: &Session) -> &[PathBuf] {
self.0.get_or_init(|| archive_search_paths(sess))
pub(super) fn get(&self, sess: &Session) -> impl Iterator<Item = &Path> {
let native_search_paths = || {
Vec::from_iter(
sess.target_filesearch(PathKind::Native).search_path_dirs().map(|p| p.to_owned()),
)
};
self.0.get_or_init(native_search_paths).iter().map(|p| &**p)
}
}

Expand Down Expand Up @@ -310,8 +315,6 @@ fn link_rlib<'a>(
flavor: RlibFlavor,
tmpdir: &MaybeTempDir,
) -> Result<Box<dyn ArchiveBuilder + 'a>, ErrorGuaranteed> {
let lib_search_paths = archive_search_paths(sess);

let mut ab = archive_builder_builder.new_archive_builder(sess);

let trailing_metadata = match flavor {
Expand Down Expand Up @@ -378,26 +381,24 @@ fn link_rlib<'a>(
// feature then we'll need to figure out how to record what objects were
// loaded from the libraries found here and then encode that into the
// metadata of the rlib we're generating somehow.
let search_paths = SearchPaths::default();
for lib in codegen_results.crate_info.used_libraries.iter() {
let NativeLibKind::Static { bundle: None | Some(true), .. } = lib.kind else {
continue;
};
let search_paths = search_paths.get(sess);
if flavor == RlibFlavor::Normal
&& let Some(filename) = lib.filename
{
let path = find_native_static_library(filename.as_str(), true, &lib_search_paths, sess);
let path = find_native_static_library(filename.as_str(), true, search_paths, sess);
let src = read(path)
.map_err(|e| sess.dcx().emit_fatal(errors::ReadFileError { message: e }))?;
let (data, _) = create_wrapper_file(sess, ".bundled_lib".to_string(), &src);
let wrapper_file = emit_wrapper_file(sess, &data, tmpdir, filename.as_str());
packed_bundled_libs.push(wrapper_file);
} else {
let path = find_native_static_library(
lib.name.as_str(),
lib.verbatim,
&lib_search_paths,
sess,
);
let path =
find_native_static_library(lib.name.as_str(), lib.verbatim, search_paths, sess);
ab.add_archive(&path, Box::new(|_| false)).unwrap_or_else(|error| {
sess.dcx().emit_fatal(errors::AddNativeLibrary { library_path: path, error })
});
Expand Down Expand Up @@ -1445,10 +1446,6 @@ fn preserve_objects_for_their_debuginfo(sess: &Session) -> (bool, bool) {
}
}

fn archive_search_paths(sess: &Session) -> Vec<PathBuf> {
sess.target_filesearch(PathKind::Native).search_path_dirs()
}

#[derive(PartialEq)]
enum RlibFlavor {
Normal,
Expand Down

0 comments on commit 6475796

Please sign in to comment.