Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 4 pull requests #112420

Merged
merged 9 commits into from
Jun 8, 2023
4 changes: 2 additions & 2 deletions compiler/rustc_const_eval/src/interpret/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ pub(crate) fn eval_nullary_intrinsic<'tcx>(
}
sym::type_id => {
ensure_monomorphic_enough(tcx, tp_ty)?;
ConstValue::from_u64(tcx.type_id_hash(tp_ty).as_u64())
ConstValue::from_u128(tcx.type_id_hash(tp_ty).as_u128())
}
sym::variant_count => match tp_ty.kind() {
// Correctly handles non-monomorphic calls, so there is no need for ensure_monomorphic_enough.
Expand Down Expand Up @@ -169,7 +169,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let ty = match intrinsic_name {
sym::pref_align_of | sym::variant_count => self.tcx.types.usize,
sym::needs_drop => self.tcx.types.bool,
sym::type_id => self.tcx.types.u64,
sym::type_id => self.tcx.types.u128,
sym::type_name => self.tcx.mk_static_str(),
_ => bug!(),
};
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_hir_analysis/src/check/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
sym::needs_drop => (1, Vec::new(), tcx.types.bool),

sym::type_name => (1, Vec::new(), tcx.mk_static_str()),
sym::type_id => (1, Vec::new(), tcx.types.u64),
sym::type_id => (1, Vec::new(), tcx.types.u128),
sym::offset => (2, vec![param(0), param(1)], param(0)),
sym::arith_offset => (
1,
Expand Down
83 changes: 35 additions & 48 deletions compiler/rustc_hir_typeck/src/writeback.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ use rustc_middle::mir::FakeReadCause;
use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCast};
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
use rustc_middle::ty::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitableExt};
use rustc_middle::ty::TypeckResults;
use rustc_middle::ty::{self, ClosureSizeProfileData, Ty, TyCtxt};
use rustc_span::symbol::sym;
use rustc_span::Span;
Expand Down Expand Up @@ -148,31 +147,25 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn fix_scalar_builtin_expr(&mut self, e: &hir::Expr<'_>) {
match e.kind {
hir::ExprKind::Unary(hir::UnOp::Neg | hir::UnOp::Not, inner) => {
let inner_ty = self.fcx.node_ty(inner.hir_id);
let inner_ty = self.fcx.resolve_vars_if_possible(inner_ty);
let inner_ty = self.typeck_results.node_type(inner.hir_id);

if inner_ty.is_scalar() {
let mut typeck_results = self.fcx.typeck_results.borrow_mut();
typeck_results.type_dependent_defs_mut().remove(e.hir_id);
typeck_results.node_substs_mut().remove(e.hir_id);
self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
self.typeck_results.node_substs_mut().remove(e.hir_id);
}
}
hir::ExprKind::Binary(ref op, lhs, rhs) | hir::ExprKind::AssignOp(ref op, lhs, rhs) => {
let lhs_ty = self.fcx.node_ty(lhs.hir_id);
let lhs_ty = self.fcx.resolve_vars_if_possible(lhs_ty);

let rhs_ty = self.fcx.node_ty(rhs.hir_id);
let rhs_ty = self.fcx.resolve_vars_if_possible(rhs_ty);
let lhs_ty = self.typeck_results.node_type(lhs.hir_id);
let rhs_ty = self.typeck_results.node_type(rhs.hir_id);

if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
let mut typeck_results = self.fcx.typeck_results.borrow_mut();
typeck_results.type_dependent_defs_mut().remove(e.hir_id);
typeck_results.node_substs_mut().remove(e.hir_id);
self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
self.typeck_results.node_substs_mut().remove(e.hir_id);

match e.kind {
hir::ExprKind::Binary(..) => {
if !op.node.is_by_value() {
let mut adjustments = typeck_results.adjustments_mut();
let mut adjustments = self.typeck_results.adjustments_mut();
if let Some(a) = adjustments.get_mut(lhs.hir_id) {
a.pop();
}
Expand All @@ -182,7 +175,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
}
}
hir::ExprKind::AssignOp(..)
if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
if let Some(a) = self.typeck_results.adjustments_mut().get_mut(lhs.hir_id) =>
{
a.pop();
}
Expand All @@ -200,16 +193,14 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
// if they are not we don't modify the expr, hence we bypass the ICE
fn is_builtin_index(
&mut self,
typeck_results: &TypeckResults<'tcx>,
e: &hir::Expr<'_>,
base_ty: Ty<'tcx>,
index_ty: Ty<'tcx>,
) -> bool {
if let Some(elem_ty) = base_ty.builtin_index() {
let Some(exp_ty) = typeck_results.expr_ty_opt(e) else {return false;};
let resolved_exp_ty = self.resolve(exp_ty, &e.span);

elem_ty == resolved_exp_ty && index_ty == self.fcx.tcx.types.usize
if let Some(elem_ty) = base_ty.builtin_index()
&& let Some(exp_ty) = self.typeck_results.expr_ty_opt(e)
{
elem_ty == exp_ty && index_ty == self.fcx.tcx.types.usize
} else {
false
}
Expand All @@ -221,38 +212,34 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
// usize-ish
fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
if let hir::ExprKind::Index(ref base, ref index) = e.kind {
let mut typeck_results = self.fcx.typeck_results.borrow_mut();

// All valid indexing looks like this; might encounter non-valid indexes at this point.
let base_ty = typeck_results
.expr_ty_adjusted_opt(base)
.map(|t| self.fcx.resolve_vars_if_possible(t).kind());
let base_ty = self.typeck_results.expr_ty_adjusted_opt(base);
if base_ty.is_none() {
// When encountering `return [0][0]` outside of a `fn` body we can encounter a base
// that isn't in the type table. We assume more relevant errors have already been
// emitted, so we delay an ICE if none have. (#64638)
self.tcx().sess.delay_span_bug(e.span, format!("bad base: `{:?}`", base));
}
if let Some(ty::Ref(_, base_ty, _)) = base_ty {
let index_ty = typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
// When encountering `return [0][0]` outside of a `fn` body we would attempt
// to access an nonexistent index. We assume that more relevant errors will
// already have been emitted, so we only gate on this with an ICE if no
// error has been emitted. (#64638)
self.fcx.tcx.ty_error_with_message(
e.span,
format!("bad index {:?} for base: `{:?}`", index, base),
)
});
let index_ty = self.fcx.resolve_vars_if_possible(index_ty);
let resolved_base_ty = self.resolve(*base_ty, &base.span);

if self.is_builtin_index(&typeck_results, e, resolved_base_ty, index_ty) {
if let Some(base_ty) = base_ty
&& let ty::Ref(_, base_ty_inner, _) = *base_ty.kind()
{
let index_ty =
self.typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
// When encountering `return [0][0]` outside of a `fn` body we would attempt
// to access an nonexistent index. We assume that more relevant errors will
// already have been emitted, so we only gate on this with an ICE if no
// error has been emitted. (#64638)
self.fcx.tcx.ty_error_with_message(
e.span,
format!("bad index {:?} for base: `{:?}`", index, base),
)
});
if self.is_builtin_index(e, base_ty_inner, index_ty) {
// Remove the method call record
typeck_results.type_dependent_defs_mut().remove(e.hir_id);
typeck_results.node_substs_mut().remove(e.hir_id);
self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
self.typeck_results.node_substs_mut().remove(e.hir_id);

if let Some(a) = typeck_results.adjustments_mut().get_mut(base.hir_id) {
if let Some(a) = self.typeck_results.adjustments_mut().get_mut(base.hir_id) {
// Discard the need for a mutable borrow

// Extra adjustment made when indexing causes a drop
Expand Down Expand Up @@ -283,9 +270,6 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {

impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
fn visit_expr(&mut self, e: &'tcx hir::Expr<'tcx>) {
self.fix_scalar_builtin_expr(e);
self.fix_index_builtin_expr(e);

match e.kind {
hir::ExprKind::Closure(&hir::Closure { body, .. }) => {
let body = self.fcx.tcx.hir().body(body);
Expand Down Expand Up @@ -314,6 +298,9 @@ impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {

self.visit_node_id(e.span, e.hir_id);
intravisit::walk_expr(self, e);

self.fix_scalar_builtin_expr(e);
self.fix_index_builtin_expr(e);
}

fn visit_generic_param(&mut self, p: &'tcx hir::GenericParam<'tcx>) {
Expand Down
9 changes: 9 additions & 0 deletions compiler/rustc_middle/src/mir/interpret/value.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,10 @@ impl<'tcx> ConstValue<'tcx> {
ConstValue::Scalar(Scalar::from_u64(i))
}

pub fn from_u128(i: u128) -> Self {
ConstValue::Scalar(Scalar::from_u128(i))
}

pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self {
ConstValue::Scalar(Scalar::from_target_usize(i, cx))
}
Expand Down Expand Up @@ -240,6 +244,11 @@ impl<Prov> Scalar<Prov> {
Scalar::Int(i.into())
}

#[inline]
pub fn from_u128(i: u128) -> Self {
Scalar::Int(i.into())
}

#[inline]
pub fn from_target_usize(i: u64, cx: &impl HasDataLayout) -> Self {
Self::from_uint(i, cx.data_layout().pointer_size)
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_middle/src/ty/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use crate::ty::{
use crate::ty::{GenericArgKind, SubstsRef};
use rustc_apfloat::Float as _;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::stable_hasher::{Hash64, HashStable, StableHasher};
use rustc_data_structures::stable_hasher::{Hash128, HashStable, StableHasher};
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_hir::def::{CtorOf, DefKind, Res};
Expand Down Expand Up @@ -129,7 +129,7 @@ impl IntTypeExt for IntegerType {
impl<'tcx> TyCtxt<'tcx> {
/// Creates a hash of the type `Ty` which will be the same no matter what crate
/// context it's calculated within. This is used by the `type_id` intrinsic.
pub fn type_id_hash(self, ty: Ty<'tcx>) -> Hash64 {
pub fn type_id_hash(self, ty: Ty<'tcx>) -> Hash128 {
// We want the type_id be independent of the types free regions, so we
// erase them. The erase_regions() call will also anonymize bound
// regions, which is desirable too.
Expand Down
15 changes: 11 additions & 4 deletions compiler/rustc_query_system/src/query/plumbing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,8 @@ where
make_query: fn(Qcx, K) -> QueryStackFrame<D>,
jobs: &mut QueryMap<D>,
) -> Option<()> {
let mut active = Vec::new();

#[cfg(parallel_compiler)]
{
// We use try_lock_shards here since we are called from the
Expand All @@ -77,8 +79,7 @@ where
for shard in shards.iter() {
for (k, v) in shard.iter() {
if let QueryResult::Started(ref job) = *v {
let query = make_query(qcx, *k);
jobs.insert(job.id, QueryJobInfo { query, job: job.clone() });
active.push((*k, job.clone()));
}
}
}
Expand All @@ -91,12 +92,18 @@ where
// really hurt much.)
for (k, v) in self.active.try_lock()?.iter() {
if let QueryResult::Started(ref job) = *v {
let query = make_query(qcx, *k);
jobs.insert(job.id, QueryJobInfo { query, job: job.clone() });
active.push((*k, job.clone()));
}
}
}

// Call `make_query` while we're not holding a `self.active` lock as `make_query` may call
// queries leading to a deadlock.
for (key, job) in active {
let query = make_query(qcx, key);
jobs.insert(job.id, QueryJobInfo { query, job });
}

Some(())
}
}
Expand Down
31 changes: 28 additions & 3 deletions library/core/src/any.rs
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@
#![stable(feature = "rust1", since = "1.0.0")]

use crate::fmt;
use crate::hash;
use crate::intrinsics;

///////////////////////////////////////////////////////////////////////////////
Expand Down Expand Up @@ -662,10 +663,10 @@ impl dyn Any + Send + Sync {
/// While `TypeId` implements `Hash`, `PartialOrd`, and `Ord`, it is worth
/// noting that the hashes and ordering will vary between Rust releases. Beware
/// of relying on them inside of your code!
#[derive(Clone, Copy, Debug, Hash, Eq, PartialOrd, Ord)]
#[derive(Clone, Copy, Debug, Eq, PartialOrd, Ord)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct TypeId {
t: u64,
t: u128,
}

#[stable(feature = "rust1", since = "1.0.0")]
Expand Down Expand Up @@ -696,7 +697,31 @@ impl TypeId {
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_type_id", issue = "77125")]
pub const fn of<T: ?Sized + 'static>() -> TypeId {
TypeId { t: intrinsics::type_id::<T>() }
#[cfg(bootstrap)]
let t = intrinsics::type_id::<T>() as u128;
#[cfg(not(bootstrap))]
let t: u128 = intrinsics::type_id::<T>();
TypeId { t }
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl hash::Hash for TypeId {
#[inline]
fn hash<H: hash::Hasher>(&self, state: &mut H) {
// We only hash the lower 64 bits of our (128 bit) internal numeric ID,
// because:
// - The hashing algorithm which backs `TypeId` is expected to be
// unbiased and high quality, meaning further mixing would be somewhat
// redundant compared to choosing (the lower) 64 bits arbitrarily.
// - `Hasher::finish` returns a u64 anyway, so the extra entropy we'd
// get from hashing the full value would probably not be useful
// (especially given the previous point about the lower 64 bits being
// high quality on their own).
// - It is correct to do so -- only hashing a subset of `self` is still
// with an `Eq` implementation that considers the entire value, as
// ours does.
(self.t as u64).hash(state);
}
}

Expand Down
17 changes: 17 additions & 0 deletions library/core/src/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1057,8 +1057,25 @@ extern "rust-intrinsic" {
#[rustc_const_unstable(feature = "const_type_id", issue = "77125")]
#[rustc_safe_intrinsic]
#[rustc_nounwind]
#[cfg(bootstrap)]
pub fn type_id<T: ?Sized + 'static>() -> u64;

/// Gets an identifier which is globally unique to the specified type. This
/// function will return the same value for a type regardless of whichever
/// crate it is invoked in.
///
/// Note that, unlike most intrinsics, this is safe to call;
/// it does not require an `unsafe` block.
/// Therefore, implementations must not require the user to uphold
/// any safety invariants.
///
/// The stabilized version of this intrinsic is [`core::any::TypeId::of`].
#[rustc_const_unstable(feature = "const_type_id", issue = "77125")]
#[rustc_safe_intrinsic]
#[rustc_nounwind]
#[cfg(not(bootstrap))]
pub fn type_id<T: ?Sized + 'static>() -> u128;

/// A guard for unsafe functions that cannot ever be executed if `T` is uninhabited:
/// This will statically either panic, or do nothing.
///
Expand Down
2 changes: 1 addition & 1 deletion src/bootstrap/tool.rs
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ impl Step for RustAnalyzerProcMacroSrv {
tool: "rust-analyzer-proc-macro-srv",
mode: Mode::ToolStd,
path: "src/tools/rust-analyzer/crates/proc-macro-srv-cli",
extra_features: vec!["proc-macro-srv/sysroot-abi".to_owned()],
extra_features: vec!["sysroot-abi".to_owned()],
is_optional_tool: false,
source_type: SourceType::InTree,
allow_features: RustAnalyzer::ALLOW_FEATURES,
Expand Down
11 changes: 11 additions & 0 deletions tests/ui/traits/new-solver/normalized-const-built-in-op.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
// compile-flags: -Ztrait-solver=next
// check-pass

const fn foo() {
let mut x = [1, 2, 3];
// We need to fix up `<<[i32; 3] as Index<usize>>::Output as AddAssign>`
// to be treated like a built-in operation.
x[1] += 5;
}

fn main() {}