Auto merge of #105378 - matthiaskrgr:rollup-fjeorw5, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #104898 (Put all cached values into a central struct instead of just the stable hash)
 - #105004 (Fix `emit_unused_delims_expr` ICE)
 - #105174 (Suggest removing struct field from destructive binding only in shorthand scenario)
 - #105250 (Replace usage of `ResumeTy` in async lowering with `Context`)
 - #105286 (Add -Z maximal-hir-to-mir-coverage flag)
 - #105320 (rustdoc: simplify CSS selectors on top-doc and non-exhaustive toggles)
 - #105349 (Point at args in associated const fn pointers)
 - #105362 (Cleanup macro-expanded code in `rustc_type_ir`)
 - #105370 (Remove outdated syntax from trait alias pretty printing)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-12-06 18:51:14 +00:00
commit b28d30e1e3
46 changed files with 650 additions and 742 deletions

View File

@ -16,7 +16,7 @@ use rustc_hir::def::Res;
use rustc_hir::definitions::DefPathData;
use rustc_session::errors::report_lit_error;
use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
use rustc_span::symbol::{sym, Ident};
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::DUMMY_SP;
use thin_vec::thin_vec;
@ -594,14 +594,38 @@ impl<'hir> LoweringContext<'_, 'hir> {
) -> hir::ExprKind<'hir> {
let output = ret_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span)));
// Resume argument type: `ResumeTy`
let unstable_span =
self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
let resume_ty = hir::QPath::LangItem(hir::LangItem::ResumeTy, unstable_span, None);
// Resume argument type, which should be `&mut Context<'_>`.
// NOTE: Using the `'static` lifetime here is technically cheating.
// The `Future::poll` argument really is `&'a mut Context<'b>`, but we cannot
// express the fact that we are not storing it across yield-points yet,
// and we would thus run into lifetime errors.
// See <https://github.com/rust-lang/rust/issues/68923>.
// Our lowering makes sure we are not mis-using the `_task_context` input type
// in the sense that we are indeed not using it across yield points. We
// get a fresh `&mut Context` for each resume / call of `Future::poll`.
// This "cheating" was previously done with a `ResumeTy` that contained a raw
// pointer, and a `get_context` accessor that pulled the `Context` lifetimes
// out of thin air.
let context_lifetime_ident = Ident::with_dummy_span(kw::StaticLifetime);
let context_lifetime = self.arena.alloc(hir::Lifetime {
hir_id: self.next_id(),
ident: context_lifetime_ident,
res: hir::LifetimeName::Static,
});
let context_path =
hir::QPath::LangItem(hir::LangItem::Context, self.lower_span(span), None);
let context_ty = hir::MutTy {
ty: self.arena.alloc(hir::Ty {
hir_id: self.next_id(),
kind: hir::TyKind::Path(context_path),
span: self.lower_span(span),
}),
mutbl: hir::Mutability::Mut,
};
let input_ty = hir::Ty {
hir_id: self.next_id(),
kind: hir::TyKind::Path(resume_ty),
span: unstable_span,
kind: hir::TyKind::Rptr(context_lifetime, context_ty),
span: self.lower_span(span),
};
// The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
@ -659,12 +683,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
.map_or(false, |attrs| attrs.into_iter().any(|attr| attr.has_name(sym::track_caller)));
let hir_id = self.lower_node_id(closure_node_id);
let unstable_span =
self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
if track_caller {
let unstable_span = self.mark_span_with_reason(
DesugaringKind::Async,
span,
self.allow_gen_future.clone(),
);
self.lower_attrs(
hir_id,
&[Attribute {
@ -711,7 +732,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
/// mut __awaitee => loop {
/// match unsafe { ::std::future::Future::poll(
/// <::std::pin::Pin>::new_unchecked(&mut __awaitee),
/// ::std::future::get_context(task_context),
/// task_context,
/// ) } {
/// ::std::task::Poll::Ready(result) => break result,
/// ::std::task::Poll::Pending => {}
@ -752,7 +773,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// unsafe {
// ::std::future::Future::poll(
// ::std::pin::Pin::new_unchecked(&mut __awaitee),
// ::std::future::get_context(task_context),
// task_context,
// )
// }
let poll_expr = {
@ -770,16 +791,10 @@ impl<'hir> LoweringContext<'_, 'hir> {
arena_vec![self; ref_mut_awaitee],
Some(expr_hir_id),
);
let get_context = self.expr_call_lang_item_fn_mut(
gen_future_span,
hir::LangItem::GetContext,
arena_vec![self; task_context],
Some(expr_hir_id),
);
let call = self.expr_call_lang_item_fn(
span,
hir::LangItem::FuturePoll,
arena_vec![self; new_unchecked, get_context],
arena_vec![self; new_unchecked, task_context],
Some(expr_hir_id),
);
self.arena.alloc(self.expr_unsafe(call))

View File

@ -348,21 +348,10 @@ impl<'a> State<'a> {
self.head(visibility_qualified(&item.vis, "trait"));
self.print_ident(item.ident);
self.print_generic_params(&generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
// FIXME(durka) this seems to be some quite outdated syntax
for b in bounds.iter() {
if let GenericBound::Trait(ptr, ast::TraitBoundModifier::Maybe) = b {
self.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
} else {
real_bounds.push(b.clone());
}
}
self.nbsp();
if !real_bounds.is_empty() {
if !bounds.is_empty() {
self.word_nbsp("=");
self.print_type_bounds(&real_bounds);
self.print_type_bounds(&bounds);
}
self.print_where_clause(&generics.where_clause);
self.word(";");

View File

@ -4,8 +4,6 @@ use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::ptr;
use crate::fingerprint::Fingerprint;
mod private {
#[derive(Clone, Copy, Debug)]
pub struct PrivateZst;
@ -110,86 +108,5 @@ where
}
}
/// A helper type that you can wrap round your own type in order to automatically
/// cache the stable hash on creation and not recompute it whenever the stable hash
/// of the type is computed.
/// This is only done in incremental mode. You can also opt out of caching by using
/// StableHash::ZERO for the hash, in which case the hash gets computed each time.
/// This is useful if you have values that you intern but never (can?) use for stable
/// hashing.
#[derive(Copy, Clone)]
pub struct WithStableHash<T> {
pub internee: T,
pub stable_hash: Fingerprint,
}
impl<T: PartialEq> PartialEq for WithStableHash<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.internee.eq(&other.internee)
}
}
impl<T: Eq> Eq for WithStableHash<T> {}
impl<T: Ord> PartialOrd for WithStableHash<T> {
fn partial_cmp(&self, other: &WithStableHash<T>) -> Option<Ordering> {
Some(self.internee.cmp(&other.internee))
}
}
impl<T: Ord> Ord for WithStableHash<T> {
fn cmp(&self, other: &WithStableHash<T>) -> Ordering {
self.internee.cmp(&other.internee)
}
}
impl<T> Deref for WithStableHash<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.internee
}
}
impl<T: Hash> Hash for WithStableHash<T> {
#[inline]
fn hash<H: Hasher>(&self, s: &mut H) {
if self.stable_hash != Fingerprint::ZERO {
self.stable_hash.hash(s)
} else {
self.internee.hash(s)
}
}
}
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for WithStableHash<T> {
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
if self.stable_hash == Fingerprint::ZERO || cfg!(debug_assertions) {
// No cached hash available. This can only mean that incremental is disabled.
// We don't cache stable hashes in non-incremental mode, because they are used
// so rarely that the performance actually suffers.
// We need to build the hash as if we cached it and then hash that hash, as
// otherwise the hashes will differ between cached and non-cached mode.
let stable_hash: Fingerprint = {
let mut hasher = StableHasher::new();
self.internee.hash_stable(hcx, &mut hasher);
hasher.finish()
};
if cfg!(debug_assertions) && self.stable_hash != Fingerprint::ZERO {
assert_eq!(
stable_hash, self.stable_hash,
"cached stable hash does not match freshly computed stable hash"
);
}
stable_hash.hash_stable(hcx, hasher);
} else {
self.stable_hash.hash_stable(hcx, hasher);
}
}
}
#[cfg(test)]
mod tests;

View File

@ -286,10 +286,9 @@ language_item_table! {
// FIXME(swatinem): the following lang items are used for async lowering and
// should become obsolete eventually.
ResumeTy, sym::ResumeTy, resume_ty, Target::Struct, GenericRequirement::None;
IdentityFuture, sym::identity_future, identity_future_fn, Target::Fn, GenericRequirement::None;
GetContext, sym::get_context, get_context_fn, Target::Fn, GenericRequirement::None;
Context, sym::Context, context, Target::Struct, GenericRequirement::None;
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
FromFrom, sym::from, from_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;

View File

@ -695,19 +695,8 @@ impl<'a> State<'a> {
self.head("trait");
self.print_ident(item.ident);
self.print_generic_params(generics.params);
let mut real_bounds = Vec::with_capacity(bounds.len());
// FIXME(durka) this seems to be some quite outdated syntax
for b in bounds {
if let GenericBound::Trait(ptr, hir::TraitBoundModifier::Maybe) = b {
self.space();
self.word_space("for ?");
self.print_trait_ref(&ptr.trait_ref);
} else {
real_bounds.push(b);
}
}
self.nbsp();
self.print_bounds("=", real_bounds);
self.print_bounds("=", bounds);
self.print_where_clause(generics);
self.word(";");
self.end(); // end inner head-block

View File

@ -1918,15 +1918,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
receiver: Option<&'tcx hir::Expr<'tcx>>,
args: &'tcx [hir::Expr<'tcx>],
) -> bool {
// Do not call `fn_sig` on non-functions.
if !matches!(
self.tcx.def_kind(def_id),
DefKind::Fn | DefKind::AssocFn | DefKind::Variant | DefKind::Ctor(..)
) {
let ty = self.tcx.type_of(def_id);
if !ty.is_fn() {
return false;
}
let sig = self.tcx.fn_sig(def_id).skip_binder();
let sig = ty.fn_sig(self.tcx).skip_binder();
let args_referencing_param: Vec<_> = sig
.inputs()
.iter()

View File

@ -747,6 +747,7 @@ fn test_unstable_options_tracking_hash() {
tracked!(link_only, true);
tracked!(llvm_plugins, vec![String::from("plugin_name")]);
tracked!(location_detail, LocationDetail { file: true, line: false, column: false });
tracked!(maximal_hir_to_mir_coverage, true);
tracked!(merge_functions, Some(MergeFunctions::Disabled));
tracked!(mir_emit_retag, true);
tracked!(mir_enable_passes, vec![("DestProp".to_string(), false)]);

View File

@ -10,7 +10,7 @@ declare_tool_lint! {
/// The `rustc_pass_by_value` lint marks a type with `#[rustc_pass_by_value]` requiring it to
/// always be passed by value. This is usually used for types that are thin wrappers around
/// references, so there is no benefit to an extra layer of indirection. (Example: `Ty` which
/// is a reference to an `Interned<TyS>`)
/// is a reference to an `Interned<TyKind>`)
pub rustc::PASS_BY_VALUE,
Warn,
"pass by reference of a type flagged as `#[rustc_pass_by_value]`",

View File

@ -633,13 +633,34 @@ trait UnusedDelimLint {
left_pos: Option<BytePos>,
right_pos: Option<BytePos>,
) {
// If `value` has `ExprKind::Err`, unused delim lint can be broken.
// For example, the following code caused ICE.
// This is because the `ExprKind::Call` in `value` has `ExprKind::Err` as its argument
// and this leads to wrong spans. #104897
//
// ```
// fn f(){(print!(á
// ```
use rustc_ast::visit::{walk_expr, Visitor};
struct ErrExprVisitor {
has_error: bool,
}
impl<'ast> Visitor<'ast> for ErrExprVisitor {
fn visit_expr(&mut self, expr: &'ast ast::Expr) {
if let ExprKind::Err = expr.kind {
self.has_error = true;
return;
}
walk_expr(self, expr)
}
}
let mut visitor = ErrExprVisitor { has_error: false };
visitor.visit_expr(value);
if visitor.has_error {
return;
}
let spans = match value.kind {
ast::ExprKind::Block(ref block, None) if block.stmts.len() == 1 => {
if let StmtKind::Expr(expr) = &block.stmts[0].kind
&& let ExprKind::Err = expr.kind
{
return
}
if let Some(span) = block.stmts[0].span.find_ancestor_inside(value.span) {
Some((value.span.with_hi(span.lo()), value.span.with_lo(span.hi())))
} else {

View File

@ -1,3 +1,5 @@
#![allow(rustc::usage_of_ty_tykind)]
/// This higher-order macro declares a list of types which can be allocated by `Arena`.
///
/// Specifying the `decode` modifier will add decode impls for `&T` and `&[T]` where `T` is the type
@ -89,8 +91,8 @@ macro_rules! arena_types {
[] hir_id_set: rustc_hir::HirIdSet,
// Interned types
[] tys: rustc_data_structures::intern::WithStableHash<rustc_middle::ty::TyS<'tcx>>,
[] predicates: rustc_data_structures::intern::WithStableHash<rustc_middle::ty::PredicateS<'tcx>>,
[] tys: rustc_type_ir::WithCachedTypeInfo<rustc_middle::ty::TyKind<'tcx>>,
[] predicates: rustc_type_ir::WithCachedTypeInfo<rustc_middle::ty::PredicateKind<'tcx>>,
[] consts: rustc_middle::ty::ConstS<'tcx>,
// Note that this deliberately duplicates items in the `rustc_hir::arena`,

View File

@ -103,7 +103,7 @@ impl hash::Hash for Allocation {
/// Interned types generally have an `Outer` type and an `Inner` type, where
/// `Outer` is a newtype around `Interned<Inner>`, and all the operations are
/// done on `Outer`, because all occurrences are interned. E.g. `Ty` is an
/// outer type and `TyS` is its inner type.
/// outer type and `TyKind` is its inner type.
///
/// Here things are different because only const allocations are interned. This
/// means that both the inner type (`Allocation`) and the outer type

View File

@ -1,5 +1,7 @@
//! Type context book-keeping.
#![allow(rustc::usage_of_ty_tykind)]
use crate::arena::Arena;
use crate::dep_graph::{DepGraph, DepKindStruct};
use crate::hir::place::Place as HirPlace;
@ -19,15 +21,15 @@ use crate::ty::{
self, AdtDef, AdtDefData, AdtKind, Binder, BindingMode, BoundVar, CanonicalPolyFnSig,
ClosureSizeProfileData, Const, ConstS, DefIdTree, FloatTy, FloatVar, FloatVid,
GenericParamDefKind, InferTy, IntTy, IntVar, IntVid, List, ParamConst, ParamTy,
PolyExistentialPredicate, PolyFnSig, Predicate, PredicateKind, PredicateS, ProjectionTy,
Region, RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyS, TyVar, TyVid, TypeAndMut,
UintTy, Visibility,
PolyExistentialPredicate, PolyFnSig, Predicate, PredicateKind, ProjectionTy, Region,
RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyVar, TyVid, TypeAndMut, UintTy,
Visibility,
};
use crate::ty::{GenericArg, GenericArgKind, InternalSubsts, SubstsRef, UserSubsts};
use rustc_ast as ast;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::intern::{Interned, WithStableHash};
use rustc_data_structures::intern::Interned;
use rustc_data_structures::memmap::Mmap;
use rustc_data_structures::profiling::SelfProfilerRef;
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
@ -68,6 +70,7 @@ use rustc_span::{Span, DUMMY_SP};
use rustc_target::abi::{Layout, LayoutS, TargetDataLayout, VariantIdx};
use rustc_target::spec::abi;
use rustc_type_ir::sty::TyKind::*;
use rustc_type_ir::WithCachedTypeInfo;
use rustc_type_ir::{DynKind, InternAs, InternIteratorElement, Interner, TypeFlags};
use std::any::Any;
@ -137,13 +140,13 @@ pub struct CtxtInterners<'tcx> {
// Specifically use a speedy hash algorithm for these hash sets, since
// they're accessed quite often.
type_: InternedSet<'tcx, WithStableHash<TyS<'tcx>>>,
type_: InternedSet<'tcx, WithCachedTypeInfo<TyKind<'tcx>>>,
const_lists: InternedSet<'tcx, List<ty::Const<'tcx>>>,
substs: InternedSet<'tcx, InternalSubsts<'tcx>>,
canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>,
region: InternedSet<'tcx, RegionKind<'tcx>>,
poly_existential_predicates: InternedSet<'tcx, List<PolyExistentialPredicate<'tcx>>>,
predicate: InternedSet<'tcx, WithStableHash<PredicateS<'tcx>>>,
predicate: InternedSet<'tcx, WithCachedTypeInfo<ty::Binder<'tcx, PredicateKind<'tcx>>>>,
predicates: InternedSet<'tcx, List<Predicate<'tcx>>>,
projs: InternedSet<'tcx, List<ProjectionKind>>,
place_elems: InternedSet<'tcx, List<PlaceElem<'tcx>>>,
@ -194,15 +197,12 @@ impl<'tcx> CtxtInterners<'tcx> {
let stable_hash =
self.stable_hash(&flags, sess, definitions, cstore, source_span, &kind);
let ty_struct = TyS {
kind,
InternedInSet(self.arena.alloc(WithCachedTypeInfo {
internee: kind,
stable_hash,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
InternedInSet(
self.arena.alloc(WithStableHash { internee: ty_struct, stable_hash }),
)
}))
})
.0,
))
@ -246,16 +246,12 @@ impl<'tcx> CtxtInterners<'tcx> {
let stable_hash =
self.stable_hash(&flags, sess, definitions, cstore, source_span, &kind);
let predicate_struct = PredicateS {
kind,
InternedInSet(self.arena.alloc(WithCachedTypeInfo {
internee: kind,
stable_hash,
flags: flags.flags,
outer_exclusive_binder: flags.outer_exclusive_binder,
};
InternedInSet(
self.arena
.alloc(WithStableHash { internee: predicate_struct, stable_hash }),
)
}))
})
.0,
))
@ -2104,7 +2100,7 @@ macro_rules! sty_debug_print {
let shards = tcx.interners.type_.lock_shards();
let types = shards.iter().flat_map(|shard| shard.keys());
for &InternedInSet(t) in types {
let variant = match t.kind {
let variant = match t.internee {
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
ty::Float(..) | ty::Str | ty::Never => continue,
ty::Error(_) => /* unimportant */ continue,
@ -2214,51 +2210,26 @@ impl<'tcx, T: 'tcx + ?Sized> IntoPointer for InternedInSet<'tcx, T> {
}
#[allow(rustc::usage_of_ty_tykind)]
impl<'tcx> Borrow<TyKind<'tcx>> for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> {
&self.0.kind
impl<'tcx, T> Borrow<T> for InternedInSet<'tcx, WithCachedTypeInfo<T>> {
fn borrow<'a>(&'a self) -> &'a T {
&self.0.internee
}
}
impl<'tcx> PartialEq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
fn eq(&self, other: &InternedInSet<'tcx, WithStableHash<TyS<'tcx>>>) -> bool {
impl<'tcx, T: PartialEq> PartialEq for InternedInSet<'tcx, WithCachedTypeInfo<T>> {
fn eq(&self, other: &InternedInSet<'tcx, WithCachedTypeInfo<T>>) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
// `x == y`.
self.0.kind == other.0.kind
self.0.internee == other.0.internee
}
}
impl<'tcx> Eq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {}
impl<'tcx, T: Eq> Eq for InternedInSet<'tcx, WithCachedTypeInfo<T>> {}
impl<'tcx> Hash for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
impl<'tcx, T: Hash> Hash for InternedInSet<'tcx, WithCachedTypeInfo<T>> {
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
self.0.kind.hash(s)
}
}
impl<'tcx> Borrow<Binder<'tcx, PredicateKind<'tcx>>>
for InternedInSet<'tcx, WithStableHash<PredicateS<'tcx>>>
{
fn borrow<'a>(&'a self) -> &'a Binder<'tcx, PredicateKind<'tcx>> {
&self.0.kind
}
}
impl<'tcx> PartialEq for InternedInSet<'tcx, WithStableHash<PredicateS<'tcx>>> {
fn eq(&self, other: &InternedInSet<'tcx, WithStableHash<PredicateS<'tcx>>>) -> bool {
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
// `x == y`.
self.0.kind == other.0.kind
}
}
impl<'tcx> Eq for InternedInSet<'tcx, WithStableHash<PredicateS<'tcx>>> {}
impl<'tcx> Hash for InternedInSet<'tcx, WithStableHash<PredicateS<'tcx>>> {
fn hash<H: Hasher>(&self, s: &mut H) {
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
self.0.kind.hash(s)
self.0.internee.hash(s)
}
}

View File

@ -9,6 +9,8 @@
//!
//! ["The `ty` module: representing types"]: https://rustc-dev-guide.rust-lang.org/ty.html
#![allow(rustc::usage_of_ty_tykind)]
pub use self::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeSuperFoldable};
pub use self::visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor};
pub use self::AssocItemContainer::*;
@ -32,7 +34,7 @@ use rustc_ast::node_id::NodeMap;
use rustc_attr as attr;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
use rustc_data_structures::intern::{Interned, WithStableHash};
use rustc_data_structures::intern::Interned;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::tagged_ptr::CopyTaggedPtr;
use rustc_hir as hir;
@ -50,6 +52,7 @@ use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{ExpnId, Span};
use rustc_target::abi::{Align, Integer, IntegerType, VariantIdx};
pub use rustc_target::abi::{ReprFlags, ReprOptions};
use rustc_type_ir::WithCachedTypeInfo;
pub use subst::*;
pub use vtable::*;
@ -445,86 +448,22 @@ pub struct CReaderCacheKey {
pub pos: usize,
}
/// Represents a type.
///
/// IMPORTANT:
/// - This is a very "dumb" struct (with no derives and no `impls`).
/// - Values of this type are always interned and thus unique, and are stored
/// as an `Interned<TyS>`.
/// - `Ty` (which contains a reference to a `Interned<TyS>`) or `Interned<TyS>`
/// should be used everywhere instead of `TyS`. In particular, `Ty` has most
/// of the relevant methods.
#[derive(PartialEq, Eq, PartialOrd, Ord)]
#[allow(rustc::usage_of_ty_tykind)]
pub(crate) struct TyS<'tcx> {
/// This field shouldn't be used directly and may be removed in the future.
/// Use `Ty::kind()` instead.
kind: TyKind<'tcx>,
/// This field provides fast access to information that is also contained
/// in `kind`.
///
/// This field shouldn't be used directly and may be removed in the future.
/// Use `Ty::flags()` instead.
flags: TypeFlags,
/// This field provides fast access to information that is also contained
/// in `kind`.
///
/// This is a kind of confusing thing: it stores the smallest
/// binder such that
///
/// (a) the binder itself captures nothing but
/// (b) all the late-bound things within the type are captured
/// by some sub-binder.
///
/// So, for a type without any late-bound things, like `u32`, this
/// will be *innermost*, because that is the innermost binder that
/// captures nothing. But for a type `&'D u32`, where `'D` is a
/// late-bound region with De Bruijn index `D`, this would be `D + 1`
/// -- the binder itself does not capture `D`, but `D` is captured
/// by an inner binder.
///
/// We call this concept an "exclusive" binder `D` because all
/// De Bruijn indices within the type are contained within `0..D`
/// (exclusive).
outer_exclusive_binder: ty::DebruijnIndex,
}
/// Use this rather than `TyS`, whenever possible.
/// Use this rather than `TyKind`, whenever possible.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
#[rustc_diagnostic_item = "Ty"]
#[rustc_pass_by_value]
pub struct Ty<'tcx>(Interned<'tcx, WithStableHash<TyS<'tcx>>>);
pub struct Ty<'tcx>(Interned<'tcx, WithCachedTypeInfo<TyKind<'tcx>>>);
impl<'tcx> TyCtxt<'tcx> {
/// A "bool" type used in rustc_mir_transform unit tests when we
/// have not spun up a TyCtxt.
pub const BOOL_TY_FOR_UNIT_TESTING: Ty<'tcx> = Ty(Interned::new_unchecked(&WithStableHash {
internee: TyS {
kind: ty::Bool,
pub const BOOL_TY_FOR_UNIT_TESTING: Ty<'tcx> =
Ty(Interned::new_unchecked(&WithCachedTypeInfo {
internee: ty::Bool,
stable_hash: Fingerprint::ZERO,
flags: TypeFlags::empty(),
outer_exclusive_binder: DebruijnIndex::from_usize(0),
},
stable_hash: Fingerprint::ZERO,
}));
}
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TyS<'tcx> {
#[inline]
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let TyS {
kind,
// The other fields just provide fast access to information that is
// also contained in `kind`, so no need to hash them.
flags: _,
outer_exclusive_binder: _,
} = self;
kind.hash_stable(hcx, hasher)
}
}));
}
impl ty::EarlyBoundRegion {
@ -535,28 +474,18 @@ impl ty::EarlyBoundRegion {
}
}
/// Represents a predicate.
///
/// See comments on `TyS`, which apply here too (albeit for
/// `PredicateS`/`Predicate` rather than `TyS`/`Ty`).
#[derive(Debug)]
pub(crate) struct PredicateS<'tcx> {
kind: Binder<'tcx, PredicateKind<'tcx>>,
flags: TypeFlags,
/// See the comment for the corresponding field of [TyS].
outer_exclusive_binder: ty::DebruijnIndex,
}
/// Use this rather than `PredicateS`, whenever possible.
/// Use this rather than `PredicateKind`, whenever possible.
#[derive(Clone, Copy, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
pub struct Predicate<'tcx>(Interned<'tcx, WithStableHash<PredicateS<'tcx>>>);
pub struct Predicate<'tcx>(
Interned<'tcx, WithCachedTypeInfo<ty::Binder<'tcx, PredicateKind<'tcx>>>>,
);
impl<'tcx> Predicate<'tcx> {
/// Gets the inner `Binder<'tcx, PredicateKind<'tcx>>`.
#[inline]
pub fn kind(self) -> Binder<'tcx, PredicateKind<'tcx>> {
self.0.kind
self.0.internee
}
#[inline(always)]
@ -631,21 +560,6 @@ impl<'tcx> Predicate<'tcx> {
}
}
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for PredicateS<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
let PredicateS {
ref kind,
// The other fields just provide fast access to information that is
// also contained in `kind`, so no need to hash them.
flags: _,
outer_exclusive_binder: _,
} = self;
kind.hash_stable(hcx, hasher);
}
}
impl rustc_errors::IntoDiagnosticArg for Predicate<'_> {
fn into_diagnostic_arg(self) -> rustc_errors::DiagnosticArgValue<'static> {
rustc_errors::DiagnosticArgValue::Str(std::borrow::Cow::Owned(self.to_string()))
@ -1028,7 +942,7 @@ impl<'tcx> Term<'tcx> {
unsafe {
match ptr & TAG_MASK {
TYPE_TAG => TermKind::Ty(Ty(Interned::new_unchecked(
&*((ptr & !TAG_MASK) as *const WithStableHash<ty::TyS<'tcx>>),
&*((ptr & !TAG_MASK) as *const WithCachedTypeInfo<ty::TyKind<'tcx>>),
))),
CONST_TAG => TermKind::Const(ty::Const(Interned::new_unchecked(
&*((ptr & !TAG_MASK) as *const ty::ConstS<'tcx>),
@ -1072,7 +986,7 @@ impl<'tcx> TermKind<'tcx> {
TermKind::Ty(ty) => {
// Ensure we can use the tag bits.
assert_eq!(mem::align_of_val(&*ty.0.0) & TAG_MASK, 0);
(TYPE_TAG, ty.0.0 as *const WithStableHash<ty::TyS<'tcx>> as usize)
(TYPE_TAG, ty.0.0 as *const WithCachedTypeInfo<ty::TyKind<'tcx>> as usize)
}
TermKind::Const(ct) => {
// Ensure we can use the tag bits.
@ -2692,8 +2606,7 @@ mod size_asserts {
use super::*;
use rustc_data_structures::static_assert_size;
// tidy-alphabetical-start
static_assert_size!(PredicateS<'_>, 48);
static_assert_size!(TyS<'_>, 40);
static_assert_size!(WithStableHash<TyS<'_>>, 56);
static_assert_size!(PredicateKind<'_>, 32);
static_assert_size!(WithCachedTypeInfo<TyKind<'_>>, 56);
// tidy-alphabetical-end
}

View File

@ -1611,7 +1611,7 @@ impl<'tcx> Region<'tcx> {
impl<'tcx> Ty<'tcx> {
#[inline(always)]
pub fn kind(self) -> &'tcx TyKind<'tcx> {
&self.0.0.kind
&self.0.0
}
#[inline(always)]

View File

@ -6,10 +6,11 @@ use crate::ty::sty::{ClosureSubsts, GeneratorSubsts, InlineConstSubsts};
use crate::ty::visit::{TypeVisitable, TypeVisitor};
use crate::ty::{self, Lift, List, ParamConst, Ty, TyCtxt};
use rustc_data_structures::intern::{Interned, WithStableHash};
use rustc_data_structures::intern::Interned;
use rustc_hir::def_id::DefId;
use rustc_macros::HashStable;
use rustc_serialize::{self, Decodable, Encodable};
use rustc_type_ir::WithCachedTypeInfo;
use smallvec::SmallVec;
use core::intrinsics;
@ -84,7 +85,7 @@ impl<'tcx> GenericArgKind<'tcx> {
GenericArgKind::Type(ty) => {
// Ensure we can use the tag bits.
assert_eq!(mem::align_of_val(&*ty.0.0) & TAG_MASK, 0);
(TYPE_TAG, ty.0.0 as *const WithStableHash<ty::TyS<'tcx>> as usize)
(TYPE_TAG, ty.0.0 as *const WithCachedTypeInfo<ty::TyKind<'tcx>> as usize)
}
GenericArgKind::Const(ct) => {
// Ensure we can use the tag bits.
@ -162,7 +163,7 @@ impl<'tcx> GenericArg<'tcx> {
&*((ptr & !TAG_MASK) as *const ty::RegionKind<'tcx>),
))),
TYPE_TAG => GenericArgKind::Type(Ty(Interned::new_unchecked(
&*((ptr & !TAG_MASK) as *const WithStableHash<ty::TyS<'tcx>>),
&*((ptr & !TAG_MASK) as *const WithCachedTypeInfo<ty::TyKind<'tcx>>),
))),
CONST_TAG => GenericArgKind::Const(ty::Const(Interned::new_unchecked(
&*((ptr & !TAG_MASK) as *const ty::ConstS<'tcx>),

View File

@ -948,20 +948,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
original_source_scope: SourceScope,
pattern_span: Span,
) {
let tcx = self.tcx;
let current_root = tcx.maybe_lint_level_root_bounded(arg_hir_id, self.hir_id);
let parent_root = tcx.maybe_lint_level_root_bounded(
self.source_scopes[original_source_scope]
.local_data
.as_ref()
.assert_crate_local()
.lint_root,
self.hir_id,
);
if current_root != parent_root {
self.source_scope =
self.new_source_scope(pattern_span, LintLevel::Explicit(current_root), None);
}
let parent_id = self.source_scopes[original_source_scope]
.local_data
.as_ref()
.assert_crate_local()
.lint_root;
self.maybe_new_source_scope(pattern_span, None, arg_hir_id, parent_id);
}
fn get_unit_temp(&mut self) -> Place<'tcx> {

View File

@ -85,6 +85,7 @@ use std::mem;
use crate::build::{BlockAnd, BlockAndExtension, BlockFrame, Builder, CFG};
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::HirId;
use rustc_index::vec::IndexVec;
use rustc_middle::middle::region;
use rustc_middle::mir::*;
@ -567,25 +568,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
F: FnOnce(&mut Builder<'a, 'tcx>) -> BlockAnd<R>,
{
let source_scope = self.source_scope;
let tcx = self.tcx;
if let LintLevel::Explicit(current_hir_id) = lint_level {
// Use `maybe_lint_level_root_bounded` with `root_lint_level` as a bound
// to avoid adding Hir dependencies on our parents.
// We estimate the true lint roots here to avoid creating a lot of source scopes.
let parent_root = tcx.maybe_lint_level_root_bounded(
self.source_scopes[source_scope].local_data.as_ref().assert_crate_local().lint_root,
self.hir_id,
);
let current_root = tcx.maybe_lint_level_root_bounded(current_hir_id, self.hir_id);
if parent_root != current_root {
self.source_scope = self.new_source_scope(
region_scope.1.span,
LintLevel::Explicit(current_root),
None,
);
}
let parent_id =
self.source_scopes[source_scope].local_data.as_ref().assert_crate_local().lint_root;
self.maybe_new_source_scope(region_scope.1.span, None, current_hir_id, parent_id);
}
self.push_scope(region_scope);
let mut block;
@ -758,6 +744,40 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
))
}
/// Possibly creates a new source scope if `current_root` and `parent_root`
/// are different, or if -Zmaximal-hir-to-mir-coverage is enabled.
pub(crate) fn maybe_new_source_scope(
&mut self,
span: Span,
safety: Option<Safety>,
current_id: HirId,
parent_id: HirId,
) {
let (current_root, parent_root) =
if self.tcx.sess.opts.unstable_opts.maximal_hir_to_mir_coverage {
// Some consumers of rustc need to map MIR locations back to HIR nodes. Currently the
// the only part of rustc that tracks MIR -> HIR is the `SourceScopeLocalData::lint_root`
// field that tracks lint levels for MIR locations. Normally the number of source scopes
// is limited to the set of nodes with lint annotations. The -Zmaximal-hir-to-mir-coverage
// flag changes this behavior to maximize the number of source scopes, increasing the
// granularity of the MIR->HIR mapping.
(current_id, parent_id)
} else {
// Use `maybe_lint_level_root_bounded` with `self.hir_id` as a bound
// to avoid adding Hir dependencies on our parents.
// We estimate the true lint roots here to avoid creating a lot of source scopes.
(
self.tcx.maybe_lint_level_root_bounded(current_id, self.hir_id),
self.tcx.maybe_lint_level_root_bounded(parent_id, self.hir_id),
)
};
if current_root != parent_root {
let lint_level = LintLevel::Explicit(current_root);
self.source_scope = self.new_source_scope(span, lint_level, safety);
}
}
/// Creates a new source scope, nested in the current one.
pub(crate) fn new_source_scope(
&mut self,

View File

@ -1548,7 +1548,13 @@ impl<'tcx> Liveness<'_, 'tcx> {
.or_insert_with(|| (ln, var, vec![id_and_sp]));
});
let can_remove = matches!(&pat.kind, hir::PatKind::Struct(_, _, true));
let can_remove = match pat.kind {
hir::PatKind::Struct(_, fields, true) => {
// if all fields are shorthand, remove the struct field, otherwise, mark with _ as prefix
fields.iter().all(|f| f.is_shorthand)
}
_ => false,
};
for (_, (ln, var, hir_ids_and_spans)) in vars {
if self.used_on_entry(ln, var) {

View File

@ -1382,6 +1382,9 @@ options! {
"list the symbols defined by a library crate (default: no)"),
macro_backtrace: bool = (false, parse_bool, [UNTRACKED],
"show macro backtraces (default: no)"),
maximal_hir_to_mir_coverage: bool = (false, parse_bool, [TRACKED],
"save as much information as possible about the correspondence between MIR and HIR \
as source scopes (default: no)"),
merge_functions: Option<MergeFunctions> = (None, parse_merge_functions, [TRACKED],
"control the operation of the MergeFunctions LLVM pass, taking \
the same values as the target option of the same name"),

View File

@ -274,7 +274,7 @@ impl Ord for DefId {
impl PartialOrd for DefId {
#[inline]
fn partial_cmp(&self, other: &DefId) -> Option<std::cmp::Ordering> {
Some(Ord::cmp(self, other))
Some(self.cmp(other))
}
}

View File

@ -165,6 +165,7 @@ symbols! {
Capture,
Center,
Clone,
Context,
Continue,
Copy,
Count,
@ -264,7 +265,6 @@ symbols! {
Relaxed,
Release,
Result,
ResumeTy,
Return,
Right,
Rust,
@ -754,7 +754,6 @@ symbols! {
generic_associated_types_extended,
generic_const_exprs,
generic_param_attrs,
get_context,
global_allocator,
global_asm,
globs,

View File

@ -19,9 +19,11 @@ use std::mem::discriminant;
pub mod codec;
pub mod sty;
pub mod ty_info;
pub use codec::*;
pub use sty::*;
pub use ty_info::*;
/// Needed so we can use #[derive(HashStable_Generic)]
pub trait HashStableContext {}

View File

@ -301,61 +301,44 @@ impl<I: Interner> Clone for TyKind<I> {
impl<I: Interner> PartialEq for TyKind<I> {
#[inline]
fn eq(&self, other: &TyKind<I>) -> bool {
let __self_vi = tykind_discriminant(self);
let __arg_1_vi = tykind_discriminant(other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Int(ref __self_0), &Int(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Uint(ref __self_0), &Uint(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Float(ref __self_0), &Float(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Adt(ref __self_0, ref __self_1), &Adt(ref __arg_1_0, ref __arg_1_1)) => {
__self_0 == __arg_1_0 && __self_1 == __arg_1_1
tykind_discriminant(self) == tykind_discriminant(other)
&& match (self, other) {
(Int(a_i), Int(b_i)) => a_i == b_i,
(Uint(a_u), Uint(b_u)) => a_u == b_u,
(Float(a_f), Float(b_f)) => a_f == b_f,
(Adt(a_d, a_s), Adt(b_d, b_s)) => a_d == b_d && a_s == b_s,
(Foreign(a_d), Foreign(b_d)) => a_d == b_d,
(Array(a_t, a_c), Array(b_t, b_c)) => a_t == b_t && a_c == b_c,
(Slice(a_t), Slice(b_t)) => a_t == b_t,
(RawPtr(a_t), RawPtr(b_t)) => a_t == b_t,
(Ref(a_r, a_t, a_m), Ref(b_r, b_t, b_m)) => a_r == b_r && a_t == b_t && a_m == b_m,
(FnDef(a_d, a_s), FnDef(b_d, b_s)) => a_d == b_d && a_s == b_s,
(FnPtr(a_s), FnPtr(b_s)) => a_s == b_s,
(Dynamic(a_p, a_r, a_repr), Dynamic(b_p, b_r, b_repr)) => {
a_p == b_p && a_r == b_r && a_repr == b_repr
}
(&Foreign(ref __self_0), &Foreign(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Array(ref __self_0, ref __self_1), &Array(ref __arg_1_0, ref __arg_1_1)) => {
__self_0 == __arg_1_0 && __self_1 == __arg_1_1
(Closure(a_d, a_s), Closure(b_d, b_s)) => a_d == b_d && a_s == b_s,
(Generator(a_d, a_s, a_m), Generator(b_d, b_s, b_m)) => {
a_d == b_d && a_s == b_s && a_m == b_m
}
(&Slice(ref __self_0), &Slice(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&RawPtr(ref __self_0), &RawPtr(ref __arg_1_0)) => __self_0 == __arg_1_0,
(
&Ref(ref __self_0, ref __self_1, ref __self_2),
&Ref(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1 && __self_2 == __arg_1_2,
(&FnDef(ref __self_0, ref __self_1), &FnDef(ref __arg_1_0, ref __arg_1_1)) => {
__self_0 == __arg_1_0 && __self_1 == __arg_1_1
(GeneratorWitness(a_g), GeneratorWitness(b_g)) => a_g == b_g,
(Tuple(a_t), Tuple(b_t)) => a_t == b_t,
(Projection(a_p), Projection(b_p)) => a_p == b_p,
(Opaque(a_d, a_s), Opaque(b_d, b_s)) => a_d == b_d && a_s == b_s,
(Param(a_p), Param(b_p)) => a_p == b_p,
(Bound(a_d, a_b), Bound(b_d, b_b)) => a_d == b_d && a_b == b_b,
(Placeholder(a_p), Placeholder(b_p)) => a_p == b_p,
(Infer(a_t), Infer(b_t)) => a_t == b_t,
(Error(a_e), Error(b_e)) => a_e == b_e,
(Bool, Bool) | (Char, Char) | (Str, Str) | (Never, Never) => true,
_ => {
debug_assert!(
false,
"This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}"
);
true
}
(&FnPtr(ref __self_0), &FnPtr(ref __arg_1_0)) => __self_0 == __arg_1_0,
(
&Dynamic(ref __self_0, ref __self_1, ref self_repr),
&Dynamic(ref __arg_1_0, ref __arg_1_1, ref arg_repr),
) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1 && self_repr == arg_repr,
(&Closure(ref __self_0, ref __self_1), &Closure(ref __arg_1_0, ref __arg_1_1)) => {
__self_0 == __arg_1_0 && __self_1 == __arg_1_1
}
(
&Generator(ref __self_0, ref __self_1, ref __self_2),
&Generator(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1 && __self_2 == __arg_1_2,
(&GeneratorWitness(ref __self_0), &GeneratorWitness(ref __arg_1_0)) => {
__self_0 == __arg_1_0
}
(&Tuple(ref __self_0), &Tuple(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Projection(ref __self_0), &Projection(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Opaque(ref __self_0, ref __self_1), &Opaque(ref __arg_1_0, ref __arg_1_1)) => {
__self_0 == __arg_1_0 && __self_1 == __arg_1_1
}
(&Param(ref __self_0), &Param(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Bound(ref __self_0, ref __self_1), &Bound(ref __arg_1_0, ref __arg_1_1)) => {
__self_0 == __arg_1_0 && __self_1 == __arg_1_1
}
(&Placeholder(ref __self_0), &Placeholder(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Infer(ref __self_0), &Infer(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&Error(ref __self_0), &Error(ref __arg_1_0)) => __self_0 == __arg_1_0,
_ => true,
}
} else {
false
}
}
}
@ -366,7 +349,7 @@ impl<I: Interner> Eq for TyKind<I> {}
impl<I: Interner> PartialOrd for TyKind<I> {
#[inline]
fn partial_cmp(&self, other: &TyKind<I>) -> Option<Ordering> {
Some(Ord::cmp(self, other))
Some(self.cmp(other))
}
}
@ -374,213 +357,106 @@ impl<I: Interner> PartialOrd for TyKind<I> {
impl<I: Interner> Ord for TyKind<I> {
#[inline]
fn cmp(&self, other: &TyKind<I>) -> Ordering {
let __self_vi = tykind_discriminant(self);
let __arg_1_vi = tykind_discriminant(other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Int(ref __self_0), &Int(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&Uint(ref __self_0), &Uint(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&Float(ref __self_0), &Float(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&Adt(ref __self_0, ref __self_1), &Adt(ref __arg_1_0, ref __arg_1_1)) => {
match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
}
tykind_discriminant(self).cmp(&tykind_discriminant(other)).then_with(|| {
match (self, other) {
(Int(a_i), Int(b_i)) => a_i.cmp(b_i),
(Uint(a_u), Uint(b_u)) => a_u.cmp(b_u),
(Float(a_f), Float(b_f)) => a_f.cmp(b_f),
(Adt(a_d, a_s), Adt(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)),
(Foreign(a_d), Foreign(b_d)) => a_d.cmp(b_d),
(Array(a_t, a_c), Array(b_t, b_c)) => a_t.cmp(b_t).then_with(|| a_c.cmp(b_c)),
(Slice(a_t), Slice(b_t)) => a_t.cmp(b_t),
(RawPtr(a_t), RawPtr(b_t)) => a_t.cmp(b_t),
(Ref(a_r, a_t, a_m), Ref(b_r, b_t, b_m)) => {
a_r.cmp(b_r).then_with(|| a_t.cmp(b_t).then_with(|| a_m.cmp(b_m)))
}
(&Foreign(ref __self_0), &Foreign(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&Array(ref __self_0, ref __self_1), &Array(ref __arg_1_0, ref __arg_1_1)) => {
match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
}
(FnDef(a_d, a_s), FnDef(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)),
(FnPtr(a_s), FnPtr(b_s)) => a_s.cmp(b_s),
(Dynamic(a_p, a_r, a_repr), Dynamic(b_p, b_r, b_repr)) => {
a_p.cmp(b_p).then_with(|| a_r.cmp(b_r).then_with(|| a_repr.cmp(b_repr)))
}
(&Slice(ref __self_0), &Slice(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&RawPtr(ref __self_0), &RawPtr(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(
&Ref(ref __self_0, ref __self_1, ref __self_2),
&Ref(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
) => match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => match Ord::cmp(__self_1, __arg_1_1) {
Ordering::Equal => Ord::cmp(__self_2, __arg_1_2),
cmp => cmp,
},
cmp => cmp,
},
(&FnDef(ref __self_0, ref __self_1), &FnDef(ref __arg_1_0, ref __arg_1_1)) => {
match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
}
(Closure(a_p, a_s), Closure(b_p, b_s)) => a_p.cmp(b_p).then_with(|| a_s.cmp(b_s)),
(Generator(a_d, a_s, a_m), Generator(b_d, b_s, b_m)) => {
a_d.cmp(b_d).then_with(|| a_s.cmp(b_s).then_with(|| a_m.cmp(b_m)))
}
(&FnPtr(ref __self_0), &FnPtr(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(
&Dynamic(ref __self_0, ref __self_1, ref self_repr),
&Dynamic(ref __arg_1_0, ref __arg_1_1, ref arg_repr),
) => match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => match Ord::cmp(__self_1, __arg_1_1) {
Ordering::Equal => Ord::cmp(self_repr, arg_repr),
cmp => cmp,
},
cmp => cmp,
},
(&Closure(ref __self_0, ref __self_1), &Closure(ref __arg_1_0, ref __arg_1_1)) => {
match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
}
(GeneratorWitness(a_g), GeneratorWitness(b_g)) => a_g.cmp(b_g),
(Tuple(a_t), Tuple(b_t)) => a_t.cmp(b_t),
(Projection(a_p), Projection(b_p)) => a_p.cmp(b_p),
(Opaque(a_d, a_s), Opaque(b_d, b_s)) => a_d.cmp(b_d).then_with(|| a_s.cmp(b_s)),
(Param(a_p), Param(b_p)) => a_p.cmp(b_p),
(Bound(a_d, a_b), Bound(b_d, b_b)) => a_d.cmp(b_d).then_with(|| a_b.cmp(b_b)),
(Placeholder(a_p), Placeholder(b_p)) => a_p.cmp(b_p),
(Infer(a_t), Infer(b_t)) => a_t.cmp(b_t),
(Error(a_e), Error(b_e)) => a_e.cmp(b_e),
(Bool, Bool) | (Char, Char) | (Str, Str) | (Never, Never) => Ordering::Equal,
_ => {
debug_assert!(false, "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}");
Ordering::Equal
}
(
&Generator(ref __self_0, ref __self_1, ref __self_2),
&Generator(ref __arg_1_0, ref __arg_1_1, ref __arg_1_2),
) => match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => match Ord::cmp(__self_1, __arg_1_1) {
Ordering::Equal => Ord::cmp(__self_2, __arg_1_2),
cmp => cmp,
},
cmp => cmp,
},
(&GeneratorWitness(ref __self_0), &GeneratorWitness(ref __arg_1_0)) => {
Ord::cmp(__self_0, __arg_1_0)
}
(&Tuple(ref __self_0), &Tuple(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&Projection(ref __self_0), &Projection(ref __arg_1_0)) => {
Ord::cmp(__self_0, __arg_1_0)
}
(&Opaque(ref __self_0, ref __self_1), &Opaque(ref __arg_1_0, ref __arg_1_1)) => {
match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
}
}
(&Param(ref __self_0), &Param(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&Bound(ref __self_0, ref __self_1), &Bound(ref __arg_1_0, ref __arg_1_1)) => {
match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
}
}
(&Placeholder(ref __self_0), &Placeholder(ref __arg_1_0)) => {
Ord::cmp(__self_0, __arg_1_0)
}
(&Infer(ref __self_0), &Infer(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&Error(ref __self_0), &Error(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
_ => Ordering::Equal,
}
} else {
Ord::cmp(&__self_vi, &__arg_1_vi)
}
})
}
}
// This is manually implemented because a derive would require `I: Hash`
impl<I: Interner> hash::Hash for TyKind<I> {
fn hash<__H: hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) {
(&Int(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
tykind_discriminant(self).hash(state);
match self {
Int(i) => i.hash(state),
Uint(u) => u.hash(state),
Float(f) => f.hash(state),
Adt(d, s) => {
d.hash(state);
s.hash(state)
}
(&Uint(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
Foreign(d) => d.hash(state),
Array(t, c) => {
t.hash(state);
c.hash(state)
}
(&Float(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
Slice(t) => t.hash(state),
RawPtr(t) => t.hash(state),
Ref(r, t, m) => {
r.hash(state);
t.hash(state);
m.hash(state)
}
(&Adt(ref __self_0, ref __self_1),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state)
FnDef(d, s) => {
d.hash(state);
s.hash(state)
}
(&Foreign(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
FnPtr(s) => s.hash(state),
Dynamic(p, r, repr) => {
p.hash(state);
r.hash(state);
repr.hash(state)
}
(&Array(ref __self_0, ref __self_1),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state)
Closure(d, s) => {
d.hash(state);
s.hash(state)
}
(&Slice(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
Generator(d, s, m) => {
d.hash(state);
s.hash(state);
m.hash(state)
}
(&RawPtr(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
GeneratorWitness(g) => g.hash(state),
Tuple(t) => t.hash(state),
Projection(p) => p.hash(state),
Opaque(d, s) => {
d.hash(state);
s.hash(state)
}
(&Ref(ref __self_0, ref __self_1, ref __self_2),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state);
hash::Hash::hash(__self_2, state)
Param(p) => p.hash(state),
Bound(d, b) => {
d.hash(state);
b.hash(state)
}
(&FnDef(ref __self_0, ref __self_1),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state)
}
(&FnPtr(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&Dynamic(ref __self_0, ref __self_1, ref repr),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state);
hash::Hash::hash(repr, state)
}
(&Closure(ref __self_0, ref __self_1),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state)
}
(&Generator(ref __self_0, ref __self_1, ref __self_2),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state);
hash::Hash::hash(__self_2, state)
}
(&GeneratorWitness(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&Tuple(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&Projection(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&Opaque(ref __self_0, ref __self_1),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state)
}
(&Param(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&Bound(ref __self_0, ref __self_1),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state)
}
(&Placeholder(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&Infer(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&Error(ref __self_0),) => {
hash::Hash::hash(&tykind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
_ => hash::Hash::hash(&tykind_discriminant(self), state),
Placeholder(p) => p.hash(state),
Infer(t) => t.hash(state),
Error(e) => e.hash(state),
Bool | Char | Str | Never => (),
}
}
}
@ -588,37 +464,34 @@ impl<I: Interner> hash::Hash for TyKind<I> {
// This is manually implemented because a derive would require `I: Debug`
impl<I: Interner> fmt::Debug for TyKind<I> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use std::fmt::*;
match self {
Bool => Formatter::write_str(f, "Bool"),
Char => Formatter::write_str(f, "Char"),
Int(f0) => Formatter::debug_tuple_field1_finish(f, "Int", f0),
Uint(f0) => Formatter::debug_tuple_field1_finish(f, "Uint", f0),
Float(f0) => Formatter::debug_tuple_field1_finish(f, "Float", f0),
Adt(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Adt", f0, f1),
Foreign(f0) => Formatter::debug_tuple_field1_finish(f, "Foreign", f0),
Str => Formatter::write_str(f, "Str"),
Array(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Array", f0, f1),
Slice(f0) => Formatter::debug_tuple_field1_finish(f, "Slice", f0),
RawPtr(f0) => Formatter::debug_tuple_field1_finish(f, "RawPtr", f0),
Ref(f0, f1, f2) => Formatter::debug_tuple_field3_finish(f, "Ref", f0, f1, f2),
FnDef(f0, f1) => Formatter::debug_tuple_field2_finish(f, "FnDef", f0, f1),
FnPtr(f0) => Formatter::debug_tuple_field1_finish(f, "FnPtr", f0),
Dynamic(f0, f1, f2) => Formatter::debug_tuple_field3_finish(f, "Dynamic", f0, f1, f2),
Closure(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Closure", f0, f1),
Generator(f0, f1, f2) => {
Formatter::debug_tuple_field3_finish(f, "Generator", f0, f1, f2)
}
GeneratorWitness(f0) => Formatter::debug_tuple_field1_finish(f, "GeneratorWitness", f0),
Never => Formatter::write_str(f, "Never"),
Tuple(f0) => Formatter::debug_tuple_field1_finish(f, "Tuple", f0),
Projection(f0) => Formatter::debug_tuple_field1_finish(f, "Projection", f0),
Opaque(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Opaque", f0, f1),
Param(f0) => Formatter::debug_tuple_field1_finish(f, "Param", f0),
Bound(f0, f1) => Formatter::debug_tuple_field2_finish(f, "Bound", f0, f1),
Placeholder(f0) => Formatter::debug_tuple_field1_finish(f, "Placeholder", f0),
Infer(f0) => Formatter::debug_tuple_field1_finish(f, "Infer", f0),
TyKind::Error(f0) => Formatter::debug_tuple_field1_finish(f, "Error", f0),
Bool => f.write_str("Bool"),
Char => f.write_str("Char"),
Int(i) => f.debug_tuple_field1_finish("Int", i),
Uint(u) => f.debug_tuple_field1_finish("Uint", u),
Float(float) => f.debug_tuple_field1_finish("Float", float),
Adt(d, s) => f.debug_tuple_field2_finish("Adt", d, s),
Foreign(d) => f.debug_tuple_field1_finish("Foreign", d),
Str => f.write_str("Str"),
Array(t, c) => f.debug_tuple_field2_finish("Array", t, c),
Slice(t) => f.debug_tuple_field1_finish("Slice", t),
RawPtr(t) => f.debug_tuple_field1_finish("RawPtr", t),
Ref(r, t, m) => f.debug_tuple_field3_finish("Ref", r, t, m),
FnDef(d, s) => f.debug_tuple_field2_finish("FnDef", d, s),
FnPtr(s) => f.debug_tuple_field1_finish("FnPtr", s),
Dynamic(p, r, repr) => f.debug_tuple_field3_finish("Dynamic", p, r, repr),
Closure(d, s) => f.debug_tuple_field2_finish("Closure", d, s),
Generator(d, s, m) => f.debug_tuple_field3_finish("Generator", d, s, m),
GeneratorWitness(g) => f.debug_tuple_field1_finish("GeneratorWitness", g),
Never => f.write_str("Never"),
Tuple(t) => f.debug_tuple_field1_finish("Tuple", t),
Projection(p) => f.debug_tuple_field1_finish("Projection", p),
Opaque(d, s) => f.debug_tuple_field2_finish("Opaque", d, s),
Param(p) => f.debug_tuple_field1_finish("Param", p),
Bound(d, b) => f.debug_tuple_field2_finish("Bound", d, b),
Placeholder(p) => f.debug_tuple_field1_finish("Placeholder", p),
Infer(t) => f.debug_tuple_field1_finish("Infer", t),
TyKind::Error(e) => f.debug_tuple_field1_finish("Error", e),
}
}
}
@ -1091,12 +964,12 @@ where
impl<I: Interner> Clone for RegionKind<I> {
fn clone(&self) -> Self {
match self {
ReEarlyBound(a) => ReEarlyBound(a.clone()),
ReLateBound(a, b) => ReLateBound(a.clone(), b.clone()),
ReFree(a) => ReFree(a.clone()),
ReEarlyBound(r) => ReEarlyBound(r.clone()),
ReLateBound(d, r) => ReLateBound(d.clone(), r.clone()),
ReFree(r) => ReFree(r.clone()),
ReStatic => ReStatic,
ReVar(a) => ReVar(a.clone()),
RePlaceholder(a) => RePlaceholder(a.clone()),
ReVar(r) => ReVar(r.clone()),
RePlaceholder(r) => RePlaceholder(r.clone()),
ReErased => ReErased,
}
}
@ -1106,29 +979,23 @@ impl<I: Interner> Clone for RegionKind<I> {
impl<I: Interner> PartialEq for RegionKind<I> {
#[inline]
fn eq(&self, other: &RegionKind<I>) -> bool {
let __self_vi = regionkind_discriminant(self);
let __arg_1_vi = regionkind_discriminant(other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&ReEarlyBound(ref __self_0), &ReEarlyBound(ref __arg_1_0)) => {
__self_0 == __arg_1_0
regionkind_discriminant(self) == regionkind_discriminant(other)
&& match (self, other) {
(ReEarlyBound(a_r), ReEarlyBound(b_r)) => a_r == b_r,
(ReLateBound(a_d, a_r), ReLateBound(b_d, b_r)) => a_d == b_d && a_r == b_r,
(ReFree(a_r), ReFree(b_r)) => a_r == b_r,
(ReStatic, ReStatic) => true,
(ReVar(a_r), ReVar(b_r)) => a_r == b_r,
(RePlaceholder(a_r), RePlaceholder(b_r)) => a_r == b_r,
(ReErased, ReErased) => true,
_ => {
debug_assert!(
false,
"This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}"
);
true
}
(
&ReLateBound(ref __self_0, ref __self_1),
&ReLateBound(ref __arg_1_0, ref __arg_1_1),
) => __self_0 == __arg_1_0 && __self_1 == __arg_1_1,
(&ReFree(ref __self_0), &ReFree(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&ReStatic, &ReStatic) => true,
(&ReVar(ref __self_0), &ReVar(ref __arg_1_0)) => __self_0 == __arg_1_0,
(&RePlaceholder(ref __self_0), &RePlaceholder(ref __arg_1_0)) => {
__self_0 == __arg_1_0
}
(&ReErased, &ReErased) => true,
_ => true,
}
} else {
false
}
}
}
@ -1139,7 +1006,7 @@ impl<I: Interner> Eq for RegionKind<I> {}
impl<I: Interner> PartialOrd for RegionKind<I> {
#[inline]
fn partial_cmp(&self, other: &RegionKind<I>) -> Option<Ordering> {
Some(Ord::cmp(self, other))
Some(self.cmp(other))
}
}
@ -1147,66 +1014,41 @@ impl<I: Interner> PartialOrd for RegionKind<I> {
impl<I: Interner> Ord for RegionKind<I> {
#[inline]
fn cmp(&self, other: &RegionKind<I>) -> Ordering {
let __self_vi = regionkind_discriminant(self);
let __arg_1_vi = regionkind_discriminant(other);
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&ReEarlyBound(ref __self_0), &ReEarlyBound(ref __arg_1_0)) => {
Ord::cmp(__self_0, __arg_1_0)
regionkind_discriminant(self).cmp(&regionkind_discriminant(other)).then_with(|| {
match (self, other) {
(ReEarlyBound(a_r), ReEarlyBound(b_r)) => a_r.cmp(b_r),
(ReLateBound(a_d, a_r), ReLateBound(b_d, b_r)) => {
a_d.cmp(b_d).then_with(|| a_r.cmp(b_r))
}
(
&ReLateBound(ref __self_0, ref __self_1),
&ReLateBound(ref __arg_1_0, ref __arg_1_1),
) => match Ord::cmp(__self_0, __arg_1_0) {
Ordering::Equal => Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
},
(&ReFree(ref __self_0), &ReFree(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&ReStatic, &ReStatic) => Ordering::Equal,
(&ReVar(ref __self_0), &ReVar(ref __arg_1_0)) => Ord::cmp(__self_0, __arg_1_0),
(&RePlaceholder(ref __self_0), &RePlaceholder(ref __arg_1_0)) => {
Ord::cmp(__self_0, __arg_1_0)
(ReFree(a_r), ReFree(b_r)) => a_r.cmp(b_r),
(ReStatic, ReStatic) => Ordering::Equal,
(ReVar(a_r), ReVar(b_r)) => a_r.cmp(b_r),
(RePlaceholder(a_r), RePlaceholder(b_r)) => a_r.cmp(b_r),
(ReErased, ReErased) => Ordering::Equal,
_ => {
debug_assert!(false, "This branch must be unreachable, maybe the match is missing an arm? self = self = {self:?}, other = {other:?}");
Ordering::Equal
}
(&ReErased, &ReErased) => Ordering::Equal,
_ => Ordering::Equal,
}
} else {
Ord::cmp(&__self_vi, &__arg_1_vi)
}
})
}
}
// This is manually implemented because a derive would require `I: Hash`
impl<I: Interner> hash::Hash for RegionKind<I> {
fn hash<__H: hash::Hasher>(&self, state: &mut __H) -> () {
match (&*self,) {
(&ReEarlyBound(ref __self_0),) => {
hash::Hash::hash(&regionkind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&ReLateBound(ref __self_0, ref __self_1),) => {
hash::Hash::hash(&regionkind_discriminant(self), state);
hash::Hash::hash(__self_0, state);
hash::Hash::hash(__self_1, state)
}
(&ReFree(ref __self_0),) => {
hash::Hash::hash(&regionkind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&ReStatic,) => {
hash::Hash::hash(&regionkind_discriminant(self), state);
}
(&ReVar(ref __self_0),) => {
hash::Hash::hash(&regionkind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&RePlaceholder(ref __self_0),) => {
hash::Hash::hash(&regionkind_discriminant(self), state);
hash::Hash::hash(__self_0, state)
}
(&ReErased,) => {
hash::Hash::hash(&regionkind_discriminant(self), state);
fn hash<H: hash::Hasher>(&self, state: &mut H) -> () {
regionkind_discriminant(self).hash(state);
match self {
ReEarlyBound(r) => r.hash(state),
ReLateBound(d, r) => {
d.hash(state);
r.hash(state)
}
ReFree(r) => r.hash(state),
ReStatic => (),
ReVar(r) => r.hash(state),
RePlaceholder(r) => r.hash(state),
ReErased => (),
}
}
}
@ -1215,21 +1057,21 @@ impl<I: Interner> hash::Hash for RegionKind<I> {
impl<I: Interner> fmt::Debug for RegionKind<I> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ReEarlyBound(ref data) => write!(f, "ReEarlyBound({:?})", data),
ReEarlyBound(data) => write!(f, "ReEarlyBound({:?})", data),
ReLateBound(binder_id, ref bound_region) => {
ReLateBound(binder_id, bound_region) => {
write!(f, "ReLateBound({:?}, {:?})", binder_id, bound_region)
}
ReFree(ref fr) => fr.fmt(f),
ReFree(fr) => fr.fmt(f),
ReStatic => write!(f, "ReStatic"),
ReStatic => f.write_str("ReStatic"),
ReVar(ref vid) => vid.fmt(f),
ReVar(vid) => vid.fmt(f),
RePlaceholder(placeholder) => write!(f, "RePlaceholder({:?})", placeholder),
ReErased => write!(f, "ReErased"),
ReErased => f.write_str("ReErased"),
}
}
}
@ -1317,18 +1159,18 @@ where
ReErased | ReStatic => {
// No variant fields to hash for these ...
}
ReLateBound(db, br) => {
db.hash_stable(hcx, hasher);
br.hash_stable(hcx, hasher);
ReLateBound(d, r) => {
d.hash_stable(hcx, hasher);
r.hash_stable(hcx, hasher);
}
ReEarlyBound(eb) => {
eb.hash_stable(hcx, hasher);
ReEarlyBound(r) => {
r.hash_stable(hcx, hasher);
}
ReFree(ref free_region) => {
free_region.hash_stable(hcx, hasher);
ReFree(r) => {
r.hash_stable(hcx, hasher);
}
RePlaceholder(p) => {
p.hash_stable(hcx, hasher);
RePlaceholder(r) => {
r.hash_stable(hcx, hasher);
}
ReVar(_) => {
panic!("region variables should not be hashed: {self:?}")

View File

@ -0,0 +1,122 @@
use std::{
cmp::Ordering,
hash::{Hash, Hasher},
ops::Deref,
};
use rustc_data_structures::{
fingerprint::Fingerprint,
stable_hasher::{HashStable, StableHasher},
};
use crate::{DebruijnIndex, TypeFlags};
/// A helper type that you can wrap round your own type in order to automatically
/// cache the stable hash, type flags and debruijn index on creation and
/// not recompute it whenever the information is needed.
/// This is only done in incremental mode. You can also opt out of caching by using
/// StableHash::ZERO for the hash, in which case the hash gets computed each time.
/// This is useful if you have values that you intern but never (can?) use for stable
/// hashing.
#[derive(Copy, Clone)]
pub struct WithCachedTypeInfo<T> {
pub internee: T,
pub stable_hash: Fingerprint,
/// This field provides fast access to information that is also contained
/// in `kind`.
///
/// This field shouldn't be used directly and may be removed in the future.
/// Use `Ty::flags()` instead.
pub flags: TypeFlags,
/// This field provides fast access to information that is also contained
/// in `kind`.
///
/// This is a kind of confusing thing: it stores the smallest
/// binder such that
///
/// (a) the binder itself captures nothing but
/// (b) all the late-bound things within the type are captured
/// by some sub-binder.
///
/// So, for a type without any late-bound things, like `u32`, this
/// will be *innermost*, because that is the innermost binder that
/// captures nothing. But for a type `&'D u32`, where `'D` is a
/// late-bound region with De Bruijn index `D`, this would be `D + 1`
/// -- the binder itself does not capture `D`, but `D` is captured
/// by an inner binder.
///
/// We call this concept an "exclusive" binder `D` because all
/// De Bruijn indices within the type are contained within `0..D`
/// (exclusive).
pub outer_exclusive_binder: DebruijnIndex,
}
impl<T: PartialEq> PartialEq for WithCachedTypeInfo<T> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.internee.eq(&other.internee)
}
}
impl<T: Eq> Eq for WithCachedTypeInfo<T> {}
impl<T: Ord> PartialOrd for WithCachedTypeInfo<T> {
fn partial_cmp(&self, other: &WithCachedTypeInfo<T>) -> Option<Ordering> {
Some(self.internee.cmp(&other.internee))
}
}
impl<T: Ord> Ord for WithCachedTypeInfo<T> {
fn cmp(&self, other: &WithCachedTypeInfo<T>) -> Ordering {
self.internee.cmp(&other.internee)
}
}
impl<T> Deref for WithCachedTypeInfo<T> {
type Target = T;
#[inline]
fn deref(&self) -> &T {
&self.internee
}
}
impl<T: Hash> Hash for WithCachedTypeInfo<T> {
#[inline]
fn hash<H: Hasher>(&self, s: &mut H) {
if self.stable_hash != Fingerprint::ZERO {
self.stable_hash.hash(s)
} else {
self.internee.hash(s)
}
}
}
impl<T: HashStable<CTX>, CTX> HashStable<CTX> for WithCachedTypeInfo<T> {
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
if self.stable_hash == Fingerprint::ZERO || cfg!(debug_assertions) {
// No cached hash available. This can only mean that incremental is disabled.
// We don't cache stable hashes in non-incremental mode, because they are used
// so rarely that the performance actually suffers.
// We need to build the hash as if we cached it and then hash that hash, as
// otherwise the hashes will differ between cached and non-cached mode.
let stable_hash: Fingerprint = {
let mut hasher = StableHasher::new();
self.internee.hash_stable(hcx, &mut hasher);
hasher.finish()
};
if cfg!(debug_assertions) && self.stable_hash != Fingerprint::ZERO {
assert_eq!(
stable_hash, self.stable_hash,
"cached stable hash does not match freshly computed stable hash"
);
}
stable_hash.hash_stable(hcx, hasher);
} else {
self.stable_hash.hash_stable(hcx, hasher);
}
}
}

View File

@ -44,7 +44,7 @@ pub use poll_fn::{poll_fn, PollFn};
/// non-Send/Sync as well, and we don't want that.
///
/// It also simplifies the HIR lowering of `.await`.
#[cfg_attr(not(bootstrap), lang = "ResumeTy")]
// FIXME(swatinem): This type can be removed when bumping the bootstrap compiler
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[derive(Debug, Copy, Clone)]
@ -61,6 +61,7 @@ unsafe impl Sync for ResumeTy {}
/// This function returns a `GenFuture` underneath, but hides it in `impl Trait` to give
/// better error messages (`impl Future` rather than `GenFuture<[closure.....]>`).
// This is `const` to avoid extra errors after we recover from `const async fn`
// FIXME(swatinem): This fn can be removed when bumping the bootstrap compiler
#[cfg_attr(bootstrap, lang = "from_generator")]
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
@ -102,7 +103,8 @@ where
GenFuture(gen)
}
#[lang = "get_context"]
// FIXME(swatinem): This fn can be removed when bumping the bootstrap compiler
#[cfg_attr(bootstrap, lang = "get_context")]
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]
#[must_use]
@ -113,6 +115,10 @@ pub unsafe fn get_context<'a, 'b>(cx: ResumeTy) -> &'a mut Context<'b> {
unsafe { &mut *cx.0.as_ptr().cast() }
}
// FIXME(swatinem): This fn is currently needed to work around shortcomings
// in type and lifetime inference.
// See the comment at the bottom of `LoweringContext::make_async_expr` and
// <https://github.com/rust-lang/rust/issues/104826>.
#[cfg_attr(not(bootstrap), lang = "identity_future")]
#[doc(hidden)]
#[unstable(feature = "gen_future", issue = "50547")]

View File

@ -174,6 +174,7 @@ impl RawWakerVTable {
/// Currently, `Context` only serves to provide access to a [`&Waker`](Waker)
/// which can be used to wake the current task.
#[stable(feature = "futures_api", since = "1.36.0")]
#[cfg_attr(not(bootstrap), lang = "Context")]
pub struct Context<'a> {
waker: &'a Waker,
// Ensure we future-proof against variance changes by forcing

View File

@ -195,8 +195,7 @@ h1, h2, h3, h4, h5, h6,
span.since,
a.srclink,
#help-button > a,
details.rustdoc-toggle.top-doc > summary,
details.rustdoc-toggle.non-exhaustive > summary,
summary.hideme,
.scraped-example-list,
/* This selector is for the items listed in the "all items" page. */
ul.all-items {
@ -1484,6 +1483,7 @@ details.rustdoc-toggle {
"Expand description" or "Show methods". */
details.rustdoc-toggle > summary.hideme {
cursor: pointer;
font-size: 1rem;
}
details.rustdoc-toggle > summary {
@ -1546,13 +1546,6 @@ details.rustdoc-toggle > summary:focus-visible::before {
outline-offset: 1px;
}
details.rustdoc-toggle.top-doc > summary,
details.rustdoc-toggle.top-doc > summary::before,
details.rustdoc-toggle.non-exhaustive > summary,
details.rustdoc-toggle.non-exhaustive > summary::before {
font-size: 1rem;
}
details.non-exhaustive {
margin-bottom: 8px;
}

View File

@ -3,3 +3,8 @@ goto: "file://" + |DOC_PATH| + "/test_docs/enum.WhoLetTheDogOut.html"
assert-css: (".variants > .variant", {"margin": "0px 0px 12px"})
assert-css: (".variants > .docblock", {"margin": "0px 0px 32px 24px"})
assert-css: (
"details.non-exhaustive > summary",
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
)

View File

@ -76,6 +76,7 @@ impl AsRef<str> for Foo {
///
/// # title!
#[doc(alias = "ThisIsAnAlias")]
#[non_exhaustive]
pub enum WhoLetTheDogOut {
/// Woof!
Woof,

View File

@ -7,6 +7,10 @@ wait-for: 50
// This is now collapsed so there shouldn't be the "open" attribute on details.
assert-attribute-false: ("#main-content > details.top-doc", {"open": ""})
assert-text: ("#toggle-all-docs", "[+]")
assert-css: (
"#main-content > details.top-doc > summary",
{"font-family": '"Fira Sans", Arial, NanumBarunGothic, sans-serif'},
)
click: "#toggle-all-docs"
// Not collapsed anymore so the "open" attribute should be back.
wait-for-attribute: ("#main-content > details.top-doc", {"open": ""})

View File

@ -77,6 +77,7 @@
-Z location-detail=val -- what location details should be tracked when using caller_location, either `none`, or a comma separated list of location details, for which valid options are `file`, `line`, and `column` (default: `file,line,column`)
-Z ls=val -- list the symbols defined by a library crate (default: no)
-Z macro-backtrace=val -- show macro backtraces (default: no)
-Z maximal-hir-to-mir-coverage=val -- save as much information as possible about the correspondence between MIR and HIR as source scopes (default: no)
-Z merge-functions=val -- control the operation of the MergeFunctions LLVM pass, taking the same values as the target option of the same name
-Z meta-stats=val -- gather metadata statistics (default: no)
-Z mir-emit-retag=val -- emit Retagging MIR statements, interpreted e.g., by miri; implies -Zmir-opt-level=0 (default: no)

View File

@ -40,7 +40,7 @@ LL | async fn bar2<T>(_: T) -> ! {
LL | | panic!()
LL | | }
| |_^
= note: required because it captures the following types: `ResumeTy`, `Option<bool>`, `impl Future<Output = !>`, `()`
= note: required because it captures the following types: `&mut Context<'_>`, `Option<bool>`, `impl Future<Output = !>`, `()`
note: required because it's used within this `async fn` body
--> $DIR/async-await-let-else.rs:21:32
|

View File

@ -57,7 +57,7 @@ note: required because it appears within the type `impl Future<Output = Arc<RefC
|
LL | fn make_non_send_future2() -> impl Future<Output = Arc<RefCell<i32>>> {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: required because it captures the following types: `ResumeTy`, `impl Future<Output = Arc<RefCell<i32>>>`, `()`, `Ready<i32>`
= note: required because it captures the following types: `&mut Context<'_>`, `impl Future<Output = Arc<RefCell<i32>>>`, `()`, `Ready<i32>`
note: required because it's used within this `async` block
--> $DIR/issue-68112.rs:60:20
|

View File

@ -57,7 +57,7 @@ note: required because it appears within the type `impl Future<Output = Arc<RefC
|
LL | fn make_non_send_future2() -> impl Future<Output = Arc<RefCell<i32>>> {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: required because it captures the following types: `ResumeTy`, `impl Future<Output = Arc<RefCell<i32>>>`, `()`, `i32`, `Ready<i32>`
= note: required because it captures the following types: `&mut Context<'_>`, `impl Future<Output = Arc<RefCell<i32>>>`, `()`, `i32`, `Ready<i32>`
note: required because it's used within this `async` block
--> $DIR/issue-68112.rs:60:20
|

View File

@ -14,6 +14,9 @@ LL | | });
|
= note: `FnMut` closures only have access to their captured variables while they are executing...
= note: ...therefore, they cannot allow references to captured variables to escape
= note: requirement occurs because of a mutable reference to `Context<'_>`
= note: mutable references are invariant over their type parameter
= help: see <https://doc.rust-lang.org/nomicon/subtyping.html> for more information about variance
error: aborting due to previous error

View File

@ -18,7 +18,7 @@ LL | async fn baz<T>(_c: impl FnMut() -> T) where T: Future<Output=()> {
| ___________________________________________________________________^
LL | | }
| |_^
= note: required because it captures the following types: `ResumeTy`, `impl Future<Output = ()>`, `()`
= note: required because it captures the following types: `&mut Context<'_>`, `impl Future<Output = ()>`, `()`
note: required because it's used within this `async` block
--> $DIR/issue-70935-complex-spans.rs:16:5
|

View File

@ -11,7 +11,7 @@ LL | async fn foo() {
|
= help: within `impl Future<Output = ()>`, the trait `Send` is not implemented for `NotSend`
= note: required because it appears within the type `(NotSend,)`
= note: required because it captures the following types: `ResumeTy`, `(NotSend,)`, `()`, `impl Future<Output = ()>`
= note: required because it captures the following types: `&mut Context<'_>`, `(NotSend,)`, `()`, `impl Future<Output = ()>`
note: required because it's used within this `async fn` body
--> $DIR/partial-drop-partial-reinit.rs:31:16
|

View File

@ -11,7 +11,7 @@ LL | async fn foo() {
|
= help: within `impl Future<Output = ()>`, the trait `Send` is not implemented for `NotSend`
= note: required because it appears within the type `(NotSend,)`
= note: required because it captures the following types: `ResumeTy`, `(NotSend,)`, `impl Future<Output = ()>`, `()`
= note: required because it captures the following types: `&mut Context<'_>`, `(NotSend,)`, `impl Future<Output = ()>`, `()`
note: required because it's used within this `async fn` body
--> $DIR/partial-drop-partial-reinit.rs:31:16
|

View File

@ -0,0 +1,6 @@
// error-pattern: this file contains an unclosed delimiter
// error-pattern: this file contains an unclosed delimiter
// error-pattern: this file contains an unclosed delimiter
// error-pattern: format argument must be a string literal
fn f(){(print!(á

View File

@ -0,0 +1,43 @@
error: this file contains an unclosed delimiter
--> $DIR/issue-104897.rs:6:18
|
LL | fn f(){(print!(á
| -- - ^
| || |
| || unclosed delimiter
| |unclosed delimiter
| unclosed delimiter
error: this file contains an unclosed delimiter
--> $DIR/issue-104897.rs:6:18
|
LL | fn f(){(print!(á
| -- - ^
| || |
| || unclosed delimiter
| |unclosed delimiter
| unclosed delimiter
error: this file contains an unclosed delimiter
--> $DIR/issue-104897.rs:6:18
|
LL | fn f(){(print!(á
| -- - ^
| || |
| || unclosed delimiter
| |unclosed delimiter
| unclosed delimiter
error: format argument must be a string literal
--> $DIR/issue-104897.rs:6:16
|
LL | fn f(){(print!(á
| ^
|
help: you might be missing a string literal to format with
|
LL | fn f(){(print!("{}", á
| +++++
error: aborting due to 4 previous errors

View File

@ -0,0 +1,10 @@
// compile-flags: -Zmaximal-hir-to-mir-coverage
// run-pass
// Just making sure this flag is accepted and doesn't crash the compiler
fn main() {
let x = 1;
let y = x + 1;
println!("{y}");
}

View File

@ -22,11 +22,11 @@ fn good_generic_fn<T>() {
// This should fail because `T` ends up in the upvars of the closure.
fn bad_generic_fn<T: Copy>(t: T) {
assert_static(opaque(async move { t; }).next());
//~^ ERROR the associated type `<impl Iterator as Iterator>::Item` may not live long enough
//~^ ERROR the parameter type `T` may not live long enough
assert_static(opaque(move || { t; }).next());
//~^ ERROR the associated type `<impl Iterator as Iterator>::Item` may not live long enough
assert_static(opaque(opaque(async move { t; }).next()).next());
//~^ ERROR the associated type `<impl Iterator as Iterator>::Item` may not live long enough
//~^ ERROR the parameter type `T` may not live long enough
}
fn main() {}

View File

@ -1,11 +1,13 @@
error[E0310]: the associated type `<impl Iterator as Iterator>::Item` may not live long enough
error[E0310]: the parameter type `T` may not live long enough
--> $DIR/closure-in-projection-issue-97405.rs:24:5
|
LL | assert_static(opaque(async move { t; }).next());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...so that the type `T` will meet its required lifetime bounds
|
= help: consider adding an explicit lifetime bound `<impl Iterator as Iterator>::Item: 'static`...
= note: ...so that the type `<impl Iterator as Iterator>::Item` will meet its required lifetime bounds
help: consider adding an explicit lifetime bound...
|
LL | fn bad_generic_fn<T: Copy + 'static>(t: T) {
| +++++++++
error[E0310]: the associated type `<impl Iterator as Iterator>::Item` may not live long enough
--> $DIR/closure-in-projection-issue-97405.rs:26:5
@ -16,14 +18,16 @@ LL | assert_static(opaque(move || { t; }).next());
= help: consider adding an explicit lifetime bound `<impl Iterator as Iterator>::Item: 'static`...
= note: ...so that the type `<impl Iterator as Iterator>::Item` will meet its required lifetime bounds
error[E0310]: the associated type `<impl Iterator as Iterator>::Item` may not live long enough
error[E0310]: the parameter type `T` may not live long enough
--> $DIR/closure-in-projection-issue-97405.rs:28:5
|
LL | assert_static(opaque(opaque(async move { t; }).next()).next());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ...so that the type `T` will meet its required lifetime bounds
|
= help: consider adding an explicit lifetime bound `<impl Iterator as Iterator>::Item: 'static`...
= note: ...so that the type `<impl Iterator as Iterator>::Item` will meet its required lifetime bounds
help: consider adding an explicit lifetime bound...
|
LL | fn bad_generic_fn<T: Copy + 'static>(t: T) {
| +++++++++
error: aborting due to 3 previous errors

View File

@ -1,8 +1,10 @@
error[E0277]: the trait bound `T: GlUniformScalar` is not satisfied
--> $DIR/assoc-const-as-fn.rs:14:5
--> $DIR/assoc-const-as-fn.rs:14:40
|
LL | <T as GlUniformScalar>::FACTORY(1, value);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `GlUniformScalar` is not implemented for `T`
| ------------------------------- ^^^^^ the trait `GlUniformScalar` is not implemented for `T`
| |
| required by a bound introduced by this call
|
help: consider further restricting this bound
|

View File

@ -14,4 +14,19 @@ fn use_foo(x: Foo) -> i32 {
return foo;
}
// issue #105028, suggest removing the field only for shorthand
fn use_match(x: Foo) {
match x {
Foo { foo: unused, .. } => { //~ WARNING unused variable
//~| help: if this is intentional, prefix it with an underscore
}
}
match x {
Foo { foo, .. } => { //~ WARNING unused variable
//~| help: try removing the field
}
}
}
fn main() {}

View File

@ -8,5 +8,19 @@ LL | let Foo { foo, bar, .. } = x;
|
= note: `#[warn(unused_variables)]` on by default
warning: 1 warning emitted
warning: unused variable: `unused`
--> $DIR/try-removing-the-field.rs:20:20
|
LL | Foo { foo: unused, .. } => {
| ^^^^^^ help: if this is intentional, prefix it with an underscore: `_unused`
warning: unused variable: `foo`
--> $DIR/try-removing-the-field.rs:26:15
|
LL | Foo { foo, .. } => {
| ^^^-
| |
| help: try removing the field
warning: 3 warnings emitted