Auto merge of #99979 - Dylan-DPC:rollup-ikkejgy, r=Dylan-DPC

Rollup of 5 pull requests

Successful merges:

 - #99186 (Use LocalDefId for closures more)
 - #99741 (Use `impl`'s generics when suggesting fix on bad `impl Copy`)
 - #99844 (Introduce an ArchiveBuilderBuilder)
 - #99921 (triagebot.yml: CC Enselic when rustdoc-json-types changes)
 - #99974 (Suggest removing a semicolon and boxing the expressions for if-else)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-07-31 13:26:27 +00:00
commit e5a7d8f945
48 changed files with 703 additions and 399 deletions

View File

@ -6,7 +6,6 @@ use rustc_errors::{
struct_span_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, MultiSpan,
};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{walk_block, walk_expr, Visitor};
use rustc_hir::{AsyncGeneratorKind, GeneratorKind};
use rustc_infer::infer::TyCtxtInferExt;
@ -21,6 +20,7 @@ use rustc_middle::ty::{
self, subst::Subst, suggest_constraining_type_params, EarlyBinder, PredicateKind, Ty,
};
use rustc_mir_dataflow::move_paths::{InitKind, MoveOutIndex, MovePathIndex};
use rustc_span::def_id::LocalDefId;
use rustc_span::hygiene::DesugaringKind;
use rustc_span::symbol::sym;
use rustc_span::{BytePos, Span, Symbol};
@ -2223,7 +2223,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let ty = self.infcx.tcx.type_of(self.mir_def_id());
match ty.kind() {
ty::FnDef(_, _) | ty::FnPtr(_) => self.annotate_fn_sig(
self.mir_def_id().to_def_id(),
self.mir_def_id(),
self.infcx.tcx.fn_sig(self.mir_def_id()),
),
_ => None,
@ -2267,8 +2267,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// Check if our `target` was captured by a closure.
if let Rvalue::Aggregate(
box AggregateKind::Closure(def_id, substs),
operands,
) = rvalue
ref operands,
) = *rvalue
{
for operand in operands {
let (Operand::Copy(assigned_from) | Operand::Move(assigned_from)) = operand else {
@ -2292,7 +2292,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// into a place then we should annotate the closure in
// case it ends up being assigned into the return place.
annotated_closure =
self.annotate_fn_sig(*def_id, substs.as_closure().sig());
self.annotate_fn_sig(def_id, substs.as_closure().sig());
debug!(
"annotate_argument_and_return_for_borrow: \
annotated_closure={:?} assigned_from_local={:?} \
@ -2414,12 +2414,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// references.
fn annotate_fn_sig(
&self,
did: DefId,
did: LocalDefId,
sig: ty::PolyFnSig<'tcx>,
) -> Option<AnnotatedBorrowFnSignature<'tcx>> {
debug!("annotate_fn_sig: did={:?} sig={:?}", did, sig);
let is_closure = self.infcx.tcx.is_closure(did);
let fn_hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(did.as_local()?);
let is_closure = self.infcx.tcx.is_closure(did.to_def_id());
let fn_hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(did);
let fn_decl = self.infcx.tcx.hir().fn_decl_by_hir_id(fn_hir_id)?;
// We need to work out which arguments to highlight. We do this by looking

View File

@ -5,9 +5,9 @@ use rustc_const_eval::util::{call_kind, CallDesugaringKind};
use rustc_errors::{Applicability, Diagnostic};
use rustc_hir as hir;
use rustc_hir::def::Namespace;
use rustc_hir::def_id::DefId;
use rustc_hir::GeneratorKind;
use rustc_infer::infer::TyCtxtInferExt;
use rustc_middle::mir::tcx::PlaceTy;
use rustc_middle::mir::{
AggregateKind, Constant, FakeReadCause, Field, Local, LocalInfo, LocalKind, Location, Operand,
Place, PlaceRef, ProjectionElem, Rvalue, Statement, StatementKind, Terminator, TerminatorKind,
@ -15,6 +15,7 @@ use rustc_middle::mir::{
use rustc_middle::ty::print::Print;
use rustc_middle::ty::{self, DefIdTree, Instance, Ty, TyCtxt};
use rustc_mir_dataflow::move_paths::{InitLocation, LookupResult};
use rustc_span::def_id::LocalDefId;
use rustc_span::{symbol::sym, Span, DUMMY_SP};
use rustc_target::abi::VariantIdx;
use rustc_trait_selection::traits::type_known_to_meet_bound_modulo_regions;
@ -41,7 +42,6 @@ pub(crate) use outlives_suggestion::OutlivesSuggestionBuilder;
pub(crate) use region_errors::{ErrorConstraintInfo, RegionErrorKind, RegionErrors};
pub(crate) use region_name::{RegionName, RegionNameSource};
pub(crate) use rustc_const_eval::util::CallKind;
use rustc_middle::mir::tcx::PlaceTy;
pub(super) struct IncludingDowncast(pub(super) bool);
@ -325,10 +325,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// so it's safe to call `expect_local`.
//
// We know the field exists so it's safe to call operator[] and `unwrap` here.
let def_id = def_id.expect_local();
let var_id = self
.infcx
.tcx
.typeck(def_id.expect_local())
.typeck(def_id)
.closure_min_captures_flattened(def_id)
.nth(field.index())
.unwrap()
@ -715,12 +716,11 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt);
if let StatementKind::Assign(box (_, Rvalue::Aggregate(ref kind, ref places))) = stmt.kind {
match kind {
box AggregateKind::Closure(def_id, _)
| box AggregateKind::Generator(def_id, _, _) => {
match **kind {
AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => {
debug!("move_spans: def_id={:?} places={:?}", def_id, places);
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
self.closure_span(*def_id, moved_place, places)
self.closure_span(def_id, moved_place, places)
{
return ClosureUse {
generator_kind,
@ -847,7 +847,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
if let StatementKind::Assign(box (_, Rvalue::Aggregate(ref kind, ref places))) =
stmt.kind
{
let (def_id, is_generator) = match kind {
let (&def_id, is_generator) = match kind {
box AggregateKind::Closure(def_id, _) => (def_id, false),
box AggregateKind::Generator(def_id, _, _) => (def_id, true),
_ => continue,
@ -858,7 +858,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
def_id, is_generator, places
);
if let Some((args_span, generator_kind, capture_kind_span, path_span)) =
self.closure_span(*def_id, Place::from(target).as_ref(), places)
self.closure_span(def_id, Place::from(target).as_ref(), places)
{
return ClosureUse { generator_kind, args_span, capture_kind_span, path_span };
} else {
@ -879,7 +879,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
/// The second span is the location the use resulting in the captured path of the capture
fn closure_span(
&self,
def_id: DefId,
def_id: LocalDefId,
target_place: PlaceRef<'tcx>,
places: &[Operand<'tcx>],
) -> Option<(Span, Option<GeneratorKind>, Span, Span)> {
@ -887,17 +887,12 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
"closure_span: def_id={:?} target_place={:?} places={:?}",
def_id, target_place, places
);
let local_did = def_id.as_local()?;
let hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(local_did);
let hir_id = self.infcx.tcx.hir().local_def_id_to_hir_id(def_id);
let expr = &self.infcx.tcx.hir().expect_expr(hir_id).kind;
debug!("closure_span: hir_id={:?} expr={:?}", hir_id, expr);
if let hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, .. }) = expr {
for (captured_place, place) in self
.infcx
.tcx
.typeck(def_id.expect_local())
.closure_min_captures_flattened(def_id)
.zip(places)
for (captured_place, place) in
self.infcx.tcx.typeck(def_id).closure_min_captures_flattened(def_id).zip(places)
{
match place {
Operand::Copy(place) | Operand::Move(place)

View File

@ -343,7 +343,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
);
let tcx = self.infcx.tcx;
if let ty::Closure(id, _) = *the_place_err.ty(self.body, tcx).ty.kind() {
self.show_mutating_upvar(tcx, id, the_place_err, &mut err);
self.show_mutating_upvar(tcx, id.expect_local(), the_place_err, &mut err);
}
}
@ -382,7 +382,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
if let ty::Ref(_, ty, Mutability::Mut) = the_place_err.ty(self.body, tcx).ty.kind()
&& let ty::Closure(id, _) = *ty.kind()
{
self.show_mutating_upvar(tcx, id, the_place_err, &mut err);
self.show_mutating_upvar(tcx, id.expect_local(), the_place_err, &mut err);
}
}
@ -685,11 +685,10 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
fn show_mutating_upvar(
&self,
tcx: TyCtxt<'_>,
id: hir::def_id::DefId,
closure_local_def_id: hir::def_id::LocalDefId,
the_place_err: PlaceRef<'tcx>,
err: &mut Diagnostic,
) {
let closure_local_def_id = id.expect_local();
let tables = tcx.typeck(closure_local_def_id);
let closure_hir_id = tcx.hir().local_def_id_to_hir_id(closure_local_def_id);
if let Some((span, closure_kind_origin)) =
@ -699,7 +698,8 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
let upvar = ty::place_to_string_for_capture(tcx, closure_kind_origin);
let root_hir_id = upvar_id.var_path.hir_id;
// we have an origin for this closure kind starting at this root variable so it's safe to unwrap here
let captured_places = tables.closure_min_captures[&id].get(&root_hir_id).unwrap();
let captured_places =
tables.closure_min_captures[&closure_local_def_id].get(&root_hir_id).unwrap();
let origin_projection = closure_kind_origin
.projections

View File

@ -189,7 +189,7 @@ impl Display for RegionName {
impl<'tcx> MirBorrowckCtxt<'_, 'tcx> {
pub(crate) fn mir_def_id(&self) -> hir::def_id::LocalDefId {
self.body.source.def_id().as_local().unwrap()
self.body.source.def_id().expect_local()
}
pub(crate) fn mir_hir_id(&self) -> hir::HirId {

View File

@ -189,7 +189,7 @@ fn do_mir_borrowck<'a, 'tcx>(
errors.set_tainted_by_errors();
}
let upvars: Vec<_> = tables
.closure_min_captures_flattened(def.did.to_def_id())
.closure_min_captures_flattened(def.did)
.map(|captured_place| {
let capture = captured_place.info.capture_kind;
let by_ref = match capture {
@ -1295,7 +1295,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
match **aggregate_kind {
AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => {
let BorrowCheckResult { used_mut_upvars, .. } =
self.infcx.tcx.mir_borrowck(def_id.expect_local());
self.infcx.tcx.mir_borrowck(def_id);
debug!("{:?} used_mut_upvars={:?}", def_id, used_mut_upvars);
for field in used_mut_upvars {
self.propagate_closure_used_mut_upvar(&operands[field.index()]);

View File

@ -1847,14 +1847,11 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
let tcx = self.tcx();
let def_id = uv.def.def_id_for_type_of();
if tcx.def_kind(def_id) == DefKind::InlineConst {
let predicates = self.prove_closure_bounds(
tcx,
def_id.expect_local(),
uv.substs,
location,
);
let def_id = def_id.expect_local();
let predicates =
self.prove_closure_bounds(tcx, def_id, uv.substs, location);
self.normalize_and_prove_instantiated_predicates(
def_id,
def_id.to_def_id(),
predicates,
location.to_locations(),
);
@ -2514,9 +2511,9 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
aggregate_kind, location
);
let (def_id, instantiated_predicates) = match aggregate_kind {
let (def_id, instantiated_predicates) = match *aggregate_kind {
AggregateKind::Adt(adt_did, _, substs, _, _) => {
(*adt_did, tcx.predicates_of(*adt_did).instantiate(tcx, substs))
(adt_did, tcx.predicates_of(adt_did).instantiate(tcx, substs))
}
// For closures, we have some **extra requirements** we
@ -2541,7 +2538,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// clauses on the struct.
AggregateKind::Closure(def_id, substs)
| AggregateKind::Generator(def_id, substs, _) => {
(*def_id, self.prove_closure_bounds(tcx, def_id.expect_local(), substs, location))
(def_id.to_def_id(), self.prove_closure_bounds(tcx, def_id, substs, location))
}
AggregateKind::Array(_) | AggregateKind::Tuple => {

View File

@ -5,7 +5,7 @@ use std::fs::File;
use std::io::{self, Read, Seek};
use std::path::{Path, PathBuf};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
use rustc_session::Session;
use object::read::archive::ArchiveFile;
@ -17,9 +17,34 @@ enum ArchiveEntry {
File(PathBuf),
}
pub(crate) struct ArArchiveBuilderBuilder;
impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a> {
Box::new(ArArchiveBuilder {
sess,
use_gnu_style_archive: sess.target.archive_format == "gnu",
// FIXME fix builtin ranlib on macOS
no_builtin_ranlib: sess.target.is_like_osx,
src_archives: vec![],
entries: vec![],
})
}
fn create_dll_import_lib(
&self,
_sess: &Session,
_lib_name: &str,
_dll_imports: &[rustc_session::cstore::DllImport],
_tmpdir: &Path,
) -> PathBuf {
bug!("creating dll imports is not supported");
}
}
pub(crate) struct ArArchiveBuilder<'a> {
sess: &'a Session,
dst: PathBuf,
use_gnu_style_archive: bool,
no_builtin_ranlib: bool,
@ -30,19 +55,6 @@ pub(crate) struct ArArchiveBuilder<'a> {
}
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
fn new(sess: &'a Session, output: &Path) -> Self {
ArArchiveBuilder {
sess,
dst: output.to_path_buf(),
use_gnu_style_archive: sess.target.archive_format == "gnu",
// FIXME fix builtin ranlib on macOS
no_builtin_ranlib: sess.target.is_like_osx,
src_archives: vec![],
entries: vec![],
}
}
fn add_file(&mut self, file: &Path) {
self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string().into_bytes(),
@ -50,10 +62,11 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
));
}
fn add_archive<F>(&mut self, archive_path: &Path, mut skip: F) -> std::io::Result<()>
where
F: FnMut(&str) -> bool + 'static,
{
fn add_archive(
&mut self,
archive_path: &Path,
mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
) -> std::io::Result<()> {
let read_cache = ReadCache::new(std::fs::File::open(&archive_path)?);
let archive = ArchiveFile::parse(&read_cache).unwrap();
let archive_index = self.src_archives.len();
@ -74,7 +87,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
Ok(())
}
fn build(mut self) -> bool {
fn build(mut self: Box<Self>, output: &Path) -> bool {
enum BuilderKind {
Bsd(ar::Builder<File>),
Gnu(ar::GnuBuilder<File>),
@ -163,7 +176,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
let mut builder = if self.use_gnu_style_archive {
BuilderKind::Gnu(
ar::GnuBuilder::new(
File::create(&self.dst).unwrap_or_else(|err| {
File::create(output).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
@ -178,7 +191,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
} else {
BuilderKind::Bsd(
ar::Builder::new(
File::create(&self.dst).unwrap_or_else(|err| {
File::create(output).unwrap_or_else(|err| {
sess.fatal(&format!(
"error opening destination during archive building: {}",
err
@ -209,7 +222,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
// Run ranlib to be able to link the archive
let status = std::process::Command::new(ranlib)
.arg(self.dst)
.arg(output)
.status()
.expect("Couldn't run ranlib");
@ -220,17 +233,4 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
any_members
}
fn sess(&self) -> &Session {
self.sess
}
fn create_dll_import_lib(
_sess: &Session,
_lib_name: &str,
_dll_imports: &[rustc_session::cstore::DllImport],
_tmpdir: &Path,
) -> PathBuf {
bug!("creating dll imports is not supported");
}
}

View File

@ -226,7 +226,7 @@ impl CodegenBackend for CraneliftCodegenBackend {
) -> Result<(), ErrorGuaranteed> {
use rustc_codegen_ssa::back::link::link_binary;
link_binary::<crate::archive::ArArchiveBuilder<'_>>(sess, &codegen_results, outputs)
link_binary(sess, &crate::archive::ArArchiveBuilderBuilder, &codegen_results, outputs)
}
}

View File

@ -1,14 +1,13 @@
use std::fs::File;
use std::path::{Path, PathBuf};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
use rustc_session::Session;
use rustc_session::cstore::DllImport;
struct ArchiveConfig<'a> {
sess: &'a Session,
dst: PathBuf,
use_native_ar: bool,
use_gnu_style_archive: bool,
}
@ -22,6 +21,35 @@ enum ArchiveEntry {
File(PathBuf),
}
pub struct ArArchiveBuilderBuilder;
impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a> {
let config = ArchiveConfig {
sess,
use_native_ar: false,
// FIXME test for linux and System V derivatives instead
use_gnu_style_archive: sess.target.options.archive_format == "gnu",
};
Box::new(ArArchiveBuilder {
config,
src_archives: vec![],
entries: vec![],
})
}
fn create_dll_import_lib(
&self,
_sess: &Session,
_lib_name: &str,
_dll_imports: &[DllImport],
_tmpdir: &Path,
) -> PathBuf {
unimplemented!();
}
}
pub struct ArArchiveBuilder<'a> {
config: ArchiveConfig<'a>,
src_archives: Vec<(PathBuf, ar::Archive<File>)>,
@ -31,22 +59,6 @@ pub struct ArArchiveBuilder<'a> {
}
impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
fn new(sess: &'a Session, output: &Path) -> Self {
let config = ArchiveConfig {
sess,
dst: output.to_path_buf(),
use_native_ar: false,
// FIXME test for linux and System V derivatives instead
use_gnu_style_archive: sess.target.options.archive_format == "gnu",
};
ArArchiveBuilder {
config,
src_archives: vec![],
entries: vec![],
}
}
fn add_file(&mut self, file: &Path) {
self.entries.push((
file.file_name().unwrap().to_str().unwrap().to_string(),
@ -54,10 +66,11 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
));
}
fn add_archive<F>(&mut self, archive_path: &Path, mut skip: F) -> std::io::Result<()>
where
F: FnMut(&str) -> bool + 'static,
{
fn add_archive(
&mut self,
archive_path: &Path,
mut skip: Box<dyn FnMut(&str) -> bool + 'static>,
) -> std::io::Result<()> {
let mut archive = ar::Archive::new(std::fs::File::open(&archive_path)?);
let archive_index = self.src_archives.len();
@ -77,7 +90,7 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
Ok(())
}
fn build(mut self) -> bool {
fn build(mut self: Box<Self>, output: &Path) -> bool {
use std::process::Command;
fn add_file_using_ar(archive: &Path, file: &Path) {
@ -97,17 +110,17 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
}
let mut builder = if self.config.use_native_ar {
BuilderKind::NativeAr(&self.config.dst)
BuilderKind::NativeAr(output)
} else if self.config.use_gnu_style_archive {
BuilderKind::Gnu(ar::GnuBuilder::new(
File::create(&self.config.dst).unwrap(),
File::create(output).unwrap(),
self.entries
.iter()
.map(|(name, _)| name.as_bytes().to_vec())
.collect(),
))
} else {
BuilderKind::Bsd(ar::Builder::new(File::create(&self.config.dst).unwrap()))
BuilderKind::Bsd(ar::Builder::new(File::create(output).unwrap()))
};
let any_members = !self.entries.is_empty();
@ -164,10 +177,8 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
std::mem::drop(builder);
// Run ranlib to be able to link the archive
let status = std::process::Command::new("ranlib")
.arg(self.config.dst)
.status()
.expect("Couldn't run ranlib");
let status =
std::process::Command::new("ranlib").arg(output).status().expect("Couldn't run ranlib");
if !status.success() {
self.config.sess.fatal(&format!("Ranlib exited with code {:?}", status.code()));
@ -175,17 +186,4 @@ impl<'a> ArchiveBuilder<'a> for ArArchiveBuilder<'a> {
any_members
}
fn sess(&self) -> &Session {
self.config.sess
}
fn create_dll_import_lib(
_sess: &Session,
_lib_name: &str,
_dll_imports: &[DllImport],
_tmpdir: &Path,
) -> PathBuf {
unimplemented!();
}
}

View File

@ -133,8 +133,9 @@ impl CodegenBackend for GccCodegenBackend {
fn link(&self, sess: &Session, codegen_results: CodegenResults, outputs: &OutputFilenames) -> Result<(), ErrorGuaranteed> {
use rustc_codegen_ssa::back::link::link_binary;
link_binary::<crate::archive::ArArchiveBuilder<'_>>(
link_binary(
sess,
&crate::archive::ArArchiveBuilderBuilder,
&codegen_results,
outputs,
)

View File

@ -10,7 +10,7 @@ use std::str;
use crate::llvm::archive_ro::{ArchiveRO, Child};
use crate::llvm::{self, ArchiveKind, LLVMMachineType, LLVMRustCOFFShortExport};
use rustc_codegen_ssa::back::archive::ArchiveBuilder;
use rustc_codegen_ssa::back::archive::{ArchiveBuilder, ArchiveBuilderBuilder};
use rustc_session::cstore::{DllCallingConvention, DllImport};
use rustc_session::Session;
@ -18,7 +18,6 @@ use rustc_session::Session;
#[must_use = "must call build() to finish building the archive"]
pub struct LlvmArchiveBuilder<'a> {
sess: &'a Session,
dst: PathBuf,
additions: Vec<Addition>,
}
@ -54,16 +53,11 @@ fn llvm_machine_type(cpu: &str) -> LLVMMachineType {
}
impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
/// Creates a new static archive, ready for modifying the archive specified
/// by `config`.
fn new(sess: &'a Session, output: &Path) -> LlvmArchiveBuilder<'a> {
LlvmArchiveBuilder { sess, dst: output.to_path_buf(), additions: Vec::new() }
}
fn add_archive<F>(&mut self, archive: &Path, skip: F) -> io::Result<()>
where
F: FnMut(&str) -> bool + 'static,
{
fn add_archive(
&mut self,
archive: &Path,
skip: Box<dyn FnMut(&str) -> bool + 'static>,
) -> io::Result<()> {
let archive_ro = match ArchiveRO::open(archive) {
Ok(ar) => ar,
Err(e) => return Err(io::Error::new(io::ErrorKind::Other, e)),
@ -88,18 +82,23 @@ impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
/// Combine the provided files, rlibs, and native libraries into a single
/// `Archive`.
fn build(mut self) -> bool {
match self.build_with_llvm() {
fn build(mut self: Box<Self>, output: &Path) -> bool {
match self.build_with_llvm(output) {
Ok(any_members) => any_members,
Err(e) => self.sess.fatal(&format!("failed to build archive: {}", e)),
}
}
}
fn sess(&self) -> &Session {
self.sess
pub struct LlvmArchiveBuilderBuilder;
impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a> {
Box::new(LlvmArchiveBuilder { sess, additions: Vec::new() })
}
fn create_dll_import_lib(
&self,
sess: &Session,
lib_name: &str,
dll_imports: &[DllImport],
@ -241,7 +240,7 @@ impl<'a> ArchiveBuilder<'a> for LlvmArchiveBuilder<'a> {
}
impl<'a> LlvmArchiveBuilder<'a> {
fn build_with_llvm(&mut self) -> io::Result<bool> {
fn build_with_llvm(&mut self, output: &Path) -> io::Result<bool> {
let kind = &*self.sess.target.archive_format;
let kind = kind.parse::<ArchiveKind>().map_err(|_| kind).unwrap_or_else(|kind| {
self.sess.fatal(&format!("Don't know how to build archive of type: {}", kind))
@ -251,7 +250,7 @@ impl<'a> LlvmArchiveBuilder<'a> {
let mut strings = Vec::new();
let mut members = Vec::new();
let dst = CString::new(self.dst.to_str().unwrap())?;
let dst = CString::new(output.to_str().unwrap())?;
unsafe {
for addition in &mut additions {

View File

@ -370,12 +370,12 @@ impl CodegenBackend for LlvmCodegenBackend {
codegen_results: CodegenResults,
outputs: &OutputFilenames,
) -> Result<(), ErrorGuaranteed> {
use crate::back::archive::LlvmArchiveBuilder;
use crate::back::archive::LlvmArchiveBuilderBuilder;
use rustc_codegen_ssa::back::link::link_binary;
// Run the linker on any artifacts that resulted from the LLVM run.
// This should produce either a finished executable or library.
link_binary::<LlvmArchiveBuilder<'_>>(sess, &codegen_results, outputs)
link_binary(sess, &LlvmArchiveBuilderBuilder, &codegen_results, outputs)
}
}

View File

@ -1,4 +1,3 @@
use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_session::cstore::DllImport;
use rustc_session::Session;
@ -40,18 +39,8 @@ pub(super) fn find_library(
));
}
pub trait ArchiveBuilder<'a> {
fn new(sess: &'a Session, output: &Path) -> Self;
fn add_file(&mut self, path: &Path);
fn add_archive<F>(&mut self, archive: &Path, skip: F) -> io::Result<()>
where
F: FnMut(&str) -> bool + 'static;
fn build(self) -> bool;
fn sess(&self) -> &Session;
pub trait ArchiveBuilderBuilder {
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder<'a> + 'a>;
/// Creates a DLL Import Library <https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-creation#creating-an-import-library>.
/// and returns the path on disk to that import library.
@ -59,29 +48,22 @@ pub trait ArchiveBuilder<'a> {
/// `linker_with_args`, which is specialized on `ArchiveBuilder` but
/// doesn't take or create an instance of that type.
fn create_dll_import_lib(
&self,
sess: &Session,
lib_name: &str,
dll_imports: &[DllImport],
tmpdir: &Path,
) -> PathBuf;
/// Creates a DLL Import Library <https://docs.microsoft.com/en-us/windows/win32/dlls/dynamic-link-library-creation#creating-an-import-library>
/// and adds it to the current compilation's set of archives.
fn inject_dll_import_lib(
&mut self,
lib_name: &str,
dll_imports: &[DllImport],
tmpdir: &MaybeTempDir,
) {
let output_path =
Self::create_dll_import_lib(self.sess(), lib_name, dll_imports, tmpdir.as_ref());
self.add_archive(&output_path, |_| false).unwrap_or_else(|e| {
self.sess().fatal(&format!(
"failed to add native library {}: {}",
output_path.display(),
e
));
});
}
}
pub trait ArchiveBuilder<'a> {
fn add_file(&mut self, path: &Path);
fn add_archive(
&mut self,
archive: &Path,
skip: Box<dyn FnMut(&str) -> bool + 'static>,
) -> io::Result<()>;
fn build(self: Box<Self>, output: &Path) -> bool;
}

View File

@ -24,7 +24,7 @@ use rustc_target::spec::crt_objects::{CrtObjects, CrtObjectsFallback};
use rustc_target::spec::{LinkOutputKind, LinkerFlavor, LldFlavor, SplitDebuginfo};
use rustc_target::spec::{PanicStrategy, RelocModel, RelroLevel, SanitizerSet, Target};
use super::archive::{find_library, ArchiveBuilder};
use super::archive::{find_library, ArchiveBuilder, ArchiveBuilderBuilder};
use super::command::Command;
use super::linker::{self, Linker};
use super::metadata::{create_rmeta_file, MetadataPosition};
@ -56,8 +56,9 @@ pub fn ensure_removed(diag_handler: &Handler, path: &Path) {
/// Performs the linkage portion of the compilation phase. This will generate all
/// of the requested outputs for this compilation session.
pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
pub fn link_binary<'a>(
sess: &'a Session,
archive_builder_builder: &dyn ArchiveBuilderBuilder,
codegen_results: &CodegenResults,
outputs: &OutputFilenames,
) -> Result<(), ErrorGuaranteed> {
@ -101,21 +102,29 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
match crate_type {
CrateType::Rlib => {
let _timer = sess.timer("link_rlib");
link_rlib::<B>(
info!("preparing rlib to {:?}", out_filename);
link_rlib(
sess,
archive_builder_builder,
codegen_results,
RlibFlavor::Normal,
&out_filename,
&path,
)?
.build();
.build(&out_filename);
}
CrateType::Staticlib => {
link_staticlib::<B>(sess, codegen_results, &out_filename, &path)?;
link_staticlib(
sess,
archive_builder_builder,
codegen_results,
&out_filename,
&path,
)?;
}
_ => {
link_natively::<B>(
link_natively(
sess,
archive_builder_builder,
crate_type,
&out_filename,
codegen_results,
@ -245,18 +254,16 @@ pub fn each_linked_rlib(
/// the object file of the crate, but it also contains all of the object files from native
/// libraries. This is done by unzipping native libraries and inserting all of the contents into
/// this archive.
fn link_rlib<'a, B: ArchiveBuilder<'a>>(
fn link_rlib<'a>(
sess: &'a Session,
archive_builder_builder: &dyn ArchiveBuilderBuilder,
codegen_results: &CodegenResults,
flavor: RlibFlavor,
out_filename: &Path,
tmpdir: &MaybeTempDir,
) -> Result<B, ErrorGuaranteed> {
info!("preparing rlib to {:?}", out_filename);
) -> Result<Box<dyn ArchiveBuilder<'a> + 'a>, ErrorGuaranteed> {
let lib_search_paths = archive_search_paths(sess);
let mut ab = <B as ArchiveBuilder>::new(sess, out_filename);
let mut ab = archive_builder_builder.new_archive_builder(sess);
let trailing_metadata = match flavor {
RlibFlavor::Normal => {
@ -342,7 +349,7 @@ fn link_rlib<'a, B: ArchiveBuilder<'a>>(
if let Some(name) = lib.name {
let location =
find_library(name.as_str(), lib.verbatim.unwrap_or(false), &lib_search_paths, sess);
ab.add_archive(&location, |_| false).unwrap_or_else(|e| {
ab.add_archive(&location, Box::new(|_| false)).unwrap_or_else(|e| {
sess.fatal(&format!(
"failed to add native library {}: {}",
location.to_string_lossy(),
@ -355,7 +362,16 @@ fn link_rlib<'a, B: ArchiveBuilder<'a>>(
for (raw_dylib_name, raw_dylib_imports) in
collate_raw_dylibs(sess, &codegen_results.crate_info.used_libraries)?
{
ab.inject_dll_import_lib(&raw_dylib_name, &raw_dylib_imports, tmpdir);
let output_path = archive_builder_builder.create_dll_import_lib(
sess,
&raw_dylib_name,
&raw_dylib_imports,
tmpdir.as_ref(),
);
ab.add_archive(&output_path, Box::new(|_| false)).unwrap_or_else(|e| {
sess.fatal(&format!("failed to add native library {}: {}", output_path.display(), e));
});
}
if let Some(trailing_metadata) = trailing_metadata {
@ -446,14 +462,21 @@ fn collate_raw_dylibs(
///
/// There's no need to include metadata in a static archive, so ensure to not link in the metadata
/// object file (and also don't prepare the archive with a metadata file).
fn link_staticlib<'a, B: ArchiveBuilder<'a>>(
fn link_staticlib<'a>(
sess: &'a Session,
archive_builder_builder: &dyn ArchiveBuilderBuilder,
codegen_results: &CodegenResults,
out_filename: &Path,
tempdir: &MaybeTempDir,
) -> Result<(), ErrorGuaranteed> {
let mut ab =
link_rlib::<B>(sess, codegen_results, RlibFlavor::StaticlibBase, out_filename, tempdir)?;
info!("preparing staticlib to {:?}", out_filename);
let mut ab = link_rlib(
sess,
archive_builder_builder,
codegen_results,
RlibFlavor::StaticlibBase,
tempdir,
)?;
let mut all_native_libs = vec![];
let res = each_linked_rlib(&codegen_results.crate_info, &mut |cnum, path| {
@ -487,26 +510,29 @@ fn link_staticlib<'a, B: ArchiveBuilder<'a>>(
// might be also an extra name suffix
let obj_start = name.as_str().to_owned();
ab.add_archive(path, move |fname: &str| {
// Ignore metadata files, no matter the name.
if fname == METADATA_FILENAME {
return true;
}
ab.add_archive(
path,
Box::new(move |fname: &str| {
// Ignore metadata files, no matter the name.
if fname == METADATA_FILENAME {
return true;
}
// Don't include Rust objects if LTO is enabled
if lto && looks_like_rust_object_file(fname) {
return true;
}
// Don't include Rust objects if LTO is enabled
if lto && looks_like_rust_object_file(fname) {
return true;
}
// Otherwise if this is *not* a rust object and we're skipping
// objects then skip this file
if skip_object_files && (!fname.starts_with(&obj_start) || !fname.ends_with(".o")) {
return true;
}
// Otherwise if this is *not* a rust object and we're skipping
// objects then skip this file
if skip_object_files && (!fname.starts_with(&obj_start) || !fname.ends_with(".o")) {
return true;
}
// ok, don't skip this
false
})
// ok, don't skip this
false
}),
)
.unwrap();
all_native_libs.extend(codegen_results.crate_info.native_libraries[&cnum].iter().cloned());
@ -515,7 +541,7 @@ fn link_staticlib<'a, B: ArchiveBuilder<'a>>(
sess.fatal(&e);
}
ab.build();
ab.build(out_filename);
if !all_native_libs.is_empty() {
if sess.opts.prints.contains(&PrintRequest::NativeStaticLibs) {
@ -645,8 +671,9 @@ fn link_dwarf_object<'a>(
///
/// This will invoke the system linker/cc to create the resulting file. This links to all upstream
/// files as well.
fn link_natively<'a, B: ArchiveBuilder<'a>>(
fn link_natively<'a>(
sess: &'a Session,
archive_builder_builder: &dyn ArchiveBuilderBuilder,
crate_type: CrateType,
out_filename: &Path,
codegen_results: &CodegenResults,
@ -654,10 +681,11 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
) -> Result<(), ErrorGuaranteed> {
info!("preparing {:?} to {:?}", crate_type, out_filename);
let (linker_path, flavor) = linker_and_flavor(sess);
let mut cmd = linker_with_args::<B>(
let mut cmd = linker_with_args(
&linker_path,
flavor,
sess,
archive_builder_builder,
crate_type,
tmpdir,
out_filename,
@ -1844,10 +1872,11 @@ fn add_rpath_args(
/// to the linking process as a whole.
/// Order-independent options may still override each other in order-dependent fashion,
/// e.g `--foo=yes --foo=no` may be equivalent to `--foo=no`.
fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
fn linker_with_args<'a>(
path: &Path,
flavor: LinkerFlavor,
sess: &'a Session,
archive_builder_builder: &dyn ArchiveBuilderBuilder,
crate_type: CrateType,
tmpdir: &Path,
out_filename: &Path,
@ -1948,7 +1977,14 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
}
// Upstream rust libraries and their non-bundled static libraries
add_upstream_rust_crates::<B>(cmd, sess, codegen_results, crate_type, tmpdir);
add_upstream_rust_crates(
cmd,
sess,
archive_builder_builder,
codegen_results,
crate_type,
tmpdir,
);
// Upstream dynamic native libraries linked with `#[link]` attributes at and `-l`
// command line options.
@ -1963,7 +1999,7 @@ fn linker_with_args<'a, B: ArchiveBuilder<'a>>(
for (raw_dylib_name, raw_dylib_imports) in
collate_raw_dylibs(sess, &codegen_results.crate_info.used_libraries)?
{
cmd.add_object(&B::create_dll_import_lib(
cmd.add_object(&archive_builder_builder.create_dll_import_lib(
sess,
&raw_dylib_name,
&raw_dylib_imports,
@ -2256,9 +2292,10 @@ fn add_local_native_libraries(
///
/// Rust crates are not considered at all when creating an rlib output. All dependencies will be
/// linked when producing the final output (instead of the intermediate rlib version).
fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
fn add_upstream_rust_crates<'a>(
cmd: &mut dyn Linker,
sess: &'a Session,
archive_builder_builder: &dyn ArchiveBuilderBuilder,
codegen_results: &CodegenResults,
crate_type: CrateType,
tmpdir: &Path,
@ -2347,7 +2384,7 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
let src = &codegen_results.crate_info.used_crate_source[&cnum];
match data[cnum.as_usize() - 1] {
_ if codegen_results.crate_info.profiler_runtime == Some(cnum) => {
add_static_crate::<B>(cmd, sess, codegen_results, tmpdir, cnum);
add_static_crate(cmd, sess, archive_builder_builder, codegen_results, tmpdir, cnum);
}
// compiler-builtins are always placed last to ensure that they're
// linked correctly.
@ -2357,7 +2394,7 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
}
Linkage::NotLinked | Linkage::IncludedFromDylib => {}
Linkage::Static => {
add_static_crate::<B>(cmd, sess, codegen_results, tmpdir, cnum);
add_static_crate(cmd, sess, archive_builder_builder, codegen_results, tmpdir, cnum);
// Link static native libs with "-bundle" modifier only if the crate they originate from
// is being linked statically to the current crate. If it's linked dynamically
@ -2431,7 +2468,7 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
// was already "included" in a dylib (e.g., `libstd` when `-C prefer-dynamic`
// is used)
if let Some(cnum) = compiler_builtins {
add_static_crate::<B>(cmd, sess, codegen_results, tmpdir, cnum);
add_static_crate(cmd, sess, archive_builder_builder, codegen_results, tmpdir, cnum);
}
// Converts a library file-stem into a cc -l argument
@ -2457,9 +2494,10 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
// Note, however, that if we're not doing LTO we can just pass the rlib
// blindly to the linker (fast) because it's fine if it's not actually
// included as we're at the end of the dependency chain.
fn add_static_crate<'a, B: ArchiveBuilder<'a>>(
fn add_static_crate<'a>(
cmd: &mut dyn Linker,
sess: &'a Session,
archive_builder_builder: &dyn ArchiveBuilderBuilder,
codegen_results: &CodegenResults,
tmpdir: &Path,
cnum: CrateNum,
@ -2499,38 +2537,41 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
let is_builtins = sess.target.no_builtins
|| !codegen_results.crate_info.is_no_builtins.contains(&cnum);
let mut archive = <B as ArchiveBuilder>::new(sess, &dst);
if let Err(e) = archive.add_archive(cratepath, move |f| {
if f == METADATA_FILENAME {
return true;
}
let mut archive = archive_builder_builder.new_archive_builder(sess);
if let Err(e) = archive.add_archive(
cratepath,
Box::new(move |f| {
if f == METADATA_FILENAME {
return true;
}
let canonical = f.replace('-', "_");
let canonical = f.replace('-', "_");
let is_rust_object =
canonical.starts_with(&canonical_name) && looks_like_rust_object_file(&f);
let is_rust_object =
canonical.starts_with(&canonical_name) && looks_like_rust_object_file(&f);
// If we've been requested to skip all native object files
// (those not generated by the rust compiler) then we can skip
// this file. See above for why we may want to do this.
let skip_because_cfg_say_so = skip_native && !is_rust_object;
// If we've been requested to skip all native object files
// (those not generated by the rust compiler) then we can skip
// this file. See above for why we may want to do this.
let skip_because_cfg_say_so = skip_native && !is_rust_object;
// If we're performing LTO and this is a rust-generated object
// file, then we don't need the object file as it's part of the
// LTO module. Note that `#![no_builtins]` is excluded from LTO,
// though, so we let that object file slide.
let skip_because_lto =
upstream_rust_objects_already_included && is_rust_object && is_builtins;
// If we're performing LTO and this is a rust-generated object
// file, then we don't need the object file as it's part of the
// LTO module. Note that `#![no_builtins]` is excluded from LTO,
// though, so we let that object file slide.
let skip_because_lto =
upstream_rust_objects_already_included && is_rust_object && is_builtins;
if skip_because_cfg_say_so || skip_because_lto {
return true;
}
if skip_because_cfg_say_so || skip_because_lto {
return true;
}
false
}) {
false
}),
) {
sess.fatal(&format!("failed to build archive from rlib: {}", e));
}
if archive.build() {
if archive.build(&dst) {
link_upstream(&dst);
}
});

View File

@ -238,7 +238,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
if let Some(local_def_id) = def_id.as_local() {
let tables = self.ecx.tcx.typeck(local_def_id);
if let Some(captured_place) =
tables.closure_min_captures_flattened(*def_id).nth(field)
tables.closure_min_captures_flattened(local_def_id).nth(field)
{
// Sometimes the index is beyond the number of upvars (seen
// for a generator).

View File

@ -712,7 +712,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
opt_suggest_box_span,
}) => {
let then_span = self.find_block_span_from_hir_id(then_id);
let else_span = self.find_block_span_from_hir_id(then_id);
let else_span = self.find_block_span_from_hir_id(else_id);
err.span_label(then_span, "expected because of this");
if let Some(sp) = outer_span {
err.span_label(sp, "`if` and `else` have incompatible types");
@ -760,11 +760,15 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
second_ty: Ty<'tcx>,
second_span: Span,
) {
let remove_semicolon =
[(first_id, second_ty), (second_id, first_ty)].into_iter().find_map(|(id, ty)| {
let hir::Node::Block(blk) = self.tcx.hir().get(id?) else { return None };
self.could_remove_semicolon(blk, ty)
});
let remove_semicolon = [
(first_id, self.resolve_vars_if_possible(second_ty)),
(second_id, self.resolve_vars_if_possible(first_ty)),
]
.into_iter()
.find_map(|(id, ty)| {
let hir::Node::Block(blk) = self.tcx.hir().get(id?) else { return None };
self.could_remove_semicolon(blk, ty)
});
match remove_semicolon {
Some((sp, StatementAsExpression::NeedsBoxing)) => {
err.multipart_suggestion(

View File

@ -1983,53 +1983,45 @@ impl<'tcx> Debug for Rvalue<'tcx> {
}
AggregateKind::Closure(def_id, substs) => ty::tls::with(|tcx| {
if let Some(def_id) = def_id.as_local() {
let name = if tcx.sess.opts.unstable_opts.span_free_formats {
let substs = tcx.lift(substs).unwrap();
format!(
"[closure@{}]",
tcx.def_path_str_with_substs(def_id.to_def_id(), substs),
)
} else {
let span = tcx.def_span(def_id);
format!(
"[closure@{}]",
tcx.sess.source_map().span_to_diagnostic_string(span)
)
};
let mut struct_fmt = fmt.debug_struct(&name);
// FIXME(project-rfc-2229#48): This should be a list of capture names/places
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(var_name.as_str(), place);
}
}
struct_fmt.finish()
let name = if tcx.sess.opts.unstable_opts.span_free_formats {
let substs = tcx.lift(substs).unwrap();
format!(
"[closure@{}]",
tcx.def_path_str_with_substs(def_id.to_def_id(), substs),
)
} else {
write!(fmt, "[closure]")
let span = tcx.def_span(def_id);
format!(
"[closure@{}]",
tcx.sess.source_map().span_to_diagnostic_string(span)
)
};
let mut struct_fmt = fmt.debug_struct(&name);
// FIXME(project-rfc-2229#48): This should be a list of capture names/places
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(var_name.as_str(), place);
}
}
struct_fmt.finish()
}),
AggregateKind::Generator(def_id, _, _) => ty::tls::with(|tcx| {
if let Some(def_id) = def_id.as_local() {
let name = format!("[generator@{:?}]", tcx.def_span(def_id));
let mut struct_fmt = fmt.debug_struct(&name);
let name = format!("[generator@{:?}]", tcx.def_span(def_id));
let mut struct_fmt = fmt.debug_struct(&name);
// FIXME(project-rfc-2229#48): This should be a list of capture names/places
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(var_name.as_str(), place);
}
// FIXME(project-rfc-2229#48): This should be a list of capture names/places
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(var_name.as_str(), place);
}
struct_fmt.finish()
} else {
write!(fmt, "[generator]")
}
struct_fmt.finish()
}),
}
}

View File

@ -18,6 +18,7 @@ use rustc_hir::{self, GeneratorKind};
use rustc_target::abi::VariantIdx;
use rustc_ast::Mutability;
use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::Symbol;
use rustc_span::Span;
use rustc_target::asm::InlineAsmRegOrRegClass;
@ -340,8 +341,11 @@ pub enum FakeReadCause {
/// If a closure pattern matches a Place starting with an Upvar, then we introduce a
/// FakeRead for that Place outside the closure, in such a case this option would be
/// Some(closure_def_id).
/// Otherwise, the value of the optional DefId will be None.
ForMatchedPlace(Option<DefId>),
/// Otherwise, the value of the optional LocalDefId will be None.
//
// We can use LocaDefId here since fake read statements are removed
// before codegen in the `CleanupNonCodegenStatements` pass.
ForMatchedPlace(Option<LocalDefId>),
/// A fake read of the RefWithinGuard version of a bind-by-value variable
/// in a match guard to ensure that its value hasn't change by the time
@ -365,7 +369,7 @@ pub enum FakeReadCause {
/// FakeRead for that Place outside the closure, in such a case this option would be
/// Some(closure_def_id).
/// Otherwise, the value of the optional DefId will be None.
ForLet(Option<DefId>),
ForLet(Option<LocalDefId>),
/// If we have an index expression like
///
@ -1095,8 +1099,10 @@ pub enum AggregateKind<'tcx> {
/// active field index would identity the field `c`
Adt(DefId, VariantIdx, SubstsRef<'tcx>, Option<UserTypeAnnotationIndex>, Option<usize>),
Closure(DefId, SubstsRef<'tcx>),
Generator(DefId, SubstsRef<'tcx>, hir::Movability),
// Note: We can use LocalDefId since closures and generators a deaggregated
// before codegen.
Closure(LocalDefId, SubstsRef<'tcx>),
Generator(LocalDefId, SubstsRef<'tcx>, hir::Movability),
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]

View File

@ -205,9 +205,9 @@ impl<'tcx> Rvalue<'tcx> {
AggregateKind::Adt(did, _, substs, _, _) => {
tcx.bound_type_of(did).subst(tcx, substs)
}
AggregateKind::Closure(did, substs) => tcx.mk_closure(did, substs),
AggregateKind::Closure(did, substs) => tcx.mk_closure(did.to_def_id(), substs),
AggregateKind::Generator(did, substs, movability) => {
tcx.mk_generator(did, substs, movability)
tcx.mk_generator(did.to_def_id(), substs, movability)
}
},
Rvalue::ShallowInitBox(_, ty) => tcx.mk_box(ty),

View File

@ -413,12 +413,12 @@ rustc_queries! {
}
query symbols_for_closure_captures(
key: (LocalDefId, DefId)
key: (LocalDefId, LocalDefId)
) -> Vec<rustc_span::Symbol> {
storage(ArenaCacheSelector<'tcx>)
desc {
|tcx| "symbols for captures of closure `{}` in `{}`",
tcx.def_path_str(key.1),
tcx.def_path_str(key.1.to_def_id()),
tcx.def_path_str(key.0.to_def_id())
}
}

View File

@ -27,6 +27,7 @@ use rustc_span::{Span, Symbol, DUMMY_SP};
use rustc_target::abi::VariantIdx;
use rustc_target::asm::InlineAsmRegOrRegClass;
use rustc_span::def_id::LocalDefId;
use std::fmt;
use std::ops::Index;
@ -405,7 +406,7 @@ pub enum ExprKind<'tcx> {
},
/// A closure definition.
Closure {
closure_id: DefId,
closure_id: LocalDefId,
substs: UpvarSubsts<'tcx>,
upvars: Box<[ExprId]>,
movability: Option<hir::Movability>,

View File

@ -59,7 +59,7 @@ pub type UpvarCaptureMap = FxHashMap<UpvarId, UpvarCapture>;
/// Given the closure DefId this map provides a map of root variables to minimum
/// set of `CapturedPlace`s that need to be tracked to support all captures of that closure.
pub type MinCaptureInformationMap<'tcx> = FxHashMap<DefId, RootVariableMinCaptureList<'tcx>>;
pub type MinCaptureInformationMap<'tcx> = FxHashMap<LocalDefId, RootVariableMinCaptureList<'tcx>>;
/// Part of `MinCaptureInformationMap`; Maps a root variable to the list of `CapturedPlace`.
/// Used to track the minimum set of `Place`s that need to be captured to support all
@ -253,7 +253,7 @@ impl<'tcx> CapturedPlace<'tcx> {
fn symbols_for_closure_captures<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: (LocalDefId, DefId),
def_id: (LocalDefId, LocalDefId),
) -> Vec<Symbol> {
let typeck_results = tcx.typeck(def_id.0);
let captures = typeck_results.closure_min_captures_flattened(def_id.1);

View File

@ -570,7 +570,7 @@ pub struct TypeckResults<'tcx> {
/// we never capture `t`. This becomes an issue when we build MIR as we require
/// information on `t` in order to create place `t.0` and `t.1`. We can solve this
/// issue by fake reading `t`.
pub closure_fake_reads: FxHashMap<DefId, Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>>,
pub closure_fake_reads: FxHashMap<LocalDefId, Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>>,
/// Tracks the rvalue scoping rules which defines finer scoping for rvalue expressions
/// by applying extended parameter rules.
@ -589,7 +589,7 @@ pub struct TypeckResults<'tcx> {
/// Contains the data for evaluating the effect of feature `capture_disjoint_fields`
/// on closure size.
pub closure_size_eval: FxHashMap<DefId, ClosureSizeProfileData<'tcx>>,
pub closure_size_eval: FxHashMap<LocalDefId, ClosureSizeProfileData<'tcx>>,
}
impl<'tcx> TypeckResults<'tcx> {
@ -811,7 +811,7 @@ impl<'tcx> TypeckResults<'tcx> {
/// by the closure.
pub fn closure_min_captures_flattened(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
) -> impl Iterator<Item = &ty::CapturedPlace<'tcx>> {
self.closure_min_captures
.get(&closure_def_id)

View File

@ -3,7 +3,7 @@
use crate::build::expr::category::Category;
use crate::build::ForGuard::{OutsideGuard, RefWithinGuard};
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::def_id::LocalDefId;
use rustc_middle::hir::place::Projection as HirProjection;
use rustc_middle::hir::place::ProjectionKind as HirProjectionKind;
use rustc_middle::middle::region;
@ -58,7 +58,7 @@ pub(crate) enum PlaceBase {
/// HirId of the upvar
var_hir_id: LocalVarId,
/// DefId of the closure
closure_def_id: DefId,
closure_def_id: LocalDefId,
/// The trait closure implements, `Fn`, `FnMut`, `FnOnce`
closure_kind: ty::ClosureKind,
},
@ -176,7 +176,7 @@ fn compute_capture_idx<'tcx>(
fn find_capture_matching_projections<'a, 'tcx>(
typeck_results: &'a ty::TypeckResults<'tcx>,
var_hir_id: LocalVarId,
closure_def_id: DefId,
closure_def_id: LocalDefId,
projections: &[PlaceElem<'tcx>],
) -> Option<(usize, &'a ty::CapturedPlace<'tcx>)> {
let closure_min_captures = typeck_results.closure_min_captures.get(&closure_def_id)?;
@ -242,7 +242,7 @@ fn to_upvars_resolved_place_builder<'a, 'tcx>(
};
// We won't be building MIR if the closure wasn't local
let closure_hir_id = tcx.hir().local_def_id_to_hir_id(closure_def_id.expect_local());
let closure_hir_id = tcx.hir().local_def_id_to_hir_id(closure_def_id);
let closure_ty = typeck_results.node_type(closure_hir_id);
let substs = match closure_ty.kind() {
@ -626,11 +626,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn lower_captured_upvar(
&mut self,
block: BasicBlock,
closure_expr_id: LocalDefId,
closure_def_id: LocalDefId,
var_hir_id: LocalVarId,
) -> BlockAnd<PlaceBuilder<'tcx>> {
let closure_ty =
self.typeck_results.node_type(self.tcx.hir().local_def_id_to_hir_id(closure_expr_id));
self.typeck_results.node_type(self.tcx.hir().local_def_id_to_hir_id(closure_def_id));
let closure_kind = if let ty::Closure(_, closure_substs) = closure_ty.kind() {
self.infcx.closure_kind(closure_substs).unwrap()
@ -639,11 +639,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
ty::ClosureKind::FnOnce
};
block.and(PlaceBuilder::from(PlaceBase::Upvar {
var_hir_id,
closure_def_id: closure_expr_id.to_def_id(),
closure_kind,
}))
block.and(PlaceBuilder::from(PlaceBase::Upvar { var_hir_id, closure_def_id, closure_kind }))
}
/// Lower an index expression

View File

@ -672,7 +672,7 @@ where
Some(builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| {
builder.args_and_body(
START_BLOCK,
fn_def.did.to_def_id(),
fn_def.did,
&arguments,
arg_scope,
&thir[expr],
@ -895,7 +895,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
fn args_and_body(
&mut self,
mut block: BasicBlock,
fn_def_id: DefId,
fn_def_id: LocalDefId,
arguments: &[ArgInfo<'tcx>],
argument_scope: region::Scope,
expr: &Expr<'tcx>,

View File

@ -408,7 +408,6 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
movability: _,
fake_reads: _,
} => {
let closure_id = closure_id.expect_local();
let closure_def = if let Some((did, const_param_id)) =
ty::WithOptConstParam::try_lookup(closure_id, self.tcx)
{

View File

@ -523,6 +523,7 @@ impl<'tcx> Cx<'tcx> {
span_bug!(expr.span, "closure expr w/o closure type: {:?}", closure_ty);
}
};
let def_id = def_id.expect_local();
let upvars = self
.typeck_results

View File

@ -129,7 +129,7 @@ impl<'tcx> Visitor<'tcx> for UnsafetyChecker<'_, 'tcx> {
}
&AggregateKind::Closure(def_id, _) | &AggregateKind::Generator(def_id, _, _) => {
let UnsafetyCheckResult { violations, used_unsafe_blocks, .. } =
self.tcx.unsafety_check_result(def_id.expect_local());
self.tcx.unsafety_check_result(def_id);
self.register_violations(
violations,
used_unsafe_blocks.iter().map(|(&h, &d)| (h, d)),

View File

@ -17,8 +17,8 @@ pub(crate) fn dump_closure_profile<'tcx>(tcx: TyCtxt<'tcx>, closure_instance: In
return;
};
let closure_def_id = closure_instance.def_id();
let typeck_results = tcx.typeck(closure_def_id.expect_local());
let closure_def_id = closure_instance.def_id().expect_local();
let typeck_results = tcx.typeck(closure_def_id);
if typeck_results.closure_size_eval.contains_key(&closure_def_id) {
let param_env = ty::ParamEnv::reveal_all();

View File

@ -519,7 +519,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
fn new(ir: &'a mut IrMaps<'tcx>, body_owner: LocalDefId) -> Liveness<'a, 'tcx> {
let typeck_results = ir.tcx.typeck(body_owner);
let param_env = ir.tcx.param_env(body_owner);
let closure_min_captures = typeck_results.closure_min_captures.get(&body_owner.to_def_id());
let closure_min_captures = typeck_results.closure_min_captures.get(&body_owner);
let closure_ln = ir.add_live_node(ClosureNode);
let exit_ln = ir.add_live_node(ExitNode);

View File

@ -191,6 +191,16 @@ impl Key for (LocalDefId, DefId) {
}
}
impl Key for (LocalDefId, LocalDefId) {
#[inline(always)]
fn query_crate_is_local(&self) -> bool {
true
}
fn default_span(&self, tcx: TyCtxt<'_>) -> Span {
self.0.default_span(tcx)
}
}
impl Key for (DefId, Option<Ident>) {
#[inline(always)]
fn query_crate_is_local(&self) -> bool {

View File

@ -5,7 +5,7 @@ use crate::type_error_struct;
use rustc_errors::{struct_span_err, Applicability, Diagnostic};
use rustc_hir as hir;
use rustc_hir::def::{self, Namespace, Res};
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::def_id::DefId;
use rustc_infer::{
infer,
traits::{self, Obligation},
@ -19,11 +19,13 @@ use rustc_middle::ty::adjustment::{
};
use rustc_middle::ty::subst::{Subst, SubstsRef};
use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitable};
use rustc_span::def_id::LocalDefId;
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
use rustc_target::spec::abi;
use rustc_trait_selection::autoderef::Autoderef;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
use std::iter;
/// Checks that it is legal to call methods of the trait corresponding
@ -59,7 +61,7 @@ pub fn check_legal_trait_for_method_call(
enum CallStep<'tcx> {
Builtin(Ty<'tcx>),
DeferredClosure(DefId, ty::FnSig<'tcx>),
DeferredClosure(LocalDefId, ty::FnSig<'tcx>),
/// E.g., enum variant constructors.
Overloaded(MethodCallee<'tcx>),
}
@ -145,7 +147,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
ty::Closure(def_id, substs) => {
assert_eq!(def_id.krate, LOCAL_CRATE);
let def_id = def_id.expect_local();
// Check whether this is a call to a closure where we
// haven't yet decided on whether the closure is fn vs
@ -558,7 +560,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
call_expr: &'tcx hir::Expr<'tcx>,
arg_exprs: &'tcx [hir::Expr<'tcx>],
expected: Expectation<'tcx>,
closure_def_id: DefId,
closure_def_id: LocalDefId,
fn_sig: ty::FnSig<'tcx>,
) -> Ty<'tcx> {
// `fn_sig` is the *signature* of the closure being called. We
@ -581,7 +583,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
arg_exprs,
fn_sig.c_variadic,
TupleArgumentsFlag::TupleArguments,
Some(closure_def_id),
Some(closure_def_id.to_def_id()),
);
fn_sig.output()

View File

@ -29,6 +29,7 @@ use rustc_middle::ty::{
ToPredicate, Ty, UserType,
};
use rustc_session::lint;
use rustc_span::def_id::LocalDefId;
use rustc_span::hygiene::DesugaringKind;
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::{Span, DUMMY_SP};
@ -114,7 +115,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub(in super::super) fn record_deferred_call_resolution(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
r: DeferredCallResolution<'tcx>,
) {
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
@ -123,7 +124,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub(in super::super) fn remove_deferred_call_resolutions(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
) -> Vec<DeferredCallResolution<'tcx>> {
let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut();
deferred_call_resolutions.remove(&closure_def_id).unwrap_or_default()

View File

@ -2,13 +2,14 @@ use super::callee::DeferredCallResolution;
use rustc_data_structures::fx::FxHashSet;
use rustc_hir as hir;
use rustc_hir::def_id::{DefIdMap, LocalDefId};
use rustc_hir::def_id::LocalDefId;
use rustc_hir::HirIdMap;
use rustc_infer::infer;
use rustc_infer::infer::{InferCtxt, InferOk, TyCtxtInferExt};
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::visit::TypeVisitable;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::def_id::LocalDefIdMap;
use rustc_span::{self, Span};
use rustc_trait_selection::infer::InferCtxtExt as _;
use rustc_trait_selection::traits::{self, ObligationCause, TraitEngine, TraitEngineExt};
@ -46,7 +47,7 @@ pub struct Inherited<'a, 'tcx> {
// decision. We keep these deferred resolutions grouped by the
// def-id of the closure, so that once we decide, we can easily go
// back and process them.
pub(super) deferred_call_resolutions: RefCell<DefIdMap<Vec<DeferredCallResolution<'tcx>>>>,
pub(super) deferred_call_resolutions: RefCell<LocalDefIdMap<Vec<DeferredCallResolution<'tcx>>>>,
pub(super) deferred_cast_checks: RefCell<Vec<super::cast::CastCheck<'tcx>>>,

View File

@ -35,7 +35,6 @@ use super::FnCtxt;
use crate::expr_use_visitor as euv;
use rustc_errors::{Applicability, MultiSpan};
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, Visitor};
use rustc_infer::infer::UpvarRegion;
@ -186,6 +185,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
}
};
let closure_def_id = closure_def_id.expect_local();
let infer_kind = if let UpvarSubsts::Closure(closure_substs) = substs {
self.closure_kind(closure_substs).is_none().then_some(closure_substs)
@ -193,20 +193,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
None
};
let local_def_id = closure_def_id.expect_local();
let body_owner_def_id = self.tcx.hir().body_owner_def_id(body.id());
assert_eq!(body_owner_def_id.to_def_id(), closure_def_id);
assert_eq!(self.tcx.hir().body_owner_def_id(body.id()), closure_def_id);
let mut delegate = InferBorrowKind {
fcx: self,
closure_def_id: local_def_id,
closure_def_id,
capture_information: Default::default(),
fake_reads: Default::default(),
};
euv::ExprUseVisitor::new(
&mut delegate,
&self.infcx,
body_owner_def_id,
closure_def_id,
self.param_env,
&self.typeck_results.borrow(),
)
@ -224,7 +221,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.compute_min_captures(closure_def_id, capture_information, span);
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id);
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(closure_def_id);
if should_do_rust_2021_incompatible_closure_captures_analysis(self.tcx, closure_hir_id) {
self.perform_2229_migration_anaysis(closure_def_id, body_id, capture_clause, span);
@ -239,7 +236,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
for var_hir_id in upvars.keys() {
let place = self.place_for_root_variable(local_def_id, *var_hir_id);
let place = self.place_for_root_variable(closure_def_id, *var_hir_id);
debug!("seed place {:?}", place);
@ -333,7 +330,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
// Returns a list of `Ty`s for each upvar.
fn final_upvar_tys(&self, closure_id: DefId) -> Vec<Ty<'tcx>> {
fn final_upvar_tys(&self, closure_id: LocalDefId) -> Vec<Ty<'tcx>> {
self.typeck_results
.borrow()
.closure_min_captures_flattened(closure_id)
@ -511,7 +508,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// ```
fn compute_min_captures(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
capture_information: InferredCaptureInformation<'tcx>,
closure_span: Span,
) {
@ -730,7 +727,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// `disjoint_capture_drop_reorder` if needed.
fn perform_2229_migration_anaysis(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
body_id: hir::BodyId,
capture_clause: hir::CaptureBy,
span: Span,
@ -746,8 +743,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let (migration_string, migrated_variables_concat) =
migration_suggestion_for_2229(self.tcx, &need_migrations);
let closure_hir_id =
self.tcx.hir().local_def_id_to_hir_id(closure_def_id.expect_local());
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(closure_def_id);
let closure_head_span = self.tcx.def_span(closure_def_id);
self.tcx.struct_span_lint_hir(
lint::builtin::RUST_2021_INCOMPATIBLE_CLOSURE_CAPTURES,
@ -1058,7 +1054,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
#[instrument(level = "debug", skip(self))]
fn compute_2229_migrations_for_drop(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
closure_span: Span,
min_captures: Option<&ty::RootVariableMinCaptureList<'tcx>>,
closure_clause: hir::CaptureBy,
@ -1066,7 +1062,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) -> Option<FxHashSet<UpvarMigrationInfo>> {
let ty = self.resolve_vars_if_possible(self.node_ty(var_hir_id));
if !ty.has_significant_drop(self.tcx, self.tcx.param_env(closure_def_id.expect_local())) {
if !ty.has_significant_drop(self.tcx, self.tcx.param_env(closure_def_id)) {
debug!("does not have significant drop");
return None;
}
@ -1160,7 +1156,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
#[instrument(level = "debug", skip(self))]
fn compute_2229_migrations(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
closure_span: Span,
closure_clause: hir::CaptureBy,
min_captures: Option<&ty::RootVariableMinCaptureList<'tcx>>,
@ -1343,14 +1339,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// implements Drop which will be affected since `y` isn't completely captured.
fn has_significant_drop_outside_of_captures(
&self,
closure_def_id: DefId,
closure_def_id: LocalDefId,
closure_span: Span,
base_path_ty: Ty<'tcx>,
captured_by_move_projs: Vec<&[Projection<'tcx>]>,
) -> bool {
let needs_drop = |ty: Ty<'tcx>| {
ty.has_significant_drop(self.tcx, self.tcx.param_env(closure_def_id.expect_local()))
};
let needs_drop =
|ty: Ty<'tcx>| ty.has_significant_drop(self.tcx, self.tcx.param_env(closure_def_id));
let is_drop_defined_for_ty = |ty: Ty<'tcx>| {
let drop_trait = self.tcx.require_lang_item(hir::LangItem::Drop, Some(closure_span));
@ -1360,7 +1355,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
drop_trait,
ty,
ty_params,
self.tcx.param_env(closure_def_id.expect_local()),
self.tcx.param_env(closure_def_id),
)
.must_apply_modulo_regions()
};
@ -1518,13 +1513,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
fn should_log_capture_analysis(&self, closure_def_id: DefId) -> bool {
self.tcx.has_attr(closure_def_id, sym::rustc_capture_analysis)
fn should_log_capture_analysis(&self, closure_def_id: LocalDefId) -> bool {
self.tcx.has_attr(closure_def_id.to_def_id(), sym::rustc_capture_analysis)
}
fn log_capture_analysis_first_pass(
&self,
closure_def_id: rustc_hir::def_id::DefId,
closure_def_id: LocalDefId,
capture_information: &InferredCaptureInformation<'tcx>,
closure_span: Span,
) {
@ -1543,7 +1538,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
fn log_closure_min_capture_info(&self, closure_def_id: DefId, closure_span: Span) {
fn log_closure_min_capture_info(&self, closure_def_id: LocalDefId, closure_span: Span) {
if self.should_log_capture_analysis(closure_def_id) {
if let Some(min_captures) =
self.typeck_results.borrow().closure_min_captures.get(&closure_def_id)

View File

@ -8,7 +8,6 @@ use hir::def_id::LocalDefId;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::ErrorGuaranteed;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::intravisit::{self, Visitor};
use rustc_infer::infer::error_reporting::TypeAnnotationNeeded::E0282;
use rustc_infer::infer::InferCtxt;
@ -348,14 +347,13 @@ impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn eval_closure_size(&mut self) {
let mut res: FxHashMap<DefId, ClosureSizeProfileData<'tcx>> = Default::default();
for (closure_def_id, data) in self.fcx.typeck_results.borrow().closure_size_eval.iter() {
let closure_hir_id =
self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
let mut res: FxHashMap<LocalDefId, ClosureSizeProfileData<'tcx>> = Default::default();
for (&closure_def_id, data) in self.fcx.typeck_results.borrow().closure_size_eval.iter() {
let closure_hir_id = self.tcx().hir().local_def_id_to_hir_id(closure_def_id);
let data = self.resolve(*data, &closure_hir_id);
res.insert(*closure_def_id, data);
res.insert(closure_def_id, data);
}
self.typeck_results.closure_size_eval = res;
@ -365,7 +363,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
self.fcx.typeck_results.borrow().closure_min_captures.len(),
Default::default(),
);
for (closure_def_id, root_min_captures) in
for (&closure_def_id, root_min_captures) in
self.fcx.typeck_results.borrow().closure_min_captures.iter()
{
let mut root_var_map_wb = ty::RootVariableMinCaptureList::with_capacity_and_hasher(
@ -377,7 +375,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
.iter()
.map(|captured_place| {
let locatable = captured_place.info.path_expr_id.unwrap_or_else(|| {
self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local())
self.tcx().hir().local_def_id_to_hir_id(closure_def_id)
});
self.resolve(captured_place.clone(), &locatable)
@ -385,7 +383,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
.collect();
root_var_map_wb.insert(*var_hir_id, min_list_wb);
}
min_captures_wb.insert(*closure_def_id, root_var_map_wb);
min_captures_wb.insert(closure_def_id, root_var_map_wb);
}
self.typeck_results.closure_min_captures = min_captures_wb;
@ -393,21 +391,20 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn visit_fake_reads_map(&mut self) {
let mut resolved_closure_fake_reads: FxHashMap<
DefId,
LocalDefId,
Vec<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>,
> = Default::default();
for (closure_def_id, fake_reads) in
for (&closure_def_id, fake_reads) in
self.fcx.typeck_results.borrow().closure_fake_reads.iter()
{
let mut resolved_fake_reads = Vec::<(HirPlace<'tcx>, FakeReadCause, hir::HirId)>::new();
for (place, cause, hir_id) in fake_reads.iter() {
let locatable =
self.tcx().hir().local_def_id_to_hir_id(closure_def_id.expect_local());
let locatable = self.tcx().hir().local_def_id_to_hir_id(closure_def_id);
let resolved_fake_read = self.resolve(place.clone(), &locatable);
resolved_fake_reads.push((resolved_fake_read, *cause, *hir_id));
}
resolved_closure_fake_reads.insert(*closure_def_id, resolved_fake_reads);
resolved_closure_fake_reads.insert(closure_def_id, resolved_fake_reads);
}
self.typeck_results.closure_fake_reads = resolved_closure_fake_reads;
}

View File

@ -94,14 +94,6 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
// We'll try to suggest constraining type parameters to fulfill the requirements of
// their `Copy` implementation.
let mut generics = None;
if let ty::Adt(def, _substs) = self_type.kind() {
let self_def_id = def.did();
if let Some(local) = self_def_id.as_local() {
let self_item = tcx.hir().expect_item(local);
generics = self_item.kind.generics();
}
}
let mut errors: BTreeMap<_, Vec<_>> = Default::default();
let mut bounds = vec![];
@ -163,16 +155,14 @@ fn visit_implementation_of_copy(tcx: TyCtxt<'_>, impl_did: LocalDefId) {
&format!("the `Copy` impl for `{}` requires that `{}`", ty, error_predicate),
);
}
if let Some(generics) = generics {
suggest_constraining_type_params(
tcx,
generics,
&mut err,
bounds.iter().map(|(param, constraint, def_id)| {
(param.as_str(), constraint.as_str(), *def_id)
}),
);
}
suggest_constraining_type_params(
tcx,
tcx.hir().get_generics(impl_did).expect("impls always have generics"),
&mut err,
bounds.iter().map(|(param, constraint, def_id)| {
(param.as_str(), constraint.as_str(), *def_id)
}),
);
err.emit();
}
Err(CopyImplementationError::NotAnAdt) => {

View File

@ -468,7 +468,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
self.borrow_expr(discr, ty::ImmBorrow);
} else {
let closure_def_id = match discr_place.place.base {
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id.to_def_id()),
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id),
_ => None,
};
@ -642,7 +642,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
fn walk_arm(&mut self, discr_place: &PlaceWithHirId<'tcx>, arm: &hir::Arm<'_>) {
let closure_def_id = match discr_place.place.base {
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id.to_def_id()),
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id),
_ => None,
};
@ -666,7 +666,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
/// let binding, and *not* a match arm or nested pat.)
fn walk_irrefutable_pat(&mut self, discr_place: &PlaceWithHirId<'tcx>, pat: &hir::Pat<'_>) {
let closure_def_id = match discr_place.place.base {
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id.to_def_id()),
PlaceBase::Upvar(upvar_id) => Some(upvar_id.closure_expr_id),
_ => None,
};
@ -763,7 +763,7 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
debug!("walk_captures({:?})", closure_expr);
let tcx = self.tcx();
let closure_def_id = tcx.hir().local_def_id(closure_expr.hir_id).to_def_id();
let closure_def_id = tcx.hir().local_def_id(closure_expr.hir_id);
let upvars = tcx.upvars_mentioned(self.body_owner);
// For purposes of this function, generator and closures are equivalent.

View File

@ -0,0 +1,70 @@
// edition:2018
fn dummy() -> i32 {
42
}
fn extra_semicolon() {
let _ = if true {
//~^ NOTE `if` and `else` have incompatible types
dummy(); //~ NOTE expected because of this
//~^ HELP consider removing this semicolon
} else {
dummy() //~ ERROR `if` and `else` have incompatible types
//~^ NOTE expected `()`, found `i32`
};
}
async fn async_dummy() {} //~ NOTE checked the `Output` of this `async fn`, found opaque type
//~| NOTE while checking the return type of the `async fn`
//~| NOTE in this expansion of desugaring of `async` block or function
//~| NOTE checked the `Output` of this `async fn`, expected opaque type
//~| NOTE while checking the return type of the `async fn`
//~| NOTE in this expansion of desugaring of `async` block or function
async fn async_dummy2() {} //~ NOTE checked the `Output` of this `async fn`, found opaque type
//~| NOTE checked the `Output` of this `async fn`, found opaque type
//~| NOTE while checking the return type of the `async fn`
//~| NOTE in this expansion of desugaring of `async` block or function
//~| NOTE while checking the return type of the `async fn`
//~| NOTE in this expansion of desugaring of `async` block or function
async fn async_extra_semicolon_same() {
let _ = if true {
//~^ NOTE `if` and `else` have incompatible types
async_dummy(); //~ NOTE expected because of this
//~^ HELP consider removing this semicolon
} else {
async_dummy() //~ ERROR `if` and `else` have incompatible types
//~^ NOTE expected `()`, found opaque type
//~| NOTE expected unit type `()`
//~| HELP consider `await`ing on the `Future`
};
}
async fn async_extra_semicolon_different() {
let _ = if true {
//~^ NOTE `if` and `else` have incompatible types
async_dummy(); //~ NOTE expected because of this
//~^ HELP consider removing this semicolon
} else {
async_dummy2() //~ ERROR `if` and `else` have incompatible types
//~^ NOTE expected `()`, found opaque type
//~| NOTE expected unit type `()`
//~| HELP consider `await`ing on the `Future`
};
}
async fn async_different_futures() {
let _ = if true {
//~^ NOTE `if` and `else` have incompatible types
async_dummy() //~ NOTE expected because of this
//~| HELP consider `await`ing on both `Future`s
} else {
async_dummy2() //~ ERROR `if` and `else` have incompatible types
//~^ NOTE expected opaque type, found a different opaque type
//~| NOTE expected opaque type `impl Future<Output = ()>`
//~| NOTE distinct uses of `impl Trait` result in different opaque types
};
}
fn main() {}

View File

@ -0,0 +1,130 @@
error[E0308]: `if` and `else` have incompatible types
--> $DIR/if-then-neeing-semi.rs:37:9
|
LL | let _ = if true {
| _____________-
LL | |
LL | | async_dummy();
| | -------------- expected because of this
LL | |
LL | | } else {
LL | | async_dummy()
| | ^^^^^^^^^^^^^ expected `()`, found opaque type
... |
LL | |
LL | | };
| |_____- `if` and `else` have incompatible types
|
note: while checking the return type of the `async fn`
--> $DIR/if-then-neeing-semi.rs:18:24
|
LL | async fn async_dummy() {}
| ^ checked the `Output` of this `async fn`, found opaque type
= note: expected unit type `()`
found opaque type `impl Future<Output = ()>`
help: consider `await`ing on the `Future`
|
LL | async_dummy().await
| ++++++
help: consider removing this semicolon
|
LL - async_dummy();
LL + async_dummy()
|
error[E0308]: `if` and `else` have incompatible types
--> $DIR/if-then-neeing-semi.rs:50:9
|
LL | let _ = if true {
| _____________-
LL | |
LL | | async_dummy();
| | -------------- expected because of this
LL | |
LL | | } else {
LL | | async_dummy2()
| | ^^^^^^^^^^^^^^ expected `()`, found opaque type
... |
LL | |
LL | | };
| |_____- `if` and `else` have incompatible types
|
note: while checking the return type of the `async fn`
--> $DIR/if-then-neeing-semi.rs:24:25
|
LL | async fn async_dummy2() {}
| ^ checked the `Output` of this `async fn`, found opaque type
= note: expected unit type `()`
found opaque type `impl Future<Output = ()>`
help: consider `await`ing on the `Future`
|
LL | async_dummy2().await
| ++++++
help: consider removing this semicolon and boxing the expressions
|
LL ~ Box::new(async_dummy())
LL |
LL | } else {
LL ~ Box::new(async_dummy2())
|
error[E0308]: `if` and `else` have incompatible types
--> $DIR/if-then-neeing-semi.rs:63:9
|
LL | let _ = if true {
| _____________-
LL | |
LL | | async_dummy()
| | ------------- expected because of this
LL | |
LL | | } else {
LL | | async_dummy2()
| | ^^^^^^^^^^^^^^ expected opaque type, found a different opaque type
... |
LL | |
LL | | };
| |_____- `if` and `else` have incompatible types
|
note: while checking the return type of the `async fn`
--> $DIR/if-then-neeing-semi.rs:18:24
|
LL | async fn async_dummy() {}
| ^ checked the `Output` of this `async fn`, expected opaque type
note: while checking the return type of the `async fn`
--> $DIR/if-then-neeing-semi.rs:24:25
|
LL | async fn async_dummy2() {}
| ^ checked the `Output` of this `async fn`, found opaque type
= note: expected opaque type `impl Future<Output = ()>` (opaque type at <$DIR/if-then-neeing-semi.rs:18:24>)
found opaque type `impl Future<Output = ()>` (opaque type at <$DIR/if-then-neeing-semi.rs:24:25>)
= note: distinct uses of `impl Trait` result in different opaque types
help: consider `await`ing on both `Future`s
|
LL ~ async_dummy().await
LL |
LL | } else {
LL ~ async_dummy2().await
|
error[E0308]: `if` and `else` have incompatible types
--> $DIR/if-then-neeing-semi.rs:13:9
|
LL | let _ = if true {
| _____________-
LL | |
LL | | dummy();
| | --------
| | | |
| | | help: consider removing this semicolon
| | expected because of this
LL | |
LL | | } else {
LL | | dummy()
| | ^^^^^^^ expected `()`, found `i32`
LL | |
LL | | };
| |_____- `if` and `else` have incompatible types
error: aborting due to 4 previous errors
For more information about this error, try `rustc --explain E0308`.

View File

@ -0,0 +1,19 @@
// run-rustfix
#[derive(Clone)]
struct Wrapper<T>(T);
struct OnlyCopyIfDisplay<T>(std::marker::PhantomData<T>);
impl<T: std::fmt::Display> Clone for OnlyCopyIfDisplay<T> {
fn clone(&self) -> Self {
OnlyCopyIfDisplay(std::marker::PhantomData)
}
}
impl<T: std::fmt::Display> Copy for OnlyCopyIfDisplay<T> {}
impl<S: std::fmt::Display> Copy for Wrapper<OnlyCopyIfDisplay<S>> {}
//~^ ERROR the trait `Copy` may not be implemented for this type
fn main() {}

View File

@ -0,0 +1,19 @@
// run-rustfix
#[derive(Clone)]
struct Wrapper<T>(T);
struct OnlyCopyIfDisplay<T>(std::marker::PhantomData<T>);
impl<T: std::fmt::Display> Clone for OnlyCopyIfDisplay<T> {
fn clone(&self) -> Self {
OnlyCopyIfDisplay(std::marker::PhantomData)
}
}
impl<T: std::fmt::Display> Copy for OnlyCopyIfDisplay<T> {}
impl<S> Copy for Wrapper<OnlyCopyIfDisplay<S>> {}
//~^ ERROR the trait `Copy` may not be implemented for this type
fn main() {}

View File

@ -0,0 +1,22 @@
error[E0204]: the trait `Copy` may not be implemented for this type
--> $DIR/missing-bound-in-manual-copy-impl-2.rs:16:9
|
LL | struct Wrapper<T>(T);
| - this field does not implement `Copy`
...
LL | impl<S> Copy for Wrapper<OnlyCopyIfDisplay<S>> {}
| ^^^^
|
note: the `Copy` impl for `OnlyCopyIfDisplay<S>` requires that `S: std::fmt::Display`
--> $DIR/missing-bound-in-manual-copy-impl-2.rs:4:19
|
LL | struct Wrapper<T>(T);
| ^
help: consider restricting type parameter `S`
|
LL | impl<S: std::fmt::Display> Copy for Wrapper<OnlyCopyIfDisplay<S>> {}
| +++++++++++++++++++
error: aborting due to previous error
For more information about this error, try `rustc --explain E0204`.

View File

@ -0,0 +1,9 @@
// run-rustfix
#[derive(Clone)]
struct Wrapper<T>(T);
impl<S: Copy> Copy for Wrapper<S> {}
//~^ ERROR the trait `Copy` may not be implemented for this type
fn main() {}

View File

@ -0,0 +1,9 @@
// run-rustfix
#[derive(Clone)]
struct Wrapper<T>(T);
impl<S> Copy for Wrapper<S> {}
//~^ ERROR the trait `Copy` may not be implemented for this type
fn main() {}

View File

@ -0,0 +1,17 @@
error[E0204]: the trait `Copy` may not be implemented for this type
--> $DIR/missing-bound-in-manual-copy-impl.rs:6:9
|
LL | struct Wrapper<T>(T);
| - this field does not implement `Copy`
LL |
LL | impl<S> Copy for Wrapper<S> {}
| ^^^^
|
help: consider restricting type parameter `S`
|
LL | impl<S: Copy> Copy for Wrapper<S> {}
| ++++++
error: aborting due to previous error
For more information about this error, try `rustc --explain E0204`.

View File

@ -968,7 +968,7 @@ pub fn can_move_expr_to_closure<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'
}
},
ExprKind::Closure { .. } => {
let closure_id = self.cx.tcx.hir().local_def_id(e.hir_id).to_def_id();
let closure_id = self.cx.tcx.hir().local_def_id(e.hir_id);
for capture in self.cx.typeck_results().closure_min_captures_flattened(closure_id) {
let local_id = match capture.place.base {
PlaceBase::Local(id) => id,

View File

@ -323,6 +323,7 @@ otherwise, make sure you bump the `FORMAT_VERSION` constant.
cc = [
"@CraftSpider",
"@aDotInTheVoid",
"@Enselic",
]
[mentions."src/tools/cargo"]