Use () for codegen queries.

This commit is contained in:
Camille GILLOT 2021-05-11 14:39:04 +02:00
parent 4e8d4bdf4b
commit 0bde3b1f80
15 changed files with 40 additions and 56 deletions

View File

@ -41,7 +41,7 @@ fn emit_module(
unwind_context.emit(&mut product); unwind_context.emit(&mut product);
let tmp_file = tcx.output_filenames(LOCAL_CRATE).temp_path(OutputType::Object, Some(&name)); let tmp_file = tcx.output_filenames(()).temp_path(OutputType::Object, Some(&name));
let obj = product.object.write().unwrap(); let obj = product.object.write().unwrap();
if let Err(err) = std::fs::write(&tmp_file, obj) { if let Err(err) = std::fs::write(&tmp_file, obj) {
tcx.sess.fatal(&format!("error writing object file: {}", err)); tcx.sess.fatal(&format!("error writing object file: {}", err));
@ -73,7 +73,7 @@ fn reuse_workproduct_for_cgu(
let work_product = cgu.work_product(tcx); let work_product = cgu.work_product(tcx);
if let Some(saved_file) = &work_product.saved_file { if let Some(saved_file) = &work_product.saved_file {
let obj_out = tcx let obj_out = tcx
.output_filenames(LOCAL_CRATE) .output_filenames(())
.temp_path(OutputType::Object, Some(&cgu.name().as_str())); .temp_path(OutputType::Object, Some(&cgu.name().as_str()));
object = Some(obj_out.clone()); object = Some(obj_out.clone());
let source_file = rustc_incremental::in_incr_comp_dir(&incr_comp_session_dir, &saved_file); let source_file = rustc_incremental::in_incr_comp_dir(&incr_comp_session_dir, &saved_file);
@ -179,7 +179,7 @@ pub(crate) fn run_aot(
let mut work_products = FxHashMap::default(); let mut work_products = FxHashMap::default();
let cgus = if tcx.sess.opts.output_types.should_codegen() { let cgus = if tcx.sess.opts.output_types.should_codegen() {
tcx.collect_and_partition_mono_items(LOCAL_CRATE).1 tcx.collect_and_partition_mono_items(()).1
} else { } else {
// If only `--emit metadata` is used, we shouldn't perform any codegen. // If only `--emit metadata` is used, we shouldn't perform any codegen.
// Also `tcx.collect_and_partition_mono_items` may panic in that case. // Also `tcx.collect_and_partition_mono_items` may panic in that case.
@ -265,7 +265,7 @@ pub(crate) fn run_aot(
.to_string(); .to_string();
let tmp_file = tcx let tmp_file = tcx
.output_filenames(LOCAL_CRATE) .output_filenames(())
.temp_path(OutputType::Metadata, Some(&metadata_cgu_name)); .temp_path(OutputType::Metadata, Some(&metadata_cgu_name));
let obj = crate::backend::with_object(tcx.sess, &metadata_cgu_name, |object| { let obj = crate::backend::with_object(tcx.sess, &metadata_cgu_name, |object| {
@ -342,7 +342,7 @@ fn codegen_global_asm(tcx: TyCtxt<'_>, cgu_name: &str, global_asm: &str) {
.join("\n"); .join("\n");
let output_object_file = let output_object_file =
tcx.output_filenames(LOCAL_CRATE).temp_path(OutputType::Object, Some(cgu_name)); tcx.output_filenames(()).temp_path(OutputType::Object, Some(cgu_name));
// Assemble `global_asm` // Assemble `global_asm`
let global_asm_object_file = add_file_stem_postfix(output_object_file.clone(), ".asm"); let global_asm_object_file = add_file_stem_postfix(output_object_file.clone(), ".asm");

View File

@ -8,7 +8,6 @@ use std::os::raw::{c_char, c_int};
use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink}; use cranelift_codegen::binemit::{NullStackMapSink, NullTrapSink};
use rustc_codegen_ssa::CrateInfo; use rustc_codegen_ssa::CrateInfo;
use rustc_middle::mir::mono::MonoItem; use rustc_middle::mir::mono::MonoItem;
use rustc_session::config::EntryFnType;
use cranelift_jit::{JITBuilder, JITModule}; use cranelift_jit::{JITBuilder, JITModule};
@ -66,7 +65,7 @@ pub(crate) fn run_jit(tcx: TyCtxt<'_>, backend_config: BackendConfig) -> ! {
matches!(backend_config.codegen_mode, CodegenMode::JitLazy), matches!(backend_config.codegen_mode, CodegenMode::JitLazy),
); );
let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE); let (_, cgus) = tcx.collect_and_partition_mono_items(());
let mono_items = cgus let mono_items = cgus
.iter() .iter()
.map(|cgu| cgu.items_in_deterministic_order(tcx).into_iter()) .map(|cgu| cgu.items_in_deterministic_order(tcx).into_iter())

View File

@ -214,7 +214,7 @@ pub(crate) fn write_ir_file(
return; return;
} }
let clif_output_dir = tcx.output_filenames(LOCAL_CRATE).with_extension("clif"); let clif_output_dir = tcx.output_filenames(()).with_extension("clif");
match std::fs::create_dir(&clif_output_dir) { match std::fs::create_dir(&clif_output_dir) {
Ok(()) => {} Ok(()) => {}

View File

@ -20,7 +20,6 @@ use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
use rustc_data_structures::small_c_str::SmallCStr; use rustc_data_structures::small_c_str::SmallCStr;
use rustc_errors::{FatalError, Handler, Level}; use rustc_errors::{FatalError, Handler, Level};
use rustc_fs_util::{link_or_copy, path_to_c_string}; use rustc_fs_util::{link_or_copy, path_to_c_string};
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::bug; use rustc_middle::bug;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_session::config::{self, Lto, OutputType, Passes, SwitchWithOptPath}; use rustc_session::config::{self, Lto, OutputType, Passes, SwitchWithOptPath};
@ -92,13 +91,12 @@ pub fn create_informational_target_machine(sess: &Session) -> &'static mut llvm:
pub fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> &'static mut llvm::TargetMachine { pub fn create_target_machine(tcx: TyCtxt<'_>, mod_name: &str) -> &'static mut llvm::TargetMachine {
let split_dwarf_file = if tcx.sess.target_can_use_split_dwarf() { let split_dwarf_file = if tcx.sess.target_can_use_split_dwarf() {
tcx.output_filenames(LOCAL_CRATE) tcx.output_filenames(()).split_dwarf_path(tcx.sess.split_debuginfo(), Some(mod_name))
.split_dwarf_path(tcx.sess.split_debuginfo(), Some(mod_name))
} else { } else {
None None
}; };
let config = TargetMachineFactoryConfig { split_dwarf_file }; let config = TargetMachineFactoryConfig { split_dwarf_file };
target_machine_factory(&tcx.sess, tcx.backend_optimization_level(LOCAL_CRATE))(config) target_machine_factory(&tcx.sess, tcx.backend_optimization_level(()))(config)
.unwrap_or_else(|err| llvm_err(tcx.sess.diagnostic(), &err).raise()) .unwrap_or_else(|err| llvm_err(tcx.sess.diagnostic(), &err).raise())
} }

View File

@ -6,7 +6,7 @@ use llvm::coverageinfo::CounterMappingRegion;
use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression}; use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression};
use rustc_codegen_ssa::traits::{ConstMethods, CoverageInfoMethods}; use rustc_codegen_ssa::traits::{ConstMethods, CoverageInfoMethods};
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
use rustc_hir::def_id::{DefId, DefIdSet, LOCAL_CRATE}; use rustc_hir::def_id::{DefId, DefIdSet};
use rustc_llvm::RustString; use rustc_llvm::RustString;
use rustc_middle::mir::coverage::CodeRegion; use rustc_middle::mir::coverage::CodeRegion;
use rustc_span::Symbol; use rustc_span::Symbol;
@ -276,7 +276,7 @@ fn add_unused_functions<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
}) })
.collect(); .collect();
let codegenned_def_ids = tcx.codegened_and_inlined_items(LOCAL_CRATE); let codegenned_def_ids = tcx.codegened_and_inlined_items(());
let mut unused_def_ids_by_file: FxHashMap<Symbol, Vec<DefId>> = FxHashMap::default(); let mut unused_def_ids_by_file: FxHashMap<Symbol, Vec<DefId>> = FxHashMap::default();
for &non_codegenned_def_id in all_def_ids.difference(codegenned_def_ids) { for &non_codegenned_def_id in all_def_ids.difference(codegenned_def_ids) {

View File

@ -995,9 +995,10 @@ pub fn compile_unit_metadata(
let name_in_debuginfo = name_in_debuginfo.to_string_lossy(); let name_in_debuginfo = name_in_debuginfo.to_string_lossy();
let work_dir = tcx.sess.working_dir.0.to_string_lossy(); let work_dir = tcx.sess.working_dir.0.to_string_lossy();
let flags = "\0"; let flags = "\0";
let out_dir = &tcx.output_filenames(LOCAL_CRATE).out_directory; let output_filenames = tcx.output_filenames(());
let out_dir = &output_filenames.out_directory;
let split_name = if tcx.sess.target_can_use_split_dwarf() { let split_name = if tcx.sess.target_can_use_split_dwarf() {
tcx.output_filenames(LOCAL_CRATE) output_filenames
.split_dwarf_path(tcx.sess.split_debuginfo(), Some(codegen_unit_name)) .split_dwarf_path(tcx.sess.split_debuginfo(), Some(codegen_unit_name))
.map(|f| out_dir.join(f)) .map(|f| out_dir.join(f))
} else { } else {
@ -1058,15 +1059,12 @@ pub fn compile_unit_metadata(
if tcx.sess.opts.debugging_opts.profile { if tcx.sess.opts.debugging_opts.profile {
let cu_desc_metadata = let cu_desc_metadata =
llvm::LLVMRustMetadataAsValue(debug_context.llcontext, unit_metadata); llvm::LLVMRustMetadataAsValue(debug_context.llcontext, unit_metadata);
let default_gcda_path = &tcx.output_filenames(LOCAL_CRATE).with_extension("gcda"); let default_gcda_path = &output_filenames.with_extension("gcda");
let gcda_path = let gcda_path =
tcx.sess.opts.debugging_opts.profile_emit.as_ref().unwrap_or(default_gcda_path); tcx.sess.opts.debugging_opts.profile_emit.as_ref().unwrap_or(default_gcda_path);
let gcov_cu_info = [ let gcov_cu_info = [
path_to_mdstring( path_to_mdstring(debug_context.llcontext, &output_filenames.with_extension("gcno")),
debug_context.llcontext,
&tcx.output_filenames(LOCAL_CRATE).with_extension("gcno"),
),
path_to_mdstring(debug_context.llcontext, &gcda_path), path_to_mdstring(debug_context.llcontext, &gcda_path),
cu_desc_metadata, cu_desc_metadata,
]; ];

View File

@ -230,7 +230,7 @@ fn exported_symbols_provider_local(
// external linkage is enough for monomorphization to be linked to. // external linkage is enough for monomorphization to be linked to.
let need_visibility = tcx.sess.target.dynamic_linking && !tcx.sess.target.only_cdylib; let need_visibility = tcx.sess.target.dynamic_linking && !tcx.sess.target.only_cdylib;
let (_, cgus) = tcx.collect_and_partition_mono_items(LOCAL_CRATE); let (_, cgus) = tcx.collect_and_partition_mono_items(());
for (mono_item, &(linkage, visibility)) in cgus.iter().flat_map(|cgu| cgu.items().iter()) { for (mono_item, &(linkage, visibility)) in cgus.iter().flat_map(|cgu| cgu.items().iter()) {
if linkage != Linkage::External { if linkage != Linkage::External {
@ -275,10 +275,8 @@ fn exported_symbols_provider_local(
fn upstream_monomorphizations_provider( fn upstream_monomorphizations_provider(
tcx: TyCtxt<'_>, tcx: TyCtxt<'_>,
cnum: CrateNum, (): (),
) -> DefIdMap<FxHashMap<SubstsRef<'_>, CrateNum>> { ) -> DefIdMap<FxHashMap<SubstsRef<'_>, CrateNum>> {
debug_assert!(cnum == LOCAL_CRATE);
let cnums = tcx.all_crate_nums(()); let cnums = tcx.all_crate_nums(());
let mut instances: DefIdMap<FxHashMap<_, _>> = Default::default(); let mut instances: DefIdMap<FxHashMap<_, _>> = Default::default();
@ -341,7 +339,7 @@ fn upstream_monomorphizations_for_provider(
def_id: DefId, def_id: DefId,
) -> Option<&FxHashMap<SubstsRef<'_>, CrateNum>> { ) -> Option<&FxHashMap<SubstsRef<'_>, CrateNum>> {
debug_assert!(!def_id.is_local()); debug_assert!(!def_id.is_local());
tcx.upstream_monomorphizations(LOCAL_CRATE).get(&def_id) tcx.upstream_monomorphizations(()).get(&def_id)
} }
fn upstream_drop_glue_for_provider<'tcx>( fn upstream_drop_glue_for_provider<'tcx>(

View File

@ -482,7 +482,7 @@ pub fn start_async_codegen<B: ExtraBackendMethods>(
codegen_worker_receive, codegen_worker_receive,
shared_emitter_main, shared_emitter_main,
future: coordinator_thread, future: coordinator_thread,
output_filenames: tcx.output_filenames(LOCAL_CRATE), output_filenames: tcx.output_filenames(()),
} }
} }
@ -1042,7 +1042,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
// If we know that we wont be doing codegen, create target machines without optimisation. // If we know that we wont be doing codegen, create target machines without optimisation.
config::OptLevel::No config::OptLevel::No
} else { } else {
tcx.backend_optimization_level(LOCAL_CRATE) tcx.backend_optimization_level(())
}; };
let cgcx = CodegenContext::<B> { let cgcx = CodegenContext::<B> {
backend: backend.clone(), backend: backend.clone(),
@ -1061,7 +1061,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
cgu_reuse_tracker: sess.cgu_reuse_tracker.clone(), cgu_reuse_tracker: sess.cgu_reuse_tracker.clone(),
coordinator_send, coordinator_send,
diag_emitter: shared_emitter.clone(), diag_emitter: shared_emitter.clone(),
output_filenames: tcx.output_filenames(LOCAL_CRATE), output_filenames: tcx.output_filenames(()),
regular_module_config: regular_config, regular_module_config: regular_config,
metadata_module_config: metadata_config, metadata_module_config: metadata_config,
allocator_module_config: allocator_config, allocator_module_config: allocator_config,

View File

@ -485,7 +485,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
// Run the monomorphization collector and partition the collected items into // Run the monomorphization collector and partition the collected items into
// codegen units. // codegen units.
let codegen_units = tcx.collect_and_partition_mono_items(LOCAL_CRATE).1; let codegen_units = tcx.collect_and_partition_mono_items(()).1;
// Force all codegen_unit queries so they are already either red or green // Force all codegen_unit queries so they are already either red or green
// when compile_codegen_unit accesses them. We are not able to re-execute // when compile_codegen_unit accesses them. We are not able to re-execute

View File

@ -36,7 +36,7 @@ pub fn assert_module_sources(tcx: TyCtxt<'_>) {
} }
let available_cgus = tcx let available_cgus = tcx
.collect_and_partition_mono_items(LOCAL_CRATE) .collect_and_partition_mono_items(())
.1 .1
.iter() .iter()
.map(|cgu| cgu.name().to_string()) .map(|cgu| cgu.name().to_string())

View File

@ -1150,11 +1150,9 @@ rustc_queries! {
/// added or removed in any upstream crate. Instead use the narrower /// added or removed in any upstream crate. Instead use the narrower
/// `upstream_monomorphizations_for`, `upstream_drop_glue_for`, or, even /// `upstream_monomorphizations_for`, `upstream_drop_glue_for`, or, even
/// better, `Instance::upstream_monomorphization()`. /// better, `Instance::upstream_monomorphization()`.
query upstream_monomorphizations( query upstream_monomorphizations(_: ()) -> DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
k: CrateNum
) -> DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
storage(ArenaCacheSelector<'tcx>) storage(ArenaCacheSelector<'tcx>)
desc { "collecting available upstream monomorphizations `{:?}`", k } desc { "collecting available upstream monomorphizations" }
} }
/// Returns the set of upstream monomorphizations available for the /// Returns the set of upstream monomorphizations available for the
@ -1434,8 +1432,7 @@ rustc_queries! {
desc { "exported_symbols" } desc { "exported_symbols" }
} }
query collect_and_partition_mono_items(_: CrateNum) query collect_and_partition_mono_items(_: ()) -> (&'tcx DefIdSet, &'tcx [CodegenUnit<'tcx>]) {
-> (&'tcx DefIdSet, &'tcx [CodegenUnit<'tcx>]) {
eval_always eval_always
desc { "collect_and_partition_mono_items" } desc { "collect_and_partition_mono_items" }
} }
@ -1444,8 +1441,7 @@ rustc_queries! {
} }
/// All items participating in code generation together with items inlined into them. /// All items participating in code generation together with items inlined into them.
query codegened_and_inlined_items(_: CrateNum) query codegened_and_inlined_items(_: ()) -> &'tcx DefIdSet {
-> &'tcx DefIdSet {
eval_always eval_always
desc { "codegened_and_inlined_items" } desc { "codegened_and_inlined_items" }
} }
@ -1460,11 +1456,11 @@ rustc_queries! {
tcx.def_path_str(key) tcx.def_path_str(key)
} }
} }
query backend_optimization_level(_: CrateNum) -> OptLevel { query backend_optimization_level(_: ()) -> OptLevel {
desc { "optimization level used by backend" } desc { "optimization level used by backend" }
} }
query output_filenames(_: CrateNum) -> Arc<OutputFilenames> { query output_filenames(_: ()) -> Arc<OutputFilenames> {
eval_always eval_always
desc { "output_filenames" } desc { "output_filenames" }
} }

View File

@ -2816,10 +2816,7 @@ pub fn provide(providers: &mut ty::query::Providers) {
}; };
providers.extern_mod_stmt_cnum = |tcx, id| tcx.extern_crate_map.get(&id).cloned(); providers.extern_mod_stmt_cnum = |tcx, id| tcx.extern_crate_map.get(&id).cloned();
providers.all_crate_nums = |tcx, ()| tcx.arena.alloc_slice(&tcx.cstore.crates_untracked()); providers.all_crate_nums = |tcx, ()| tcx.arena.alloc_slice(&tcx.cstore.crates_untracked());
providers.output_filenames = |tcx, cnum| { providers.output_filenames = |tcx, ()| tcx.output_filenames.clone();
assert_eq!(cnum, LOCAL_CRATE);
tcx.output_filenames.clone()
};
providers.features_query = |tcx, ()| tcx.sess.features_untracked(); providers.features_query = |tcx, ()| tcx.sess.features_untracked();
providers.is_panic_runtime = |tcx, cnum| { providers.is_panic_runtime = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE); assert_eq!(cnum, LOCAL_CRATE);

View File

@ -184,7 +184,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync::{par_iter, MTLock, MTRef, ParallelIterator}; use rustc_data_structures::sync::{par_iter, MTLock, MTRef, ParallelIterator};
use rustc_errors::{ErrorReported, FatalError}; use rustc_errors::{ErrorReported, FatalError};
use rustc_hir as hir; use rustc_hir as hir;
use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId, LOCAL_CRATE}; use rustc_hir::def_id::{DefId, DefIdMap, LocalDefId};
use rustc_hir::itemlikevisit::ItemLikeVisitor; use rustc_hir::itemlikevisit::ItemLikeVisitor;
use rustc_hir::lang_items::LangItem; use rustc_hir::lang_items::LangItem;
use rustc_index::bit_set::GrowableBitSet; use rustc_index::bit_set::GrowableBitSet;
@ -452,7 +452,7 @@ fn shrunk_instance_name(
after = &s[positions().rev().nth(after).unwrap_or(0)..], after = &s[positions().rev().nth(after).unwrap_or(0)..],
); );
let path = tcx.output_filenames(LOCAL_CRATE).temp_path_ext("long-type.txt", None); let path = tcx.output_filenames(()).temp_path_ext("long-type.txt", None);
let written_to_path = std::fs::write(&path, s).ok().map(|_| path); let written_to_path = std::fs::write(&path, s).ok().map(|_| path);
(shrunk, written_to_path) (shrunk, written_to_path)

View File

@ -97,7 +97,7 @@ mod merging;
use rustc_data_structures::fx::{FxHashMap, FxHashSet}; use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::sync; use rustc_data_structures::sync;
use rustc_hir::def_id::{CrateNum, DefIdSet, LOCAL_CRATE}; use rustc_hir::def_id::DefIdSet;
use rustc_middle::mir::mono::MonoItem; use rustc_middle::mir::mono::MonoItem;
use rustc_middle::mir::mono::{CodegenUnit, Linkage}; use rustc_middle::mir::mono::{CodegenUnit, Linkage};
use rustc_middle::ty::print::with_no_trimmed_paths; use rustc_middle::ty::print::with_no_trimmed_paths;
@ -311,10 +311,8 @@ where
fn collect_and_partition_mono_items<'tcx>( fn collect_and_partition_mono_items<'tcx>(
tcx: TyCtxt<'tcx>, tcx: TyCtxt<'tcx>,
cnum: CrateNum, (): (),
) -> (&'tcx DefIdSet, &'tcx [CodegenUnit<'tcx>]) { ) -> (&'tcx DefIdSet, &'tcx [CodegenUnit<'tcx>]) {
assert_eq!(cnum, LOCAL_CRATE);
let collection_mode = match tcx.sess.opts.debugging_opts.print_mono_items { let collection_mode = match tcx.sess.opts.debugging_opts.print_mono_items {
Some(ref s) => { Some(ref s) => {
let mode_string = s.to_lowercase(); let mode_string = s.to_lowercase();
@ -426,8 +424,8 @@ fn collect_and_partition_mono_items<'tcx>(
(tcx.arena.alloc(mono_items), codegen_units) (tcx.arena.alloc(mono_items), codegen_units)
} }
fn codegened_and_inlined_items<'tcx>(tcx: TyCtxt<'tcx>, cnum: CrateNum) -> &'tcx DefIdSet { fn codegened_and_inlined_items<'tcx>(tcx: TyCtxt<'tcx>, (): ()) -> &'tcx DefIdSet {
let (items, cgus) = tcx.collect_and_partition_mono_items(cnum); let (items, cgus) = tcx.collect_and_partition_mono_items(());
let mut visited = DefIdSet::default(); let mut visited = DefIdSet::default();
let mut result = items.clone(); let mut result = items.clone();
@ -455,12 +453,12 @@ pub fn provide(providers: &mut Providers) {
providers.codegened_and_inlined_items = codegened_and_inlined_items; providers.codegened_and_inlined_items = codegened_and_inlined_items;
providers.is_codegened_item = |tcx, def_id| { providers.is_codegened_item = |tcx, def_id| {
let (all_mono_items, _) = tcx.collect_and_partition_mono_items(LOCAL_CRATE); let (all_mono_items, _) = tcx.collect_and_partition_mono_items(());
all_mono_items.contains(&def_id) all_mono_items.contains(&def_id)
}; };
providers.codegen_unit = |tcx, name| { providers.codegen_unit = |tcx, name| {
let (_, all) = tcx.collect_and_partition_mono_items(LOCAL_CRATE); let (_, all) = tcx.collect_and_partition_mono_items(());
all.iter() all.iter()
.find(|cgu| cgu.name() == name) .find(|cgu| cgu.name() == name)
.unwrap_or_else(|| panic!("failed to find cgu with name {:?}", name)) .unwrap_or_else(|| panic!("failed to find cgu with name {:?}", name))

View File

@ -95,7 +95,7 @@ impl<'tcx> SaveContext<'tcx> {
let sess = &self.tcx.sess; let sess = &self.tcx.sess;
// Save-analysis is emitted per whole session, not per each crate type // Save-analysis is emitted per whole session, not per each crate type
let crate_type = sess.crate_types()[0]; let crate_type = sess.crate_types()[0];
let outputs = &*self.tcx.output_filenames(LOCAL_CRATE); let outputs = &*self.tcx.output_filenames(());
if outputs.outputs.contains_key(&OutputType::Metadata) { if outputs.outputs.contains_key(&OutputType::Metadata) {
filename_for_metadata(sess, crate_name, outputs) filename_for_metadata(sess, crate_name, outputs)