Auto merge of #107586 - SparrowLii:parallel-query, r=cjgillot

Introduce `DynSend` and `DynSync` auto trait for parallel compiler

part of parallel-rustc #101566

This PR introduces `DynSend / DynSync` trait and `FromDyn / IntoDyn` structure in rustc_data_structure::marker. `FromDyn` can dynamically check data structures for thread safety when switching to parallel environments (such as calling `par_for_each_in`). This happens only when `-Z threads > 1` so it doesn't affect single-threaded mode's compile efficiency.

r? `@cjgillot`
This commit is contained in:
bors 2023-05-13 13:47:53 +00:00
commit dd8ec9c88d
26 changed files with 557 additions and 110 deletions

View File

@ -3306,6 +3306,7 @@ dependencies = [
"rustc-hash",
"rustc-rayon",
"rustc-rayon-core",
"rustc_arena",
"rustc_graphviz",
"rustc_index",
"rustc_macros",

View File

@ -48,14 +48,15 @@ pub enum TokenTree {
Delimited(DelimSpan, Delimiter, TokenStream),
}
// Ensure all fields of `TokenTree` is `Send` and `Sync`.
// Ensure all fields of `TokenTree` are `DynSend` and `DynSync`.
#[cfg(parallel_compiler)]
fn _dummy()
where
Token: Send + Sync,
DelimSpan: Send + Sync,
Delimiter: Send + Sync,
TokenStream: Send + Sync,
Token: sync::DynSend + sync::DynSync,
Spacing: sync::DynSend + sync::DynSync,
DelimSpan: sync::DynSend + sync::DynSync,
Delimiter: sync::DynSend + sync::DynSync,
TokenStream: sync::DynSend + sync::DynSync,
{
}
@ -118,7 +119,7 @@ where
}
}
pub trait ToAttrTokenStream: sync::Send + sync::Sync {
pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync {
fn to_attr_token_stream(&self) -> AttrTokenStream;
}

View File

@ -17,10 +17,7 @@ use rustc_ast::expand::allocator::AllocatorKind;
use rustc_attr as attr;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
use rustc_data_structures::sync::par_iter;
#[cfg(parallel_compiler)]
use rustc_data_structures::sync::ParallelIterator;
use rustc_data_structures::sync::par_map;
use rustc_hir as hir;
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
use rustc_hir::lang_items::LangItem;
@ -689,7 +686,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
// This likely is a temporary measure. Once we don't have to support the
// non-parallel compiler anymore, we can compile CGUs end-to-end in
// parallel and get rid of the complicated scheduling logic.
let mut pre_compiled_cgus = if cfg!(parallel_compiler) {
let mut pre_compiled_cgus = if tcx.sess.threads() > 1 {
tcx.sess.time("compile_first_CGU_batch", || {
// Try to find one CGU to compile per thread.
let cgus: Vec<_> = cgu_reuse
@ -702,12 +699,10 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
// Compile the found CGUs in parallel.
let start_time = Instant::now();
let pre_compiled_cgus = par_iter(cgus)
.map(|(i, _)| {
let module = backend.compile_codegen_unit(tcx, codegen_units[i].name());
(i, module)
})
.collect();
let pre_compiled_cgus = par_map(cgus, |(i, _)| {
let module = backend.compile_codegen_unit(tcx, codegen_units[i].name());
(i, module)
});
total_codegen_time += start_time.elapsed();

View File

@ -22,6 +22,7 @@ use rustc_target::spec::Target;
pub use rustc_data_structures::sync::MetadataRef;
use rustc_data_structures::sync::{DynSend, DynSync};
use std::any::Any;
pub trait BackendTypes {
@ -117,7 +118,9 @@ pub trait CodegenBackend {
) -> Result<(), ErrorGuaranteed>;
}
pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Send + Sync {
pub trait ExtraBackendMethods:
CodegenBackend + WriteBackendMethods + Sized + Send + Sync + DynSend + DynSync
{
fn codegen_allocator<'tcx>(
&self,
tcx: TyCtxt<'tcx>,

View File

@ -16,6 +16,7 @@ libc = "0.2"
measureme = "10.0.0"
rustc-rayon-core = { version = "0.5.0", optional = true }
rustc-rayon = { version = "0.5.0", optional = true }
rustc_arena = { path = "../rustc_arena" }
rustc_graphviz = { path = "../rustc_graphviz" }
rustc-hash = "1.1.0"
rustc_index = { path = "../rustc_index", package = "rustc_index" }

View File

@ -26,6 +26,7 @@
#![feature(test)]
#![feature(thread_id_value)]
#![feature(vec_into_raw_parts)]
#![feature(allocator_api)]
#![feature(get_mut_unchecked)]
#![feature(lint_reasons)]
#![feature(unwrap_infallible)]
@ -77,6 +78,7 @@ pub mod sorted_map;
pub mod stable_hasher;
mod atomic_ref;
pub mod fingerprint;
pub mod marker;
pub mod profiling;
pub mod sharded;
pub mod stack;

View File

@ -0,0 +1,257 @@
cfg_if!(
if #[cfg(not(parallel_compiler))] {
pub auto trait DynSend {}
pub auto trait DynSync {}
impl<T> DynSend for T {}
impl<T> DynSync for T {}
} else {
#[rustc_on_unimplemented(
message = "`{Self}` doesn't implement `DynSend`. \
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`"
)]
// This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()`
// is true. These types can be wrapped in a `FromDyn` to get a `Send` type. Wrapping a
// `Send` type in `IntoDynSyncSend` will create a `DynSend` type.
pub unsafe auto trait DynSend {}
#[rustc_on_unimplemented(
message = "`{Self}` doesn't implement `DynSync`. \
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`"
)]
// This is an auto trait for types which can be shared across threads if `sync::is_dyn_thread_safe()`
// is true. These types can be wrapped in a `FromDyn` to get a `Sync` type. Wrapping a
// `Sync` type in `IntoDynSyncSend` will create a `DynSync` type.
pub unsafe auto trait DynSync {}
// Same with `Sync` and `Send`.
unsafe impl<T: DynSync + ?Sized> DynSend for &T {}
macro_rules! impls_dyn_send_neg {
($([$t1: ty $(where $($generics1: tt)*)?])*) => {
$(impl$(<$($generics1)*>)? !DynSend for $t1 {})*
};
}
// Consistent with `std`
impls_dyn_send_neg!(
[std::env::Args]
[std::env::ArgsOs]
[*const T where T: ?Sized]
[*mut T where T: ?Sized]
[std::ptr::NonNull<T> where T: ?Sized]
[std::rc::Rc<T> where T: ?Sized]
[std::rc::Weak<T> where T: ?Sized]
[std::sync::MutexGuard<'_, T> where T: ?Sized]
[std::sync::RwLockReadGuard<'_, T> where T: ?Sized]
[std::sync::RwLockWriteGuard<'_, T> where T: ?Sized]
[std::io::StdoutLock<'_>]
[std::io::StderrLock<'_>]
);
cfg_if!(
// Consistent with `std`
// `os_imp::Env` is `!Send` in these platforms
if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] {
impl !DynSend for std::env::VarsOs {}
}
);
macro_rules! already_send {
($([$ty: ty])*) => {
$(unsafe impl DynSend for $ty where $ty: Send {})*
};
}
// These structures are already `Send`.
already_send!(
[std::backtrace::Backtrace]
[std::io::Stdout]
[std::io::Stderr]
[std::io::Error]
[std::fs::File]
[rustc_arena::DroplessArena]
[crate::memmap::Mmap]
[crate::profiling::SelfProfiler]
[crate::owned_slice::OwnedSlice]
);
macro_rules! impl_dyn_send {
($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
$(unsafe impl<$($generics2)*> DynSend for $ty {})*
};
}
impl_dyn_send!(
[std::sync::atomic::AtomicPtr<T> where T]
[std::sync::Mutex<T> where T: ?Sized+ DynSend]
[std::sync::mpsc::Sender<T> where T: DynSend]
[std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
[std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
[std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
[std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
[std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
[Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
[Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
[crate::sync::Lock<T> where T: DynSend]
[crate::sync::RwLock<T> where T: DynSend]
[crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Send + crate::tagged_ptr::Pointer, T: Send + crate::tagged_ptr::Tag, const CP: bool]
[rustc_arena::TypedArena<T> where T: DynSend]
[indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
[indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
[thin_vec::ThinVec<T> where T: DynSend]
[smallvec::SmallVec<A> where A: smallvec::Array + DynSend]
);
macro_rules! impls_dyn_sync_neg {
($([$t1: ty $(where $($generics1: tt)*)?])*) => {
$(impl$(<$($generics1)*>)? !DynSync for $t1 {})*
};
}
// Consistent with `std`
impls_dyn_sync_neg!(
[std::env::Args]
[std::env::ArgsOs]
[*const T where T: ?Sized]
[*mut T where T: ?Sized]
[std::cell::Cell<T> where T: ?Sized]
[std::cell::RefCell<T> where T: ?Sized]
[std::cell::UnsafeCell<T> where T: ?Sized]
[std::ptr::NonNull<T> where T: ?Sized]
[std::rc::Rc<T> where T: ?Sized]
[std::rc::Weak<T> where T: ?Sized]
[std::cell::OnceCell<T> where T]
[std::sync::mpsc::Receiver<T> where T]
[std::sync::mpsc::Sender<T> where T]
);
cfg_if!(
// Consistent with `std`
// `os_imp::Env` is `!Sync` in these platforms
if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] {
impl !DynSync for std::env::VarsOs {}
}
);
macro_rules! already_sync {
($([$ty: ty])*) => {
$(unsafe impl DynSync for $ty where $ty: Sync {})*
};
}
// These structures are already `Sync`.
already_sync!(
[std::sync::atomic::AtomicBool]
[std::sync::atomic::AtomicUsize]
[std::sync::atomic::AtomicU8]
[std::sync::atomic::AtomicU32]
[std::sync::atomic::AtomicU64]
[std::backtrace::Backtrace]
[std::io::Error]
[std::fs::File]
[jobserver_crate::Client]
[crate::memmap::Mmap]
[crate::profiling::SelfProfiler]
[crate::owned_slice::OwnedSlice]
);
macro_rules! impl_dyn_sync {
($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
$(unsafe impl<$($generics2)*> DynSync for $ty {})*
};
}
impl_dyn_sync!(
[std::sync::atomic::AtomicPtr<T> where T]
[std::sync::OnceLock<T> where T: DynSend + DynSync]
[std::sync::Mutex<T> where T: ?Sized + DynSend]
[std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
[std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
[std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
[std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
[std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
[Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
[Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]
[crate::sync::Lock<T> where T: DynSend]
[crate::sync::RwLock<T> where T: DynSend + DynSync]
[crate::sync::OneThread<T> where T]
[crate::sync::WorkerLocal<T> where T: DynSend]
[crate::intern::Interned<'a, T> where 'a, T: DynSync]
[crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Sync + crate::tagged_ptr::Pointer, T: Sync + crate::tagged_ptr::Tag, const CP: bool]
[parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
[parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
[indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
[indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
[smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
[thin_vec::ThinVec<T> where T: DynSync]
);
}
);
pub fn assert_dyn_sync<T: ?Sized + DynSync>() {}
pub fn assert_dyn_send<T: ?Sized + DynSend>() {}
pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {}
pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {}
#[derive(Copy, Clone)]
pub struct FromDyn<T>(T);
impl<T> FromDyn<T> {
#[inline(always)]
pub fn from(val: T) -> Self {
// Check that `sync::is_dyn_thread_safe()` is true on creation so we can
// implement `Send` and `Sync` for this structure when `T`
// implements `DynSend` and `DynSync` respectively.
#[cfg(parallel_compiler)]
assert!(crate::sync::is_dyn_thread_safe());
FromDyn(val)
}
#[inline(always)]
pub fn into_inner(self) -> T {
self.0
}
}
// `FromDyn` is `Send` if `T` is `DynSend`, since it ensures that sync::is_dyn_thread_safe() is true.
#[cfg(parallel_compiler)]
unsafe impl<T: DynSend> Send for FromDyn<T> {}
// `FromDyn` is `Sync` if `T` is `DynSync`, since it ensures that sync::is_dyn_thread_safe() is true.
#[cfg(parallel_compiler)]
unsafe impl<T: DynSync> Sync for FromDyn<T> {}
impl<T> std::ops::Deref for FromDyn<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
// A wrapper to convert a struct that is already a `Send` or `Sync` into
// an instance of `DynSend` and `DynSync`, since the compiler cannot infer
// it automatically in some cases. (e.g. Box<dyn Send / Sync>)
#[derive(Copy, Clone)]
pub struct IntoDynSyncSend<T: ?Sized>(pub T);
#[cfg(parallel_compiler)]
unsafe impl<T: ?Sized + Send> DynSend for IntoDynSyncSend<T> {}
#[cfg(parallel_compiler)]
unsafe impl<T: ?Sized + Sync> DynSync for IntoDynSyncSend<T> {}
impl<T> std::ops::Deref for IntoDynSyncSend<T> {
type Target = T;
#[inline(always)]
fn deref(&self) -> &T {
&self.0
}
}
impl<T> std::ops::DerefMut for IntoDynSyncSend<T> {
#[inline(always)]
fn deref_mut(&mut self) -> &mut T {
&mut self.0
}
}

View File

@ -69,6 +69,6 @@ fn drop_drops() {
#[test]
fn send_sync() {
crate::sync::assert_send::<OwnedSlice>();
crate::sync::assert_sync::<OwnedSlice>();
crate::sync::assert_dyn_send::<OwnedSlice>();
crate::sync::assert_dyn_sync::<OwnedSlice>();
}

View File

@ -39,6 +39,7 @@
//!
//! [^2] `MTLockRef` is a typedef.
pub use crate::marker::*;
use crate::owned_slice::OwnedSlice;
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
@ -55,6 +56,42 @@ pub use vec::{AppendOnlyIndexVec, AppendOnlyVec};
mod vec;
mod mode {
use super::Ordering;
use std::sync::atomic::AtomicU8;
const UNINITIALIZED: u8 = 0;
const DYN_NOT_THREAD_SAFE: u8 = 1;
const DYN_THREAD_SAFE: u8 = 2;
static DYN_THREAD_SAFE_MODE: AtomicU8 = AtomicU8::new(UNINITIALIZED);
// Whether thread safety is enabled (due to running under multiple threads).
#[inline]
pub fn is_dyn_thread_safe() -> bool {
match DYN_THREAD_SAFE_MODE.load(Ordering::Relaxed) {
DYN_NOT_THREAD_SAFE => false,
DYN_THREAD_SAFE => true,
_ => panic!("uninitialized dyn_thread_safe mode!"),
}
}
// Only set by the `-Z threads` compile option
pub fn set_dyn_thread_safe_mode(mode: bool) {
let set: u8 = if mode { DYN_THREAD_SAFE } else { DYN_NOT_THREAD_SAFE };
let previous = DYN_THREAD_SAFE_MODE.compare_exchange(
UNINITIALIZED,
set,
Ordering::Relaxed,
Ordering::Relaxed,
);
// Check that the mode was either uninitialized or was already set to the requested mode.
assert!(previous.is_ok() || previous == Err(set));
}
}
pub use mode::{is_dyn_thread_safe, set_dyn_thread_safe_mode};
cfg_if! {
if #[cfg(not(parallel_compiler))] {
pub unsafe auto trait Send {}
@ -149,7 +186,7 @@ cfg_if! {
#[macro_export]
macro_rules! parallel {
($($blocks:tt),*) => {
($($blocks:block),*) => {
// We catch panics here ensuring that all the blocks execute.
// This makes behavior consistent with the parallel compiler.
let mut panic = None;
@ -168,12 +205,6 @@ cfg_if! {
}
}
pub use Iterator as ParallelIterator;
pub fn par_iter<T: IntoIterator>(t: T) -> T::IntoIter {
t.into_iter()
}
pub fn par_for_each_in<T: IntoIterator>(t: T, mut for_each: impl FnMut(T::Item) + Sync + Send) {
// We catch panics here ensuring that all the loop iterations execute.
// This makes behavior consistent with the parallel compiler.
@ -190,6 +221,29 @@ cfg_if! {
}
}
pub fn par_map<T: IntoIterator, R, C: FromIterator<R>>(
t: T,
mut map: impl FnMut(<<T as IntoIterator>::IntoIter as Iterator>::Item) -> R,
) -> C {
// We catch panics here ensuring that all the loop iterations execute.
let mut panic = None;
let r = t.into_iter().filter_map(|i| {
match catch_unwind(AssertUnwindSafe(|| map(i))) {
Ok(r) => Some(r),
Err(p) => {
if panic.is_none() {
panic = Some(p);
}
None
}
}
}).collect();
if let Some(panic) = panic {
resume_unwind(panic);
}
r
}
pub type MetadataRef = OwnedSlice;
pub use std::rc::Rc as Lrc;
@ -302,46 +356,165 @@ cfg_if! {
use parking_lot::RwLock as InnerRwLock;
use std::thread;
pub use rayon::{join, scope};
#[inline]
pub fn join<A, B, RA: DynSend, RB: DynSend>(oper_a: A, oper_b: B) -> (RA, RB)
where
A: FnOnce() -> RA + DynSend,
B: FnOnce() -> RB + DynSend,
{
if mode::is_dyn_thread_safe() {
let oper_a = FromDyn::from(oper_a);
let oper_b = FromDyn::from(oper_b);
let (a, b) = rayon::join(move || FromDyn::from(oper_a.into_inner()()), move || FromDyn::from(oper_b.into_inner()()));
(a.into_inner(), b.into_inner())
} else {
(oper_a(), oper_b())
}
}
// This function only works when `mode::is_dyn_thread_safe()`.
pub fn scope<'scope, OP, R>(op: OP) -> R
where
OP: FnOnce(&rayon::Scope<'scope>) -> R + DynSend,
R: DynSend,
{
let op = FromDyn::from(op);
rayon::scope(|s| FromDyn::from(op.into_inner()(s))).into_inner()
}
/// Runs a list of blocks in parallel. The first block is executed immediately on
/// the current thread. Use that for the longest running block.
#[macro_export]
macro_rules! parallel {
(impl $fblock:tt [$($c:tt,)*] [$block:tt $(, $rest:tt)*]) => {
(impl $fblock:block [$($c:expr,)*] [$block:expr $(, $rest:expr)*]) => {
parallel!(impl $fblock [$block, $($c,)*] [$($rest),*])
};
(impl $fblock:tt [$($blocks:tt,)*] []) => {
(impl $fblock:block [$($blocks:expr,)*] []) => {
::rustc_data_structures::sync::scope(|s| {
$(let block = rustc_data_structures::sync::FromDyn::from(|| $blocks);
s.spawn(move |_| block.into_inner()());)*
(|| $fblock)();
});
};
($fblock:block, $($blocks:block),*) => {
if rustc_data_structures::sync::is_dyn_thread_safe() {
// Reverse the order of the later blocks since Rayon executes them in reverse order
// when using a single thread. This ensures the execution order matches that
// of a single threaded rustc.
parallel!(impl $fblock [] [$($blocks),*]);
} else {
// We catch panics here ensuring that all the blocks execute.
// This makes behavior consistent with the parallel compiler.
let mut panic = None;
if let Err(p) = ::std::panic::catch_unwind(
::std::panic::AssertUnwindSafe(|| $fblock)
) {
if panic.is_none() {
panic = Some(p);
}
}
$(
s.spawn(|_| $blocks);
if let Err(p) = ::std::panic::catch_unwind(
::std::panic::AssertUnwindSafe(|| $blocks)
) {
if panic.is_none() {
panic = Some(p);
}
}
)*
$fblock;
})
};
($fblock:tt, $($blocks:tt),*) => {
// Reverse the order of the later blocks since Rayon executes them in reverse order
// when using a single thread. This ensures the execution order matches that
// of a single threaded rustc
parallel!(impl $fblock [] [$($blocks),*]);
if let Some(panic) = panic {
::std::panic::resume_unwind(panic);
}
}
};
}
pub use rayon::iter::ParallelIterator;
use rayon::iter::IntoParallelIterator;
use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelIterator};
pub fn par_iter<T: IntoParallelIterator>(t: T) -> T::Iter {
t.into_par_iter()
}
pub fn par_for_each_in<T: IntoParallelIterator>(
pub fn par_for_each_in<I, T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>>(
t: T,
for_each: impl Fn(T::Item) + Sync + Send,
for_each: impl Fn(I) + DynSync + DynSend
) {
let ps: Vec<_> = t.into_par_iter().map(|i| catch_unwind(AssertUnwindSafe(|| for_each(i)))).collect();
ps.into_iter().for_each(|p| if let Err(panic) = p {
resume_unwind(panic)
});
if mode::is_dyn_thread_safe() {
let for_each = FromDyn::from(for_each);
let panic: Lock<Option<_>> = Lock::new(None);
t.into_par_iter().for_each(|i| if let Err(p) = catch_unwind(AssertUnwindSafe(|| for_each(i))) {
let mut l = panic.lock();
if l.is_none() {
*l = Some(p)
}
});
if let Some(panic) = panic.into_inner() {
resume_unwind(panic);
}
} else {
// We catch panics here ensuring that all the loop iterations execute.
// This makes behavior consistent with the parallel compiler.
let mut panic = None;
t.into_iter().for_each(|i| {
if let Err(p) = catch_unwind(AssertUnwindSafe(|| for_each(i))) {
if panic.is_none() {
panic = Some(p);
}
}
});
if let Some(panic) = panic {
resume_unwind(panic);
}
}
}
pub fn par_map<
I,
T: IntoIterator<Item = I> + IntoParallelIterator<Item = I>,
R: std::marker::Send,
C: FromIterator<R> + FromParallelIterator<R>
>(
t: T,
map: impl Fn(I) -> R + DynSync + DynSend
) -> C {
if mode::is_dyn_thread_safe() {
let panic: Lock<Option<_>> = Lock::new(None);
let map = FromDyn::from(map);
// We catch panics here ensuring that all the loop iterations execute.
let r = t.into_par_iter().filter_map(|i| {
match catch_unwind(AssertUnwindSafe(|| map(i))) {
Ok(r) => Some(r),
Err(p) => {
let mut l = panic.lock();
if l.is_none() {
*l = Some(p);
}
None
},
}
}).collect();
if let Some(panic) = panic.into_inner() {
resume_unwind(panic);
}
r
} else {
// We catch panics here ensuring that all the loop iterations execute.
let mut panic = None;
let r = t.into_iter().filter_map(|i| {
match catch_unwind(AssertUnwindSafe(|| map(i))) {
Ok(r) => Some(r),
Err(p) => {
if panic.is_none() {
panic = Some(p);
}
None
}
}
}).collect();
if let Some(panic) = panic {
resume_unwind(panic);
}
r
}
}
pub type MetadataRef = OwnedSlice;
@ -352,11 +525,6 @@ cfg_if! {
}
}
pub fn assert_sync<T: ?Sized + Sync>() {}
pub fn assert_send<T: ?Sized + Send>() {}
pub fn assert_send_val<T: ?Sized + Send>(_t: &T) {}
pub fn assert_send_sync_val<T: ?Sized + Sync + Send>(_t: &T) {}
#[derive(Default)]
#[cfg_attr(parallel_compiler, repr(align(64)))]
pub struct CacheAligned<T>(pub T);

View File

@ -256,6 +256,9 @@ fn run_compiler(
let sopts = config::build_session_options(&matches);
// Set parallel mode before thread pool creation, which will create `Lock`s.
interface::set_thread_safe_mode(&sopts.unstable_opts);
if let Some(ref code) = matches.opt_str("explain") {
handle_explain(diagnostics_registry(), code, sopts.error_format);
return Ok(());

View File

@ -11,7 +11,7 @@ extern crate tracing;
use fluent_bundle::FluentResource;
use fluent_syntax::parser::ParserError;
use icu_provider_adapters::fallback::{LocaleFallbackProvider, LocaleFallbacker};
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
use rustc_fluent_macro::fluent_messages;
use rustc_macros::{Decodable, Encodable};
use rustc_span::Span;
@ -37,16 +37,17 @@ pub use unic_langid::{langid, LanguageIdentifier};
fluent_messages! { "../messages.ftl" }
pub type FluentBundle = fluent_bundle::bundle::FluentBundle<FluentResource, IntlLangMemoizer>;
#[cfg(parallel_compiler)]
fn new_bundle(locales: Vec<LanguageIdentifier>) -> FluentBundle {
FluentBundle::new_concurrent(locales)
}
pub type FluentBundle =
IntoDynSyncSend<fluent_bundle::bundle::FluentBundle<FluentResource, IntlLangMemoizer>>;
#[cfg(not(parallel_compiler))]
fn new_bundle(locales: Vec<LanguageIdentifier>) -> FluentBundle {
FluentBundle::new(locales)
IntoDynSyncSend(fluent_bundle::bundle::FluentBundle::new(locales))
}
#[cfg(parallel_compiler)]
fn new_bundle(locales: Vec<LanguageIdentifier>) -> FluentBundle {
IntoDynSyncSend(fluent_bundle::bundle::FluentBundle::new_concurrent(locales))
}
#[derive(Debug)]

View File

@ -32,7 +32,7 @@ use emitter::{is_case_difference, Emitter, EmitterWriter};
use registry::Registry;
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet};
use rustc_data_structures::stable_hasher::{Hash128, StableHasher};
use rustc_data_structures::sync::{self, Lock, Lrc};
use rustc_data_structures::sync::{self, IntoDynSyncSend, Lock, Lrc};
use rustc_data_structures::AtomicRef;
pub use rustc_error_messages::{
fallback_fluent_bundle, fluent_bundle, DelayDm, DiagnosticMessage, FluentBundle,
@ -409,7 +409,7 @@ struct HandlerInner {
err_count: usize,
warn_count: usize,
deduplicated_err_count: usize,
emitter: Box<dyn Emitter + sync::Send>,
emitter: IntoDynSyncSend<Box<dyn Emitter + sync::Send>>,
delayed_span_bugs: Vec<DelayedDiagnostic>,
delayed_good_path_bugs: Vec<DelayedDiagnostic>,
/// This flag indicates that an expected diagnostic was emitted and suppressed.
@ -605,7 +605,7 @@ impl Handler {
warn_count: 0,
deduplicated_err_count: 0,
deduplicated_warn_count: 0,
emitter,
emitter: IntoDynSyncSend(emitter),
delayed_span_bugs: Vec::new(),
delayed_good_path_bugs: Vec::new(),
suppressed_expected_diag: false,

View File

@ -2,7 +2,7 @@ use crate::error::{TranslateError, TranslateErrorKind};
use crate::fluent_bundle::*;
use crate::translation::Translate;
use crate::FluentBundle;
use rustc_data_structures::sync::Lrc;
use rustc_data_structures::sync::{IntoDynSyncSend, Lrc};
use rustc_error_messages::fluent_bundle::resolver::errors::{ReferenceKind, ResolverError};
use rustc_error_messages::langid;
use rustc_error_messages::DiagnosticMessage;
@ -27,10 +27,14 @@ fn make_dummy(ftl: &'static str) -> Dummy {
let langid_en = langid!("en-US");
#[cfg(parallel_compiler)]
let mut bundle = FluentBundle::new_concurrent(vec![langid_en]);
let mut bundle: FluentBundle =
IntoDynSyncSend(crate::fluent_bundle::bundle::FluentBundle::new_concurrent(vec![
langid_en,
]));
#[cfg(not(parallel_compiler))]
let mut bundle = FluentBundle::new(vec![langid_en]);
let mut bundle: FluentBundle =
IntoDynSyncSend(crate::fluent_bundle::bundle::FluentBundle::new(vec![langid_en]));
bundle.add_resource(resource).expect("Failed to add FTL resources to the bundle.");

View File

@ -653,13 +653,13 @@ pub enum SyntaxExtensionKind {
/// A token-based function-like macro.
Bang(
/// An expander with signature TokenStream -> TokenStream.
Box<dyn BangProcMacro + sync::Sync + sync::Send>,
Box<dyn BangProcMacro + sync::DynSync + sync::DynSend>,
),
/// An AST-based function-like macro.
LegacyBang(
/// An expander with signature TokenStream -> AST.
Box<dyn TTMacroExpander + sync::Sync + sync::Send>,
Box<dyn TTMacroExpander + sync::DynSync + sync::DynSend>,
),
/// A token-based attribute macro.
@ -667,7 +667,7 @@ pub enum SyntaxExtensionKind {
/// An expander with signature (TokenStream, TokenStream) -> TokenStream.
/// The first TokenSteam is the attribute itself, the second is the annotated item.
/// The produced TokenSteam replaces the input TokenSteam.
Box<dyn AttrProcMacro + sync::Sync + sync::Send>,
Box<dyn AttrProcMacro + sync::DynSync + sync::DynSend>,
),
/// An AST-based attribute macro.
@ -675,7 +675,7 @@ pub enum SyntaxExtensionKind {
/// An expander with signature (AST, AST) -> AST.
/// The first AST fragment is the attribute itself, the second is the annotated item.
/// The produced AST fragment replaces the input AST fragment.
Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
Box<dyn MultiItemModifier + sync::DynSync + sync::DynSend>,
),
/// A trivial attribute "macro" that does nothing,
@ -692,14 +692,14 @@ pub enum SyntaxExtensionKind {
/// is handled identically to `LegacyDerive`. It should be migrated to
/// a token-based representation like `Bang` and `Attr`, instead of
/// using `MultiItemModifier`.
Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
Box<dyn MultiItemModifier + sync::DynSync + sync::DynSend>,
),
/// An AST-based derive macro.
LegacyDerive(
/// An expander with signature AST -> AST.
/// The produced AST fragment is appended to the input AST fragment.
Box<dyn MultiItemModifier + sync::Sync + sync::Send>,
Box<dyn MultiItemModifier + sync::DynSync + sync::DynSend>,
),
}

View File

@ -60,6 +60,11 @@ impl Compiler {
}
}
#[allow(rustc::bad_opt_access)]
pub fn set_thread_safe_mode(sopts: &config::UnstableOptions) {
rustc_data_structures::sync::set_dyn_thread_safe_mode(sopts.threads > 1);
}
/// Converts strings provided as `--cfg [cfgspec]` into a `crate_cfg`.
pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> FxHashSet<(String, Option<String>)> {
rustc_span::create_default_session_if_not_set_then(move |_| {

View File

@ -49,9 +49,9 @@ use std::cell::Cell;
use std::iter;
use std::slice;
type EarlyLintPassFactory = dyn Fn() -> EarlyLintPassObject + sync::Send + sync::Sync;
type EarlyLintPassFactory = dyn Fn() -> EarlyLintPassObject + sync::DynSend + sync::DynSync;
type LateLintPassFactory =
dyn for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx> + sync::Send + sync::Sync;
dyn for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx> + sync::DynSend + sync::DynSync;
/// Information about the registered lints.
///
@ -169,7 +169,7 @@ impl LintStore {
pub fn register_early_pass(
&mut self,
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync,
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::DynSend + sync::DynSync,
) {
self.early_passes.push(Box::new(pass));
}
@ -182,7 +182,7 @@ impl LintStore {
/// * See [rust-clippy#5518](https://github.com/rust-lang/rust-clippy/pull/5518)
pub fn register_pre_expansion_pass(
&mut self,
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::Send + sync::Sync,
pass: impl Fn() -> EarlyLintPassObject + 'static + sync::DynSend + sync::DynSync,
) {
self.pre_expansion_passes.push(Box::new(pass));
}
@ -191,8 +191,8 @@ impl LintStore {
&mut self,
pass: impl for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx>
+ 'static
+ sync::Send
+ sync::Sync,
+ sync::DynSend
+ sync::DynSync,
) {
self.late_passes.push(Box::new(pass));
}
@ -201,8 +201,8 @@ impl LintStore {
&mut self,
pass: impl for<'tcx> Fn(TyCtxt<'tcx>) -> LateLintPassObject<'tcx>
+ 'static
+ sync::Send
+ sync::Sync,
+ sync::DynSend
+ sync::DynSync,
) {
self.late_module_passes.push(Box::new(pass));
}

View File

@ -16,7 +16,7 @@
use crate::{passes::LateLintPassObject, LateContext, LateLintPass, LintStore};
use rustc_ast as ast;
use rustc_data_structures::sync::join;
use rustc_data_structures::sync::{join, DynSend};
use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit as hir_visit;
@ -429,7 +429,7 @@ fn late_lint_crate_inner<'tcx, T: LateLintPass<'tcx>>(
/// Performs lint checking on a crate.
pub fn check_crate<'tcx, T: LateLintPass<'tcx> + 'tcx>(
tcx: TyCtxt<'tcx>,
builtin_lints: impl FnOnce() -> T + Send,
builtin_lints: impl FnOnce() -> T + Send + DynSend,
) {
join(
|| {

View File

@ -8,7 +8,7 @@ use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
use rustc_data_structures::memmap::{Mmap, MmapMut};
use rustc_data_structures::stable_hasher::{Hash128, HashStable, StableHasher};
use rustc_data_structures::sync::{join, par_iter, Lrc, ParallelIterator};
use rustc_data_structures::sync::{join, par_for_each_in, Lrc};
use rustc_data_structures::temp_dir::MaybeTempDir;
use rustc_hir as hir;
use rustc_hir::def::DefKind;
@ -2131,7 +2131,7 @@ fn prefetch_mir(tcx: TyCtxt<'_>) {
return;
}
par_iter(tcx.mir_keys(())).for_each(|&def_id| {
par_for_each_in(tcx.mir_keys(()), |&def_id| {
let (encode_const, encode_opt) = should_encode_mir(tcx, def_id);
if encode_const {

View File

@ -5,7 +5,7 @@ use rustc_ast as ast;
use rustc_data_structures::fingerprint::Fingerprint;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::{par_for_each_in, Send, Sync};
use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync};
use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{DefId, LocalDefId, CRATE_DEF_ID, LOCAL_CRATE};
use rustc_hir::definitions::{DefKey, DefPath, DefPathData, DefPathHash};
@ -150,11 +150,6 @@ impl<'hir> Map<'hir> {
self.tcx.hir_module_items(module).items()
}
#[inline]
pub fn par_for_each_item(self, f: impl Fn(ItemId) + Sync + Send) {
par_for_each_in(&self.tcx.hir_crate_items(()).items[..], |id| f(*id));
}
pub fn def_key(self, def_id: LocalDefId) -> DefKey {
// Accessing the DefKey is ok, since it is part of DefPathHash.
self.tcx.definitions_untracked().def_key(def_id)
@ -502,7 +497,7 @@ impl<'hir> Map<'hir> {
}
#[inline]
pub fn par_body_owners(self, f: impl Fn(LocalDefId) + Sync + Send) {
pub fn par_body_owners(self, f: impl Fn(LocalDefId) + DynSend + DynSync) {
par_for_each_in(&self.tcx.hir_crate_items(()).body_owners[..], |&def_id| f(def_id));
}
@ -640,7 +635,7 @@ impl<'hir> Map<'hir> {
}
#[inline]
pub fn par_for_each_module(self, f: impl Fn(LocalDefId) + Sync + Send) {
pub fn par_for_each_module(self, f: impl Fn(LocalDefId) + DynSend + DynSync) {
let crate_items = self.tcx.hir_crate_items(());
par_for_each_in(&crate_items.submodules[..], |module| f(module.def_id))
}

View File

@ -9,7 +9,7 @@ pub mod place;
use crate::ty::query::Providers;
use crate::ty::{EarlyBinder, ImplSubject, TyCtxt};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{par_for_each_in, Send, Sync};
use rustc_data_structures::sync::{par_for_each_in, DynSend, DynSync};
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_hir::*;
use rustc_query_system::ich::StableHashingContext;
@ -77,19 +77,19 @@ impl ModuleItems {
self.owners().map(|id| id.def_id)
}
pub fn par_items(&self, f: impl Fn(ItemId) + Send + Sync) {
pub fn par_items(&self, f: impl Fn(ItemId) + DynSend + DynSync) {
par_for_each_in(&self.items[..], |&id| f(id))
}
pub fn par_trait_items(&self, f: impl Fn(TraitItemId) + Send + Sync) {
pub fn par_trait_items(&self, f: impl Fn(TraitItemId) + DynSend + DynSync) {
par_for_each_in(&self.trait_items[..], |&id| f(id))
}
pub fn par_impl_items(&self, f: impl Fn(ImplItemId) + Send + Sync) {
pub fn par_impl_items(&self, f: impl Fn(ImplItemId) + DynSend + DynSync) {
par_for_each_in(&self.impl_items[..], |&id| f(id))
}
pub fn par_foreign_items(&self, f: impl Fn(ForeignItemId) + Send + Sync) {
pub fn par_foreign_items(&self, f: impl Fn(ForeignItemId) + DynSend + DynSync) {
par_for_each_in(&self.foreign_items[..], |&id| f(id))
}
}

View File

@ -496,7 +496,7 @@ pub struct GlobalCtxt<'tcx> {
///
/// FIXME(Centril): consider `dyn LintStoreMarker` once
/// we can upcast to `Any` for some additional type safety.
pub lint_store: Lrc<dyn Any + sync::Sync + sync::Send>,
pub lint_store: Lrc<dyn Any + sync::DynSync + sync::DynSend>,
pub dep_graph: DepGraph,
@ -648,7 +648,7 @@ impl<'tcx> TyCtxt<'tcx> {
/// reference to the context, to allow formatting values that need it.
pub fn create_global_ctxt(
s: &'tcx Session,
lint_store: Lrc<dyn Any + sync::Send + sync::Sync>,
lint_store: Lrc<dyn Any + sync::DynSend + sync::DynSync>,
arena: &'tcx WorkerLocal<Arena<'tcx>>,
hir_arena: &'tcx WorkerLocal<hir::Arena<'tcx>>,
untracked: Untracked,

View File

@ -94,8 +94,8 @@ where
f(None)
} else {
// We could get an `ImplicitCtxt` pointer from another thread.
// Ensure that `ImplicitCtxt` is `Sync`.
sync::assert_sync::<ImplicitCtxt<'_, '_>>();
// Ensure that `ImplicitCtxt` is `DynSync`.
sync::assert_dyn_sync::<ImplicitCtxt<'_, '_>>();
unsafe { f(Some(downcast(context))) }
}

View File

@ -199,6 +199,12 @@ impl<'a, T: Copy> IntoIterator for &'a List<T> {
unsafe impl<T: Sync> Sync for List<T> {}
// We need this since `List` uses extern type `OpaqueListContents`.
#[cfg(parallel_compiler)]
use rustc_data_structures::sync::DynSync;
#[cfg(parallel_compiler)]
unsafe impl<T: DynSync> DynSync for List<T> {}
// Safety:
// Layouts of `Equivalent<T>` and `List<T>` are the same, modulo opaque tail,
// thus aligns of `Equivalent<T>` and `List<T>` must be the same.

View File

@ -207,7 +207,7 @@ pub trait MetadataLoader: std::fmt::Debug {
fn get_dylib_metadata(&self, target: &Target, filename: &Path) -> Result<MetadataRef, String>;
}
pub type MetadataLoaderDyn = dyn MetadataLoader + Send + Sync;
pub type MetadataLoaderDyn = dyn MetadataLoader + Send + Sync + sync::DynSend + sync::DynSync;
/// A store of Rust crates, through which their metadata can be accessed.
///
@ -252,7 +252,7 @@ pub trait CrateStore: std::fmt::Debug {
fn import_source_files(&self, sess: &Session, cnum: CrateNum);
}
pub type CrateStoreDyn = dyn CrateStore + sync::Sync + sync::Send;
pub type CrateStoreDyn = dyn CrateStore + sync::DynSync + sync::DynSend;
pub struct Untracked {
pub cstore: RwLock<Box<CrateStoreDyn>>,

View File

@ -14,7 +14,9 @@ pub use crate::*;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{Hash128, Hash64, StableHasher};
use rustc_data_structures::sync::{AtomicU32, Lrc, MappedReadGuard, ReadGuard, RwLock};
use rustc_data_structures::sync::{
AtomicU32, IntoDynSyncSend, Lrc, MappedReadGuard, ReadGuard, RwLock,
};
use std::cmp;
use std::hash::Hash;
use std::path::{self, Path, PathBuf};
@ -176,7 +178,7 @@ pub struct SourceMap {
used_address_space: AtomicU32,
files: RwLock<SourceMapFiles>,
file_loader: Box<dyn FileLoader + Sync + Send>,
file_loader: IntoDynSyncSend<Box<dyn FileLoader + Sync + Send>>,
// This is used to apply the file path remapping as specified via
// `--remap-path-prefix` to all `SourceFile`s allocated within this `SourceMap`.
path_mapping: FilePathMapping,
@ -202,7 +204,7 @@ impl SourceMap {
SourceMap {
used_address_space: AtomicU32::new(0),
files: Default::default(),
file_loader,
file_loader: IntoDynSyncSend(file_loader),
path_mapping,
hash_kind,
}

View File

@ -739,6 +739,9 @@ fn main_args(at_args: &[String]) -> MainResult {
}
};
// Set parallel mode before error handler creation, which will create `Lock`s.
interface::set_thread_safe_mode(&options.unstable_opts);
let diag = core::new_handler(
options.error_format,
None,