Auto merge of #61150 - Centril:rollup-wmm7qga, r=Centril

Rollup of 13 pull requests

Successful merges:

 - #61026 (Tweak macro parse errors when reaching EOF during macro call parse)
 - #61095 (Update cargo)
 - #61096 (tidy: don't short-circuit on license error)
 - #61107 (Fix a couple docs typos)
 - #61110 (Revert edition-guide toolstate override)
 - #61111 (Fixed type-alias-bounds lint doc)
 - #61113 (Deprecate `FnBox`. `Box<dyn FnOnce()>` can be called directly, since 1.35)
 - #61116 (Remove the incorrect warning from README.md)
 - #61118 (Dont ICE on an attempt to use GAT without feature gate)
 - #61121 (improve debug-printing of scalars)
 - #61125 (Updated my mailmap entry)
 - #61134 (Annotate each `reverse_bits` with `#[must_use]`)
 - #61138 (Move async/await tests to their own folder)

Failed merges:

r? @ghost
This commit is contained in:
bors 2019-05-25 04:10:07 +00:00
commit 315ab95a9c
154 changed files with 465 additions and 187 deletions

View File

@ -6,7 +6,7 @@
#
Aaron Todd <github@opprobrio.us>
Aaron Power <theaaronepower@gmail.com>
Aaron Power <theaaronepower@gmail.com> Erin Power <xampprocky@gmail.com>
Abhishek Chanda <abhishek.becs@gmail.com> Abhishek Chanda <abhishek@cloudscaling.com>
Adolfo Ochagavía <aochagavia92@gmail.com>
Adrien Tétar <adri-from-59@hotmail.fr>

View File

@ -253,7 +253,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cargo"
version = "0.37.0"
version = "0.38.0"
dependencies = [
"atty 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"bufstream 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -261,7 +261,7 @@ dependencies = [
"bytesize 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)",
"core-foundation 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)",
"crates-io 0.25.0",
"crates-io 0.26.0",
"crossbeam-utils 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"crypto-hash 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"curl 0.4.21 (registry+https://github.com/rust-lang/crates.io-index)",
@ -278,7 +278,7 @@ dependencies = [
"hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"home 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ignore 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"im-rc 12.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"im-rc 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"jobserver 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -300,6 +300,7 @@ dependencies = [
"serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
"shell-escape 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"strip-ansi-escapes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tar 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"termcolor 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -537,7 +538,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "crates-io"
version = "0.25.0"
version = "0.26.0"
dependencies = [
"curl 0.4.21 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1163,10 +1164,11 @@ dependencies = [
[[package]]
name = "im-rc"
version = "12.3.0"
version = "13.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"sized-chunks 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2239,7 +2241,7 @@ dependencies = [
name = "rls"
version = "1.36.0"
dependencies = [
"cargo 0.37.0",
"cargo 0.38.0",
"cargo_metadata 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy_lints 0.0.212",
"crossbeam-channel 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3263,6 +3265,14 @@ name = "siphasher"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "sized-chunks"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "slab"
version = "0.4.2"
@ -3348,6 +3358,14 @@ name = "string_cache_shared"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "strip-ansi-escapes"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"vte 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "strsim"
version = "0.7.0"
@ -3924,6 +3942,11 @@ name = "utf8-ranges"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "utf8parse"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "vcpkg"
version = "0.2.6"
@ -3954,6 +3977,14 @@ name = "void"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "vte"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "wait-timeout"
version = "0.1.5"
@ -4158,7 +4189,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e"
"checksum if_chain 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4bac95d9aa0624e7b78187d6fb8ab012b41d9f6f54b1bcb61e61c4845f8357ec"
"checksum ignore 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "8dc57fa12805f367736a38541ac1a9fc6a52812a0ca959b1d4d4b640a89eb002"
"checksum im-rc 12.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9460397452f537fd51808056ff209f4c4c4c9d20d42ae952f517708726284972"
"checksum im-rc 13.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0a0197597d095c0d11107975d3175173f810ee572c2501ff4de64f4f3f119806"
"checksum iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe6e417e7d0975db6512b90796e8ce223145ac4e33c377e4a42882a0e88bb08"
"checksum is-match 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7e5b386aef33a1c677be65237cb9d32c3f3ef56bd035949710c4bb13083eb053"
"checksum itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)" = "f58856976b776fedd95533137617a02fb25719f40e7d9b01c7043cd65474f450"
@ -4309,6 +4340,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum shlex 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2"
"checksum signal-hook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "1f272d1b7586bec132ed427f532dd418d8beca1ca7f2caf7df35569b1415a4b4"
"checksum siphasher 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0df90a788073e8d0235a67e50441d47db7c8ad9debd91cbf43736a2a92d36537"
"checksum sized-chunks 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a2a2eb3fe454976eefb479f78f9b394d34d661b647c6326a3a6e66f68bb12c26"
"checksum slab 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c111b5bd5695e56cffe5129854aa230b39c93a305372fdbb2668ca2394eea9f8"
"checksum smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)" = "b73ea3738b47563803ef814925e69be00799a8c07420be8b996f8e98fb2336db"
"checksum socket2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "c4d11a52082057d87cb5caa31ad812f4504b97ab44732cd8359df2e9ff9f48e7"
@ -4316,6 +4348,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum string_cache 0.7.3 (registry+https://github.com/rust-lang/crates.io-index)" = "25d70109977172b127fe834e5449e5ab1740b9ba49fa18a2020f509174f25423"
"checksum string_cache_codegen 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1eea1eee654ef80933142157fdad9dd8bc43cf7c74e999e369263496f04ff4da"
"checksum string_cache_shared 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b1884d1bc09741d466d9b14e6d37ac89d6909cbcac41dd9ae982d4d063bbedfc"
"checksum strip-ansi-escapes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d63676e2abafa709460982ddc02a3bb586b6d15a49b75c212e06edd3933acee"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum strum 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f6c3a2071519ab6a48f465808c4c1ffdd00dfc8e93111d02b4fc5abab177676e"
"checksum strum_macros 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8baacebd7b7c9b864d83a6ba7a246232983e277b86fa5cdec77f565715a4b136"
@ -4368,11 +4401,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
"checksum utf-8 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f1262dfab4c30d5cb7c07026be00ee343a6cf5027fdc0104a9160f354e5db75c"
"checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737"
"checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d"
"checksum vcpkg 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "def296d3eb3b12371b2c7d0e83bfe1403e4db2d7a0bba324a12b21c4ee13143d"
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
"checksum vergen 3.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6aba5e34f93dc7051dfad05b98a18e9156f27e7b431fe1d2398cb6061c0a1dba"
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum vte 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4f42f536e22f7fcbb407639765c8fd78707a33109301f834a594758bedd6e8cf"
"checksum wait-timeout 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "b9f3bf741a801531993db6478b95682117471f76916f5e690dd8d45395b09349"
"checksum walkdir 2.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d9d7ed3431229a144296213105a390676cc49c9b6a72bd19f3176c98e129fa1"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"

View File

@ -134,9 +134,8 @@ MSVC builds of Rust additionally require an installation of Visual Studio 2017
[Visual Studio Build Tools]: https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019
At last check (cmake 3.14.3 and msvc 16.0.3) using the 2019 tools fails to
build the in-tree LLVM build with a CMake error, so use 2017 instead by
including the “MSVC v141 VS 2017 C++ x64/x86 build tools (v14.16)” component.
(If you're installing cmake yourself, be careful that “C++ CMake tools for
Windows” doesn't get included under “Individual components”.)
With these dependencies installed, you can build the compiler in a `cmd.exe`
shell with:
@ -151,7 +150,7 @@ then you may need to force rustbuild to use an older version. This can be done
by manually calling the appropriate vcvars file before running the bootstrap.
```batch
> CALL "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\bin\amd64\vcvars64.bat"
> CALL "C:\Program Files (x86)\Microsoft Visual Studio\2019\BuildTools\VC\Auxiliary\Build\vcvars64.bat"
> python x.py build
```

View File

@ -74,9 +74,7 @@ status_check() {
check_dispatch $1 beta nomicon src/doc/nomicon
check_dispatch $1 beta reference src/doc/reference
check_dispatch $1 beta rust-by-example src/doc/rust-by-example
# Temporarily disabled until
# https://github.com/rust-lang/rust/issues/60459 is fixed.
# check_dispatch $1 beta edition-guide src/doc/edition-guide
check_dispatch $1 beta edition-guide src/doc/edition-guide
check_dispatch $1 beta rls src/tools/rls
check_dispatch $1 beta rustfmt src/tools/rustfmt
check_dispatch $1 beta clippy-driver src/tools/clippy

View File

@ -529,18 +529,21 @@ This lint detects bounds in type aliases. These are not currently enforced.
Some example code that triggers this lint:
```rust
#[allow(dead_code)]
type SendVec<T: Send> = Vec<T>;
```
This will produce:
```text
warning: type alias is never used: `SendVec`
--> src/main.rs:1:1
warning: bounds on generic parameters are not enforced in type aliases
--> src/lib.rs:2:17
|
1 | type SendVec<T: Send> = Vec<T>;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
2 | type SendVec<T: Send> = Vec<T>;
| ^^^^
|
= note: #[warn(type_alias_bounds)] on by default
= help: the bound will not be checked when the type alias is used, and should be removed
```
## tyvar-behind-raw-pointer

View File

@ -759,13 +759,14 @@ impl<A, F: Fn<A> + ?Sized> Fn<A> for Box<F> {
}
}
/// `FnBox` is deprecated and will be removed.
/// `Box<dyn FnOnce()>` can be called directly, since Rust 1.35.0.
///
/// `FnBox` is a version of the `FnOnce` intended for use with boxed
/// closure objects. The idea is that where one would normally store a
/// `Box<dyn FnOnce()>` in a data structure, you should use
/// closure objects. The idea was that where one would normally store a
/// `Box<dyn FnOnce()>` in a data structure, you whould use
/// `Box<dyn FnBox()>`. The two traits behave essentially the same, except
/// that a `FnBox` closure can only be called if it is boxed. (Note
/// that `FnBox` may be deprecated in the future if `Box<dyn FnOnce()>`
/// closures become directly usable.)
/// that a `FnBox` closure can only be called if it is boxed.
///
/// # Examples
///
@ -777,6 +778,7 @@ impl<A, F: Fn<A> + ?Sized> Fn<A> for Box<F> {
///
/// ```
/// #![feature(fnbox)]
/// #![allow(deprecated)]
///
/// use std::boxed::FnBox;
/// use std::collections::HashMap;
@ -796,16 +798,38 @@ impl<A, F: Fn<A> + ?Sized> Fn<A> for Box<F> {
/// }
/// }
/// ```
///
/// In Rust 1.35.0 or later, use `FnOnce`, `FnMut`, or `Fn` instead:
///
/// ```
/// use std::collections::HashMap;
///
/// fn make_map() -> HashMap<i32, Box<dyn FnOnce() -> i32>> {
/// let mut map: HashMap<i32, Box<dyn FnOnce() -> i32>> = HashMap::new();
/// map.insert(1, Box::new(|| 22));
/// map.insert(2, Box::new(|| 44));
/// map
/// }
///
/// fn main() {
/// let mut map = make_map();
/// for i in &[1, 2] {
/// let f = map.remove(&i).unwrap();
/// assert_eq!(f(), i * 22);
/// }
/// }
/// ```
#[rustc_paren_sugar]
#[unstable(feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
#[unstable(feature = "fnbox", issue = "28796")]
#[rustc_deprecated(reason = "use `FnOnce`, `FnMut`, or `Fn` instead", since = "1.37.0")]
pub trait FnBox<A>: FnOnce<A> {
/// Performs the call operation.
fn call_box(self: Box<Self>, args: A) -> Self::Output;
}
#[unstable(feature = "fnbox",
reason = "will be deprecated if and when `Box<FnOnce>` becomes usable", issue = "28796")]
#[unstable(feature = "fnbox", issue = "28796")]
#[rustc_deprecated(reason = "use `FnOnce`, `FnMut`, or `Fn` instead", since = "1.37.0")]
#[allow(deprecated, deprecated_in_future)]
impl<A, F> FnBox<A> for F
where F: FnOnce<A>
{

View File

@ -11,7 +11,7 @@
//! mutate it.
//!
//! Shareable mutable containers exist to permit mutability in a controlled manner, even in the
//! presence of aliasing. Both `Cell<T>` and `RefCell<T>` allows to do this in a single threaded
//! presence of aliasing. Both `Cell<T>` and `RefCell<T>` allow doing this in a single-threaded
//! way. However, neither `Cell<T>` nor `RefCell<T>` are thread safe (they do not implement
//! `Sync`). If you need to do aliasing and mutation between multiple threads it is possible to
//! use [`Mutex`](../../std/sync/struct.Mutex.html),

View File

@ -473,6 +473,7 @@ assert_eq!(m, ", $reversed, ");
#[unstable(feature = "reverse_bits", issue = "48763")]
#[rustc_const_unstable(feature = "const_int_conversion")]
#[inline]
#[must_use]
pub const fn reverse_bits(self) -> Self {
(self as $UnsignedT).reverse_bits() as Self
}
@ -2522,6 +2523,7 @@ assert_eq!(m, ", $reversed, ");
```"),
#[unstable(feature = "reverse_bits", issue = "48763")]
#[inline]
#[must_use]
pub const fn reverse_bits(self) -> Self {
intrinsics::bitreverse(self as $ActualT) as Self
}

View File

@ -524,6 +524,7 @@ assert_eq!(n.trailing_zeros(), 3);
/// ```
#[unstable(feature = "reverse_bits", issue = "48763")]
#[inline]
#[must_use]
pub const fn reverse_bits(self) -> Self {
Wrapping(self.0.reverse_bits())
}

View File

@ -658,12 +658,15 @@ impl<'tcx> ScopeTree {
// The lifetime was defined on node that doesn't own a body,
// which in practice can only mean a trait or an impl, that
// is the parent of a method, and that is enforced below.
assert_eq!(Some(param_owner_id), self.root_parent,
"free_scope: {:?} not recognized by the \
region scope tree for {:?} / {:?}",
param_owner,
self.root_parent.map(|id| tcx.hir().local_def_id_from_hir_id(id)),
self.root_body.map(|hir_id| DefId::local(hir_id.owner)));
if Some(param_owner_id) != self.root_parent {
tcx.sess.delay_span_bug(
DUMMY_SP,
&format!("free_scope: {:?} not recognized by the \
region scope tree for {:?} / {:?}",
param_owner,
self.root_parent.map(|id| tcx.hir().local_def_id_from_hir_id(id)),
self.root_body.map(|hir_id| DefId::local(hir_id.owner))));
}
// The trait/impl lifetime is in scope for the method's body.
self.root_body.unwrap().local_id

View File

@ -1,3 +1,5 @@
use std::fmt;
use crate::mir;
use crate::ty::layout::{self, HasDataLayout, Size};
use rustc_macros::HashStable;
@ -70,7 +72,7 @@ impl<T: layout::HasDataLayout> PointerArithmetic for T {}
///
/// Pointer is also generic over the `Tag` associated with each pointer,
/// which is used to do provenance tracking during execution.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd,
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd,
RustcEncodable, RustcDecodable, Hash, HashStable)]
pub struct Pointer<Tag=(),Id=AllocId> {
pub alloc_id: Id,
@ -80,6 +82,18 @@ pub struct Pointer<Tag=(),Id=AllocId> {
static_assert_size!(Pointer, 16);
impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for Pointer<Tag, Id> {
default fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}.{:#x}[{:?}]", self.alloc_id, self.offset.bytes(), self.tag)
}
}
// Specialization for no tag
impl<Id: fmt::Debug> fmt::Debug for Pointer<(), Id> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{:?}.{:#x}", self.alloc_id, self.offset.bytes())
}
}
/// Produces a `Pointer` which points to the beginning of the Allocation
impl From<AllocId> for Pointer {
#[inline(always)]

View File

@ -93,7 +93,7 @@ impl<'tcx> ConstValue<'tcx> {
/// `memory::Allocation`. It is in many ways like a small chunk of a `Allocation`, up to 8 bytes in
/// size. Like a range of bytes in an `Allocation`, a `Scalar` can either represent the raw bytes
/// of a simple value or a pointer into another `Allocation`
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd,
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd,
RustcEncodable, RustcDecodable, Hash, HashStable)]
pub enum Scalar<Tag=(), Id=AllocId> {
/// The raw bytes of a simple value.
@ -113,6 +113,27 @@ pub enum Scalar<Tag=(), Id=AllocId> {
#[cfg(target_arch = "x86_64")]
static_assert_size!(Scalar, 24);
impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for Scalar<Tag, Id> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Scalar::Ptr(ptr) =>
write!(f, "{:?}", ptr),
&Scalar::Bits { bits, size } => {
if size == 0 {
assert_eq!(bits, 0, "ZST value must be 0");
write!(f, "<ZST>")
} else {
assert_eq!(truncate(bits, Size::from_bytes(size as u64)), bits,
"Scalar value {:#x} exceeds size of {} bytes", bits, size);
// Format as hex number wide enough to fit any value of the given `size`.
// So bits=20, size=1 will be "0x14", but with size=4 it'll be "0x00000014".
write!(f, "0x{:>0width$x}", bits, width=(size*2) as usize)
}
}
}
}
}
impl<Tag> fmt::Display for Scalar<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
@ -412,7 +433,7 @@ impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
Scalar(Scalar<Tag, Id>),
Undef,
@ -425,6 +446,15 @@ impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
}
}
impl<Tag: fmt::Debug, Id: fmt::Debug> fmt::Debug for ScalarMaybeUndef<Tag, Id> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUndef::Undef => write!(f, "Undef"),
ScalarMaybeUndef::Scalar(s) => write!(f, "{:?}", s),
}
}
}
impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {

View File

@ -226,12 +226,12 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString {
Ok(result)
}
fn verify(&self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
trait_def_id: DefId,
span: Span)
-> Result<(), ErrorReported>
{
fn verify(
&self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
trait_def_id: DefId,
span: Span,
) -> Result<(), ErrorReported> {
let name = tcx.item_name(trait_def_id);
let generics = tcx.generics_of(trait_def_id);
let parser = Parser::new(&self.0, None, vec![], false);
@ -272,12 +272,12 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString {
result
}
pub fn format(&self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
trait_ref: ty::TraitRef<'tcx>,
options: &FxHashMap<String, String>)
-> String
{
pub fn format(
&self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
trait_ref: ty::TraitRef<'tcx>,
options: &FxHashMap<String, String>,
) -> String {
let name = tcx.item_name(trait_ref.def_id);
let trait_str = tcx.def_path_str(trait_ref.def_id);
let generics = tcx.generics_of(trait_ref.def_id);

View File

@ -479,21 +479,22 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> {
// the specialized routine `ty::replace_late_regions()`.
match *r {
ty::ReEarlyBound(data) => {
let r = self.substs.get(data.index as usize).map(|k| k.unpack());
match r {
let rk = self.substs.get(data.index as usize).map(|k| k.unpack());
match rk {
Some(UnpackedKind::Lifetime(lt)) => {
self.shift_region_through_binders(lt)
}
_ => {
let span = self.span.unwrap_or(DUMMY_SP);
span_bug!(
span,
let msg = format!(
"Region parameter out of range \
when substituting in region {} (root type={:?}) \
(index={})",
data.name,
self.root_ty,
data.index);
self.tcx.sess.delay_span_bug(span, &msg);
r
}
}
}

View File

@ -465,7 +465,7 @@ pub struct JoinPathsError {
/// # }
/// ```
///
/// Using `env::join_paths` with `env::spit_paths` to append an item to the `PATH` environment
/// Using `env::join_paths` with [`env::split_paths`] to append an item to the `PATH` environment
/// variable:
///
/// ```
@ -483,6 +483,8 @@ pub struct JoinPathsError {
/// Ok(())
/// }
/// ```
///
/// [`env::split_paths`]: fn.split_paths.html
#[stable(feature = "env", since = "1.0.0")]
pub fn join_paths<I, T>(paths: I) -> Result<OsString, JoinPathsError>
where I: IntoIterator<Item=T>, T: AsRef<OsStr>

View File

@ -278,7 +278,14 @@ impl Attribute {
pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
{
let mut parser = Parser::new(sess, self.tokens.clone(), None, false, false);
let mut parser = Parser::new(
sess,
self.tokens.clone(),
None,
false,
false,
Some("attribute"),
);
let result = f(&mut parser)?;
if parser.token != token::Eof {
parser.unexpected()?;

View File

@ -11,7 +11,7 @@ use crate::parse::{self, parser, DirectoryOwnership};
use crate::parse::token;
use crate::ptr::P;
use crate::symbol::{kw, sym, Ident, Symbol};
use crate::ThinVec;
use crate::{ThinVec, MACRO_ARGUMENTS};
use crate::tokenstream::{self, TokenStream};
use errors::{DiagnosticBuilder, DiagnosticId};
@ -850,7 +850,7 @@ impl<'a> ExtCtxt<'a> {
}
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect(), MACRO_ARGUMENTS)
}
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }

View File

@ -658,7 +658,14 @@ pub fn parse(
recurse_into_modules: bool,
) -> NamedParseResult {
// Create a parser that can be used for the "black box" parts.
let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true);
let mut parser = Parser::new(
sess,
tts,
directory,
recurse_into_modules,
true,
crate::MACRO_ARGUMENTS,
);
// A queue of possible matcher positions. We initialize it with the matcher position in which
// the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then

View File

@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
path: Cow::from(cx.current_expansion.module.directory.as_path()),
ownership: cx.current_expansion.directory_ownership,
};
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false);
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None);
p.root_module_name = cx.current_expansion.module.mod_path.last()
.map(|id| id.as_str().to_string());

View File

@ -31,6 +31,8 @@ pub use rustc_data_structures::thin_vec::ThinVec;
use ast::AttrId;
use syntax_pos::edition::Edition;
const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
// A variant of 'try!' that panics on an Err. This is used as a crutch on the
// way towards a non-panic!-prone parser. It should be used for fatal parsing
// errors; eventually we plan to convert all code using panictry to just use

View File

@ -13,7 +13,7 @@ use crate::symbol::kw;
use crate::ThinVec;
use errors::{Applicability, DiagnosticBuilder};
use log::debug;
use syntax_pos::Span;
use syntax_pos::{Span, DUMMY_SP};
pub trait RecoverQPath: Sized + 'static {
const PATH_STYLE: PathStyle = PathStyle::Expr;
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
let mut path = ast::Path {
segments: Vec::new(),
span: syntax_pos::DUMMY_SP,
span: DUMMY_SP,
};
self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
path.span = ty_span.to(self.prev_span);
@ -267,6 +267,58 @@ impl<'a> Parser<'a> {
}
}
/// Create a `DiagnosticBuilder` for an unexpected token `t` and try to recover if it is a
/// closing delimiter.
pub fn unexpected_try_recover(
&mut self,
t: &token::Token,
) -> PResult<'a, bool /* recovered */> {
let token_str = pprust::token_to_string(t);
let this_token_str = self.this_token_descr();
let (prev_sp, sp) = match (&self.token, self.subparser_name) {
// Point at the end of the macro call when reaching end of macro arguments.
(token::Token::Eof, Some(_)) => {
let sp = self.sess.source_map().next_point(self.span);
(sp, sp)
}
// We don't want to point at the following span after DUMMY_SP.
// This happens when the parser finds an empty TokenStream.
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
// EOF, don't want to point at the following char, but rather the last token.
(token::Token::Eof, None) => (self.prev_span, self.span),
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
};
let msg = format!(
"expected `{}`, found {}",
token_str,
match (&self.token, self.subparser_name) {
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
_ => this_token_str,
},
);
let mut err = self.struct_span_err(sp, &msg);
let label_exp = format!("expected `{}`", token_str);
match self.recover_closing_delimiter(&[t.clone()], err) {
Err(e) => err = e,
Ok(recovered) => {
return Ok(recovered);
}
}
let cm = self.sess.source_map();
match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
// When the spans are in the same line, it means that the only content
// between them is whitespace, point only at the found token.
err.span_label(sp, label_exp);
}
_ => {
err.span_label(prev_sp, label_exp);
err.span_label(sp, "unexpected token");
}
}
Err(err)
}
/// Consume alternative await syntaxes like `await <expr>`, `await? <expr>`, `await(<expr>)`
/// and `await { <expr> }`.
crate fn parse_incorrect_await_syntax(
@ -562,4 +614,23 @@ impl<'a> Parser<'a> {
}
}
crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
let (span, msg) = match (&self.token, self.subparser_name) {
(&token::Token::Eof, Some(origin)) => {
let sp = self.sess.source_map().next_point(self.span);
(sp, format!("expected expression, found end of {}", origin))
}
_ => (self.span, format!(
"expected expression, found {}",
self.this_token_descr(),
)),
};
let mut err = self.struct_span_err(span, &msg);
let sp = self.sess.source_map().start_point(self.span);
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
self.sess.expr_parentheses_needed(&mut err, *sp, None);
}
err.span_label(span, "expected expression");
err
}
}

View File

@ -236,7 +236,7 @@ fn maybe_source_file_to_parser(
) -> Result<Parser<'_>, Vec<Diagnostic>> {
let end_pos = source_file.end_pos;
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
let mut parser = stream_to_parser(sess, stream);
let mut parser = stream_to_parser(sess, stream, None);
parser.unclosed_delims = unclosed_delims;
if parser.token == token::Eof && parser.span.is_dummy() {
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
@ -248,7 +248,7 @@ fn maybe_source_file_to_parser(
// must preserve old name for now, because quote! from the *existing*
// compiler expands into it
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
stream_to_parser(sess, tts.into_iter().collect())
stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS)
}
@ -328,8 +328,12 @@ pub fn maybe_file_to_stream(
}
/// Given stream and the `ParseSess`, produces a parser.
pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
Parser::new(sess, stream, None, true, false)
pub fn stream_to_parser<'a>(
sess: &'a ParseSess,
stream: TokenStream,
subparser_name: Option<&'static str>,
) -> Parser<'a> {
Parser::new(sess, stream, None, true, false, subparser_name)
}
/// Given stream, the `ParseSess` and the base directory, produces a parser.
@ -343,10 +347,12 @@ pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
/// The main usage of this function is outside of rustc, for those who uses
/// libsyntax as a library. Please do not remove this function while refactoring
/// just because it is not used in rustc codebase!
pub fn stream_to_parser_with_base_dir<'a>(sess: &'a ParseSess,
stream: TokenStream,
base_dir: Directory<'a>) -> Parser<'a> {
Parser::new(sess, stream, Some(base_dir), true, false)
pub fn stream_to_parser_with_base_dir<'a>(
sess: &'a ParseSess,
stream: TokenStream,
base_dir: Directory<'a>,
) -> Parser<'a> {
Parser::new(sess, stream, Some(base_dir), true, false, None)
}
/// A sequence separator.

View File

@ -51,7 +51,7 @@ use crate::symbol::{kw, sym, Symbol};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
use rustc_target::spec::abi::{self, Abi};
use syntax_pos::{
Span, MultiSpan, BytePos, FileName,
BytePos, DUMMY_SP, FileName, MultiSpan, Span,
hygiene::CompilerDesugaringKind,
};
use log::{debug, trace};
@ -233,6 +233,8 @@ pub struct Parser<'a> {
/// error.
crate unclosed_delims: Vec<UnmatchedBrace>,
last_unexpected_token_span: Option<Span>,
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
crate subparser_name: Option<&'static str>,
}
impl<'a> Drop for Parser<'a> {
@ -309,7 +311,7 @@ impl TokenCursor {
self.frame = frame;
continue
} else {
return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP }
return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
};
match self.frame.last_token {
@ -533,17 +535,19 @@ enum TokenExpectType {
}
impl<'a> Parser<'a> {
pub fn new(sess: &'a ParseSess,
tokens: TokenStream,
directory: Option<Directory<'a>>,
recurse_into_file_modules: bool,
desugar_doc_comments: bool)
-> Self {
pub fn new(
sess: &'a ParseSess,
tokens: TokenStream,
directory: Option<Directory<'a>>,
recurse_into_file_modules: bool,
desugar_doc_comments: bool,
subparser_name: Option<&'static str>,
) -> Self {
let mut parser = Parser {
sess,
token: token::Whitespace,
span: syntax_pos::DUMMY_SP,
prev_span: syntax_pos::DUMMY_SP,
span: DUMMY_SP,
prev_span: DUMMY_SP,
meta_var_span: None,
prev_token_kind: PrevTokenKind::Other,
restrictions: Restrictions::empty(),
@ -568,6 +572,7 @@ impl<'a> Parser<'a> {
max_angle_bracket_count: 0,
unclosed_delims: Vec::new(),
last_unexpected_token_span: None,
subparser_name,
};
let tok = parser.next_tok();
@ -631,44 +636,13 @@ impl<'a> Parser<'a> {
}
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
if self.expected_tokens.is_empty() {
if self.token == *t {
self.bump();
Ok(false)
} else {
let token_str = pprust::token_to_string(t);
let this_token_str = self.this_token_descr();
let mut err = self.fatal(&format!("expected `{}`, found {}",
token_str,
this_token_str));
let sp = if self.token == token::Token::Eof {
// EOF, don't want to point at the following char, but rather the last token
self.prev_span
} else {
self.sess.source_map().next_point(self.prev_span)
};
let label_exp = format!("expected `{}`", token_str);
match self.recover_closing_delimiter(&[t.clone()], err) {
Err(e) => err = e,
Ok(recovered) => {
return Ok(recovered);
}
}
let cm = self.sess.source_map();
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
// When the spans are in the same line, it means that the only content
// between them is whitespace, point only at the found token.
err.span_label(self.span, label_exp);
}
_ => {
err.span_label(sp, label_exp);
err.span_label(self.span, "unexpected token");
}
}
Err(err)
self.unexpected_try_recover(t)
}
} else {
self.expect_one_of(slice::from_ref(t), &[])
@ -812,7 +786,7 @@ impl<'a> Parser<'a> {
// | expected one of 8 possible tokens here
err.span_label(self.span, label_exp);
}
_ if self.prev_span == syntax_pos::DUMMY_SP => {
_ if self.prev_span == DUMMY_SP => {
// Account for macro context where the previous span might not be
// available to avoid incorrect output (#54841).
err.span_label(self.span, "unexpected token");
@ -2041,7 +2015,7 @@ impl<'a> Parser<'a> {
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_span);
} else {
path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP };
path = ast::Path { segments: Vec::new(), span: DUMMY_SP };
path_span = self.span.to(self.span);
}
@ -2627,17 +2601,7 @@ impl<'a> Parser<'a> {
}
Err(mut err) => {
self.cancel(&mut err);
let msg = format!("expected expression, found {}",
self.this_token_descr());
let mut err = self.fatal(&msg);
let sp = self.sess.source_map().start_point(self.span);
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow()
.get(&sp)
{
self.sess.expr_parentheses_needed(&mut err, *sp, None);
}
err.span_label(self.span, "expected expression");
return Err(err);
return Err(self.expected_expression_found());
}
}
}
@ -5592,7 +5556,7 @@ impl<'a> Parser<'a> {
where_clause: WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
span: syntax_pos::DUMMY_SP,
span: DUMMY_SP,
},
span: span_lo.to(self.prev_span),
})
@ -5838,7 +5802,7 @@ impl<'a> Parser<'a> {
let mut where_clause = WhereClause {
id: ast::DUMMY_NODE_ID,
predicates: Vec::new(),
span: syntax_pos::DUMMY_SP,
span: DUMMY_SP,
};
if !self.eat_keyword(kw::Where) {
@ -7005,7 +6969,7 @@ impl<'a> Parser<'a> {
Ident::with_empty_ctxt(sym::warn_directory_ownership)),
tokens: TokenStream::empty(),
is_sugared_doc: false,
span: syntax_pos::DUMMY_SP,
span: DUMMY_SP,
};
attr::mark_known(&attr);
attrs.push(attr);
@ -7013,7 +6977,7 @@ impl<'a> Parser<'a> {
Ok((id, ItemKind::Mod(module), Some(attrs)))
} else {
let placeholder = ast::Mod {
inner: syntax_pos::DUMMY_SP,
inner: DUMMY_SP,
items: Vec::new(),
inline: false
};

View File

@ -138,7 +138,11 @@ fn parse_inline_asm<'a>(
if p2.token != token::Eof {
let mut extra_tts = p2.parse_all_token_trees()?;
extra_tts.extend(tts[first_colon..].iter().cloned());
p = parse::stream_to_parser(cx.parse_sess, extra_tts.into_iter().collect());
p = parse::stream_to_parser(
cx.parse_sess,
extra_tts.into_iter().collect(),
Some("inline assembly"),
);
}
asm = s;

View File

@ -89,7 +89,7 @@ impl MultiItemModifier for ProcMacroDerive {
let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
let msg = "proc-macro derive produced unparseable tokens";
let mut parser = parse::stream_to_parser(ecx.parse_sess, stream);
let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
let mut items = vec![];
loop {

View File

@ -22,7 +22,6 @@
#![unstable(feature = "test", issue = "27812")]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(deny(warnings))))]
#![feature(asm)]
#![feature(fnbox)]
#![cfg_attr(any(unix, target_os = "cloudabi"), feature(libc, rustc_private))]
#![feature(nll)]
#![feature(set_stdio)]
@ -56,7 +55,6 @@ pub use self::TestResult::*;
use std::any::Any;
use std::borrow::Cow;
use std::boxed::FnBox;
use std::cmp;
use std::collections::BTreeMap;
use std::env;
@ -174,7 +172,7 @@ pub trait TDynBenchFn: Send {
pub enum TestFn {
StaticTestFn(fn()),
StaticBenchFn(fn(&mut Bencher)),
DynTestFn(Box<dyn FnBox() + Send>),
DynTestFn(Box<dyn FnOnce() + Send>),
DynBenchFn(Box<dyn TDynBenchFn + 'static>),
}
@ -1447,7 +1445,7 @@ pub fn run_test(
desc: TestDesc,
monitor_ch: Sender<MonitorMsg>,
nocapture: bool,
testfn: Box<dyn FnBox() + Send>,
testfn: Box<dyn FnOnce() + Send>,
concurrency: Concurrent,
) {
// Buffer for capturing standard I/O

View File

@ -1,4 +1,5 @@
#![feature(fnbox)]
#![allow(deprecated, deprecated_in_future)]
use std::boxed::FnBox;

View File

@ -1,3 +1,5 @@
// run-pass
// edition:2018
// aux-build:arc_wake.rs

View File

@ -1,3 +1,5 @@
// run-pass
// edition:2018
// aux-build:arc_wake.rs

View File

@ -0,0 +1,64 @@
// edition:2018
use std::sync::Arc;
use std::task::{
Waker, RawWaker, RawWakerVTable,
};
macro_rules! waker_vtable {
($ty:ident) => {
&RawWakerVTable::new(
clone_arc_raw::<$ty>,
wake_arc_raw::<$ty>,
wake_by_ref_arc_raw::<$ty>,
drop_arc_raw::<$ty>,
)
};
}
pub trait ArcWake {
fn wake(self: Arc<Self>);
fn wake_by_ref(arc_self: &Arc<Self>) {
arc_self.clone().wake()
}
fn into_waker(wake: Arc<Self>) -> Waker where Self: Sized
{
let ptr = Arc::into_raw(wake) as *const ();
unsafe {
Waker::from_raw(RawWaker::new(ptr, waker_vtable!(Self)))
}
}
}
unsafe fn increase_refcount<T: ArcWake>(data: *const ()) {
// Retain Arc by creating a copy
let arc: Arc<T> = Arc::from_raw(data as *const T);
let arc_clone = arc.clone();
// Forget the Arcs again, so that the refcount isn't decrased
let _ = Arc::into_raw(arc);
let _ = Arc::into_raw(arc_clone);
}
unsafe fn clone_arc_raw<T: ArcWake>(data: *const ()) -> RawWaker {
increase_refcount::<T>(data);
RawWaker::new(data, waker_vtable!(T))
}
unsafe fn drop_arc_raw<T: ArcWake>(data: *const ()) {
// Drop Arc
let _: Arc<T> = Arc::from_raw(data as *const T);
}
unsafe fn wake_arc_raw<T: ArcWake>(data: *const ()) {
let arc: Arc<T> = Arc::from_raw(data as *const T);
ArcWake::wake(arc);
}
unsafe fn wake_by_ref_arc_raw<T: ArcWake>(data: *const ()) {
let arc: Arc<T> = Arc::from_raw(data as *const T);
ArcWake::wake_by_ref(&arc);
let _ = Arc::into_raw(arc);
}

View File

@ -1,7 +1,9 @@
#![feature(async_await)]
#![allow(unused_parens)]
// run-pass
// edition:2018
// pp-exact
#![feature(async_await)]
#![allow(unused_parens)]
fn main() { let _a = (async { }); }

View File

@ -1,3 +1,5 @@
// run-pass
// compile-flags: --edition=2018
#![feature(async_await, await_macro)]

View File

@ -1,7 +1,3 @@
#![feature(core, fnbox)]
use std::boxed::FnBox;
struct FuncContainer {
f1: fn(data: u8),
f2: extern "C" fn(data: u8),
@ -18,7 +14,7 @@ struct Obj<F> where F: FnOnce() -> u32 {
}
struct BoxedObj {
boxed_closure: Box<FnBox() -> u32>,
boxed_closure: Box<FnOnce() -> u32>,
}
struct Wrapper<F> where F: FnMut() -> u32 {
@ -29,8 +25,8 @@ fn func() -> u32 {
0
}
fn check_expression() -> Obj<Box<FnBox() -> u32>> {
Obj { closure: Box::new(|| 42_u32) as Box<FnBox() -> u32>, not_closure: 42 }
fn check_expression() -> Obj<Box<FnOnce() -> u32>> {
Obj { closure: Box::new(|| 42_u32) as Box<FnOnce() -> u32>, not_closure: 42 }
}
fn main() {
@ -48,7 +44,7 @@ fn main() {
let boxed_fn = BoxedObj { boxed_closure: Box::new(func) };
boxed_fn.boxed_closure();//~ ERROR no method named `boxed_closure` found
let boxed_closure = BoxedObj { boxed_closure: Box::new(|| 42_u32) as Box<FnBox() -> u32> };
let boxed_closure = BoxedObj { boxed_closure: Box::new(|| 42_u32) as Box<FnOnce() -> u32> };
boxed_closure.boxed_closure();//~ ERROR no method named `boxed_closure` found
// test expression writing in the notes

View File

@ -1,5 +1,5 @@
error[E0599]: no method named `closure` found for type `Obj<[closure@$DIR/issue-2392.rs:39:36: 39:41]>` in the current scope
--> $DIR/issue-2392.rs:40:15
error[E0599]: no method named `closure` found for type `Obj<[closure@$DIR/issue-2392.rs:35:36: 35:41]>` in the current scope
--> $DIR/issue-2392.rs:36:15
|
LL | struct Obj<F> where F: FnOnce() -> u32 {
| -------------------------------------- method `closure` not found for this
@ -11,8 +11,8 @@ help: to call the function stored in `closure`, surround the field access with p
LL | (o_closure.closure)();
| ^ ^
error[E0599]: no method named `not_closure` found for type `Obj<[closure@$DIR/issue-2392.rs:39:36: 39:41]>` in the current scope
--> $DIR/issue-2392.rs:42:15
error[E0599]: no method named `not_closure` found for type `Obj<[closure@$DIR/issue-2392.rs:35:36: 35:41]>` in the current scope
--> $DIR/issue-2392.rs:38:15
|
LL | struct Obj<F> where F: FnOnce() -> u32 {
| -------------------------------------- method `not_closure` not found for this
@ -23,7 +23,7 @@ LL | o_closure.not_closure();
| field, not a method
error[E0599]: no method named `closure` found for type `Obj<fn() -> u32 {func}>` in the current scope
--> $DIR/issue-2392.rs:46:12
--> $DIR/issue-2392.rs:42:12
|
LL | struct Obj<F> where F: FnOnce() -> u32 {
| -------------------------------------- method `closure` not found for this
@ -36,7 +36,7 @@ LL | (o_func.closure)();
| ^ ^
error[E0599]: no method named `boxed_closure` found for type `BoxedObj` in the current scope
--> $DIR/issue-2392.rs:49:14
--> $DIR/issue-2392.rs:45:14
|
LL | struct BoxedObj {
| --------------- method `boxed_closure` not found for this
@ -49,7 +49,7 @@ LL | (boxed_fn.boxed_closure)();
| ^ ^
error[E0599]: no method named `boxed_closure` found for type `BoxedObj` in the current scope
--> $DIR/issue-2392.rs:52:19
--> $DIR/issue-2392.rs:48:19
|
LL | struct BoxedObj {
| --------------- method `boxed_closure` not found for this
@ -62,7 +62,7 @@ LL | (boxed_closure.boxed_closure)();
| ^ ^
error[E0599]: no method named `closure` found for type `Obj<fn() -> u32 {func}>` in the current scope
--> $DIR/issue-2392.rs:57:12
--> $DIR/issue-2392.rs:53:12
|
LL | struct Obj<F> where F: FnOnce() -> u32 {
| -------------------------------------- method `closure` not found for this
@ -75,7 +75,7 @@ LL | (w.wrap.closure)();
| ^ ^
error[E0599]: no method named `not_closure` found for type `Obj<fn() -> u32 {func}>` in the current scope
--> $DIR/issue-2392.rs:59:12
--> $DIR/issue-2392.rs:55:12
|
LL | struct Obj<F> where F: FnOnce() -> u32 {
| -------------------------------------- method `not_closure` not found for this
@ -85,8 +85,8 @@ LL | w.wrap.not_closure();
| |
| field, not a method
error[E0599]: no method named `closure` found for type `Obj<std::boxed::Box<(dyn std::boxed::FnBox<(), Output = u32> + 'static)>>` in the current scope
--> $DIR/issue-2392.rs:62:24
error[E0599]: no method named `closure` found for type `Obj<std::boxed::Box<(dyn std::ops::FnOnce() -> u32 + 'static)>>` in the current scope
--> $DIR/issue-2392.rs:58:24
|
LL | struct Obj<F> where F: FnOnce() -> u32 {
| -------------------------------------- method `closure` not found for this
@ -99,7 +99,7 @@ LL | (check_expression().closure)();
| ^ ^
error[E0599]: no method named `f1` found for type `FuncContainer` in the current scope
--> $DIR/issue-2392.rs:68:31
--> $DIR/issue-2392.rs:64:31
|
LL | struct FuncContainer {
| -------------------- method `f1` not found for this
@ -112,7 +112,7 @@ LL | ((*self.container).f1)(1);
| ^ ^
error[E0599]: no method named `f2` found for type `FuncContainer` in the current scope
--> $DIR/issue-2392.rs:69:31
--> $DIR/issue-2392.rs:65:31
|
LL | struct FuncContainer {
| -------------------- method `f2` not found for this
@ -125,7 +125,7 @@ LL | ((*self.container).f2)(1);
| ^ ^
error[E0599]: no method named `f3` found for type `FuncContainer` in the current scope
--> $DIR/issue-2392.rs:70:31
--> $DIR/issue-2392.rs:66:31
|
LL | struct FuncContainer {
| -------------------- method `f3` not found for this

Some files were not shown because too many files have changed in this diff Show More