Rollup merge of #75270 - matthiaskrgr:clippy_aug_1, r=Dylan-DPC

fix a couple of clippy findings
This commit is contained in:
Yuki Okushi 2020-08-08 11:36:12 +09:00 committed by GitHub
commit 21bfe529c7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
27 changed files with 48 additions and 53 deletions

View File

@ -4,7 +4,7 @@ use std::env;
#[allow(deprecated)]
pub fn get_concurrency() -> usize {
return match env::var("RUST_TEST_THREADS") {
match env::var("RUST_TEST_THREADS") {
Ok(s) => {
let opt_n: Option<usize> = s.parse().ok();
match opt_n {
@ -13,7 +13,7 @@ pub fn get_concurrency() -> usize {
}
}
Err(..) => num_cpus(),
};
}
}
cfg_if::cfg_if! {

View File

@ -960,7 +960,7 @@ fn pointer_type_metadata(
fn param_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
debug!("param_type_metadata: {:?}", t);
let name = format!("{:?}", t);
return unsafe {
unsafe {
llvm::LLVMRustDIBuilderCreateBasicType(
DIB(cx),
name.as_ptr().cast(),
@ -968,7 +968,7 @@ fn param_type_metadata(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll DIType {
Size::ZERO.bits(),
DW_ATE_unsigned,
)
};
}
}
pub fn compile_unit_metadata(

View File

@ -490,7 +490,7 @@ fn copy_all_cgu_workproducts_to_incr_comp_cache_dir(
let _timer = sess.timer("copy_all_cgu_workproducts_to_incr_comp_cache_dir");
for module in compiled_modules.modules.iter().filter(|m| m.kind == ModuleKind::Regular) {
let path = module.object.as_ref().map(|path| path.clone());
let path = module.object.as_ref().cloned();
if let Some((id, product)) =
copy_cgu_workproduct_to_incr_comp_cache_dir(sess, &module.name, &path)

View File

@ -85,11 +85,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
debug!("try_report_named_anon_conflict: ret ty {:?}", ty);
if sub == &ty::ReStatic
&& v.0
.into_iter()
.filter(|t| t.span.desugaring_kind().is_none())
.next()
.is_some()
&& v.0.into_iter().find(|t| t.span.desugaring_kind().is_none()).is_some()
{
// If the failure is due to a `'static` requirement coming from a `dyn` or
// `impl` Trait that *isn't* caused by `async fn` desugaring, handle this case

View File

@ -257,7 +257,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
param.param_ty.to_string(),
Applicability::MaybeIncorrect,
);
} else if let Some(_) = opaque
} else if opaque
.bounds
.iter()
.filter_map(|arg| match arg {
@ -269,6 +269,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
_ => None,
})
.next()
.is_some()
{
} else {
err.span_suggestion_verbose(

View File

@ -50,7 +50,7 @@ impl TypeRelation<'tcx> for Glb<'combine, 'infcx, 'tcx> {
ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b),
ty::Covariant => self.relate(a, b),
// FIXME(#41044) -- not correct, need test
ty::Bivariant => Ok(a.clone()),
ty::Bivariant => Ok(a),
ty::Contravariant => self.fields.lub(self.a_is_expected).relate(a, b),
}
}
@ -97,7 +97,7 @@ impl TypeRelation<'tcx> for Glb<'combine, 'infcx, 'tcx> {
// very challenging, switch to invariance. This is obviously
// overly conservative but works ok in practice.
self.relate_with_variance(ty::Variance::Invariant, a, b)?;
Ok(a.clone())
Ok(a)
}
}

View File

@ -719,7 +719,7 @@ where
self.a_scopes.pop().unwrap();
}
Ok(a.clone())
Ok(a)
}
}

View File

@ -288,9 +288,9 @@ impl<'me, 'tcx> LeakCheck<'me, 'tcx> {
) -> TypeError<'tcx> {
debug!("error: placeholder={:?}, other_region={:?}", placeholder, other_region);
if self.overly_polymorphic {
return TypeError::RegionsOverlyPolymorphic(placeholder.name, other_region);
TypeError::RegionsOverlyPolymorphic(placeholder.name, other_region)
} else {
return TypeError::RegionsInsufficientlyPolymorphic(placeholder.name, other_region);
TypeError::RegionsInsufficientlyPolymorphic(placeholder.name, other_region)
}
}
}

View File

@ -68,7 +68,7 @@ impl TypeRelation<'tcx> for Sub<'combine, 'infcx, 'tcx> {
match variance {
ty::Invariant => self.fields.equate(self.a_is_expected).relate(a, b),
ty::Covariant => self.relate(a, b),
ty::Bivariant => Ok(a.clone()),
ty::Bivariant => Ok(a),
ty::Contravariant => self.with_expected_switched(|this| this.relate(b, a)),
}
}

View File

@ -1074,7 +1074,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
}
// If `ty` is a `repr(transparent)` newtype, and the non-zero-sized type is a generic
// argument, which after substitution, is `()`, then this branch can be hit.
FfiResult::FfiUnsafe { ty, .. } if is_return_type && ty.is_unit() => return,
FfiResult::FfiUnsafe { ty, .. } if is_return_type && ty.is_unit() => {}
FfiResult::FfiUnsafe { ty, reason, help } => {
self.emit_ffi_unsafe_type_lint(ty, sp, &reason, help.as_deref());
}

View File

@ -162,7 +162,7 @@ impl<'a, 'tcx> SpecializedEncoder<ExpnId> for EncodeContext<'a, 'tcx> {
fn specialized_encode(&mut self, expn: &ExpnId) -> Result<(), Self::Error> {
rustc_span::hygiene::raw_encode_expn_id(
*expn,
&mut self.hygiene_ctxt,
&self.hygiene_ctxt,
ExpnDataEncodeMode::Metadata,
self,
)

View File

@ -128,7 +128,7 @@ pub struct DropckOutlivesResult<'tcx> {
impl<'tcx> DropckOutlivesResult<'tcx> {
pub fn report_overflows(&self, tcx: TyCtxt<'tcx>, span: Span, ty: Ty<'tcx>) {
if let Some(overflow_ty) = self.overflows.iter().next() {
if let Some(overflow_ty) = self.overflows.get(0) {
let mut err = struct_span_err!(
tcx.sess,
span,

View File

@ -868,7 +868,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
}
}
}
return normal_ret;
normal_ret
}
/// Finds the span of arguments of a closure (within `maybe_closure_span`)

View File

@ -361,7 +361,7 @@ fn optimization_applies<'tcx>(
}
trace!("SUCCESS: optimization applies!");
return true;
true
}
impl<'tcx> MirPass<'tcx> for SimplifyArmIdentity {

View File

@ -115,7 +115,7 @@ pub fn equal_up_to_regions(
T: Relate<'tcx>,
{
self.relate(a.skip_binder(), b.skip_binder())?;
Ok(a.clone())
Ok(a)
}
}

View File

@ -820,7 +820,7 @@ fn find_skips_from_snippet(
}
let r_start = str_style.map(|r| r + 1).unwrap_or(0);
let r_end = str_style.map(|r| r).unwrap_or(0);
let r_end = str_style.unwrap_or(0);
let s = &snippet[r_start + 1..snippet.len() - r_end - 1];
(find_skips(s, str_style.is_some()), true)
}

View File

@ -702,7 +702,7 @@ impl<'tcx> SaveContext<'tcx> {
Res::Def(HirDefKind::ConstParam, def_id) => {
Some(Ref { kind: RefKind::Variable, span, ref_id: id_from_def_id(def_id) })
}
Res::Def(HirDefKind::Ctor(_, ..), def_id) => {
Res::Def(HirDefKind::Ctor(..), def_id) => {
// This is a reference to a tuple struct or an enum variant where the def_id points
// to an invisible constructor function. That is not a very useful
// def, so adjust to point to the tuple struct or enum variant itself.

View File

@ -98,7 +98,7 @@ impl<'a> FileSearch<'a> {
p.push(RUST_LIB_DIR);
p.push(&self.triple);
p.push("bin");
if self_contained { vec![p.clone(), p.join("self-contained")] } else { vec![p.clone()] }
if self_contained { vec![p.clone(), p.join("self-contained")] } else { vec![p] }
}
}

View File

@ -1030,7 +1030,7 @@ pub fn decode_expn_id<
drop(expns);
expn_id
});
return Ok(expn_id);
Ok(expn_id)
}
// Decodes `SyntaxContext`, using the provided `HygieneDecodeContext`
@ -1103,7 +1103,7 @@ pub fn decode_syntax_context<
assert_eq!(dummy.dollar_crate_name, kw::Invalid);
});
return Ok(new_ctxt);
Ok(new_ctxt)
}
pub fn num_syntax_ctxts() -> usize {

View File

@ -187,7 +187,7 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> {
}
pub fn span(&self) -> Span {
self.span.clone()
self.span
}
pub fn reached_recursion_limit(&self) -> bool {

View File

@ -495,7 +495,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
if let Ok(src) = self.tcx.sess.source_map().span_to_snippet(span) {
// Don't care about `&mut` because `DerefMut` is used less
// often and user will not expect autoderef happens.
if src.starts_with("&") && !src.starts_with("&mut ") {
if src.starts_with('&') && !src.starts_with("&mut ") {
let derefs = "*".repeat(steps);
err.span_suggestion(
span,

View File

@ -141,7 +141,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
let predicates = self.tcx.predicates_of(adt_def.did).predicates;
let where_clauses: Vec<_> = predicates
.into_iter()
.iter()
.map(|(wc, _)| wc.subst(self.tcx, bound_vars))
.filter_map(|wc| LowerInto::<Option<chalk_ir::QuantifiedWhereClause<RustInterner<'tcx>>>>::lower_into(wc, &self.interner))
.collect();
@ -174,7 +174,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
phantom_data: adt_def.is_phantom_data(),
},
});
return struct_datum;
struct_datum
}
fn fn_def_datum(
@ -187,7 +187,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
let predicates = self.tcx.predicates_defined_on(def_id).predicates;
let where_clauses: Vec<_> = predicates
.into_iter()
.iter()
.map(|(wc, _)| wc.subst(self.tcx, &bound_vars))
.filter_map(|wc| LowerInto::<Option<chalk_ir::QuantifiedWhereClause<RustInterner<'tcx>>>>::lower_into(wc, &self.interner)).collect();
@ -276,7 +276,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
parameters[0].assert_ty_ref(&self.interner).could_match(&self.interner, &lowered_ty)
});
let impls = matched_impls.map(|matched_impl| chalk_ir::ImplId(matched_impl)).collect();
let impls = matched_impls.map(chalk_ir::ImplId).collect();
impls
}
@ -379,7 +379,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
ty::AdtKind::Struct | ty::AdtKind::Union => None,
ty::AdtKind::Enum => {
let constraint = self.tcx.adt_sized_constraint(adt_def.did);
if constraint.0.len() > 0 { unimplemented!() } else { Some(true) }
if !constraint.0.is_empty() { unimplemented!() } else { Some(true) }
}
},
_ => None,
@ -398,7 +398,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
ty::AdtKind::Struct | ty::AdtKind::Union => None,
ty::AdtKind::Enum => {
let constraint = self.tcx.adt_sized_constraint(adt_def.did);
if constraint.0.len() > 0 { unimplemented!() } else { Some(true) }
if !constraint.0.is_empty() { unimplemented!() } else { Some(true) }
}
},
_ => None,
@ -440,7 +440,7 @@ impl<'tcx> chalk_solve::RustIrDatabase<RustInterner<'tcx>> for RustIrDatabase<'t
FnOnce => self.tcx.lang_items().fn_once_trait(),
Unsize => self.tcx.lang_items().unsize_trait(),
};
def_id.map(|t| chalk_ir::TraitId(t))
def_id.map(chalk_ir::TraitId)
}
fn is_object_safe(&self, trait_id: chalk_ir::TraitId<RustInterner<'tcx>>) -> bool {

View File

@ -443,7 +443,7 @@ fn opaque_type_projection_predicates(
let bounds = tcx.predicates_of(def_id);
let predicates =
util::elaborate_predicates(tcx, bounds.predicates.into_iter().map(|&(pred, _)| pred));
util::elaborate_predicates(tcx, bounds.predicates.iter().map(|&(pred, _)| pred));
let filtered_predicates = predicates.filter_map(|obligation| {
let pred = obligation.predicate;

View File

@ -368,6 +368,6 @@ impl TypeRelation<'tcx> for SimpleEqRelation<'tcx> {
let anon_b = self.tcx.anonymize_late_bound_regions(&b);
self.relate(anon_a.skip_binder(), anon_b.skip_binder())?;
Ok(a.clone())
Ok(a)
}
}

View File

@ -200,13 +200,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Gather up expressions we want to munge.
let mut exprs = vec![expr];
loop {
match exprs.last().unwrap().kind {
hir::ExprKind::Field(ref expr, _)
| hir::ExprKind::Index(ref expr, _)
| hir::ExprKind::Unary(hir::UnOp::UnDeref, ref expr) => exprs.push(&expr),
_ => break,
}
while let hir::ExprKind::Field(ref expr, _)
| hir::ExprKind::Index(ref expr, _)
| hir::ExprKind::Unary(hir::UnOp::UnDeref, ref expr) = exprs.last().unwrap().kind
{
exprs.push(&expr);
}
debug!("convert_place_derefs_to_mutable: exprs={:?}", exprs);

View File

@ -583,7 +583,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
self.tcx()
.sess
.delay_span_bug(span, "struct or tuple struct pattern not applied to an ADT");
return Err(());
Err(())
}
}
}
@ -596,7 +596,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
ty::Tuple(substs) => Ok(substs.len()),
_ => {
self.tcx().sess.delay_span_bug(span, "tuple pattern not applied to a tuple");
return Err(());
Err(())
}
}
}

View File

@ -69,14 +69,14 @@ impl DocFS {
let sender = self.errors.clone().expect("can't write after closing");
rayon::spawn(move || {
fs::write(&path, contents).unwrap_or_else(|e| {
sender
.send(format!("\"{}\": {}", path.display(), e))
.expect(&format!("failed to send error on \"{}\"", path.display()));
sender.send(format!("\"{}\": {}", path.display(), e)).unwrap_or_else(|_| {
panic!("failed to send error on \"{}\"", path.display())
})
});
});
Ok(())
} else {
Ok(try_err!(fs::write(&path, contents), path))
try_err!(fs::write(&path, contents), path);
}
Ok(())
}
}