Rollup merge of #118147 - Nilstrieb:no-redundant-casts, r=WaffleLapkin

Fix some unnecessary casts

`x clippy compiler -Aclippy::all -Wclippy::unnecessary_cast --fix` with some manual review to ensure every fix is correct.
This commit is contained in:
Michael Goulet 2023-11-22 09:28:51 -08:00 committed by GitHub
commit 040151a4be
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 18 additions and 30 deletions

View File

@ -474,27 +474,25 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
cg_base.project_index(bx, llindex) cg_base.project_index(bx, llindex)
} }
mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => { mir::ProjectionElem::ConstantIndex { offset, from_end: false, min_length: _ } => {
let lloffset = bx.cx().const_usize(offset as u64); let lloffset = bx.cx().const_usize(offset);
cg_base.project_index(bx, lloffset) cg_base.project_index(bx, lloffset)
} }
mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => { mir::ProjectionElem::ConstantIndex { offset, from_end: true, min_length: _ } => {
let lloffset = bx.cx().const_usize(offset as u64); let lloffset = bx.cx().const_usize(offset);
let lllen = cg_base.len(bx.cx()); let lllen = cg_base.len(bx.cx());
let llindex = bx.sub(lllen, lloffset); let llindex = bx.sub(lllen, lloffset);
cg_base.project_index(bx, llindex) cg_base.project_index(bx, llindex)
} }
mir::ProjectionElem::Subslice { from, to, from_end } => { mir::ProjectionElem::Subslice { from, to, from_end } => {
let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from as u64)); let mut subslice = cg_base.project_index(bx, bx.cx().const_usize(from));
let projected_ty = let projected_ty =
PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty; PlaceTy::from_ty(cg_base.layout.ty).projection_ty(tcx, *elem).ty;
subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty)); subslice.layout = bx.cx().layout_of(self.monomorphize(projected_ty));
if subslice.layout.is_unsized() { if subslice.layout.is_unsized() {
assert!(from_end, "slice subslices should be `from_end`"); assert!(from_end, "slice subslices should be `from_end`");
subslice.llextra = Some(bx.sub( subslice.llextra =
cg_base.llextra.unwrap(), Some(bx.sub(cg_base.llextra.unwrap(), bx.cx().const_usize(from + to)));
bx.cx().const_usize((from as u64) + (to as u64)),
));
} }
subslice subslice

View File

@ -493,7 +493,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
}; };
let ptr = ecx.allocate_ptr( let ptr = ecx.allocate_ptr(
Size::from_bytes(size as u64), Size::from_bytes(size),
align, align,
interpret::MemoryKind::Machine(MemoryKind::Heap), interpret::MemoryKind::Machine(MemoryKind::Heap),
)?; )?;

View File

@ -1861,7 +1861,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
// method yet. So create fresh variables here for those too, // method yet. So create fresh variables here for those too,
// if there are any. // if there are any.
let generics = self.tcx.generics_of(method); let generics = self.tcx.generics_of(method);
assert_eq!(args.len(), generics.parent_count as usize); assert_eq!(args.len(), generics.parent_count);
let xform_fn_sig = if generics.params.is_empty() { let xform_fn_sig = if generics.params.is_empty() {
fn_sig.instantiate(self.tcx, args) fn_sig.instantiate(self.tcx, args)

View File

@ -187,7 +187,7 @@ impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for InferenceFudger<'a, 'tcx> {
if self.type_vars.0.contains(&vid) { if self.type_vars.0.contains(&vid) {
// This variable was created during the fudging. // This variable was created during the fudging.
// Recreate it with a fresh variable here. // Recreate it with a fresh variable here.
let idx = (vid.as_usize() - self.type_vars.0.start.as_usize()) as usize; let idx = vid.as_usize() - self.type_vars.0.start.as_usize();
let origin = self.type_vars.1[idx]; let origin = self.type_vars.1[idx];
self.infcx.next_ty_var(origin) self.infcx.next_ty_var(origin)
} else { } else {
@ -236,7 +236,7 @@ impl<'a, 'tcx> TypeFolder<TyCtxt<'tcx>> for InferenceFudger<'a, 'tcx> {
if self.const_vars.0.contains(&vid) { if self.const_vars.0.contains(&vid) {
// This variable was created during the fudging. // This variable was created during the fudging.
// Recreate it with a fresh variable here. // Recreate it with a fresh variable here.
let idx = (vid.index() - self.const_vars.0.start.index()) as usize; let idx = vid.index() - self.const_vars.0.start.index();
let origin = self.const_vars.1[idx]; let origin = self.const_vars.1[idx];
self.infcx.next_const_var(ct.ty(), origin) self.infcx.next_const_var(ct.ty(), origin)
} else { } else {

View File

@ -681,17 +681,13 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
for constraint in self.data.constraints.keys() { for constraint in self.data.constraints.keys() {
match *constraint { match *constraint {
Constraint::VarSubVar(a_id, b_id) => { Constraint::VarSubVar(a_id, b_id) => {
graph.add_edge( graph.add_edge(NodeIndex(a_id.index()), NodeIndex(b_id.index()), *constraint);
NodeIndex(a_id.index() as usize),
NodeIndex(b_id.index() as usize),
*constraint,
);
} }
Constraint::RegSubVar(_, b_id) => { Constraint::RegSubVar(_, b_id) => {
graph.add_edge(dummy_source, NodeIndex(b_id.index() as usize), *constraint); graph.add_edge(dummy_source, NodeIndex(b_id.index()), *constraint);
} }
Constraint::VarSubReg(a_id, _) => { Constraint::VarSubReg(a_id, _) => {
graph.add_edge(NodeIndex(a_id.index() as usize), dummy_sink, *constraint); graph.add_edge(NodeIndex(a_id.index()), dummy_sink, *constraint);
} }
Constraint::RegSubReg(..) => { Constraint::RegSubReg(..) => {
// this would be an edge from `dummy_source` to // this would be an edge from `dummy_source` to
@ -878,7 +874,7 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
) { ) {
debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir); debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir);
let source_node_index = NodeIndex(source_vid.index() as usize); let source_node_index = NodeIndex(source_vid.index());
for (_, edge) in graph.adjacent_edges(source_node_index, dir) { for (_, edge) in graph.adjacent_edges(source_node_index, dir) {
match edge.data { match edge.data {
Constraint::VarSubVar(from_vid, to_vid) => { Constraint::VarSubVar(from_vid, to_vid) => {

View File

@ -317,7 +317,7 @@ impl<'tcx> RegionConstraintStorage<'tcx> {
match undo_entry { match undo_entry {
AddVar(vid) => { AddVar(vid) => {
self.var_infos.pop().unwrap(); self.var_infos.pop().unwrap();
assert_eq!(self.var_infos.len(), vid.index() as usize); assert_eq!(self.var_infos.len(), vid.index());
} }
AddConstraint(ref constraint) => { AddConstraint(ref constraint) => {
self.data.constraints.remove(constraint); self.data.constraints.remove(constraint);

View File

@ -2268,11 +2268,7 @@ fn encode_metadata_impl(tcx: TyCtxt<'_>, path: &Path) {
file.seek(std::io::SeekFrom::Start(pos_before_seek)).unwrap(); file.seek(std::io::SeekFrom::Start(pos_before_seek)).unwrap();
// Record metadata size for self-profiling // Record metadata size for self-profiling
tcx.prof.artifact_size( tcx.prof.artifact_size("crate_metadata", "crate_metadata", file.metadata().unwrap().len());
"crate_metadata",
"crate_metadata",
file.metadata().unwrap().len() as u64,
);
} }
pub fn provide(providers: &mut Providers) { pub fn provide(providers: &mut Providers) {

View File

@ -497,7 +497,7 @@ impl<I: Idx, const N: usize, T: FixedSizeEncoding<ByteArray = [u8; N]>> TableBui
} }
LazyTable::from_position_and_encoded_size( LazyTable::from_position_and_encoded_size(
NonZeroUsize::new(pos as usize).unwrap(), NonZeroUsize::new(pos).unwrap(),
width, width,
self.blocks.len(), self.blocks.len(),
) )

View File

@ -95,9 +95,7 @@ impl<'tcx> PlaceTy<'tcx> {
ProjectionElem::Subslice { from, to, from_end } => { ProjectionElem::Subslice { from, to, from_end } => {
PlaceTy::from_ty(match self.ty.kind() { PlaceTy::from_ty(match self.ty.kind() {
ty::Slice(..) => self.ty, ty::Slice(..) => self.ty,
ty::Array(inner, _) if !from_end => { ty::Array(inner, _) if !from_end => Ty::new_array(tcx, *inner, to - from),
Ty::new_array(tcx, *inner, (to - from) as u64)
}
ty::Array(inner, size) if from_end => { ty::Array(inner, size) if from_end => {
let size = size.eval_target_usize(tcx, param_env); let size = size.eval_target_usize(tcx, param_env);
let len = size - from - to; let len = size - from - to;

View File

@ -189,7 +189,7 @@ impl<'tcx> SymbolMangler<'tcx> {
self.push("N"); self.push("N");
self.out.push(ns); self.out.push(ns);
print_prefix(self)?; print_prefix(self)?;
self.push_disambiguator(disambiguator as u64); self.push_disambiguator(disambiguator);
self.push_ident(name); self.push_ident(name);
Ok(()) Ok(())
} }