Auto merge of #91957 - nnethercote:rm-SymbolStr, r=oli-obk

Remove `SymbolStr`

This was originally proposed in https://github.com/rust-lang/rust/pull/74554#discussion_r466203544. As well as removing the icky `SymbolStr` type, it allows the removal of a lot of `&` and `*` occurrences.

Best reviewed one commit at a time.

r? `@oli-obk`
This commit is contained in:
bors 2021-12-19 09:31:37 +00:00
commit a41a6925ba
140 changed files with 354 additions and 415 deletions

View File

@ -35,12 +35,12 @@ impl LitKind {
LitKind::Bool(symbol == kw::True)
}
token::Byte => {
return unescape_byte(&symbol.as_str())
return unescape_byte(symbol.as_str())
.map(LitKind::Byte)
.map_err(|_| LitError::LexerError);
}
token::Char => {
return unescape_char(&symbol.as_str())
return unescape_char(symbol.as_str())
.map(LitKind::Char)
.map_err(|_| LitError::LexerError);
}

View File

@ -1278,7 +1278,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
pub(super) fn lower_abi(&mut self, abi: StrLit) -> abi::Abi {
abi::lookup(&abi.symbol_unescaped.as_str()).unwrap_or_else(|| {
abi::lookup(abi.symbol_unescaped.as_str()).unwrap_or_else(|| {
self.error_on_invalid_abi(abi);
abi::Abi::Rust
})

View File

@ -580,8 +580,7 @@ impl<'a> AstValidator<'a> {
/// An item in `extern { ... }` cannot use non-ascii identifier.
fn check_foreign_item_ascii_only(&self, ident: Ident) {
let symbol_str = ident.as_str();
if !symbol_str.is_ascii() {
if !ident.as_str().is_ascii() {
let n = 83942;
self.err_handler()
.struct_span_err(

View File

@ -61,7 +61,7 @@ impl<'a> PostExpansionVisitor<'a> {
fn check_abi(&self, abi: ast::StrLit) {
let ast::StrLit { symbol_unescaped, span, .. } = abi;
match &*symbol_unescaped.as_str() {
match symbol_unescaped.as_str() {
// Stable
"Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64"
| "system" => {}

View File

@ -204,7 +204,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
};
if let Some(suffix) = suffix {
out.push_str(&suffix.as_str())
out.push_str(suffix.as_str())
}
out
@ -384,7 +384,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
fn print_symbol(&mut self, sym: Symbol, style: ast::StrStyle) {
self.print_string(&sym.as_str(), style);
self.print_string(sym.as_str(), style);
}
fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) {

View File

@ -236,7 +236,7 @@ where
// These unwraps are safe because `get` ensures the meta item
// is a name/value pair string literal.
issue_num = match &*issue.unwrap().as_str() {
issue_num = match issue.unwrap().as_str() {
"none" => None,
issue => {
let emit_diag = |msg: &str| {
@ -301,7 +301,7 @@ where
match (feature, reason, issue) {
(Some(feature), reason, Some(_)) => {
if !rustc_lexer::is_ident(&feature.as_str()) {
if !rustc_lexer::is_ident(feature.as_str()) {
handle_errors(
&sess.parse_sess,
attr.span,
@ -535,7 +535,7 @@ pub fn eval_condition(
return false;
}
};
let min_version = match parse_version(&min_version.as_str(), false) {
let min_version = match parse_version(min_version.as_str(), false) {
Some(ver) => ver,
None => {
sess.span_diagnostic

View File

@ -416,7 +416,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
tcx,
generics,
&mut err,
&param.name.as_str(),
param.name.as_str(),
"Copy",
None,
);

View File

@ -206,7 +206,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
{
let local_info = &self.body.local_decls[local].local_info;
if let Some(box LocalInfo::StaticRef { def_id, .. }) = *local_info {
buf.push_str(&self.infcx.tcx.item_name(def_id).as_str());
buf.push_str(self.infcx.tcx.item_name(def_id).as_str());
} else {
unreachable!();
}
@ -318,7 +318,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let decl = &self.body.local_decls[local];
match self.local_names[local] {
Some(name) if !decl.from_compiler_desugaring() => {
buf.push_str(&name.as_str());
buf.push_str(name.as_str());
Ok(())
}
_ => Err(()),

View File

@ -573,7 +573,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
template_snippet.as_ref().map(|s| Symbol::intern(s)),
template_sp,
));
let template_str = &template_str.as_str();
let template_str = template_str.as_str();
if let Some(InlineAsmArch::X86 | InlineAsmArch::X86_64) = ecx.sess.asm_arch {
let find_span = |needle: &str| -> Span {

View File

@ -21,7 +21,7 @@ pub fn expand_concat(
match e.kind {
ast::ExprKind::Lit(ref lit) => match lit.kind {
ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) => {
accumulator.push_str(&s.as_str());
accumulator.push_str(s.as_str());
}
ast::LitKind::Char(c) => {
accumulator.push(c);

View File

@ -29,7 +29,7 @@ pub fn expand_concat_idents<'cx>(
} else {
if let TokenTree::Token(token) = e {
if let Some((ident, _)) = token.ident() {
res_str.push_str(&ident.name.as_str());
res_str.push_str(ident.name.as_str());
continue;
}
}

View File

@ -121,7 +121,7 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
let help_msg = match lit.token.kind {
token::Str if rustc_lexer::is_ident(&lit.token.symbol.as_str()) => {
token::Str if rustc_lexer::is_ident(lit.token.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token.symbol)
}
_ => "for example, write `#[derive(Debug)]` for `Debug`".to_string(),

View File

@ -80,11 +80,11 @@ pub fn expand_env<'cx>(
}
let sp = cx.with_def_site_ctxt(sp);
let value = env::var(&*var.as_str()).ok().as_deref().map(Symbol::intern);
let value = env::var(var.as_str()).ok().as_deref().map(Symbol::intern);
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value));
let e = match value {
None => {
cx.span_err(sp, &msg.as_str());
cx.span_err(sp, msg.as_str());
return DummyResult::any(sp);
}
Some(value) => cx.expr_str(sp, value),

View File

@ -955,7 +955,7 @@ pub fn expand_preparsed_format_args(
ast::StrStyle::Raw(raw) => Some(raw as usize),
};
let fmt_str = &fmt_str.as_str(); // for the suggestions below
let fmt_str = fmt_str.as_str(); // for the suggestions below
let fmt_snippet = ecx.source_map().span_to_snippet(fmt_sp).ok();
let mut parser = parse::Parser::new(
fmt_str,

View File

@ -369,7 +369,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
TodoItem::Static(def_id) => {
//println!("static {:?}", def_id);
let section_name = tcx.codegen_fn_attrs(def_id).link_section.map(|s| s.as_str());
let section_name = tcx.codegen_fn_attrs(def_id).link_section;
let alloc = tcx.eval_static_initializer(def_id).unwrap();
@ -388,6 +388,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
if let Some(section_name) = section_name {
let (segment_name, section_name) = if tcx.sess.target.is_like_osx {
let section_name = section_name.as_str();
if let Some(names) = section_name.split_once(',') {
names
} else {
@ -397,7 +398,7 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant
));
}
} else {
("", &*section_name)
("", section_name.as_str())
};
data_ctx.set_segment_section(segment_name, section_name);
}

View File

@ -84,7 +84,7 @@ fn reuse_workproduct_for_cgu(
let work_product = cgu.work_product(tcx);
if let Some(saved_file) = &work_product.saved_file {
let obj_out =
tcx.output_filenames(()).temp_path(OutputType::Object, Some(&cgu.name().as_str()));
tcx.output_filenames(()).temp_path(OutputType::Object, Some(cgu.name().as_str()));
object = Some(obj_out.clone());
let source_file = rustc_incremental::in_incr_comp_dir_sess(&tcx.sess, &saved_file);
if let Err(err) = rustc_fs_util::link_or_copy(&source_file, &obj_out) {
@ -176,7 +176,7 @@ fn module_codegen(
)
});
codegen_global_asm(tcx, &cgu.name().as_str(), &cx.global_asm);
codegen_global_asm(tcx, cgu.name().as_str(), &cx.global_asm);
codegen_result
}
@ -207,7 +207,7 @@ pub(crate) fn run_aot(
cgus.iter()
.map(|cgu| {
let cgu_reuse = determine_cgu_reuse(tcx, cgu);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(&cgu.name().as_str(), cgu_reuse);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse);
match cgu_reuse {
_ if backend_config.disable_incr_cache => {}

View File

@ -33,7 +33,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
return value;
}
let global = self.global_string(&*symbol.as_str());
let global = self.global_string(symbol.as_str());
self.const_cstr_cache.borrow_mut().insert(symbol, global);
global

View File

@ -17,7 +17,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
global.set_tls_model(self.tls_model);
}
if let Some(link_section) = link_section {
global.set_link_section(&link_section.as_str());
global.set_link_section(link_section.as_str());
}
global
}
@ -53,7 +53,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
global.set_tls_model(self.tls_model);
}
if let Some(link_section) = link_section {
global.set_link_section(&link_section.as_str());
global.set_link_section(link_section.as_str());
}
let global_address = global.get_address(None);
self.globals.borrow_mut().insert(name.to_string(), global_address);

View File

@ -88,7 +88,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
let arg_tys = sig.inputs();
let ret_ty = sig.output();
let name = tcx.item_name(def_id);
let name_str = &*name.as_str();
let name_str = name.as_str();
let llret_ty = self.layout_of(ret_ty).gcc_type(self, true);
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);

View File

@ -52,7 +52,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
let sig =
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), callee_ty.fn_sig(tcx));
let arg_tys = sig.inputs();
let name_str = &*name.as_str();
let name_str = name.as_str();
// every intrinsic below takes a SIMD vector as its first argument
require_simd!(arg_tys[0], "input");

View File

@ -326,7 +326,7 @@ pub fn from_fn_attrs<'ll, 'tcx>(
.target_features
.iter()
.flat_map(|f| {
let feature = &f.as_str();
let feature = f.as_str();
llvm_util::to_llvm_feature(cx.tcx.sess, feature)
.into_iter()
.map(|f| format!("+{}", f))
@ -351,7 +351,7 @@ pub fn from_fn_attrs<'ll, 'tcx>(
let name =
codegen_fn_attrs.link_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id()));
let name = CString::new(&name.as_str()[..]).unwrap();
let name = CString::new(name.as_str()).unwrap();
llvm::AddFunctionAttrStringValue(
llfn,
llvm::AttributePlace::Function,

View File

@ -79,7 +79,7 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen
&[cgu_name.to_string(), cgu.size_estimate().to_string()],
);
// Instantiate monomorphizations without filling out definitions yet...
let llvm_module = ModuleLlvm::new(tcx, &cgu_name.as_str());
let llvm_module = ModuleLlvm::new(tcx, cgu_name.as_str());
{
let cx = CodegenCx::new(tcx, cgu, &llvm_module);
let mono_items = cx.codegen_unit.items_in_deterministic_order(cx.tcx);
@ -143,7 +143,7 @@ pub fn set_link_section(llval: &Value, attrs: &CodegenFnAttrs) {
None => return,
};
unsafe {
let buf = SmallCStr::new(&sect.as_str());
let buf = SmallCStr::new(sect.as_str());
llvm::LLVMSetSection(llval, buf.as_ptr());
}
}

View File

@ -320,7 +320,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
let dbg_cx = if tcx.sess.opts.debuginfo != DebugInfo::None {
let dctx = debuginfo::CrateDebugContext::new(llmod);
debuginfo::metadata::compile_unit_metadata(tcx, &codegen_unit.name().as_str(), &dctx);
debuginfo::metadata::compile_unit_metadata(tcx, codegen_unit.name().as_str(), &dctx);
Some(dctx)
} else {
None

View File

@ -1037,7 +1037,7 @@ pub fn compile_unit_metadata<'ll, 'tcx>(
) -> &'ll DIDescriptor {
let mut name_in_debuginfo = match tcx.sess.local_crate_source_file {
Some(ref path) => path.clone(),
None => PathBuf::from(&*tcx.crate_name(LOCAL_CRATE).as_str()),
None => PathBuf::from(tcx.crate_name(LOCAL_CRATE).as_str()),
};
// To avoid breaking split DWARF, we need to ensure that each codegen unit
@ -1371,7 +1371,7 @@ fn closure_saved_names_of_captured_variables(tcx: TyCtxt<'_>, def_id: DefId) ->
_ => return None,
};
let prefix = if is_ref { "_ref__" } else { "" };
Some(prefix.to_owned() + &var.name.as_str())
Some(prefix.to_owned() + var.name.as_str())
})
.collect::<Vec<_>>()
}
@ -1949,7 +1949,7 @@ enum VariantInfo<'a, 'tcx> {
impl<'tcx> VariantInfo<'_, 'tcx> {
fn map_struct_name<R>(&self, f: impl FnOnce(&str) -> R) -> R {
match self {
VariantInfo::Adt(variant) => f(&variant.ident.as_str()),
VariantInfo::Adt(variant) => f(variant.ident.as_str()),
VariantInfo::Generator { variant_index, .. } => {
f(&GeneratorSubsts::variant_name(*variant_index))
}
@ -2114,8 +2114,8 @@ fn prepare_enum_metadata<'ll, 'tcx>(
let item_name;
let discriminant_name = match enum_type.kind() {
ty::Adt(..) => {
item_name = tcx.item_name(enum_def_id).as_str();
&*item_name
item_name = tcx.item_name(enum_def_id);
item_name.as_str()
}
ty::Generator(..) => enum_name.as_str(),
_ => bug!(),
@ -2448,7 +2448,7 @@ fn compute_type_parameters<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -
cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty);
let actual_type_metadata =
type_metadata(cx, actual_type, rustc_span::DUMMY_SP);
let name = &name.as_str();
let name = name.as_str();
Some(unsafe {
Some(llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
DIB(cx),
@ -2590,7 +2590,8 @@ pub fn create_global_var_metadata<'ll>(cx: &CodegenCx<'ll, '_>, def_id: DefId, g
let is_local_to_unit = is_node_local_to_unit(cx, def_id);
let variable_type = Instance::mono(cx.tcx, def_id).ty(cx.tcx, ty::ParamEnv::reveal_all());
let type_metadata = type_metadata(cx, variable_type, span);
let var_name = tcx.item_name(def_id).as_str();
let var_name = tcx.item_name(def_id);
let var_name = var_name.as_str();
let linkage_name = mangled_name_of_instance(cx, Instance::mono(tcx, def_id)).name;
// When empty, linkage_name field is omitted,
// which is what we want for no_mangle statics

View File

@ -48,7 +48,7 @@ impl Command {
}
pub fn sym_arg(&mut self, arg: Symbol) -> &mut Command {
self.arg(&*arg.as_str());
self.arg(arg.as_str());
self
}

View File

@ -88,7 +88,7 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
sess,
crate_type,
outputs,
&codegen_results.crate_info.local_crate_name.as_str(),
codegen_results.crate_info.local_crate_name.as_str(),
);
match crate_type {
CrateType::Rlib => {

View File

@ -672,7 +672,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
}
let cgu_reuse = cgu_reuse[i];
tcx.sess.cgu_reuse_tracker.set_actual_reuse(&cgu.name().as_str(), cgu_reuse);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse);
match cgu_reuse {
CguReuse::No => {

View File

@ -516,7 +516,7 @@ fn push_unqualified_item_name(
) {
match disambiguated_data.data {
DefPathData::CrateRoot => {
output.push_str(&tcx.crate_name(def_id.krate).as_str());
output.push_str(tcx.crate_name(def_id.krate).as_str());
}
DefPathData::ClosureExpr if tcx.generator_kind(def_id).is_some() => {
// Generators look like closures, but we want to treat them differently
@ -529,7 +529,7 @@ fn push_unqualified_item_name(
}
_ => match disambiguated_data.data.name() {
DefPathDataName::Named(name) => {
output.push_str(&name.as_str());
output.push_str(name.as_str());
}
DefPathDataName::Anon { namespace } => {
if cpp_like_names(tcx) {

View File

@ -68,7 +68,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let arg_tys = sig.inputs();
let ret_ty = sig.output();
let name = bx.tcx().item_name(def_id);
let name_str = &*name.as_str();
let name_str = name.as_str();
let llret_ty = bx.backend_type(bx.layout_of(ret_ty));
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
@ -375,7 +375,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
use crate::common::AtomicOrdering::*;
use crate::common::{AtomicRmwBinOp, SynchronizationScope};
let split: Vec<&str> = name_str.split('_').collect();
let split: Vec<_> = name_str.split('_').collect();
let is_cxchg = split[1] == "cxchg" || split[1] == "cxchgweak";
let (order, failorder) = match split.len() {

View File

@ -82,7 +82,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> MPlaceTy<'tcx, M::PointerTag> {
let loc_details = &self.tcx.sess.opts.debugging_opts.location_detail;
let file = if loc_details.file {
self.allocate_str(&filename.as_str(), MemoryKind::CallerLocation, Mutability::Not)
self.allocate_str(filename.as_str(), MemoryKind::CallerLocation, Mutability::Not)
} else {
// FIXME: This creates a new allocation each time. It might be preferable to
// perform this allocation only once, and re-use the `MPlaceTy`.

View File

@ -88,7 +88,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
}
fn path_crate(mut self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
self.path.push_str(&self.tcx.crate_name(cnum).as_str());
self.path.push_str(self.tcx.crate_name(cnum).as_str());
Ok(self)
}

View File

@ -171,7 +171,7 @@ fn get_features(
}
if let Some(allowed) = sess.opts.debugging_opts.allow_features.as_ref() {
if allowed.iter().all(|f| name.as_str() != *f) {
if allowed.iter().all(|f| name.as_str() != f) {
struct_span_err!(
span_handler,
mi.span(),

View File

@ -103,10 +103,10 @@ crate fn mod_dir_path(
if let DirOwnership::Owned { relative } = &mut dir_ownership {
if let Some(ident) = relative.take() {
// Remove the relative offset.
dir_path.push(&*ident.as_str());
dir_path.push(ident.as_str());
}
}
dir_path.push(&*ident.as_str());
dir_path.push(ident.as_str());
(dir_path, dir_ownership)
}
@ -170,8 +170,8 @@ fn mod_file_path_from_attr(
) -> Option<PathBuf> {
// Extract path string from first `#[path = "path_string"]` attribute.
let first_path = attrs.iter().find(|at| at.has_name(sym::path))?;
let path_string = match first_path.value_str() {
Some(s) => s.as_str(),
let path_sym = match first_path.value_str() {
Some(s) => s,
None => {
// This check is here mainly to catch attempting to use a macro,
// such as #[path = concat!(...)]. This isn't currently supported
@ -189,14 +189,16 @@ fn mod_file_path_from_attr(
}
};
let path_str = path_sym.as_str();
// On windows, the base path might have the form
// `\\?\foo\bar` in which case it does not tolerate
// mixed `/` and `\` separators, so canonicalize
// `/` to `\`.
#[cfg(windows)]
let path_string = path_string.replace("/", "\\");
let path_str = path_str.replace("/", "\\");
Some(dir_path.join(&*path_string))
Some(dir_path.join(path_str))
}
/// Returns a path to a module.

View File

@ -331,9 +331,9 @@ pub struct Ident {
impl Ident {
fn new(sess: &ParseSess, sym: Symbol, is_raw: bool, span: Span) -> Ident {
let sym = nfc_normalize(&sym.as_str());
let sym = nfc_normalize(sym.as_str());
let string = sym.as_str();
if !rustc_lexer::is_ident(&string) {
if !rustc_lexer::is_ident(string) {
panic!("`{:?}` is not a valid identifier", string)
}
if is_raw && !sym.can_be_raw() {

View File

@ -173,7 +173,7 @@ impl DisambiguatedDefPathData {
if verbose && self.disambiguator != 0 {
write!(writer, "{}#{}", name, self.disambiguator)
} else {
writer.write_str(&name.as_str())
writer.write_str(name.as_str())
}
}
DefPathDataName::Anon { namespace } => {
@ -499,7 +499,7 @@ impl DefPathData {
impl fmt::Display for DefPathData {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.name() {
DefPathDataName::Named(name) => f.write_str(&name.as_str()),
DefPathDataName::Named(name) => f.write_str(name.as_str()),
// FIXME(#70334): this will generate legacy {{closure}}, {{impl}}, etc
DefPathDataName::Anon { namespace } => write!(f, "{{{{{}}}}}", namespace),
}

View File

@ -131,7 +131,7 @@ impl<'tcx> IfThisChanged<'tcx> {
DepNode::from_def_path_hash(self.tcx, def_path_hash, DepKind::hir_owner)
}
Some(n) => {
match DepNode::from_label_string(self.tcx, &n.as_str(), def_path_hash) {
match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) {
Ok(n) => n,
Err(()) => {
self.tcx.sess.span_fatal(
@ -147,7 +147,7 @@ impl<'tcx> IfThisChanged<'tcx> {
let dep_node_interned = self.argument(attr);
let dep_node = match dep_node_interned {
Some(n) => {
match DepNode::from_label_string(self.tcx, &n.as_str(), def_path_hash) {
match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) {
Ok(n) => n,
Err(()) => {
self.tcx.sess.span_fatal(

View File

@ -124,7 +124,7 @@ impl<'tcx> AssertModuleSource<'tcx> {
debug!("mapping '{}' to cgu name '{}'", self.field(attr, sym::module), cgu_name);
if !self.available_cgus.contains(&*cgu_name.as_str()) {
if !self.available_cgus.contains(cgu_name.as_str()) {
self.tcx.sess.span_err(
attr.span,
&format!(

View File

@ -2252,8 +2252,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
.map(|p| p.name.as_str()),
);
}
let lts = lts_names.iter().map(|s| -> &str { &*s }).collect::<Vec<_>>();
possible.find(|candidate| !lts.contains(&candidate.as_str()))
possible.find(|candidate| !lts_names.contains(&&candidate[..]))
})
.unwrap_or("'lt".to_string());
let add_lt_sugg = generics

View File

@ -324,7 +324,7 @@ pub fn configure_and_expand(
let crate_attrs = krate.attrs.clone();
let extern_mod_loaded = |ident: Ident, attrs, items, span| {
let krate = ast::Crate { attrs, items, span, is_placeholder: None };
pre_expansion_lint(sess, lint_store, &krate, &crate_attrs, &ident.name.as_str());
pre_expansion_lint(sess, lint_store, &krate, &crate_attrs, ident.name.as_str());
(krate.attrs, krate.items)
};
let mut ecx = ExtCtxt::new(sess, cfg, resolver, Some(&extern_mod_loaded));
@ -631,7 +631,7 @@ fn write_out_deps(
// (e.g. accessed in proc macros).
let file_depinfo = sess.parse_sess.file_depinfo.borrow();
let extra_tracked_files = file_depinfo.iter().map(|path_sym| {
let path = PathBuf::from(&*path_sym.as_str());
let path = PathBuf::from(path_sym.as_str());
let file = FileName::from(path);
escape_dep_filename(&file.prefer_local().to_string())
});
@ -1049,8 +1049,8 @@ fn encode_and_write_metadata(
let need_metadata_file = tcx.sess.opts.output_types.contains_key(&OutputType::Metadata);
if need_metadata_file {
let crate_name = &tcx.crate_name(LOCAL_CRATE).as_str();
let out_filename = filename_for_metadata(tcx.sess, crate_name, outputs);
let crate_name = tcx.crate_name(LOCAL_CRATE);
let out_filename = filename_for_metadata(tcx.sess, crate_name.as_str(), outputs);
// To avoid races with another rustc process scanning the output directory,
// we need to write the file somewhere else and atomically move it to its
// final destination, with an `fs::rename` call. In order for the rename to

View File

@ -3181,7 +3181,7 @@ impl<'tcx> LateLintPass<'tcx> for NamedAsmLabels {
} = expr
{
for (template_sym, template_snippet, template_span) in template_strs.iter() {
let template_str = &template_sym.as_str();
let template_str = template_sym.as_str();
let find_label_span = |needle: &str| -> Option<Span> {
if let Some(template_snippet) = template_snippet {
let snippet = template_snippet.as_str();

View File

@ -381,10 +381,10 @@ impl LintStore {
lint_name,
self.lint_groups.keys().collect::<Vec<_>>()
);
let lint_name_str = &*lint_name.as_str();
self.lint_groups.contains_key(&lint_name_str) || {
let lint_name_str = lint_name.as_str();
self.lint_groups.contains_key(lint_name_str) || {
let warnings_name_str = crate::WARNINGS.name_lower();
lint_name_str == &*warnings_name_str
lint_name_str == warnings_name_str
}
}

View File

@ -127,7 +127,7 @@ impl HiddenUnicodeCodepoints {
impl EarlyLintPass for HiddenUnicodeCodepoints {
fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &ast::Attribute) {
if let ast::AttrKind::DocComment(_, comment) = attr.kind {
if contains_text_flow_control_chars(&comment.as_str()) {
if contains_text_flow_control_chars(comment.as_str()) {
self.lint_text_direction_codepoint(cx, comment, attr.span, 0, false, "doc comment");
}
}
@ -138,7 +138,7 @@ impl EarlyLintPass for HiddenUnicodeCodepoints {
let (text, span, padding) = match &expr.kind {
ast::ExprKind::Lit(ast::Lit { token, kind, span }) => {
let text = token.symbol;
if !contains_text_flow_control_chars(&text.as_str()) {
if !contains_text_flow_control_chars(text.as_str()) {
return;
}
let padding = match kind {

View File

@ -154,7 +154,7 @@ impl<'s> LintLevelsBuilder<'s> {
LintLevelSource::Node(_, forbid_source_span, reason) => {
diag_builder.span_label(forbid_source_span, "`forbid` level set here");
if let Some(rationale) = reason {
diag_builder.note(&rationale.as_str());
diag_builder.note(rationale.as_str());
}
}
LintLevelSource::CommandLine(_, _) => {

View File

@ -218,8 +218,7 @@ impl EarlyLintPass for NonAsciiIdents {
cx.struct_span_lint(CONFUSABLE_IDENTS, sp, |lint| {
lint.build(&format!(
"identifier pair considered confusable between `{}` and `{}`",
existing_symbol.as_str(),
symbol.as_str()
existing_symbol, symbol
))
.span_label(
*existing_span,

View File

@ -9,7 +9,7 @@ use rustc_middle::ty::subst::InternalSubsts;
use rustc_parse_format::{ParseMode, Parser, Piece};
use rustc_session::lint::FutureIncompatibilityReason;
use rustc_span::edition::Edition;
use rustc_span::{hygiene, sym, symbol::kw, symbol::SymbolStr, InnerSpan, Span, Symbol};
use rustc_span::{hygiene, sym, symbol::kw, InnerSpan, Span, Symbol};
use rustc_trait_selection::infer::InferCtxtExt;
declare_lint! {
@ -71,14 +71,14 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
if let hir::ExprKind::Lit(lit) = &arg.kind {
if let ast::LitKind::Str(sym, _) = lit.node {
// The argument is a string literal.
check_panic_str(cx, f, arg, &sym.as_str());
check_panic_str(cx, f, arg, sym.as_str());
return;
}
}
// The argument is *not* a string literal.
let (span, panic, symbol_str) = panic_call(cx, f);
let (span, panic, symbol) = panic_call(cx, f);
if in_external_macro(cx.sess(), span) {
// Nothing that can be done about it in the current crate.
@ -103,7 +103,7 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
cx.struct_span_lint(NON_FMT_PANICS, arg_span, |lint| {
let mut l = lint.build("panic message is not a string literal");
l.note(&format!("this usage of {}!() is deprecated; it will be a hard error in Rust 2021", symbol_str));
l.note(&format!("this usage of {}!() is deprecated; it will be a hard error in Rust 2021", symbol));
l.note("for more information, see <https://doc.rust-lang.org/nightly/edition-guide/rust-2021/panic-macro-consistency.html>");
if !is_arg_inside_call(arg_span, span) {
// No clue where this argument is coming from.
@ -112,7 +112,7 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
}
if arg_macro.map_or(false, |id| cx.tcx.is_diagnostic_item(sym::format_macro, id)) {
// A case of `panic!(format!(..))`.
l.note(format!("the {}!() macro supports formatting, so there's no need for the format!() macro here", symbol_str).as_str());
l.note(format!("the {}!() macro supports formatting, so there's no need for the format!() macro here", symbol).as_str());
if let Some((open, close, _)) = find_delimiters(cx, arg_span) {
l.multipart_suggestion(
"remove the `format!(..)` macro call",
@ -301,7 +301,7 @@ fn find_delimiters<'tcx>(cx: &LateContext<'tcx>, span: Span) -> Option<(Span, Sp
))
}
fn panic_call<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>) -> (Span, Symbol, SymbolStr) {
fn panic_call<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>) -> (Span, Symbol, Symbol) {
let mut expn = f.span.ctxt().outer_expn_data();
let mut panic_macro = kw::Empty;
@ -328,7 +328,7 @@ fn panic_call<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>) -> (Span,
let macro_symbol =
if let hygiene::ExpnKind::Macro(_, symbol) = expn.kind { symbol } else { sym::panic };
(expn.call_site, panic_macro, macro_symbol.as_str())
(expn.call_site, panic_macro, macro_symbol)
}
fn is_arg_inside_call(arg: Span, call: Span) -> bool {

View File

@ -133,7 +133,7 @@ fn to_camel_case(s: &str) -> String {
impl NonCamelCaseTypes {
fn check_case(&self, cx: &EarlyContext<'_>, sort: &str, ident: &Ident) {
let name = &ident.name.as_str();
let name = ident.name.as_str();
if !is_camel_case(name) {
cx.struct_span_lint(NON_CAMEL_CASE_TYPES, ident.span, |lint| {
@ -276,7 +276,7 @@ impl NonSnakeCase {
})
}
let name = &ident.name.as_str();
let name = ident.name.as_str();
if !is_snake_case(name) {
cx.struct_span_lint(NON_SNAKE_CASE, ident.span, |lint| {
@ -484,7 +484,7 @@ declare_lint_pass!(NonUpperCaseGlobals => [NON_UPPER_CASE_GLOBALS]);
impl NonUpperCaseGlobals {
fn check_upper_case(cx: &LateContext<'_>, sort: &str, ident: &Ident) {
let name = &ident.name.as_str();
let name = ident.name.as_str();
if name.chars().any(|c| c.is_lowercase()) {
cx.struct_span_lint(NON_UPPER_CASE_GLOBALS, ident.span, |lint| {
let uc = NonSnakeCase::to_snake_case(&name).to_uppercase();

View File

@ -315,7 +315,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
let mut err = lint.build(&msg);
// check for #[must_use = "..."]
if let Some(note) = attr.value_str() {
err.note(&note.as_str());
err.note(note.as_str());
}
err.emit();
});

View File

@ -292,7 +292,7 @@ impl<'a> CrateLoader<'a> {
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = self.cstore.get_crate_data(cnum).cdata.source();
if let Some(entry) = self.sess.opts.externs.get(&name.as_str()) {
if let Some(entry) = self.sess.opts.externs.get(name.as_str()) {
// Only use `--extern crate_name=path` here, not `--extern crate_name`.
if let Some(mut files) = entry.files() {
if files.any(|l| {
@ -381,7 +381,7 @@ impl<'a> CrateLoader<'a> {
let host_hash = host_lib.as_ref().map(|lib| lib.metadata.get_root().hash());
let private_dep =
self.sess.opts.externs.get(&name.as_str()).map_or(false, |e| e.is_private_dep);
self.sess.opts.externs.get(name.as_str()).map_or(false, |e| e.is_private_dep);
// Claim this crate number and cache it
let cnum = self.cstore.alloc_new_crate_num();
@ -997,7 +997,7 @@ impl<'a> CrateLoader<'a> {
);
let name = match orig_name {
Some(orig_name) => {
validate_crate_name(self.sess, &orig_name.as_str(), Some(item.span));
validate_crate_name(self.sess, orig_name.as_str(), Some(item.span));
orig_name
}
None => item.ident.name,

View File

@ -315,7 +315,7 @@ impl<'a> CrateLocator<'a> {
exact_paths: if hash.is_none() {
sess.opts
.externs
.get(&crate_name.as_str())
.get(crate_name.as_str())
.into_iter()
.filter_map(|entry| entry.files())
.flatten()
@ -976,7 +976,8 @@ impl CrateError {
let candidates = libraries
.iter()
.map(|lib| {
let crate_name = &lib.metadata.get_root().name().as_str();
let crate_name = lib.metadata.get_root().name();
let crate_name = crate_name.as_str();
let mut paths = lib.source.paths();
// This `unwrap()` should be okay because there has to be at least one
@ -1174,7 +1175,7 @@ impl CrateError {
} else if crate_name
== Symbol::intern(&sess.opts.debugging_opts.profiler_runtime)
{
err.note(&"the compiler may have been built without the profiler runtime");
err.note("the compiler may have been built without the profiler runtime");
} else if crate_name.as_str().starts_with("rustc_") {
err.help(
"maybe you need to install the missing components with: \

View File

@ -67,7 +67,7 @@ impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> {
Some(name) => name,
None => continue, // skip like historical compilers
};
lib.kind = match &*kind.as_str() {
lib.kind = match kind.as_str() {
"static" => NativeLibKind::Static { bundle: None, whole_archive: None },
"static-nobundle" => {
sess.struct_span_warn(

View File

@ -319,7 +319,7 @@ pub fn struct_lint_level<'s, 'd>(
}
LintLevelSource::Node(lint_attr_name, src, reason) => {
if let Some(rationale) = reason {
err.note(&rationale.as_str());
err.note(rationale.as_str());
}
sess.diag_span_note_once(
&mut err,

View File

@ -21,7 +21,7 @@ pub mod lib_features {
.map(|(f, s)| (*f, Some(*s)))
.chain(self.unstable.iter().map(|f| (*f, None)))
.collect();
all_features.sort_unstable_by_key(|f| f.0.as_str());
all_features.sort_unstable_by(|a, b| a.0.as_str().partial_cmp(b.0.as_str()).unwrap());
all_features
}
}

View File

@ -131,8 +131,7 @@ pub fn report_unstable(
/// deprecated (i.e., whether X is not greater than the current rustc version).
pub fn deprecation_in_effect(depr: &Deprecation) -> bool {
let is_since_rustc_version = depr.is_since_rustc_version;
let since = depr.since.map(Symbol::as_str);
let since = since.as_deref();
let since = depr.since.as_ref().map(Symbol::as_str);
fn parse_version(ver: &str) -> Vec<u32> {
// We ignore non-integer components of the version (e.g., "nightly").
@ -197,7 +196,7 @@ fn deprecation_message(
let message = if is_in_effect {
format!("use of deprecated {} `{}`", kind, path)
} else {
let since = since.map(Symbol::as_str);
let since = since.as_ref().map(Symbol::as_str);
if since.as_deref() == Some("TBD") {
format!("use of {} `{}` that will be deprecated in a future Rust version", kind, path)

View File

@ -2444,7 +2444,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
CtorKind::Fictive => {
let mut struct_fmt = fmt.debug_struct(&name);
for (field, place) in iter::zip(&variant_def.fields, places) {
struct_fmt.field(&field.ident.as_str(), place);
struct_fmt.field(field.ident.as_str(), place);
}
struct_fmt.finish()
}
@ -2473,7 +2473,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
struct_fmt.field(var_name.as_str(), place);
}
}
@ -2493,7 +2493,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
struct_fmt.field(var_name.as_str(), place);
}
}

View File

@ -338,7 +338,7 @@ impl<'tcx> CodegenUnit<'tcx> {
}
pub fn work_product_id(&self) -> WorkProductId {
WorkProductId::from_cgu_name(&self.name().as_str())
WorkProductId::from_cgu_name(self.name().as_str())
}
pub fn work_product(&self, tcx: TyCtxt<'_>) -> WorkProduct {
@ -470,7 +470,7 @@ impl CodegenUnitNameBuilder<'tcx> {
if self.tcx.sess.opts.debugging_opts.human_readable_cgu_names {
cgu_name
} else {
Symbol::intern(&CodegenUnit::mangle_name(&cgu_name.as_str()))
Symbol::intern(&CodegenUnit::mangle_name(cgu_name.as_str()))
}
}

View File

@ -1216,8 +1216,8 @@ impl<'tcx> TyCtxt<'tcx> {
}
pub fn consider_optimizing<T: Fn() -> String>(self, msg: T) -> bool {
let cname = self.crate_name(LOCAL_CRATE).as_str();
self.sess.consider_optimizing(&cname, msg)
let cname = self.crate_name(LOCAL_CRATE);
self.sess.consider_optimizing(cname.as_str(), msg)
}
/// Obtain all lang items of this crate and all dependencies (recursively)

View File

@ -303,7 +303,7 @@ pub trait PrettyPrinter<'tcx>:
match self.tcx().trimmed_def_paths(()).get(&def_id) {
None => Ok((self, false)),
Some(symbol) => {
self.write_str(&symbol.as_str())?;
self.write_str(symbol.as_str())?;
Ok((self, true))
}
}

View File

@ -3,7 +3,7 @@ use std::cmp;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir::def_id::LOCAL_CRATE;
use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder};
use rustc_span::symbol::{Symbol, SymbolStr};
use rustc_span::symbol::Symbol;
use super::PartitioningCx;
use crate::partitioning::PreInliningPartitioning;
@ -24,11 +24,11 @@ pub fn merge_codegen_units<'tcx>(
// smallest into each other) we're sure to start off with a deterministic
// order (sorted by name). This'll mean that if two cgus have the same size
// the stable sort below will keep everything nice and deterministic.
codegen_units.sort_by_cached_key(|cgu| cgu.name().as_str());
codegen_units.sort_by(|a, b| a.name().as_str().partial_cmp(b.name().as_str()).unwrap());
// This map keeps track of what got merged into what.
let mut cgu_contents: FxHashMap<Symbol, Vec<SymbolStr>> =
codegen_units.iter().map(|cgu| (cgu.name(), vec![cgu.name().as_str()])).collect();
let mut cgu_contents: FxHashMap<Symbol, Vec<Symbol>> =
codegen_units.iter().map(|cgu| (cgu.name(), vec![cgu.name()])).collect();
// Merge the two smallest codegen units until the target size is reached.
while codegen_units.len() > cx.target_cgu_count {
@ -69,7 +69,7 @@ pub fn merge_codegen_units<'tcx>(
// were actually modified by merging.
.filter(|(_, cgu_contents)| cgu_contents.len() > 1)
.map(|(current_cgu_name, cgu_contents)| {
let mut cgu_contents: Vec<&str> = cgu_contents.iter().map(|s| &s[..]).collect();
let mut cgu_contents: Vec<&str> = cgu_contents.iter().map(|s| s.as_str()).collect();
// Sort the names, so things are deterministic and easy to
// predict.

View File

@ -208,7 +208,7 @@ pub fn partition<'tcx>(
internalization_candidates: _,
} = post_inlining;
result.sort_by_cached_key(|cgu| cgu.name().as_str());
result.sort_by(|a, b| a.name().as_str().partial_cmp(b.name().as_str()).unwrap());
result
}
@ -366,7 +366,7 @@ fn collect_and_partition_mono_items<'tcx>(
for cgu in codegen_units {
tcx.prof.artifact_size(
"codegen_unit_size_estimate",
&cgu.name().as_str()[..],
cgu.name().as_str(),
cgu.size_estimate() as u64,
);
}
@ -401,7 +401,7 @@ fn collect_and_partition_mono_items<'tcx>(
cgus.dedup();
for &(ref cgu_name, (linkage, _)) in cgus.iter() {
output.push(' ');
output.push_str(&cgu_name.as_str());
output.push_str(cgu_name.as_str());
let linkage_abbrev = match linkage {
Linkage::External => "External",

View File

@ -1639,7 +1639,7 @@ impl<'a> Parser<'a> {
next_token.kind
{
if self.token.span.hi() == next_token.span.lo() {
let s = String::from("0.") + &symbol.as_str();
let s = String::from("0.") + symbol.as_str();
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
return Some(Token::new(kind, self.token.span.to(next_token.span)));
}
@ -1710,7 +1710,8 @@ impl<'a> Parser<'a> {
);
}
LitError::InvalidIntSuffix => {
let suf = suffix.expect("suffix error with no suffix").as_str();
let suf = suffix.expect("suffix error with no suffix");
let suf = suf.as_str();
if looks_like_width_suffix(&['i', 'u'], &suf) {
// If it looks like a width, try to be helpful.
let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
@ -1726,8 +1727,9 @@ impl<'a> Parser<'a> {
}
}
LitError::InvalidFloatSuffix => {
let suf = suffix.expect("suffix error with no suffix").as_str();
if looks_like_width_suffix(&['f'], &suf) {
let suf = suffix.expect("suffix error with no suffix");
let suf = suf.as_str();
if looks_like_width_suffix(&['f'], suf) {
// If it looks like a width, try to be helpful.
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
self.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit();

View File

@ -607,7 +607,7 @@ impl CheckAttrVisitor<'_> {
return err_fn(meta.span(), &format!("isn't allowed on {}", err));
}
let item_name = self.tcx.hir().name(hir_id);
if &*item_name.as_str() == doc_alias {
if item_name.as_str() == doc_alias {
return err_fn(meta.span(), "is the same as the item's name");
}
let span = meta.span();
@ -636,7 +636,7 @@ impl CheckAttrVisitor<'_> {
LitKind::Str(s, _) => {
if !self.check_doc_alias_value(
v,
&s.as_str(),
s.as_str(),
hir_id,
target,
true,

View File

@ -1464,7 +1464,7 @@ impl<'tcx> Liveness<'_, 'tcx> {
if name == kw::Empty {
return None;
}
let name: &str = &name.as_str();
let name = name.as_str();
if name.as_bytes()[0] == b'_' {
return None;
}

View File

@ -61,8 +61,8 @@ impl<'p, 'c, 'tcx> QueryKeyStringBuilder<'p, 'c, 'tcx> {
match def_key.disambiguated_data.data {
DefPathData::CrateRoot => {
crate_name = self.tcx.crate_name(def_id.krate).as_str();
name = &*crate_name;
crate_name = self.tcx.crate_name(def_id.krate);
name = crate_name.as_str();
dis = "";
end_index = 3;
}

View File

@ -1,4 +1,3 @@
use std::cmp::Reverse;
use std::ptr;
use rustc_ast::{self as ast, Path};
@ -784,7 +783,7 @@ impl<'a> Resolver<'a> {
});
// Make sure error reporting is deterministic.
suggestions.sort_by_cached_key(|suggestion| suggestion.candidate.as_str());
suggestions.sort_by(|a, b| a.candidate.as_str().partial_cmp(b.candidate.as_str()).unwrap());
match find_best_match_for_name(
&suggestions.iter().map(|suggestion| suggestion.candidate).collect::<Vec<Symbol>>(),
@ -1186,7 +1185,7 @@ impl<'a> Resolver<'a> {
("", " from prelude")
} else if b.is_extern_crate()
&& !b.is_import()
&& self.session.opts.externs.get(&ident.as_str()).is_some()
&& self.session.opts.externs.get(ident.as_str()).is_some()
{
("", " passed with `--extern`")
} else if add_built_in {
@ -1481,12 +1480,12 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
return None;
}
// Sort extern crate names in reverse order to get
// Sort extern crate names in *reverse* order to get
// 1) some consistent ordering for emitted diagnostics, and
// 2) `std` suggestions before `core` suggestions.
let mut extern_crate_names =
self.r.extern_prelude.iter().map(|(ident, _)| ident.name).collect::<Vec<_>>();
extern_crate_names.sort_by_key(|name| Reverse(name.as_str()));
extern_crate_names.sort_by(|a, b| b.as_str().partial_cmp(a.as_str()).unwrap());
for name in extern_crate_names.into_iter() {
// Replace first ident with a crate name and check if that is valid.

View File

@ -231,7 +231,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let is_assoc_fn = self.self_type_is_available(span);
// Emit help message for fake-self from other languages (e.g., `this` in Javascript).
if ["this", "my"].contains(&&*item_str.as_str()) && is_assoc_fn {
if ["this", "my"].contains(&item_str.as_str()) && is_assoc_fn {
err.span_suggestion_short(
span,
"you might have meant to use `self` here instead",
@ -1358,7 +1358,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
let name = path[path.len() - 1].ident.name;
// Make sure error reporting is deterministic.
names.sort_by_cached_key(|suggestion| suggestion.candidate.as_str());
names.sort_by(|a, b| a.candidate.as_str().partial_cmp(b.candidate.as_str()).unwrap());
match find_best_match_for_name(
&names.iter().map(|suggestion| suggestion.candidate).collect::<Vec<Symbol>>(),
@ -1377,7 +1377,7 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
fn likely_rust_type(path: &[Segment]) -> Option<Symbol> {
let name = path[path.len() - 1].ident.as_str();
// Common Java types
Some(match &*name {
Some(match name {
"byte" => sym::u8, // In Java, bytes are signed, but in practice one almost always wants unsigned bytes.
"short" => sym::i16,
"boolean" => sym::bool,
@ -2345,7 +2345,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
_ => None,
});
}
suggest_existing(err, &name.as_str()[..], suggs);
suggest_existing(err, name.as_str(), suggs);
}
[] => {
let mut suggs = Vec::new();

View File

@ -689,11 +689,11 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
hir_id: hir::HirId,
) {
let name = match fk {
intravisit::FnKind::ItemFn(id, _, _, _) => id.as_str(),
intravisit::FnKind::Method(id, _, _) => id.as_str(),
intravisit::FnKind::Closure => Symbol::intern("closure").as_str(),
intravisit::FnKind::ItemFn(id, _, _, _) => id.name,
intravisit::FnKind::Method(id, _, _) => id.name,
intravisit::FnKind::Closure => sym::closure,
};
let name: &str = &name;
let name = name.as_str();
let span = span!(Level::DEBUG, "visit_fn", name);
let _enter = span.enter();
match fk {

View File

@ -3481,7 +3481,7 @@ fn names_to_string(names: &[Symbol]) -> String {
if Ident::with_dummy_span(*name).is_raw_guess() {
result.push_str("r#");
}
result.push_str(&name.as_str());
result.push_str(name.as_str());
}
result
}

View File

@ -105,7 +105,7 @@ fn fast_print_path(path: &ast::Path) -> Symbol {
path_str.push_str("::");
}
if segment.ident.name != kw::PathRoot {
path_str.push_str(&segment.ident.as_str())
path_str.push_str(segment.ident.as_str())
}
}
Symbol::intern(&path_str)

View File

@ -825,7 +825,7 @@ impl<'tcx> SaveContext<'tcx> {
for attr in attrs {
if let Some(val) = attr.doc_str() {
// FIXME: Should save-analysis beautify doc strings itself or leave it to users?
result.push_str(&beautify_doc_string(val).as_str());
result.push_str(beautify_doc_string(val).as_str());
result.push('\n');
}
}

View File

@ -616,7 +616,7 @@ impl<'hir> Sig for hir::Generics<'hir> {
if let hir::GenericParamKind::Const { .. } = param.kind {
param_text.push_str("const ");
}
param_text.push_str(&param.name.ident().as_str());
param_text.push_str(param.name.ident().as_str());
defs.push(SigElement {
id: id_from_hir_id(param.hir_id, scx),
start: offset + text.len(),

View File

@ -60,7 +60,7 @@ pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input)
if let Some(ref s) = sess.opts.crate_name {
if let Some((attr, name)) = attr_crate_name {
if name.as_str() != *s {
if name.as_str() != s {
let msg = format!(
"`--crate-name` and `#[crate_name]` are \
required to match, but `{}` != `{}`",

View File

@ -55,7 +55,7 @@ pub fn find_best_match_for_name(
lookup: Symbol,
dist: Option<usize>,
) -> Option<Symbol> {
let lookup = &lookup.as_str();
let lookup = lookup.as_str();
let max_dist = dist.unwrap_or_else(|| cmp::max(lookup.len(), 3) / 3);
// Priority of matches:
@ -70,7 +70,7 @@ pub fn find_best_match_for_name(
let levenshtein_match = name_vec
.iter()
.filter_map(|&name| {
let dist = lev_distance(lookup, &name.as_str());
let dist = lev_distance(lookup, name.as_str());
if dist <= max_dist { Some((name, dist)) } else { None }
})
// Here we are collecting the next structure:
@ -88,7 +88,7 @@ pub fn find_best_match_for_name(
fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Symbol> {
iter_names.iter().fold(None, |result, candidate| {
if sort_by_words(&candidate.as_str()) == sort_by_words(lookup) {
if sort_by_words(candidate.as_str()) == sort_by_words(lookup) {
Some(*candidate)
} else {
result

View File

@ -1512,9 +1512,12 @@ impl Ident {
Ident::new(self.name, self.span.normalize_to_macro_rules())
}
/// Convert the name to a `SymbolStr`. This is a slowish operation because
/// it requires locking the symbol interner.
pub fn as_str(self) -> SymbolStr {
/// Access the underlying string. This is a slowish operation because it
/// requires locking the symbol interner.
///
/// Note that the lifetime of the return value is a lie. See
/// `Symbol::as_str()` for details.
pub fn as_str(&self) -> &str {
self.name.as_str()
}
}
@ -1650,12 +1653,17 @@ impl Symbol {
with_session_globals(|session_globals| session_globals.symbol_interner.intern(string))
}
/// Convert to a `SymbolStr`. This is a slowish operation because it
/// Access the underlying string. This is a slowish operation because it
/// requires locking the symbol interner.
pub fn as_str(self) -> SymbolStr {
with_session_globals(|session_globals| {
let symbol_str = session_globals.symbol_interner.get(self);
unsafe { SymbolStr { string: std::mem::transmute::<&str, &str>(symbol_str) } }
///
/// Note that the lifetime of the return value is a lie. It's not the same
/// as `&self`, but actually tied to the lifetime of the underlying
/// interner. Interners are long-lived, and there are very few of them, and
/// this function is typically used for short-lived things, so in practice
/// it works out ok.
pub fn as_str(&self) -> &str {
with_session_globals(|session_globals| unsafe {
std::mem::transmute::<&str, &str>(session_globals.symbol_interner.get(*self))
})
}
@ -1678,19 +1686,19 @@ impl Symbol {
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.as_str(), f)
fmt::Debug::fmt(self.as_str(), f)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.as_str(), f)
fmt::Display::fmt(self.as_str(), f)
}
}
impl<S: Encoder> Encodable<S> for Symbol {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
s.emit_str(self.as_str())
}
}
@ -1709,11 +1717,10 @@ impl<CTX> HashStable<CTX> for Symbol {
}
impl<CTX> ToStableHashKey<CTX> for Symbol {
type KeyType = SymbolStr;
type KeyType = String;
#[inline]
fn to_stable_hash_key(&self, _: &CTX) -> SymbolStr {
self.as_str()
fn to_stable_hash_key(&self, _: &CTX) -> String {
self.as_str().to_string()
}
}
@ -1905,70 +1912,3 @@ impl Ident {
self.name.can_be_raw() && self.is_reserved()
}
}
/// An alternative to [`Symbol`], useful when the chars within the symbol need to
/// be accessed. It deliberately has limited functionality and should only be
/// used for temporary values.
///
/// Because the interner outlives any thread which uses this type, we can
/// safely treat `string` which points to interner data, as an immortal string,
/// as long as this type never crosses between threads.
//
// FIXME: ensure that the interner outlives any thread which uses `SymbolStr`,
// by creating a new thread right after constructing the interner.
#[derive(Clone, Eq, PartialOrd, Ord)]
pub struct SymbolStr {
string: &'static str,
}
// This impl allows a `SymbolStr` to be directly equated with a `String` or
// `&str`.
impl<T: std::ops::Deref<Target = str>> std::cmp::PartialEq<T> for SymbolStr {
fn eq(&self, other: &T) -> bool {
self.string == other.deref()
}
}
impl !Send for SymbolStr {}
impl !Sync for SymbolStr {}
/// This impl means that if `ss` is a `SymbolStr`:
/// - `*ss` is a `str`;
/// - `&*ss` is a `&str` (and `match &*ss { ... }` is a common pattern).
/// - `&ss as &str` is a `&str`, which means that `&ss` can be passed to a
/// function expecting a `&str`.
impl std::ops::Deref for SymbolStr {
type Target = str;
#[inline]
fn deref(&self) -> &str {
self.string
}
}
impl fmt::Debug for SymbolStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self.string, f)
}
}
impl fmt::Display for SymbolStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self.string, f)
}
}
impl<CTX> HashStable<CTX> for SymbolStr {
#[inline]
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
self.string.hash_stable(hcx, hasher)
}
}
impl<CTX> ToStableHashKey<CTX> for SymbolStr {
type KeyType = SymbolStr;
#[inline]
fn to_stable_hash_key(&self, _: &CTX) -> SymbolStr {
self.clone()
}
}

View File

@ -255,7 +255,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolPrinter<'tcx> {
}
fn path_crate(self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
self.write_str(&self.tcx.crate_name(cnum).as_str())?;
self.write_str(self.tcx.crate_name(cnum).as_str())?;
Ok(self)
}
fn path_qualified(

View File

@ -9,6 +9,7 @@ use rustc_middle::ty::layout::IntegerExt;
use rustc_middle::ty::print::{Print, Printer};
use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst};
use rustc_middle::ty::{self, FloatTy, Instance, IntTy, Ty, TyCtxt, TypeFoldable, UintTy};
use rustc_span::symbol::kw;
use rustc_target::abi::call::FnAbi;
use rustc_target::abi::Integer;
use rustc_target::spec::abi::Abi;
@ -559,7 +560,7 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
ty::ExistentialPredicate::Projection(projection) => {
let name = cx.tcx.associated_item(projection.item_def_id).ident;
cx.push("p");
cx.push_ident(&name.as_str());
cx.push_ident(name.as_str());
cx = projection.ty.print(cx)?;
}
ty::ExistentialPredicate::AutoTrait(def_id) => {
@ -702,12 +703,11 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
// just to be able to handle disambiguators.
let disambiguated_field =
self.tcx.def_key(field_def.did).disambiguated_data;
let field_name =
disambiguated_field.data.get_opt_name().map(|s| s.as_str());
let field_name = disambiguated_field.data.get_opt_name();
self.push_disambiguator(
disambiguated_field.disambiguator as u64,
);
self.push_ident(&field_name.as_ref().map_or("", |s| &s[..]));
self.push_ident(field_name.unwrap_or(kw::Empty).as_str());
self = field.print(self)?;
}
@ -736,8 +736,8 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
self.push("C");
let stable_crate_id = self.tcx.def_path_hash(cnum.as_def_id()).stable_crate_id();
self.push_disambiguator(stable_crate_id.to_u64());
let name = self.tcx.crate_name(cnum).as_str();
self.push_ident(&name);
let name = self.tcx.crate_name(cnum);
self.push_ident(name.as_str());
Ok(self)
}
@ -793,13 +793,13 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
}
};
let name = disambiguated_data.data.get_opt_name().map(|s| s.as_str());
let name = disambiguated_data.data.get_opt_name();
self.path_append_ns(
print_prefix,
ns,
disambiguated_data.disambiguator as u64,
name.as_ref().map_or("", |s| &s[..]),
name.unwrap_or(kw::Empty).as_str(),
)
}

View File

@ -298,43 +298,43 @@ impl InlineAsmReg {
let name = name.as_str();
Ok(match arch {
InlineAsmArch::X86 | InlineAsmArch::X86_64 => {
Self::X86(X86InlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::X86(X86InlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Arm => {
Self::Arm(ArmInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Arm(ArmInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::AArch64 => {
Self::AArch64(AArch64InlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::AArch64(AArch64InlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => {
Self::RiscV(RiscVInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::RiscV(RiscVInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Nvptx64 => {
Self::Nvptx(NvptxInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Nvptx(NvptxInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::PowerPC | InlineAsmArch::PowerPC64 => {
Self::PowerPC(PowerPCInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::PowerPC(PowerPCInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Hexagon => {
Self::Hexagon(HexagonInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Hexagon(HexagonInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Mips | InlineAsmArch::Mips64 => {
Self::Mips(MipsInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Mips(MipsInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::S390x => {
Self::S390x(S390xInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::S390x(S390xInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::SpirV => {
Self::SpirV(SpirVInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::SpirV(SpirVInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Wasm32 | InlineAsmArch::Wasm64 => {
Self::Wasm(WasmInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Wasm(WasmInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Bpf => {
Self::Bpf(BpfInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Bpf(BpfInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Avr => {
Self::Avr(AvrInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Avr(AvrInlineAsmReg::parse(arch, has_feature, target, name)?)
}
})
}
@ -798,7 +798,7 @@ impl InlineAsmClobberAbi {
target: &Target,
name: Symbol,
) -> Result<Self, &'static [&'static str]> {
let name = &*name.as_str();
let name = name.as_str();
match arch {
InlineAsmArch::X86 => match name {
"C" | "system" | "efiapi" | "cdecl" | "stdcall" | "fastcall" => {

View File

@ -269,7 +269,7 @@ impl<'tcx> OnUnimplementedFormatString {
let name = tcx.item_name(trait_def_id);
let generics = tcx.generics_of(trait_def_id);
let s = self.0.as_str();
let parser = Parser::new(&s, None, None, false, ParseMode::Format);
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let mut result = Ok(());
for token in parser {
match token {
@ -347,7 +347,7 @@ impl<'tcx> OnUnimplementedFormatString {
let empty_string = String::new();
let s = self.0.as_str();
let parser = Parser::new(&s, None, None, false, ParseMode::Format);
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
parser
.map(|p| match p {

View File

@ -609,7 +609,7 @@ fn check_must_not_suspend_def(
// Add optional reason note
if let Some(note) = attr.value_str() {
// FIXME(guswynn): consider formatting this better
err.span_note(data.source_span, &note.as_str());
err.span_note(data.source_span, note.as_str());
}
// Add some quick suggestions on what to do

View File

@ -1038,7 +1038,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
.collect();
// Sort them by the name so we have a stable result.
names.sort_by_cached_key(|n| n.as_str());
names.sort_by(|a, b| a.as_str().partial_cmp(b.as_str()).unwrap());
names
}
@ -1908,7 +1908,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
.associated_items(def_id)
.in_definition_order()
.filter(|x| {
let dist = lev_distance(&*name.as_str(), &x.ident.as_str());
let dist = lev_distance(name.as_str(), x.ident.as_str());
x.kind.namespace() == Namespace::ValueNS && dist > 0 && dist <= max_dist
})
.copied()

View File

@ -2849,7 +2849,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
);
} else if attr.has_name(sym::linkage) {
if let Some(val) = attr.value_str() {
codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, &val.as_str()));
codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, val.as_str()));
}
} else if attr.has_name(sym::link_section) {
if let Some(val) = attr.value_str() {

View File

@ -466,7 +466,7 @@ impl<'a> fmt::Display for Display<'a> {
(sym::unix, None) => "Unix",
(sym::windows, None) => "Windows",
(sym::debug_assertions, None) => "debug-assertions enabled",
(sym::target_os, Some(os)) => match &*os.as_str() {
(sym::target_os, Some(os)) => match os.as_str() {
"android" => "Android",
"dragonfly" => "DragonFly BSD",
"emscripten" => "Emscripten",
@ -487,7 +487,7 @@ impl<'a> fmt::Display for Display<'a> {
"windows" => "Windows",
_ => "",
},
(sym::target_arch, Some(arch)) => match &*arch.as_str() {
(sym::target_arch, Some(arch)) => match arch.as_str() {
"aarch64" => "AArch64",
"arm" => "ARM",
"asmjs" => "JavaScript",
@ -504,14 +504,14 @@ impl<'a> fmt::Display for Display<'a> {
"x86_64" => "x86-64",
_ => "",
},
(sym::target_vendor, Some(vendor)) => match &*vendor.as_str() {
(sym::target_vendor, Some(vendor)) => match vendor.as_str() {
"apple" => "Apple",
"pc" => "PC",
"sun" => "Sun",
"fortanix" => "Fortanix",
_ => "",
},
(sym::target_env, Some(env)) => match &*env.as_str() {
(sym::target_env, Some(env)) => match env.as_str() {
"gnu" => "GNU",
"msvc" => "MSVC",
"musl" => "musl",
@ -545,14 +545,14 @@ impl<'a> fmt::Display for Display<'a> {
write!(
fmt,
r#"<code>{}="{}"</code>"#,
Escape(&name.as_str()),
Escape(&v.as_str())
Escape(name.as_str()),
Escape(v.as_str())
)
} else {
write!(fmt, r#"`{}="{}"`"#, name, v)
}
} else if self.1.is_html() {
write!(fmt, "<code>{}</code>", Escape(&name.as_str()))
write!(fmt, "<code>{}</code>", Escape(name.as_str()))
} else {
write!(fmt, "`{}`", name)
}

View File

@ -25,7 +25,7 @@ use rustc_middle::ty::{self, TyCtxt};
use rustc_session::Session;
use rustc_span::hygiene::MacroKind;
use rustc_span::source_map::DUMMY_SP;
use rustc_span::symbol::{kw, sym, Ident, Symbol, SymbolStr};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{self, FileName, Loc};
use rustc_target::abi::VariantIdx;
use rustc_target::spec::abi::Abi;
@ -200,7 +200,7 @@ impl ExternalCrate {
// See if there's documentation generated into the local directory
// WARNING: since rustdoc creates these directories as it generates documentation, this check is only accurate before rendering starts.
// Make sure to call `location()` by that time.
let local_location = dst.join(&*self.name(tcx).as_str());
let local_location = dst.join(self.name(tcx).as_str());
if local_location.is_dir() {
return Local;
}
@ -2009,10 +2009,6 @@ impl Path {
self.segments.last().expect("segments were empty").name
}
crate fn last_name(&self) -> SymbolStr {
self.segments.last().expect("segments were empty").name.as_str()
}
crate fn whole_name(&self) -> String {
self.segments
.iter()

View File

@ -162,7 +162,7 @@ crate fn qpath_to_string(p: &hir::QPath<'_>) -> String {
s.push_str("::");
}
if seg.ident.name != kw::PathRoot {
s.push_str(&seg.ident.as_str());
s.push_str(seg.ident.as_str());
}
}
s

View File

@ -150,8 +150,7 @@ impl Cache {
let name = e.name(tcx);
let render_options = &cx.render_options;
let extern_url =
render_options.extern_html_root_urls.get(&*name.as_str()).map(|u| &**u);
let extern_url = render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u);
let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence;
let dst = &render_options.output;
let location = e.location(extern_url, extern_url_takes_precedence, dst, tcx);

View File

@ -90,7 +90,7 @@ crate fn run_format<'tcx, T: FormatRenderer<'tcx>>(
// FIXME: checking `item.name.is_some()` is very implicit and leads to lots of special
// cases. Use an explicit match instead.
} else if item.name.is_some() && !item.is_extern_crate() {
prof.generic_activity_with_arg("render_item", &*item.name.unwrap_or(unknown).as_str())
prof.generic_activity_with_arg("render_item", item.name.unwrap_or(unknown).as_str())
.run(|| cx.item(item))?;
}
}

View File

@ -175,7 +175,7 @@ impl clean::GenericParamDef {
Ok(())
}
clean::GenericParamDefKind::Type { bounds, default, .. } => {
f.write_str(&*self.name.as_str())?;
f.write_str(self.name.as_str())?;
if !bounds.is_empty() {
if f.alternate() {
@ -638,7 +638,7 @@ fn resolved_path<'cx>(
last.name.to_string()
}
} else {
anchor(did, &*last.name.as_str(), cx).to_string()
anchor(did, last.name.as_str(), cx).to_string()
};
write!(w, "{}{}", path, last.args.print(cx))?;
}
@ -667,20 +667,18 @@ fn primitive_link(
needs_termination = true;
}
Some(&def_id) => {
let cname_str;
let cname_sym;
let loc = match m.extern_locations[&def_id.krate] {
ExternalLocation::Remote(ref s) => {
cname_str =
ExternalCrate { crate_num: def_id.krate }.name(cx.tcx()).as_str();
Some(vec![s.trim_end_matches('/'), &cname_str[..]])
cname_sym = ExternalCrate { crate_num: def_id.krate }.name(cx.tcx());
Some(vec![s.trim_end_matches('/'), cname_sym.as_str()])
}
ExternalLocation::Local => {
cname_str =
ExternalCrate { crate_num: def_id.krate }.name(cx.tcx()).as_str();
Some(if cx.current.first().map(|x| &x[..]) == Some(&cname_str[..]) {
cname_sym = ExternalCrate { crate_num: def_id.krate }.name(cx.tcx());
Some(if cx.current.first().map(|x| &x[..]) == Some(cname_sym.as_str()) {
iter::repeat("..").take(cx.current.len() - 1).collect()
} else {
let cname = iter::once(&cname_str[..]);
let cname = iter::once(cname_sym.as_str());
iter::repeat("..").take(cx.current.len()).chain(cname).collect()
})
}
@ -775,7 +773,7 @@ fn fmt_type<'cx>(
clean::Primitive(clean::PrimitiveType::Never) => {
primitive_link(f, PrimitiveType::Never, "!", cx)
}
clean::Primitive(prim) => primitive_link(f, prim, &*prim.as_sym().as_str(), cx),
clean::Primitive(prim) => primitive_link(f, prim, prim.as_sym().as_str(), cx),
clean::BareFunction(ref decl) => {
if f.alternate() {
write!(
@ -1271,7 +1269,7 @@ impl clean::Visibility {
debug!("path={:?}", path);
// modified from `resolved_path()` to work with `DefPathData`
let last_name = path.data.last().unwrap().data.get_opt_name().unwrap();
let anchor = anchor(vis_did, &last_name.as_str(), cx).to_string();
let anchor = anchor(vis_did, last_name.as_str(), cx).to_string();
let mut s = "pub(in ".to_owned();
for seg in &path.data[..path.data.len() - 1] {
@ -1402,9 +1400,9 @@ impl clean::ImportSource {
for seg in &self.path.segments[..self.path.segments.len() - 1] {
write!(f, "{}::", seg.name)?;
}
let name = self.path.last_name();
let name = self.path.last();
if let hir::def::Res::PrimTy(p) = self.path.res {
primitive_link(f, PrimitiveType::from(p), &*name, cx)?;
primitive_link(f, PrimitiveType::from(p), name.as_str(), cx)?;
} else {
write!(f, "{}", name)?;
}
@ -1420,7 +1418,7 @@ impl clean::TypeBinding {
cx: &'a Context<'tcx>,
) -> impl fmt::Display + 'a + Captures<'tcx> {
display_fn(move |f| {
f.write_str(&*self.name.as_str())?;
f.write_str(self.name.as_str())?;
match self.kind {
clean::TypeBindingKind::Equality { ref ty } => {
if f.alternate() {

View File

@ -180,7 +180,7 @@ impl<'tcx> Context<'tcx> {
fn render_item(&self, it: &clean::Item, is_module: bool) -> String {
let mut title = String::new();
if !is_module {
title.push_str(&it.name.unwrap().as_str());
title.push_str(it.name.unwrap().as_str());
}
if !it.is_primitive() && !it.is_keyword() {
if !is_module {
@ -315,7 +315,7 @@ impl<'tcx> Context<'tcx> {
};
let file = &file;
let symbol;
let krate_sym;
let (krate, path) = if cnum == LOCAL_CRATE {
if let Some(path) = self.shared.local_sources.get(file) {
(self.shared.layout.krate.as_str(), path)
@ -343,8 +343,8 @@ impl<'tcx> Context<'tcx> {
let mut fname = file.file_name().expect("source has no filename").to_os_string();
fname.push(".html");
path.push_str(&fname.to_string_lossy());
symbol = krate.as_str();
(&*symbol, &path)
krate_sym = krate;
(krate_sym.as_str(), &path)
};
let anchor = if with_lines {
@ -549,7 +549,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
fn after_krate(&mut self) -> Result<(), Error> {
let crate_name = self.tcx().crate_name(LOCAL_CRATE);
let final_file = self.dst.join(&*crate_name.as_str()).join("all.html");
let final_file = self.dst.join(crate_name.as_str()).join("all.html");
let settings_file = self.dst.join("settings.html");
let mut root_path = self.dst.to_str().expect("invalid path").to_owned();
@ -619,9 +619,9 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
if let Some(ref redirections) = self.shared.redirections {
if !redirections.borrow().is_empty() {
let redirect_map_path =
self.dst.join(&*crate_name.as_str()).join("redirect-map.json");
self.dst.join(crate_name.as_str()).join("redirect-map.json");
let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
self.shared.ensure_dir(&self.dst.join(&*crate_name.as_str()))?;
self.shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
self.shared.fs.write(redirect_map_path, paths)?;
}
}
@ -703,7 +703,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
if !buf.is_empty() {
let name = item.name.as_ref().unwrap();
let item_type = item.type_();
let file_name = &item_path(item_type, &name.as_str());
let file_name = &item_path(item_type, name.as_str());
self.shared.ensure_dir(&self.dst)?;
let joint_dst = self.dst.join(file_name);
self.shared.fs.write(joint_dst, buf)?;

View File

@ -640,9 +640,9 @@ fn short_item_info(
// We display deprecation messages for #[deprecated] and #[rustc_deprecated]
// but only display the future-deprecation messages for #[rustc_deprecated].
let mut message = if let Some(since) = since {
let since = &since.as_str();
let since = since.as_str();
if !stability::deprecation_in_effect(&depr) {
if *since == "TBD" {
if since == "TBD" {
String::from("Deprecating in a future Rust version")
} else {
format!("Deprecating in {}", Escape(since))
@ -658,7 +658,7 @@ fn short_item_info(
let note = note.as_str();
let mut ids = cx.id_map.borrow_mut();
let html = MarkdownHtml(
&note,
note,
&mut ids,
error_codes,
cx.shared.edition(),
@ -683,7 +683,7 @@ fn short_item_info(
let mut message =
"<span class=\"emoji\">🔬</span> This is a nightly-only experimental API.".to_owned();
let mut feature = format!("<code>{}</code>", Escape(&feature.as_str()));
let mut feature = format!("<code>{}</code>", Escape(feature.as_str()));
if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue) {
feature.push_str(&format!(
"&nbsp;<a href=\"{url}{issue}\">#{issue}</a>",
@ -1414,7 +1414,7 @@ fn render_impl(
let source_id = trait_
.and_then(|trait_| {
trait_.items.iter().find(|item| {
item.name.map(|n| n.as_str().eq(&name.as_str())).unwrap_or(false)
item.name.map(|n| n.as_str().eq(name.as_str())).unwrap_or(false)
})
})
.map(|item| format!("{}.{}", item.type_(), name));

View File

@ -136,7 +136,7 @@ pub(super) fn print_item(
page: page,
static_root_path: page.get_static_root_path(),
typ: typ,
name: &item.name.as_ref().unwrap().as_str(),
name: item.name.as_ref().unwrap().as_str(),
item_type: &item.type_().to_string(),
path_components: path_components,
stability_since_raw: &stability_since_raw,
@ -239,9 +239,9 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl
(true, false) => return Ordering::Greater,
}
}
let lhs = i1.name.unwrap_or(kw::Empty).as_str();
let rhs = i2.name.unwrap_or(kw::Empty).as_str();
compare_names(&lhs, &rhs)
let lhs = i1.name.unwrap_or(kw::Empty);
let rhs = i2.name.unwrap_or(kw::Empty);
compare_names(lhs.as_str(), rhs.as_str())
}
if cx.shared.sort_modules_alphabetically {
@ -315,7 +315,7 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl
w,
"<div class=\"item-left\"><code>{}extern crate {} as {};",
myitem.visibility.print_with_space(myitem.def_id, cx),
anchor(myitem.def_id.expect_def_id(), &*src.as_str(), cx),
anchor(myitem.def_id.expect_def_id(), src.as_str(), cx),
myitem.name.as_ref().unwrap(),
),
None => write!(
@ -324,7 +324,7 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl
myitem.visibility.print_with_space(myitem.def_id, cx),
anchor(
myitem.def_id.expect_def_id(),
&*myitem.name.as_ref().unwrap().as_str(),
myitem.name.as_ref().unwrap().as_str(),
cx
),
),
@ -405,7 +405,7 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl
add = add,
stab = stab.unwrap_or_default(),
unsafety_flag = unsafety_flag,
href = item_path(myitem.type_(), &myitem.name.unwrap().as_str()),
href = item_path(myitem.type_(), myitem.name.unwrap().as_str()),
title = [full_path(cx, myitem), myitem.type_().to_string()]
.iter()
.filter_map(|s| if !s.is_empty() { Some(s.as_str()) } else { None })
@ -1308,7 +1308,7 @@ fn item_struct(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::St
document_non_exhaustive(w, it);
for (index, (field, ty)) in fields.enumerate() {
let field_name =
field.name.map_or_else(|| index.to_string(), |sym| (*sym.as_str()).to_string());
field.name.map_or_else(|| index.to_string(), |sym| sym.as_str().to_string());
let id = cx.derive_id(format!("{}.{}", ItemType::StructField, field_name));
write!(
w,
@ -1410,7 +1410,7 @@ crate fn compare_names(mut lhs: &str, mut rhs: &str) -> Ordering {
pub(super) fn full_path(cx: &Context<'_>, item: &clean::Item) -> String {
let mut s = cx.current.join("::");
s.push_str("::");
s.push_str(&item.name.unwrap().as_str());
s.push_str(item.name.unwrap().as_str());
s
}

View File

@ -418,7 +418,7 @@ pub(super) fn write_shared(
let dst = cx.dst.join(&format!("source-files{}.js", cx.shared.resource_suffix));
let make_sources = || {
let (mut all_sources, _krates) =
try_err!(collect(&dst, &krate.name(cx.tcx()).as_str(), "sourcesIndex"), &dst);
try_err!(collect(&dst, krate.name(cx.tcx()).as_str(), "sourcesIndex"), &dst);
all_sources.push(format!(
"sourcesIndex[\"{}\"] = {};",
&krate.name(cx.tcx()),
@ -437,7 +437,7 @@ pub(super) fn write_shared(
// Update the search index and crate list.
let dst = cx.dst.join(&format!("search-index{}.js", cx.shared.resource_suffix));
let (mut all_indexes, mut krates) =
try_err!(collect_json(&dst, &krate.name(cx.tcx()).as_str()), &dst);
try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
all_indexes.push(search_index);
krates.push(krate.name(cx.tcx()).to_string());
krates.sort();
@ -575,7 +575,7 @@ pub(super) fn write_shared(
mydst.push(&format!("{}.{}.js", remote_item_type, remote_path[remote_path.len() - 1]));
let (mut all_implementors, _) =
try_err!(collect(&mydst, &krate.name(cx.tcx()).as_str(), "implementors"), &mydst);
try_err!(collect(&mydst, krate.name(cx.tcx()).as_str(), "implementors"), &mydst);
all_implementors.push(implementors);
// Sort the implementors by crate so the file will be generated
// identically even with rustdoc running in parallel.

View File

@ -19,7 +19,7 @@ use std::path::{Component, Path, PathBuf};
crate fn render(cx: &mut Context<'_>, krate: &clean::Crate) -> Result<(), Error> {
info!("emitting source files");
let dst = cx.dst.join("src").join(&*krate.name(cx.tcx()).as_str());
let dst = cx.dst.join("src").join(krate.name(cx.tcx()).as_str());
cx.shared.ensure_dir(&dst)?;
let mut collector = SourceCollector { dst, cx, emitted_local_sources: FxHashSet::default() };

View File

@ -609,7 +609,7 @@ impl FromWithTcx<clean::Import> for Import {
},
Glob => Import {
source: import.source.path.whole_name(),
name: import.source.path.last_name().to_string(),
name: import.source.path.last().to_string(),
id: import.source.did.map(ItemId::from).map(from_item_id),
glob: true,
},

View File

@ -2153,8 +2153,8 @@ fn privacy_error(cx: &DocContext<'_>, diag_info: &DiagnosticInfo<'_>, path_str:
let sym;
let item_name = match diag_info.item.name {
Some(name) => {
sym = name.as_str();
&*sym
sym = name;
sym.as_str()
}
None => "<unknown>",
};

View File

@ -21,7 +21,7 @@ declare_lint_pass!(Pass => [TEST_LINT, PLEASE_LINT]);
impl<'tcx> LateLintPass<'tcx> for Pass {
fn check_item(&mut self, cx: &LateContext, it: &rustc_hir::Item) {
match &*it.ident.as_str() {
match it.ident.as_str() {
"lintme" => cx.lint(TEST_LINT, |lint| {
lint.build("item is named 'lintme'").set_span(it.span).emit()
}),

View File

@ -17,7 +17,7 @@ use rustc_semver::RustcVersion;
use rustc_session::{declare_lint_pass, declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
use rustc_span::sym;
use rustc_span::symbol::{Symbol, SymbolStr};
use rustc_span::symbol::Symbol;
use semver::Version;
static UNIX_SYSTEMS: &[&str] = &[
@ -310,8 +310,8 @@ impl<'tcx> LateLintPass<'tcx> for Attributes {
|| is_word(lint, sym::deprecated)
|| is_word(lint, sym!(unreachable_pub))
|| is_word(lint, sym!(unused))
|| extract_clippy_lint(lint).map_or(false, |s| s == "wildcard_imports")
|| extract_clippy_lint(lint).map_or(false, |s| s == "enum_glob_use")
|| extract_clippy_lint(lint).map_or(false, |s| s.as_str() == "wildcard_imports")
|| extract_clippy_lint(lint).map_or(false, |s| s.as_str() == "enum_glob_use")
{
return;
}
@ -370,7 +370,7 @@ impl<'tcx> LateLintPass<'tcx> for Attributes {
}
/// Returns the lint name if it is clippy lint.
fn extract_clippy_lint(lint: &NestedMetaItem) -> Option<SymbolStr> {
fn extract_clippy_lint(lint: &NestedMetaItem) -> Option<Symbol> {
if_chain! {
if let Some(meta_item) = lint.meta_item();
if meta_item.path.segments.len() > 1;
@ -378,7 +378,7 @@ fn extract_clippy_lint(lint: &NestedMetaItem) -> Option<SymbolStr> {
if tool_name.name == sym::clippy;
then {
let lint_name = meta_item.path.segments.last().unwrap().ident.name;
return Some(lint_name.as_str());
return Some(lint_name);
}
}
None
@ -387,7 +387,7 @@ fn extract_clippy_lint(lint: &NestedMetaItem) -> Option<SymbolStr> {
fn check_clippy_lint_names(cx: &LateContext<'_>, name: Symbol, items: &[NestedMetaItem]) {
for lint in items {
if let Some(lint_name) = extract_clippy_lint(lint) {
if lint_name == "restriction" && name != sym::allow {
if lint_name.as_str() == "restriction" && name != sym::allow {
span_lint_and_help(
cx,
BLANKET_CLIPPY_RESTRICTION_LINTS,
@ -486,7 +486,7 @@ fn check_attrs(cx: &LateContext<'_>, span: Span, name: Symbol, attrs: &[Attribut
fn check_semver(cx: &LateContext<'_>, span: Span, lit: &Lit) {
if let LitKind::Str(is, _) = lit.kind {
if Version::parse(&is.as_str()).is_ok() {
if Version::parse(is.as_str()).is_ok() {
return;
}
}
@ -619,7 +619,7 @@ fn check_mismatched_target_os(cx: &EarlyContext<'_>, attr: &Attribute) {
MetaItemKind::Word => {
if_chain! {
if let Some(ident) = meta.ident();
if let Some(os) = find_os(&*ident.name.as_str());
if let Some(os) = find_os(ident.name.as_str());
then {
mismatched.push((os, ident.span));
}

View File

@ -272,7 +272,7 @@ fn simplify_not(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<String> {
.copied()
.flat_map(|(a, b)| vec![(a, b), (b, a)])
.find(|&(a, _)| {
let path: &str = &path.ident.name.as_str();
let path: &str = path.ident.name.as_str();
a == path
})
.and_then(|(_, neg_method)| Some(format!("{}.{}()", snippet_opt(cx, args[0].span)?, neg_method)))

View File

@ -321,8 +321,8 @@ fn get_implementing_type<'a>(path: &QPath<'_>, candidates: &'a [&str], function:
if let TyKind::Path(QPath::Resolved(None, tp)) = &ty.kind;
if let [int] = &*tp.segments;
then {
let name = &int.ident.name.as_str();
candidates.iter().find(|c| name == *c).copied()
let name = int.ident.name.as_str();
candidates.iter().find(|c| &name == *c).copied()
} else {
None
}
@ -335,8 +335,8 @@ fn int_ty_to_sym<'tcx>(path: &QPath<'_>) -> Option<&'tcx str> {
if let QPath::Resolved(_, path) = *path;
if let [ty] = &*path.segments;
then {
let name = &ty.ident.name.as_str();
INTS.iter().find(|c| name == *c).copied()
let name = ty.ident.name.as_str();
INTS.iter().find(|c| &name == *c).copied()
} else {
None
}

View File

@ -437,7 +437,7 @@ fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs
for attr in attrs {
if let AttrKind::DocComment(comment_kind, comment) = attr.kind {
let (comment, current_spans) = strip_doc_comment_decoration(&comment.as_str(), comment_kind, attr.span);
let (comment, current_spans) = strip_doc_comment_decoration(comment.as_str(), comment_kind, attr.span);
spans.extend_from_slice(&current_spans);
doc.push_str(&comment);
} else if attr.has_name(sym::doc) {

View File

@ -49,7 +49,7 @@ impl<'tcx> LateLintPass<'tcx> for DurationSubsec {
if match_type(cx, cx.typeck_results().expr_ty(&args[0]).peel_refs(), &paths::DURATION);
if let Some((Constant::Int(divisor), _)) = constant(cx, cx.typeck_results(), right);
then {
let suggested_fn = match (method_path.ident.as_str().as_ref(), divisor) {
let suggested_fn = match (method_path.ident.as_str(), divisor) {
("subsec_micros", 1_000) | ("subsec_nanos", 1_000_000) => "subsec_millis",
("subsec_nanos", 1_000) => "subsec_micros",
_ => return,

View File

@ -153,7 +153,7 @@ fn check_variant(
);
}
}
let first = &def.variants[0].ident.name.as_str();
let first = def.variants[0].ident.name.as_str();
let mut pre = &first[..str_utils::camel_case_until(&*first).byte_index];
let mut post = &first[str_utils::camel_case_start(&*first).byte_index..];
for var in def.variants {

Some files were not shown because too many files have changed in this diff Show More