Auto merge of #103841 - Dylan-DPC:rollup-rff2x1l, r=Dylan-DPC

Rollup of 5 pull requests

Successful merges:

 - #84022 (Make PROC_MACRO_DERIVE_RESOLUTION_FALLBACK a hard error)
 - #103760 (resolve: Turn the binding from `#[macro_export]` into a proper `Import`)
 - #103813 (rustdoc: remove unnecessary CSS `.search-results { clear: both }`)
 - #103817 (rustdoc: rename syntax highlighting CSS class `attribute` to `attr`)
 - #103833 (⬆️ rust-analyzer)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-11-01 18:15:32 +00:00
commit ab5a2bc731
61 changed files with 1135 additions and 833 deletions

View File

@ -1982,73 +1982,6 @@ declare_lint! {
};
}
declare_lint! {
/// The `proc_macro_derive_resolution_fallback` lint detects proc macro
/// derives using inaccessible names from parent modules.
///
/// ### Example
///
/// ```rust,ignore (proc-macro)
/// // foo.rs
/// #![crate_type = "proc-macro"]
///
/// extern crate proc_macro;
///
/// use proc_macro::*;
///
/// #[proc_macro_derive(Foo)]
/// pub fn foo1(a: TokenStream) -> TokenStream {
/// drop(a);
/// "mod __bar { static mut BAR: Option<Something> = None; }".parse().unwrap()
/// }
/// ```
///
/// ```rust,ignore (needs-dependency)
/// // bar.rs
/// #[macro_use]
/// extern crate foo;
///
/// struct Something;
///
/// #[derive(Foo)]
/// struct Another;
///
/// fn main() {}
/// ```
///
/// This will produce:
///
/// ```text
/// warning: cannot find type `Something` in this scope
/// --> src/main.rs:8:10
/// |
/// 8 | #[derive(Foo)]
/// | ^^^ names from parent modules are not accessible without an explicit import
/// |
/// = note: `#[warn(proc_macro_derive_resolution_fallback)]` on by default
/// = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
/// = note: for more information, see issue #50504 <https://github.com/rust-lang/rust/issues/50504>
/// ```
///
/// ### Explanation
///
/// If a proc-macro generates a module, the compiler unintentionally
/// allowed items in that module to refer to items in the crate root
/// without importing them. This is a [future-incompatible] lint to
/// transition this to a hard error in the future. See [issue #50504] for
/// more details.
///
/// [issue #50504]: https://github.com/rust-lang/rust/issues/50504
/// [future-incompatible]: ../index.md#future-incompatible-lints
pub PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
Deny,
"detects proc macro derives using inaccessible names from parent modules",
@future_incompatible = FutureIncompatibleInfo {
reference: "issue #83583 <https://github.com/rust-lang/rust/issues/83583>",
reason: FutureIncompatibilityReason::FutureReleaseErrorReportNow,
};
}
declare_lint! {
/// The `macro_use_extern_crate` lint detects the use of the
/// [`macro_use` attribute].
@ -3287,7 +3220,6 @@ declare_lint_pass! {
UNSTABLE_NAME_COLLISIONS,
IRREFUTABLE_LET_PATTERNS,
WHERE_CLAUSES_OBJECT_SAFETY,
PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
MACRO_USE_EXTERN_CRATE,
MACRO_EXPANDED_MACRO_EXPORTS_ACCESSED_BY_ABSOLUTE_PATHS,
ILL_FORMED_ATTRIBUTE_INPUT,

View File

@ -56,21 +56,7 @@ impl<'a, Id: Into<DefId>> ToNameBinding<'a>
impl<'a, Id: Into<DefId>> ToNameBinding<'a> for (Res, ty::Visibility<Id>, Span, LocalExpnId) {
fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Res(self.0, false),
ambiguity: None,
vis: self.1.to_def_id(),
span: self.2,
expansion: self.3,
})
}
}
struct IsMacroExport;
impl<'a> ToNameBinding<'a> for (Res, ty::Visibility, Span, LocalExpnId, IsMacroExport) {
fn to_name_binding(self, arenas: &'a ResolverArenas<'a>) -> &'a NameBinding<'a> {
arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Res(self.0, true),
kind: NameBindingKind::Res(self.0),
ambiguity: None,
vis: self.1.to_def_id(),
span: self.2,
@ -364,7 +350,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
module_path: Vec<Segment>,
kind: ImportKind<'a>,
span: Span,
id: NodeId,
item: &ast::Item,
root_span: Span,
root_id: NodeId,
@ -377,7 +362,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
module_path,
imported_module: Cell::new(None),
span,
id,
use_span: item.span,
use_span_with_attributes: item.span_with_attributes(),
has_attributes: !item.attrs.is_empty(),
@ -574,27 +558,20 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
},
type_ns_only,
nested,
id,
additional_ids: (id1, id2),
};
self.add_import(
module_path,
kind,
use_tree.span,
id,
item,
root_span,
item.id,
vis,
);
self.add_import(module_path, kind, use_tree.span, item, root_span, item.id, vis);
}
ast::UseTreeKind::Glob => {
let kind = ImportKind::Glob {
is_prelude: self.r.session.contains_name(&item.attrs, sym::prelude_import),
max_vis: Cell::new(None),
id,
};
self.r.visibilities.insert(self.r.local_def_id(id), vis);
self.add_import(prefix, kind, use_tree.span, id, item, root_span, item.id, vis);
self.add_import(prefix, kind, use_tree.span, item, root_span, item.id, vis);
}
ast::UseTreeKind::Nested(ref items) => {
// Ensure there is at most one `self` in the list
@ -881,9 +858,8 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
})
.unwrap_or((true, None, self.r.dummy_binding));
let import = self.r.arenas.alloc_import(Import {
kind: ImportKind::ExternCrate { source: orig_name, target: ident },
kind: ImportKind::ExternCrate { source: orig_name, target: ident, id: item.id },
root_id: item.id,
id: item.id,
parent_scope: self.parent_scope,
imported_module: Cell::new(module),
has_attributes: !item.attrs.is_empty(),
@ -1118,7 +1094,6 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
this.r.arenas.alloc_import(Import {
kind: ImportKind::MacroUse,
root_id: item.id,
id: item.id,
parent_scope: this.parent_scope,
imported_module: Cell::new(Some(ModuleOrUniformRoot::Module(module))),
use_span_with_attributes: item.span_with_attributes(),
@ -1278,8 +1253,22 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
let binding = (res, vis, span, expansion).to_name_binding(self.r.arenas);
self.r.set_binding_parent_module(binding, parent_scope.module);
if is_macro_export {
let module = self.r.graph_root;
self.r.define(module, ident, MacroNS, (res, vis, span, expansion, IsMacroExport));
let import = self.r.arenas.alloc_import(Import {
kind: ImportKind::MacroExport,
root_id: item.id,
parent_scope: self.parent_scope,
imported_module: Cell::new(None),
has_attributes: false,
use_span_with_attributes: span,
use_span: span,
root_span: span,
span: span,
module_path: Vec::new(),
vis: Cell::new(Some(vis)),
used: Cell::new(true),
});
let import_binding = self.r.import(binding, import);
self.r.define(self.r.graph_root, ident, MacroNS, import_binding);
} else {
self.r.check_reserved_macro_name(ident, res);
self.insert_unused_macro(ident, def_id, item.id, &rule_spans);

View File

@ -234,7 +234,7 @@ impl Resolver<'_> {
if !import.span.is_dummy() {
self.lint_buffer.buffer_lint(
MACRO_USE_EXTERN_CRATE,
import.id,
import.root_id,
import.span,
"deprecated `#[macro_use]` attribute used to \
import macros should be replaced at use sites \
@ -244,13 +244,13 @@ impl Resolver<'_> {
}
}
}
ImportKind::ExternCrate { .. } => {
let def_id = self.local_def_id(import.id);
ImportKind::ExternCrate { id, .. } => {
let def_id = self.local_def_id(id);
self.maybe_unused_extern_crates.push((def_id, import.span));
}
ImportKind::MacroUse => {
let msg = "unused `#[macro_use]` import";
self.lint_buffer.buffer_lint(UNUSED_IMPORTS, import.id, import.span, msg);
self.lint_buffer.buffer_lint(UNUSED_IMPORTS, import.root_id, import.span, msg);
}
_ => {}
}

View File

@ -190,12 +190,12 @@ impl<'a> Resolver<'a> {
ModuleKind::Block => "block",
};
let old_noun = match old_binding.is_import() {
let old_noun = match old_binding.is_import_user_facing() {
true => "import",
false => "definition",
};
let new_participle = match new_binding.is_import() {
let new_participle = match new_binding.is_import_user_facing() {
true => "imported",
false => "defined",
};
@ -226,7 +226,7 @@ impl<'a> Resolver<'a> {
true => struct_span_err!(self.session, span, E0254, "{}", msg),
false => struct_span_err!(self.session, span, E0260, "{}", msg),
},
_ => match (old_binding.is_import(), new_binding.is_import()) {
_ => match (old_binding.is_import_user_facing(), new_binding.is_import_user_facing()) {
(false, false) => struct_span_err!(self.session, span, E0428, "{}", msg),
(true, true) => struct_span_err!(self.session, span, E0252, "{}", msg),
_ => struct_span_err!(self.session, span, E0255, "{}", msg),
@ -248,14 +248,18 @@ impl<'a> Resolver<'a> {
// See https://github.com/rust-lang/rust/issues/32354
use NameBindingKind::Import;
let can_suggest = |binding: &NameBinding<'_>, import: &self::Import<'_>| {
!binding.span.is_dummy()
&& !matches!(import.kind, ImportKind::MacroUse | ImportKind::MacroExport)
};
let import = match (&new_binding.kind, &old_binding.kind) {
// If there are two imports where one or both have attributes then prefer removing the
// import without attributes.
(Import { import: new, .. }, Import { import: old, .. })
if {
!new_binding.span.is_dummy()
&& !old_binding.span.is_dummy()
&& (new.has_attributes || old.has_attributes)
(new.has_attributes || old.has_attributes)
&& can_suggest(old_binding, old)
&& can_suggest(new_binding, new)
} =>
{
if old.has_attributes {
@ -265,10 +269,10 @@ impl<'a> Resolver<'a> {
}
}
// Otherwise prioritize the new binding.
(Import { import, .. }, other) if !new_binding.span.is_dummy() => {
(Import { import, .. }, other) if can_suggest(new_binding, import) => {
Some((import, new_binding.span, other.is_import()))
}
(other, Import { import, .. }) if !old_binding.span.is_dummy() => {
(other, Import { import, .. }) if can_suggest(old_binding, import) => {
Some((import, old_binding.span, other.is_import()))
}
_ => None,
@ -353,7 +357,7 @@ impl<'a> Resolver<'a> {
}
}
}
ImportKind::ExternCrate { source, target } => {
ImportKind::ExternCrate { source, target, .. } => {
suggestion = Some(format!(
"extern crate {} as {};",
source.unwrap_or(target.name),
@ -1202,7 +1206,7 @@ impl<'a> Resolver<'a> {
let root_module = this.resolve_crate_root(root_ident);
this.add_module_candidates(root_module, &mut suggestions, filter_fn, None);
}
Scope::Module(module, _) => {
Scope::Module(module) => {
this.add_module_candidates(module, &mut suggestions, filter_fn, None);
}
Scope::MacroUsePrelude => {
@ -1683,7 +1687,7 @@ impl<'a> Resolver<'a> {
let a = if built_in.is_empty() { res.article() } else { "a" };
format!("{a}{built_in} {thing}{from}", thing = res.descr())
} else {
let introduced = if b.is_import() { "imported" } else { "defined" };
let introduced = if b.is_import_user_facing() { "imported" } else { "defined" };
format!("the {thing} {introduced} here", thing = res.descr())
}
}
@ -1742,10 +1746,10 @@ impl<'a> Resolver<'a> {
/// If the binding refers to a tuple struct constructor with fields,
/// returns the span of its fields.
fn ctor_fields_span(&self, binding: &NameBinding<'_>) -> Option<Span> {
if let NameBindingKind::Res(
Res::Def(DefKind::Ctor(CtorOf::Struct, CtorKind::Fn), ctor_def_id),
_,
) = binding.kind
if let NameBindingKind::Res(Res::Def(
DefKind::Ctor(CtorOf::Struct, CtorKind::Fn),
ctor_def_id,
)) = binding.kind
{
let def_id = self.parent(ctor_def_id);
let fields = self.field_names.get(&def_id)?;
@ -1789,7 +1793,9 @@ impl<'a> Resolver<'a> {
next_ident = source;
Some(binding)
}
ImportKind::Glob { .. } | ImportKind::MacroUse => Some(binding),
ImportKind::Glob { .. } | ImportKind::MacroUse | ImportKind::MacroExport => {
Some(binding)
}
ImportKind::ExternCrate { .. } => None,
},
_ => None,

View File

@ -57,26 +57,45 @@ impl<'r, 'a> EffectiveVisibilitiesVisitor<'r, 'a> {
while let NameBindingKind::Import { binding: nested_binding, import, .. } =
binding.kind
{
let mut update = |node_id| self.update(
self.r.local_def_id(node_id),
binding.vis.expect_local(),
prev_parent_id,
level,
);
// In theory all the import IDs have individual visibilities and effective
// visibilities, but in practice these IDs go straigth to HIR where all
// their few uses assume that their (effective) visibility applies to the
// whole syntactic `use` item. So we update them all to the maximum value
// among the potential individual effective visibilities. Maybe HIR for
// imports shouldn't use three IDs at all.
update(import.id);
if let ImportKind::Single { additional_ids, .. } = import.kind {
update(additional_ids.0);
update(additional_ids.1);
let mut update = |node_id| {
self.update(
self.r.local_def_id(node_id),
binding.vis.expect_local(),
prev_parent_id,
level,
)
};
match import.kind {
ImportKind::Single { id, additional_ids, .. } => {
// In theory all the import IDs have individual visibilities and
// effective visibilities, but in practice these IDs go straigth to
// HIR where all their few uses assume that their (effective)
// visibility applies to the whole syntactic `use` item. So we
// update them all to the maximum value among the potential
// individual effective visibilities. Maybe HIR for imports
// shouldn't use three IDs at all.
update(id);
update(additional_ids.0);
update(additional_ids.1);
prev_parent_id = self.r.local_def_id(id);
}
ImportKind::Glob { id, .. } | ImportKind::ExternCrate { id, .. } => {
update(id);
prev_parent_id = self.r.local_def_id(id);
}
ImportKind::MacroUse => {
// In theory we should reset the parent id to something private
// here, but `macro_use` imports always refer to external items,
// so it doesn't matter and we can just do nothing.
}
ImportKind::MacroExport => {
// In theory we should reset the parent id to something public
// here, but it has the same effect as leaving the previous parent,
// so we can just do nothing.
}
}
level = Level::Reexported;
prev_parent_id = self.r.local_def_id(import.id);
binding = nested_binding;
}
}
@ -138,13 +157,6 @@ impl<'r, 'ast> Visitor<'ast> for EffectiveVisibilitiesVisitor<'ast, 'r> {
self.update(def_id, Visibility::Public, parent_id, Level::Direct);
}
// Only exported `macro_rules!` items are public, but they always are
ast::ItemKind::MacroDef(ref macro_def) if macro_def.macro_rules => {
let parent_id = self.r.local_parent(def_id);
let vis = self.r.visibilities[&def_id];
self.update(def_id, vis, parent_id, Level::Direct);
}
ast::ItemKind::Mod(..) => {
self.set_bindings_effective_visibilities(def_id);
visit::walk_item(self, item);

View File

@ -1,11 +1,9 @@
use rustc_ast::{self as ast, NodeId};
use rustc_ast as ast;
use rustc_feature::is_builtin_attr_name;
use rustc_hir::def::{DefKind, Namespace, NonMacroAttrKind, PartialRes, PerNS};
use rustc_hir::PrimTy;
use rustc_middle::bug;
use rustc_middle::ty;
use rustc_session::lint::builtin::PROC_MACRO_DERIVE_RESOLUTION_FALLBACK;
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_span::def_id::LocalDefId;
use rustc_span::edition::Edition;
use rustc_span::hygiene::{ExpnId, ExpnKind, LocalExpnId, MacroKind, SyntaxContext};
@ -19,7 +17,7 @@ use crate::late::{
};
use crate::macros::{sub_namespace_match, MacroRulesScope};
use crate::{AmbiguityError, AmbiguityErrorMisc, AmbiguityKind, Determinacy, Finalize};
use crate::{ImportKind, LexicalScopeBinding, Module, ModuleKind, ModuleOrUniformRoot};
use crate::{Import, ImportKind, LexicalScopeBinding, Module, ModuleKind, ModuleOrUniformRoot};
use crate::{NameBinding, NameBindingKind, ParentScope, PathResult, PrivacyError, Res};
use crate::{ResolutionError, Resolver, Scope, ScopeSet, Segment, ToNameBinding, Weak};
@ -101,7 +99,7 @@ impl<'a> Resolver<'a> {
};
let mut scope = match ns {
_ if is_absolute_path => Scope::CrateRoot,
TypeNS | ValueNS => Scope::Module(module, None),
TypeNS | ValueNS => Scope::Module(module),
MacroNS => Scope::DeriveHelpers(parent_scope.expansion),
};
let mut ctxt = ctxt.normalize_to_macros_2_0();
@ -165,7 +163,7 @@ impl<'a> Resolver<'a> {
MacroRulesScope::Invocation(invoc_id) => {
Scope::MacroRules(self.invocation_parent_scopes[&invoc_id].macro_rules)
}
MacroRulesScope::Empty => Scope::Module(module, None),
MacroRulesScope::Empty => Scope::Module(module),
},
Scope::CrateRoot => match ns {
TypeNS => {
@ -174,16 +172,10 @@ impl<'a> Resolver<'a> {
}
ValueNS | MacroNS => break,
},
Scope::Module(module, prev_lint_id) => {
Scope::Module(module) => {
use_prelude = !module.no_implicit_prelude;
let derive_fallback_lint_id = match scope_set {
ScopeSet::Late(.., lint_id) => lint_id,
_ => None,
};
match self.hygienic_lexical_parent(module, &mut ctxt, derive_fallback_lint_id) {
Some((parent_module, lint_id)) => {
Scope::Module(parent_module, lint_id.or(prev_lint_id))
}
match self.hygienic_lexical_parent(module, &mut ctxt) {
Some(parent_module) => Scope::Module(parent_module),
None => {
ctxt.adjust(ExpnId::root());
match ns {
@ -215,45 +207,13 @@ impl<'a> Resolver<'a> {
&mut self,
module: Module<'a>,
ctxt: &mut SyntaxContext,
derive_fallback_lint_id: Option<NodeId>,
) -> Option<(Module<'a>, Option<NodeId>)> {
) -> Option<Module<'a>> {
if !module.expansion.outer_expn_is_descendant_of(*ctxt) {
return Some((self.expn_def_scope(ctxt.remove_mark()), None));
return Some(self.expn_def_scope(ctxt.remove_mark()));
}
if let ModuleKind::Block = module.kind {
return Some((module.parent.unwrap().nearest_item_scope(), None));
}
// We need to support the next case under a deprecation warning
// ```
// struct MyStruct;
// ---- begin: this comes from a proc macro derive
// mod implementation_details {
// // Note that `MyStruct` is not in scope here.
// impl SomeTrait for MyStruct { ... }
// }
// ---- end
// ```
// So we have to fall back to the module's parent during lexical resolution in this case.
if derive_fallback_lint_id.is_some() {
if let Some(parent) = module.parent {
// Inner module is inside the macro, parent module is outside of the macro.
if module.expansion != parent.expansion
&& module.expansion.is_descendant_of(parent.expansion)
{
// The macro is a proc macro derive
if let Some(def_id) = module.expansion.expn_data().macro_def_id {
let ext = self.get_macro_by_def_id(def_id).ext;
if ext.builtin_name.is_none()
&& ext.macro_kind() == MacroKind::Derive
&& parent.expansion.outer_expn_is_descendant_of(*ctxt)
{
return Some((parent, derive_fallback_lint_id));
}
}
}
}
return Some(module.parent.unwrap().nearest_item_scope());
}
None
@ -510,7 +470,7 @@ impl<'a> Resolver<'a> {
Err((Determinacy::Determined, _)) => Err(Determinacy::Determined),
}
}
Scope::Module(module, derive_fallback_lint_id) => {
Scope::Module(module) => {
let adjusted_parent_scope = &ParentScope { module, ..*parent_scope };
let binding = this.resolve_ident_in_module_unadjusted_ext(
ModuleOrUniformRoot::Module(module),
@ -523,21 +483,6 @@ impl<'a> Resolver<'a> {
);
match binding {
Ok(binding) => {
if let Some(lint_id) = derive_fallback_lint_id {
this.lint_buffer.buffer_lint_with_diagnostic(
PROC_MACRO_DERIVE_RESOLUTION_FALLBACK,
lint_id,
orig_ident.span,
&format!(
"cannot find {} `{}` in this scope",
ns.descr(),
ident
),
BuiltinLintDiagnostics::ProcMacroDeriveResolutionFallback(
orig_ident.span,
),
);
}
let misc_flags = if ptr::eq(module, this.graph_root) {
Flags::MISC_SUGGEST_CRATE
} else if module.is_normal() {
@ -915,7 +860,11 @@ impl<'a> Resolver<'a> {
}
if !restricted_shadowing && binding.expansion != LocalExpnId::ROOT {
if let NameBindingKind::Res(_, true) = binding.kind {
if let NameBindingKind::Import {
import: Import { kind: ImportKind::MacroExport, .. },
..
} = binding.kind
{
self.macro_expanded_macro_export_errors.insert((path_span, binding.span));
}
}

View File

@ -44,20 +44,36 @@ pub enum ImportKind<'a> {
type_ns_only: bool,
/// Did this import result from a nested import? ie. `use foo::{bar, baz};`
nested: bool,
/// The ID of the `UseTree` that imported this `Import`.
///
/// In the case where the `Import` was expanded from a "nested" use tree,
/// this id is the ID of the leaf tree. For example:
///
/// ```ignore (pacify the merciless tidy)
/// use foo::bar::{a, b}
/// ```
///
/// If this is the import for `foo::bar::a`, we would have the ID of the `UseTree`
/// for `a` in this field.
id: NodeId,
/// Additional `NodeId`s allocated to a `ast::UseTree` for automatically generated `use` statement
/// (eg. implicit struct constructors)
additional_ids: (NodeId, NodeId),
},
Glob {
is_prelude: bool,
max_vis: Cell<Option<ty::Visibility>>, // The visibility of the greatest re-export.
// n.b. `max_vis` is only used in `finalize_import` to check for re-export errors.
// The visibility of the greatest re-export.
// n.b. `max_vis` is only used in `finalize_import` to check for re-export errors.
max_vis: Cell<Option<ty::Visibility>>,
id: NodeId,
},
ExternCrate {
source: Option<Symbol>,
target: Ident,
id: NodeId,
},
MacroUse,
MacroExport,
}
/// Manually implement `Debug` for `ImportKind` because the `source/target_bindings`
@ -71,6 +87,7 @@ impl<'a> std::fmt::Debug for ImportKind<'a> {
ref target,
ref type_ns_only,
ref nested,
ref id,
ref additional_ids,
// Ignore the following to avoid an infinite loop while printing.
source_bindings: _,
@ -81,19 +98,23 @@ impl<'a> std::fmt::Debug for ImportKind<'a> {
.field("target", target)
.field("type_ns_only", type_ns_only)
.field("nested", nested)
.field("id", id)
.field("additional_ids", additional_ids)
.finish_non_exhaustive(),
Glob { ref is_prelude, ref max_vis } => f
Glob { ref is_prelude, ref max_vis, ref id } => f
.debug_struct("Glob")
.field("is_prelude", is_prelude)
.field("max_vis", max_vis)
.field("id", id)
.finish(),
ExternCrate { ref source, ref target } => f
ExternCrate { ref source, ref target, ref id } => f
.debug_struct("ExternCrate")
.field("source", source)
.field("target", target)
.field("id", id)
.finish(),
MacroUse => f.debug_struct("MacroUse").finish(),
MacroExport => f.debug_struct("MacroExport").finish(),
}
}
}
@ -103,24 +124,15 @@ impl<'a> std::fmt::Debug for ImportKind<'a> {
pub(crate) struct Import<'a> {
pub kind: ImportKind<'a>,
/// The ID of the `extern crate`, `UseTree` etc that imported this `Import`.
///
/// In the case where the `Import` was expanded from a "nested" use tree,
/// this id is the ID of the leaf tree. For example:
///
/// ```ignore (pacify the merciless tidy)
/// Node ID of the "root" use item -- this is always the same as `ImportKind`'s `id`
/// (if it exists) except in the case of "nested" use trees, in which case
/// it will be the ID of the root use tree. e.g., in the example
/// ```ignore (incomplete code)
/// use foo::bar::{a, b}
/// ```
///
/// If this is the import for `foo::bar::a`, we would have the ID of the `UseTree`
/// for `a` in this field.
pub id: NodeId,
/// The `id` of the "root" use-kind -- this is always the same as
/// `id` except in the case of "nested" use trees, in which case
/// it will be the `id` of the root use tree. e.g., in the example
/// from `id`, this would be the ID of the `use foo::bar`
/// `UseTree` node.
/// this would be the ID of the `use foo::bar` `UseTree` node.
/// In case of imports without their own node ID it's the closest node that can be used,
/// for example, for reporting lints.
pub root_id: NodeId,
/// Span of the entire use statement.
@ -161,6 +173,15 @@ impl<'a> Import<'a> {
pub(crate) fn expect_vis(&self) -> ty::Visibility {
self.vis.get().expect("encountered cleared import visibility")
}
pub(crate) fn id(&self) -> Option<NodeId> {
match self.kind {
ImportKind::Single { id, .. }
| ImportKind::Glob { id, .. }
| ImportKind::ExternCrate { id, .. } => Some(id),
ImportKind::MacroUse | ImportKind::MacroExport => None,
}
}
}
/// Records information about the resolution of a name in a namespace of a module.
@ -368,7 +389,9 @@ impl<'a> Resolver<'a> {
self.record_use(target, dummy_binding, false);
} else if import.imported_module.get().is_none() {
import.used.set(true);
self.used_imports.insert(import.id);
if let Some(id) = import.id() {
self.used_imports.insert(id);
}
}
}
}
@ -718,47 +741,51 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
PathResult::Indeterminate => unreachable!(),
};
let (ident, target, source_bindings, target_bindings, type_ns_only) = match import.kind {
ImportKind::Single {
source,
target,
ref source_bindings,
ref target_bindings,
type_ns_only,
..
} => (source, target, source_bindings, target_bindings, type_ns_only),
ImportKind::Glob { is_prelude, ref max_vis } => {
if import.module_path.len() <= 1 {
// HACK(eddyb) `lint_if_path_starts_with_module` needs at least
// 2 segments, so the `resolve_path` above won't trigger it.
let mut full_path = import.module_path.clone();
full_path.push(Segment::from_ident(Ident::empty()));
self.r.lint_if_path_starts_with_module(Some(finalize), &full_path, None);
}
if let ModuleOrUniformRoot::Module(module) = module {
if ptr::eq(module, import.parent_scope.module) {
// Importing a module into itself is not allowed.
return Some(UnresolvedImportError {
span: import.span,
label: Some(String::from("cannot glob-import a module into itself")),
note: None,
suggestion: None,
candidate: None,
});
let (ident, target, source_bindings, target_bindings, type_ns_only, import_id) =
match import.kind {
ImportKind::Single {
source,
target,
ref source_bindings,
ref target_bindings,
type_ns_only,
id,
..
} => (source, target, source_bindings, target_bindings, type_ns_only, id),
ImportKind::Glob { is_prelude, ref max_vis, id } => {
if import.module_path.len() <= 1 {
// HACK(eddyb) `lint_if_path_starts_with_module` needs at least
// 2 segments, so the `resolve_path` above won't trigger it.
let mut full_path = import.module_path.clone();
full_path.push(Segment::from_ident(Ident::empty()));
self.r.lint_if_path_starts_with_module(Some(finalize), &full_path, None);
}
}
if !is_prelude
if let ModuleOrUniformRoot::Module(module) = module {
if ptr::eq(module, import.parent_scope.module) {
// Importing a module into itself is not allowed.
return Some(UnresolvedImportError {
span: import.span,
label: Some(String::from(
"cannot glob-import a module into itself",
)),
note: None,
suggestion: None,
candidate: None,
});
}
}
if !is_prelude
&& let Some(max_vis) = max_vis.get()
&& !max_vis.is_at_least(import.expect_vis(), &*self.r)
{
let msg = "glob import doesn't reexport anything because no candidate is public enough";
self.r.lint_buffer.buffer_lint(UNUSED_IMPORTS, import.id, import.span, msg);
self.r.lint_buffer.buffer_lint(UNUSED_IMPORTS, id, import.span, msg);
}
return None;
}
_ => unreachable!(),
};
return None;
}
_ => unreachable!(),
};
let mut all_ns_err = true;
self.r.per_ns(|this, ns| {
@ -858,7 +885,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
match binding.kind {
// Never suggest the name that has binding error
// i.e., the name that cannot be previously resolved
NameBindingKind::Res(Res::Err, _) => None,
NameBindingKind::Res(Res::Err) => None,
_ => Some(i.name),
}
}
@ -960,7 +987,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
);
self.r.lint_buffer.buffer_lint(
PUB_USE_OF_PRIVATE_EXTERN_CRATE,
import.id,
import_id,
import.span,
&msg,
);
@ -989,7 +1016,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
let mut err =
struct_span_err!(self.r.session, import.span, E0364, "{error_msg}");
match binding.kind {
NameBindingKind::Res(Res::Def(DefKind::Macro(_), def_id), _)
NameBindingKind::Res(Res::Def(DefKind::Macro(_), def_id))
// exclude decl_macro
if self.r.get_macro_by_def_id(def_id).macro_rules =>
{
@ -1029,7 +1056,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
// purposes it's good enough to just favor one over the other.
self.r.per_ns(|this, ns| {
if let Ok(binding) = source_bindings[ns].get() {
this.import_res_map.entry(import.id).or_default()[ns] = Some(binding.res());
this.import_res_map.entry(import_id).or_default()[ns] = Some(binding.res());
}
});
@ -1047,6 +1074,9 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
target_bindings: &PerNS<Cell<Option<&'b NameBinding<'b>>>>,
target: Ident,
) {
// This function is only called for single imports.
let ImportKind::Single { id, .. } = import.kind else { unreachable!() };
// Skip if the import was produced by a macro.
if import.parent_scope.expansion != LocalExpnId::ROOT {
return;
@ -1094,7 +1124,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
redundant_spans.dedup();
self.r.lint_buffer.buffer_lint_with_diagnostic(
UNUSED_IMPORTS,
import.id,
id,
import.span,
&format!("the item `{}` is imported redundantly", ident),
BuiltinLintDiagnostics::RedundantImport(redundant_spans, ident),
@ -1103,6 +1133,9 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
}
fn resolve_glob_import(&mut self, import: &'b Import<'b>) {
// This function is only called for glob imports.
let ImportKind::Glob { id, is_prelude, .. } = import.kind else { unreachable!() };
let ModuleOrUniformRoot::Module(module) = import.imported_module.get().unwrap() else {
self.r.session.span_err(import.span, "cannot glob-import all possible crates");
return;
@ -1113,7 +1146,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
return;
} else if ptr::eq(module, import.parent_scope.module) {
return;
} else if let ImportKind::Glob { is_prelude: true, .. } = import.kind {
} else if is_prelude {
self.r.prelude = Some(module);
return;
}
@ -1145,7 +1178,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
}
// Record the destination of this import
self.r.record_partial_res(import.id, PartialRes::new(module.res().unwrap()));
self.r.record_partial_res(id, PartialRes::new(module.res().unwrap()));
}
// Miscellaneous post-processing, including recording re-exports,
@ -1204,5 +1237,6 @@ fn import_kind_to_string(import_kind: &ImportKind<'_>) -> String {
ImportKind::Glob { .. } => "*".to_string(),
ImportKind::ExternCrate { .. } => "<extern crate>".to_string(),
ImportKind::MacroUse => "#[macro_use]".to_string(),
ImportKind::MacroExport => "#[macro_export]".to_string(),
}
}

View File

@ -104,9 +104,7 @@ enum Scope<'a> {
DeriveHelpersCompat,
MacroRules(MacroRulesScopeRef<'a>),
CrateRoot,
// The node ID is for reporting the `PROC_MACRO_DERIVE_RESOLUTION_FALLBACK`
// lint if it should be reported.
Module(Module<'a>, Option<NodeId>),
Module(Module<'a>),
MacroUsePrelude,
BuiltinAttrs,
ExternPrelude,
@ -646,7 +644,7 @@ impl<'a> ToNameBinding<'a> for &'a NameBinding<'a> {
#[derive(Clone, Debug)]
enum NameBindingKind<'a> {
Res(Res, /* is_macro_export */ bool),
Res(Res),
Module(Module<'a>),
Import { binding: &'a NameBinding<'a>, import: &'a Import<'a>, used: Cell<bool> },
}
@ -745,7 +743,7 @@ impl<'a> NameBinding<'a> {
fn res(&self) -> Res {
match self.kind {
NameBindingKind::Res(res, _) => res,
NameBindingKind::Res(res) => res,
NameBindingKind::Module(module) => module.res().unwrap(),
NameBindingKind::Import { binding, .. } => binding.res(),
}
@ -762,10 +760,10 @@ impl<'a> NameBinding<'a> {
fn is_possibly_imported_variant(&self) -> bool {
match self.kind {
NameBindingKind::Import { binding, .. } => binding.is_possibly_imported_variant(),
NameBindingKind::Res(
Res::Def(DefKind::Variant | DefKind::Ctor(CtorOf::Variant, ..), _),
NameBindingKind::Res(Res::Def(
DefKind::Variant | DefKind::Ctor(CtorOf::Variant, ..),
_,
) => true,
)) => true,
NameBindingKind::Res(..) | NameBindingKind::Module(..) => false,
}
}
@ -788,6 +786,13 @@ impl<'a> NameBinding<'a> {
matches!(self.kind, NameBindingKind::Import { .. })
}
/// The binding introduced by `#[macro_export] macro_rules` is a public import, but it might
/// not be perceived as such by users, so treat it as a non-import in some diagnostics.
fn is_import_user_facing(&self) -> bool {
matches!(self.kind, NameBindingKind::Import { import, .. }
if !matches!(import.kind, ImportKind::MacroExport))
}
fn is_glob_import(&self) -> bool {
match self.kind {
NameBindingKind::Import { import, .. } => import.is_glob(),
@ -1283,7 +1288,7 @@ impl<'a> Resolver<'a> {
arenas,
dummy_binding: arenas.alloc_name_binding(NameBinding {
kind: NameBindingKind::Res(Res::Err, false),
kind: NameBindingKind::Res(Res::Err),
ambiguity: None,
expansion: LocalExpnId::ROOT,
span: DUMMY_SP,
@ -1551,7 +1556,7 @@ impl<'a> Resolver<'a> {
self.visit_scopes(ScopeSet::All(TypeNS, false), parent_scope, ctxt, |this, scope, _, _| {
match scope {
Scope::Module(module, _) => {
Scope::Module(module) => {
this.traits_in_module(module, assoc_item, &mut found_traits);
}
Scope::StdLibPrelude => {
@ -1613,10 +1618,12 @@ impl<'a> Resolver<'a> {
) -> SmallVec<[LocalDefId; 1]> {
let mut import_ids = smallvec![];
while let NameBindingKind::Import { import, binding, .. } = kind {
let id = self.local_def_id(import.id);
self.maybe_unused_trait_imports.insert(id);
if let Some(node_id) = import.id() {
let def_id = self.local_def_id(node_id);
self.maybe_unused_trait_imports.insert(def_id);
import_ids.push(def_id);
}
self.add_to_glob_map(&import, trait_name);
import_ids.push(id);
kind = &binding.kind;
}
import_ids
@ -1683,7 +1690,9 @@ impl<'a> Resolver<'a> {
}
used.set(true);
import.used.set(true);
self.used_imports.insert(import.id);
if let Some(id) = import.id() {
self.used_imports.insert(id);
}
self.add_to_glob_map(&import, ident);
self.record_use(ident, binding, false);
}
@ -1691,8 +1700,8 @@ impl<'a> Resolver<'a> {
#[inline]
fn add_to_glob_map(&mut self, import: &Import<'_>, ident: Ident) {
if import.is_glob() {
let def_id = self.local_def_id(import.id);
if let ImportKind::Glob { id, .. } = import.kind {
let def_id = self.local_def_id(id);
self.glob_map.entry(def_id).or_default().insert(ident.name);
}
}
@ -1994,11 +2003,7 @@ impl<'a> Resolver<'a> {
// Items that go to reexport table encoded to metadata and visible through it to other crates.
fn is_reexport(&self, binding: &NameBinding<'a>) -> Option<def::Res<!>> {
// FIXME: Consider changing the binding inserted by `#[macro_export] macro_rules`
// into the crate root to actual `NameBindingKind::Import`.
if binding.is_import()
|| matches!(binding.kind, NameBindingKind::Res(_, _is_macro_export @ true))
{
if binding.is_import() {
let res = binding.res().expect_non_local();
// Ambiguous imports are treated as errors at this point and are
// not exposed to other crates (see #36837 for more details).

View File

@ -362,7 +362,7 @@ impl Class {
match self {
Class::Comment => "comment",
Class::DocComment => "doccomment",
Class::Attribute => "attribute",
Class::Attribute => "attr",
Class::KeyWord => "kw",
Class::RefKeyWord => "kw-2",
Class::Self_(_) => "self",

View File

@ -3,16 +3,16 @@
.kw { color: #8959A8; }
.kw-2, .prelude-ty { color: #4271AE; }
.number, .string { color: #718C00; }
.self, .bool-val, .prelude-val, .attribute, .attribute .ident { color: #C82829; }
.self, .bool-val, .prelude-val, .attr, .attr .ident { color: #C82829; }
.macro, .macro-nonterminal { color: #3E999F; }
.lifetime { color: #B76514; }
.question-mark { color: #ff9011; }
</style>
<pre><code><span class="attribute">#![crate_type = <span class="string">&quot;lib&quot;</span>]
<pre><code><span class="attr">#![crate_type = <span class="string">&quot;lib&quot;</span>]
</span><span class="kw">use </span>std::path::{Path, PathBuf};
<span class="attribute">#[cfg(target_os = <span class="string">&quot;linux&quot;</span>)]
<span class="attr">#[cfg(target_os = <span class="string">&quot;linux&quot;</span>)]
#[cfg(target_os = <span class="string">&quot;windows&quot;</span>)]
</span><span class="kw">fn </span>main() -&gt; () {
<span class="kw">let </span>foo = <span class="bool-val">true </span>&amp;&amp; <span class="bool-val">false </span>|| <span class="bool-val">true</span>;
@ -23,7 +23,7 @@
<span class="macro">mac!</span>(foo, <span class="kw-2">&amp;mut </span>bar);
<span class="macro">assert!</span>(<span class="self">self</span>.length &lt; N &amp;&amp; index &lt;= <span class="self">self</span>.length);
::std::env::var(<span class="string">&quot;gateau&quot;</span>).is_ok();
<span class="attribute">#[rustfmt::skip]
<span class="attr">#[rustfmt::skip]
</span><span class="kw">let </span>s:std::path::PathBuf = std::path::PathBuf::new();
<span class="kw">let </span><span class="kw-2">mut </span>s = String::new();

View File

@ -9,7 +9,7 @@ const STYLE: &str = r#"
.kw { color: #8959A8; }
.kw-2, .prelude-ty { color: #4271AE; }
.number, .string { color: #718C00; }
.self, .bool-val, .prelude-val, .attribute, .attribute .ident { color: #C82829; }
.self, .bool-val, .prelude-val, .attr, .attr .ident { color: #C82829; }
.macro, .macro-nonterminal { color: #3E999F; }
.lifetime { color: #B76514; }
.question-mark { color: #ff9011; }

View File

@ -880,8 +880,6 @@ so that we can apply CSS-filters to change the arrow color in themes */
.search-results.active {
display: block;
/* prevent overhanging tabs from moving the first result */
clear: both;
}
.search-results .desc > span {
@ -1092,7 +1090,7 @@ pre.rust .bool-val {
pre.rust .self {
color: var(--code-highlight-self-color);
}
pre.rust .attribute {
pre.rust .attr {
color: var(--code-highlight-attribute-color);
}
pre.rust .macro,

View File

@ -15,7 +15,7 @@ define-function: (
string,
bool_val,
self,
attribute,
attr,
macro,
question_mark,
comment,
@ -33,7 +33,7 @@ define-function: (
("assert-css", ("pre.rust .string", {"color": |string|}, ALL)),
("assert-css", ("pre.rust .bool-val", {"color": |bool_val|}, ALL)),
("assert-css", ("pre.rust .self", {"color": |self|}, ALL)),
("assert-css", ("pre.rust .attribute", {"color": |attribute|}, ALL)),
("assert-css", ("pre.rust .attr", {"color": |attr|}, ALL)),
("assert-css", ("pre.rust .macro", {"color": |macro|}, ALL)),
("assert-css", ("pre.rust .question-mark", {"color": |question_mark|}, ALL)),
("assert-css", ("pre.rust .comment", {"color": |comment|}, ALL)),
@ -52,7 +52,7 @@ call-function: ("check-colors", {
"string": "rgb(184, 204, 82)",
"bool_val": "rgb(255, 119, 51)",
"self": "rgb(54, 163, 217)",
"attribute": "rgb(230, 225, 207)",
"attr": "rgb(230, 225, 207)",
"macro": "rgb(163, 122, 204)",
"question_mark": "rgb(255, 144, 17)",
"comment": "rgb(120, 135, 151)",
@ -69,7 +69,7 @@ call-function: ("check-colors", {
"string": "rgb(131, 163, 0)",
"bool_val": "rgb(238, 104, 104)",
"self": "rgb(238, 104, 104)",
"attribute": "rgb(238, 104, 104)",
"attr": "rgb(238, 104, 104)",
"macro": "rgb(62, 153, 159)",
"question_mark": "rgb(255, 144, 17)",
"comment": "rgb(141, 141, 139)",
@ -86,7 +86,7 @@ call-function: ("check-colors", {
"string": "rgb(113, 140, 0)",
"bool_val": "rgb(200, 40, 41)",
"self": "rgb(200, 40, 41)",
"attribute": "rgb(200, 40, 41)",
"attr": "rgb(200, 40, 41)",
"macro": "rgb(62, 153, 159)",
"question_mark": "rgb(255, 144, 17)",
"comment": "rgb(142, 144, 140)",

View File

@ -1,5 +1,5 @@
<code># single
## double
### triple
<span class="attribute">#[outer]
<span class="attr">#[outer]
#![inner]</span></code>

View File

@ -1,10 +1,10 @@
// @has issue_41783/struct.Foo.html
// @!hasraw - 'space'
// @!hasraw - 'comment'
// @hasraw - '<span class="attribute">#[outer]'
// @!hasraw - '<span class="attribute">#[outer]</span>'
// @hasraw - '<span class="attr">#[outer]'
// @!hasraw - '<span class="attr">#[outer]</span>'
// @hasraw - '#![inner]</span>'
// @!hasraw - '<span class="attribute">#![inner]</span>'
// @!hasraw - '<span class="attr">#![inner]</span>'
// @snapshot 'codeblock' - '//*[@class="rustdoc-toggle top-doc"]/*[@class="docblock"]//pre/code'
/// ```no_run

View File

@ -1,7 +1,17 @@
#[macro_export]
macro_rules! foo { ($i:ident) => {} }
macro_rules! foo { () => {} }
#[macro_export]
macro_rules! foo { () => {} } //~ ERROR the name `foo` is defined multiple times
mod inner1 {
#[macro_export]
macro_rules! bar { () => {} }
}
mod inner2 {
#[macro_export]
macro_rules! bar { () => {} } //~ ERROR the name `bar` is defined multiple times
}
fn main() {}

View File

@ -1,7 +1,7 @@
error[E0428]: the name `foo` is defined multiple times
--> $DIR/issue-38715.rs:5:1
|
LL | macro_rules! foo { ($i:ident) => {} }
LL | macro_rules! foo { () => {} }
| ---------------- previous definition of the macro `foo` here
...
LL | macro_rules! foo { () => {} }
@ -9,6 +9,17 @@ LL | macro_rules! foo { () => {} }
|
= note: `foo` must be defined only once in the macro namespace of this module
error: aborting due to previous error
error[E0428]: the name `bar` is defined multiple times
--> $DIR/issue-38715.rs:14:5
|
LL | macro_rules! bar { () => {} }
| ---------------- previous definition of the macro `bar` here
...
LL | macro_rules! bar { () => {} }
| ^^^^^^^^^^^^^^^^ `bar` redefined here
|
= note: `bar` must be defined only once in the macro namespace of this module
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0428`.

View File

@ -38,13 +38,13 @@ mod outer { //~ ERROR Direct: pub(crate), Reexported: pub(crate), Reachable: pub
}
#[rustc_effective_visibility]
macro_rules! none_macro { //~ Direct: pub(crate), Reexported: pub(crate), Reachable: pub(crate), ReachableThroughImplTrait: pub(crate)
macro_rules! none_macro { //~ ERROR not in the table
() => {};
}
#[macro_export]
#[rustc_effective_visibility]
macro_rules! public_macro { //~ Direct: pub, Reexported: pub, Reachable: pub, ReachableThroughImplTrait: pub
macro_rules! public_macro { //~ ERROR Direct: pub(self), Reexported: pub, Reachable: pub, ReachableThroughImplTrait: pub
() => {};
}

View File

@ -64,13 +64,13 @@ error: Direct: pub(crate), Reexported: pub, Reachable: pub, ReachableThroughImpl
LL | PubUnion,
| ^^^^^^^^
error: Direct: pub(crate), Reexported: pub(crate), Reachable: pub(crate), ReachableThroughImplTrait: pub(crate)
error: not in the table
--> $DIR/effective_visibilities.rs:41:5
|
LL | macro_rules! none_macro {
| ^^^^^^^^^^^^^^^^^^^^^^^
error: Direct: pub, Reexported: pub, Reachable: pub, ReachableThroughImplTrait: pub
error: Direct: pub(self), Reexported: pub, Reachable: pub, ReachableThroughImplTrait: pub
--> $DIR/effective_visibilities.rs:47:5
|
LL | macro_rules! public_macro {

View File

@ -15,19 +15,16 @@ struct S;
#[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterDerive` in this scope
//~| WARN this was previously accepted
//~| WARN this was previously accepted
struct Z;
fn inner_block() {
#[derive(generate_mod::CheckDerive)] //~ ERROR cannot find type `FromOutside` in this scope
//~| ERROR cannot find type `OuterDerive` in this scope
//~| WARN this was previously accepted
//~| WARN this was previously accepted
struct InnerZ;
}
#[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
#[derive(generate_mod::CheckDeriveLint)] //~ ERROR cannot find type `OuterDeriveLint` in this scope
//~| ERROR cannot find type `FromOutside` in this scope
struct W;
fn main() {}

View File

@ -38,127 +38,66 @@ LL | #[generate_mod::check_attr]
OuterAttr
= note: this error originates in the attribute macro `generate_mod::check_attr` (in Nightly builds, run with -Z macro-backtrace for more info)
error: cannot find type `FromOutside` in this scope
error[E0412]: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:16:10
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
| ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
= note: consider importing this struct:
FromOutside
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
error: cannot find type `OuterDerive` in this scope
error[E0412]: cannot find type `OuterDerive` in this scope
--> $DIR/generate-mod.rs:16:10
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
| ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: consider importing this struct:
OuterDerive
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
error: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:23:14
error[E0412]: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:21:14
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
| ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: consider importing this struct:
FromOutside
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
error: cannot find type `OuterDerive` in this scope
--> $DIR/generate-mod.rs:23:14
error[E0412]: cannot find type `OuterDerive` in this scope
--> $DIR/generate-mod.rs:21:14
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
| ^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: consider importing this struct:
OuterDerive
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 8 previous errors
error[E0412]: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:26:10
|
LL | #[derive(generate_mod::CheckDeriveLint)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
|
= note: consider importing this struct:
FromOutside
= note: this error originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)
error[E0412]: cannot find type `OuterDeriveLint` in this scope
--> $DIR/generate-mod.rs:26:10
|
LL | #[derive(generate_mod::CheckDeriveLint)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not found in this scope
|
= note: consider importing this struct:
OuterDeriveLint
= note: this error originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 10 previous errors
For more information about this error, try `rustc --explain E0412`.
Future incompatibility report: Future breakage diagnostic:
error: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:16:10
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
Future breakage diagnostic:
error: cannot find type `OuterDerive` in this scope
--> $DIR/generate-mod.rs:16:10
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
Future breakage diagnostic:
error: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:23:14
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
Future breakage diagnostic:
error: cannot find type `OuterDerive` in this scope
--> $DIR/generate-mod.rs:23:14
|
LL | #[derive(generate_mod::CheckDerive)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
= note: `#[deny(proc_macro_derive_resolution_fallback)]` on by default
= note: this error originates in the derive macro `generate_mod::CheckDerive` (in Nightly builds, run with -Z macro-backtrace for more info)
Future breakage diagnostic:
warning: cannot find type `FromOutside` in this scope
--> $DIR/generate-mod.rs:30:10
|
LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
note: the lint level is defined here
--> $DIR/generate-mod.rs:30:10
|
LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this warning originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)
Future breakage diagnostic:
warning: cannot find type `OuterDeriveLint` in this scope
--> $DIR/generate-mod.rs:30:10
|
LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ names from parent modules are not accessible without an explicit import
|
= warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
= note: for more information, see issue #83583 <https://github.com/rust-lang/rust/issues/83583>
note: the lint level is defined here
--> $DIR/generate-mod.rs:30:10
|
LL | #[derive(generate_mod::CheckDeriveLint)] // OK, lint is suppressed
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this warning originates in the derive macro `generate_mod::CheckDeriveLint` (in Nightly builds, run with -Z macro-backtrace for more info)

View File

@ -872,9 +872,9 @@ dependencies = [
[[package]]
name = "lsp-types"
version = "0.93.1"
version = "0.93.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3bcfee315dde785ba887edb540b08765fd7df75a7d948844be6bf5712246734"
checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51"
dependencies = [
"bitflags",
"serde",

View File

@ -295,7 +295,9 @@ impl FlycheckActor {
} => {
let mut cmd = Command::new(toolchain::cargo());
cmd.arg(command);
cmd.args(&["--workspace", "--message-format=json"]);
cmd.current_dir(&self.root);
cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
.arg(self.root.join("Cargo.toml").as_os_str());
if let Some(target) = target_triple {
cmd.args(&["--target", target.as_str()]);

View File

@ -662,8 +662,12 @@ fn desugar_future_path(orig: TypeRef) -> Path {
let mut generic_args: Vec<_> =
std::iter::repeat(None).take(path.segments().len() - 1).collect();
let mut last = GenericArgs::empty();
let binding =
AssociatedTypeBinding { name: name![Output], type_ref: Some(orig), bounds: Vec::new() };
let binding = AssociatedTypeBinding {
name: name![Output],
args: None,
type_ref: Some(orig),
bounds: Vec::new(),
};
last.bindings.push(binding);
generic_args.push(Some(Interned::new(last)));

View File

@ -68,6 +68,9 @@ pub struct GenericArgs {
pub struct AssociatedTypeBinding {
/// The name of the associated type.
pub name: Name,
/// The generic arguments to the associated type. e.g. For `Trait<Assoc<'a, T> = &'a T>`, this
/// would be `['a, T]`.
pub args: Option<Interned<GenericArgs>>,
/// The type bound to this associated type (in `Item = T`, this would be the
/// `T`). This can be `None` if there are bounds instead.
pub type_ref: Option<TypeRef>,

View File

@ -163,6 +163,10 @@ pub(super) fn lower_generic_args(
ast::GenericArg::AssocTypeArg(assoc_type_arg) => {
if let Some(name_ref) = assoc_type_arg.name_ref() {
let name = name_ref.as_name();
let args = assoc_type_arg
.generic_arg_list()
.and_then(|args| lower_generic_args(lower_ctx, args))
.map(Interned::new);
let type_ref = assoc_type_arg.ty().map(|it| TypeRef::from_ast(lower_ctx, it));
let bounds = if let Some(l) = assoc_type_arg.type_bound_list() {
l.bounds()
@ -171,7 +175,7 @@ pub(super) fn lower_generic_args(
} else {
Vec::new()
};
bindings.push(AssociatedTypeBinding { name, type_ref, bounds });
bindings.push(AssociatedTypeBinding { name, args, type_ref, bounds });
}
}
ast::GenericArg::LifetimeArg(lifetime_arg) => {
@ -214,6 +218,7 @@ fn lower_generic_args_from_fn_path(
let type_ref = TypeRef::from_ast_opt(ctx, ret_type.ty());
bindings.push(AssociatedTypeBinding {
name: name![Output],
args: None,
type_ref: Some(type_ref),
bounds: Vec::new(),
});
@ -222,6 +227,7 @@ fn lower_generic_args_from_fn_path(
let type_ref = TypeRef::Tuple(Vec::new());
bindings.push(AssociatedTypeBinding {
name: name![Output],
args: None,
type_ref: Some(type_ref),
bounds: Vec::new(),
});

View File

@ -11,9 +11,9 @@ use syntax::SmolStr;
use crate::{
db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id,
from_placeholder_idx, to_chalk_trait_id, AdtId, AliasEq, AliasTy, Binders, CallableDefId,
CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy, QuantifiedWhereClause,
Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
from_placeholder_idx, to_chalk_trait_id, utils::generics, AdtId, AliasEq, AliasTy, Binders,
CallableDefId, CallableSig, FnPointer, ImplTraitId, Interner, Lifetime, ProjectionTy,
QuantifiedWhereClause, Substitution, TraitRef, Ty, TyBuilder, TyKind, WhereClause,
};
pub trait TyExt {
@ -338,10 +338,13 @@ pub trait ProjectionTyExt {
impl ProjectionTyExt for ProjectionTy {
fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef {
TraitRef {
trait_id: to_chalk_trait_id(self.trait_(db)),
substitution: self.substitution.clone(),
}
// FIXME: something like `Split` trait from chalk-solve might be nice.
let generics = generics(db.upcast(), from_assoc_type_id(self.associated_ty_id).into());
let substitution = Substitution::from_iter(
Interner,
self.substitution.iter(Interner).skip(generics.len_self()),
);
TraitRef { trait_id: to_chalk_trait_id(self.trait_(db)), substitution }
}
fn trait_(&self, db: &dyn HirDatabase) -> TraitId {

View File

@ -289,16 +289,18 @@ impl HirDisplay for ProjectionTy {
return write!(f, "{}", TYPE_HINT_TRUNCATION);
}
let trait_ = f.db.trait_data(self.trait_(f.db));
let trait_ref = self.trait_ref(f.db);
write!(f, "<")?;
self.self_type_parameter(f.db).hir_fmt(f)?;
write!(f, " as {}", trait_.name)?;
if self.substitution.len(Interner) > 1 {
fmt_trait_ref(&trait_ref, f, true)?;
write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
let proj_params_count =
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
let proj_params = &self.substitution.as_slice(Interner)[..proj_params_count];
if !proj_params.is_empty() {
write!(f, "<")?;
f.write_joined(&self.substitution.as_slice(Interner)[1..], ", ")?;
f.write_joined(proj_params, ", ")?;
write!(f, ">")?;
}
write!(f, ">::{}", f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id)).name)?;
Ok(())
}
}
@ -641,9 +643,12 @@ impl HirDisplay for Ty {
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
if f.display_target.is_test() {
write!(f, "{}::{}", trait_.name, type_alias_data.name)?;
// Note that the generic args for the associated type come before those for the
// trait (including the self type).
// FIXME: reconsider the generic args order upon formatting?
if parameters.len(Interner) > 0 {
write!(f, "<")?;
f.write_joined(&*parameters.as_slice(Interner), ", ")?;
f.write_joined(parameters.as_slice(Interner), ", ")?;
write!(f, ">")?;
}
} else {
@ -972,9 +977,20 @@ fn write_bounds_like_dyn_trait(
angle_open = true;
}
if let AliasTy::Projection(proj) = alias {
let type_alias =
f.db.type_alias_data(from_assoc_type_id(proj.associated_ty_id));
write!(f, "{} = ", type_alias.name)?;
let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
let type_alias = f.db.type_alias_data(assoc_ty_id);
write!(f, "{}", type_alias.name)?;
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
if proj_arg_count > 0 {
write!(f, "<")?;
f.write_joined(
&proj.substitution.as_slice(Interner)[..proj_arg_count],
", ",
)?;
write!(f, ">")?;
}
write!(f, " = ")?;
}
ty.hir_fmt(f)?;
}

View File

@ -157,7 +157,7 @@ impl<'a> InferenceContext<'a> {
remaining_segments_for_ty,
true,
);
if let TyKind::Error = ty.kind(Interner) {
if ty.is_unknown() {
return None;
}

View File

@ -340,8 +340,8 @@ impl<'a> InferenceTable<'a> {
self.resolve_with_fallback(t, &|_, _, d, _| d)
}
/// Unify two types and register new trait goals that arise from that.
pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
/// Unify two relatable values (e.g. `Ty`) and register new trait goals that arise from that.
pub(crate) fn unify<T: ?Sized + Zip<Interner>>(&mut self, ty1: &T, ty2: &T) -> bool {
let result = match self.try_unify(ty1, ty2) {
Ok(r) => r,
Err(_) => return false,
@ -350,9 +350,13 @@ impl<'a> InferenceTable<'a> {
true
}
/// Unify two types and return new trait goals arising from it, so the
/// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the
/// caller needs to deal with them.
pub(crate) fn try_unify<T: Zip<Interner>>(&mut self, t1: &T, t2: &T) -> InferResult<()> {
pub(crate) fn try_unify<T: ?Sized + Zip<Interner>>(
&mut self,
t1: &T,
t2: &T,
) -> InferResult<()> {
match self.var_unification_table.relate(
Interner,
&self.db,

View File

@ -81,7 +81,20 @@ pub type PlaceholderIndex = chalk_ir::PlaceholderIndex;
pub type VariableKind = chalk_ir::VariableKind<Interner>;
pub type VariableKinds = chalk_ir::VariableKinds<Interner>;
pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
/// Represents generic parameters and an item bound by them. When the item has parent, the binders
/// also contain the generic parameters for its parent. See chalk's documentation for details.
///
/// One thing to keep in mind when working with `Binders` (and `Substitution`s, which represent
/// generic arguments) in rust-analyzer is that the ordering within *is* significant - the generic
/// parameters/arguments for an item MUST come before those for its parent. This is to facilitate
/// the integration with chalk-solve, which mildly puts constraints as such. See #13335 for its
/// motivation in detail.
pub type Binders<T> = chalk_ir::Binders<T>;
/// Interned list of generic arguments for an item. When an item has parent, the `Substitution` for
/// it contains generic arguments for both its parent and itself. See chalk's documentation for
/// details.
///
/// See `Binders` for the constraint on the ordering.
pub type Substitution = chalk_ir::Substitution<Interner>;
pub type GenericArg = chalk_ir::GenericArg<Interner>;
pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
@ -124,14 +137,6 @@ pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
pub type Guidance = chalk_solve::Guidance<Interner>;
pub type WhereClause = chalk_ir::WhereClause<Interner>;
// FIXME: get rid of this
pub fn subst_prefix(s: &Substitution, n: usize) -> Substitution {
Substitution::from_iter(
Interner,
s.as_slice(Interner)[..std::cmp::min(s.len(Interner), n)].iter().cloned(),
)
}
/// Return an index of a parameter in the generic type parameter list by it's id.
pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
generics(db.upcast(), id.parent).param_idx(id)
@ -382,7 +387,6 @@ pub(crate) fn fold_tys_and_consts<T: HasInterner<Interner = Interner> + TypeFold
pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
where
T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
T: HasInterner<Interner = Interner>,
{
use chalk_ir::{
fold::{FallibleTypeFolder, TypeSuperFoldable},

View File

@ -447,12 +447,31 @@ impl<'a> TyLoweringContext<'a> {
.db
.trait_data(trait_ref.hir_trait_id())
.associated_type_by_name(segment.name);
match found {
Some(associated_ty) => {
// FIXME handle type parameters on the segment
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
// generic params. It's inefficient to splice the `Substitution`s, so we may want
// that method to optionally take parent `Substitution` as we already know them at
// this point (`trait_ref.substitution`).
let substitution = self.substs_from_path_segment(
segment,
Some(associated_ty.into()),
false,
None,
);
let len_self =
generics(self.db.upcast(), associated_ty.into()).len_self();
let substitution = Substitution::from_iter(
Interner,
substitution
.iter(Interner)
.take(len_self)
.chain(trait_ref.substitution.iter(Interner)),
);
TyKind::Alias(AliasTy::Projection(ProjectionTy {
associated_ty_id: to_assoc_type_id(associated_ty),
substitution: trait_ref.substitution,
substitution,
}))
.intern(Interner)
}
@ -590,36 +609,48 @@ impl<'a> TyLoweringContext<'a> {
res,
Some(segment.name.clone()),
move |name, t, associated_ty| {
if name == segment.name {
let substs = match self.type_param_mode {
ParamLoweringMode::Placeholder => {
// if we're lowering to placeholders, we have to put
// them in now
let generics = generics(
self.db.upcast(),
self.resolver
.generic_def()
.expect("there should be generics if there's a generic param"),
);
let s = generics.placeholder_subst(self.db);
s.apply(t.substitution.clone(), Interner)
}
ParamLoweringMode::Variable => t.substitution.clone(),
};
// We need to shift in the bound vars, since
// associated_type_shorthand_candidates does not do that
let substs = substs.shifted_in_from(Interner, self.in_binders);
// FIXME handle type parameters on the segment
Some(
TyKind::Alias(AliasTy::Projection(ProjectionTy {
associated_ty_id: to_assoc_type_id(associated_ty),
substitution: substs,
}))
.intern(Interner),
)
} else {
None
if name != segment.name {
return None;
}
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
// generic params. It's inefficient to splice the `Substitution`s, so we may want
// that method to optionally take parent `Substitution` as we already know them at
// this point (`t.substitution`).
let substs = self.substs_from_path_segment(
segment.clone(),
Some(associated_ty.into()),
false,
None,
);
let len_self = generics(self.db.upcast(), associated_ty.into()).len_self();
let substs = Substitution::from_iter(
Interner,
substs.iter(Interner).take(len_self).chain(t.substitution.iter(Interner)),
);
let substs = match self.type_param_mode {
ParamLoweringMode::Placeholder => {
// if we're lowering to placeholders, we have to put
// them in now
let generics = generics(self.db.upcast(), def);
let s = generics.placeholder_subst(self.db);
s.apply(substs, Interner)
}
ParamLoweringMode::Variable => substs,
};
// We need to shift in the bound vars, since
// associated_type_shorthand_candidates does not do that
let substs = substs.shifted_in_from(Interner, self.in_binders);
Some(
TyKind::Alias(AliasTy::Projection(ProjectionTy {
associated_ty_id: to_assoc_type_id(associated_ty),
substitution: substs,
}))
.intern(Interner),
)
},
);
@ -777,7 +808,15 @@ impl<'a> TyLoweringContext<'a> {
// handle defaults. In expression or pattern path segments without
// explicitly specified type arguments, missing type arguments are inferred
// (i.e. defaults aren't used).
if !infer_args || had_explicit_args {
// Generic parameters for associated types are not supposed to have defaults, so we just
// ignore them.
let is_assoc_ty = if let GenericDefId::TypeAliasId(id) = def {
let container = id.lookup(self.db.upcast()).container;
matches!(container, ItemContainerId::TraitId(_))
} else {
false
};
if !is_assoc_ty && (!infer_args || had_explicit_args) {
let defaults = self.db.generic_defaults(def);
assert_eq!(total_len, defaults.len());
let parent_from = item_len - substs.len();
@ -966,9 +1005,28 @@ impl<'a> TyLoweringContext<'a> {
None => return SmallVec::new(),
Some(t) => t,
};
// FIXME: `substs_from_path_segment()` pushes `TyKind::Error` for every parent
// generic params. It's inefficient to splice the `Substitution`s, so we may want
// that method to optionally take parent `Substitution` as we already know them at
// this point (`super_trait_ref.substitution`).
let substitution = self.substs_from_path_segment(
// FIXME: This is hack. We shouldn't really build `PathSegment` directly.
PathSegment { name: &binding.name, args_and_bindings: binding.args.as_deref() },
Some(associated_ty.into()),
false, // this is not relevant
Some(super_trait_ref.self_type_parameter(Interner)),
);
let self_params = generics(self.db.upcast(), associated_ty.into()).len_self();
let substitution = Substitution::from_iter(
Interner,
substitution
.iter(Interner)
.take(self_params)
.chain(super_trait_ref.substitution.iter(Interner)),
);
let projection_ty = ProjectionTy {
associated_ty_id: to_assoc_type_id(associated_ty),
substitution: super_trait_ref.substitution,
substitution,
};
let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),

View File

@ -22,10 +22,10 @@ use crate::{
from_foreign_def_id,
infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
primitive::{FloatTy, IntTy, UintTy},
static_lifetime,
static_lifetime, to_chalk_trait_id,
utils::all_super_traits,
AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
Scalar, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
};
/// This is used as a key for indexing impls.
@ -624,52 +624,76 @@ pub(crate) fn iterate_method_candidates<T>(
slot
}
/// Looks up the impl method that actually runs for the trait method `func`.
///
/// Returns `func` if it's not a method defined in a trait or the lookup failed.
pub fn lookup_impl_method(
self_ty: &Ty,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
trait_: TraitId,
func: FunctionId,
fn_subst: Substitution,
) -> FunctionId {
let trait_id = match func.lookup(db.upcast()).container {
ItemContainerId::TraitId(id) => id,
_ => return func,
};
let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
let fn_params = fn_subst.len(Interner) - trait_params;
let trait_ref = TraitRef {
trait_id: to_chalk_trait_id(trait_id),
substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)),
};
let name = &db.function_data(func).name;
lookup_impl_method_for_trait_ref(trait_ref, db, env, name).unwrap_or(func)
}
fn lookup_impl_method_for_trait_ref(
trait_ref: TraitRef,
db: &dyn HirDatabase,
env: Arc<TraitEnvironment>,
name: &Name,
) -> Option<FunctionId> {
let self_ty_fp = TyFingerprint::for_trait_impl(self_ty)?;
let trait_impls = db.trait_impls_in_deps(env.krate);
let impls = trait_impls.for_trait_and_self_ty(trait_, self_ty_fp);
let mut table = InferenceTable::new(db, env.clone());
find_matching_impl(impls, &mut table, &self_ty).and_then(|data| {
data.items.iter().find_map(|it| match it {
AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
_ => None,
})
let self_ty = trait_ref.self_type_parameter(Interner);
let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
let impls = db.trait_impls_in_deps(env.krate);
let impls = impls.for_trait_and_self_ty(trait_ref.hir_trait_id(), self_ty_fp);
let table = InferenceTable::new(db, env);
let impl_data = find_matching_impl(impls, table, trait_ref)?;
impl_data.items.iter().find_map(|it| match it {
AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
_ => None,
})
}
fn find_matching_impl(
mut impls: impl Iterator<Item = ImplId>,
table: &mut InferenceTable<'_>,
self_ty: &Ty,
mut table: InferenceTable<'_>,
actual_trait_ref: TraitRef,
) -> Option<Arc<ImplData>> {
let db = table.db;
loop {
let impl_ = impls.next()?;
let r = table.run_in_snapshot(|table| {
let impl_data = db.impl_data(impl_);
let substs =
let impl_substs =
TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs);
let trait_ref = db
.impl_trait(impl_)
.expect("non-trait method in find_matching_impl")
.substitute(Interner, &impl_substs);
table
.unify(self_ty, &impl_ty)
.then(|| {
let wh_goals =
crate::chalk_db::convert_where_clauses(db, impl_.into(), &substs)
.into_iter()
.map(|b| b.cast(Interner));
if !table.unify(&trait_ref, &actual_trait_ref) {
return None;
}
let goal = crate::Goal::all(Interner, wh_goals);
table.try_obligation(goal).map(|_| impl_data)
})
.flatten()
let wcs = crate::chalk_db::convert_where_clauses(db, impl_.into(), &impl_substs)
.into_iter()
.map(|b| b.cast(Interner));
let goal = crate::Goal::all(Interner, wcs);
table.try_obligation(goal).map(|_| impl_data)
});
if r.is_some() {
break r;
@ -1214,7 +1238,7 @@ fn is_valid_fn_candidate(
let expected_receiver =
sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
check_that!(table.unify(&receiver_ty, &expected_receiver));
check_that!(table.unify(receiver_ty, &expected_receiver));
}
if let ItemContainerId::ImplId(impl_id) = container {

View File

@ -196,3 +196,34 @@ fn test(
"#,
);
}
#[test]
fn projection_type_correct_arguments_order() {
check_types_source_code(
r#"
trait Foo<T> {
type Assoc<U>;
}
fn f<T: Foo<i32>>(a: T::Assoc<usize>) {
a;
//^ <T as Foo<i32>>::Assoc<usize>
}
"#,
);
}
#[test]
fn generic_associated_type_binding_in_impl_trait() {
check_types_source_code(
r#"
//- minicore: sized
trait Foo<T> {
type Assoc<U>;
}
fn f(a: impl Foo<i8, Assoc<i16> = i32>) {
a;
//^ impl Foo<i8, Assoc<i16> = i32>
}
"#,
);
}

View File

@ -3963,3 +3963,124 @@ fn g(t: &(dyn T + Send)) {
"#,
);
}
#[test]
fn gats_in_path() {
check_types(
r#"
//- minicore: deref
use core::ops::Deref;
trait PointerFamily {
type Pointer<T>: Deref<Target = T>;
}
fn f<P: PointerFamily>(p: P::Pointer<i32>) {
let a = *p;
//^ i32
}
fn g<P: PointerFamily>(p: <P as PointerFamily>::Pointer<i32>) {
let a = *p;
//^ i32
}
"#,
);
}
#[test]
fn gats_with_impl_trait() {
// FIXME: the last function (`fn i()`) is not valid Rust as of this writing because you cannot
// specify the same associated type multiple times even if their arguments are different (c.f.
// `fn h()`, which is valid). Reconsider how to treat these invalid types.
check_types(
r#"
//- minicore: deref
use core::ops::Deref;
trait Trait {
type Assoc<T>: Deref<Target = T>;
fn get<U>(&self) -> Self::Assoc<U>;
}
fn f<T>(v: impl Trait) {
let a = v.get::<i32>().deref();
//^ &i32
let a = v.get::<T>().deref();
//^ &T
}
fn g<'a, T: 'a>(v: impl Trait<Assoc<T> = &'a T>) {
let a = v.get::<T>();
//^ &T
let a = v.get::<()>();
//^ Trait::Assoc<(), impl Trait<Assoc<T> = &T>>
}
fn h<'a>(v: impl Trait<Assoc<i32> = &'a i32> + Trait<Assoc<i64> = &'a i64>) {
let a = v.get::<i32>();
//^ &i32
let a = v.get::<i64>();
//^ &i64
}
fn i<'a>(v: impl Trait<Assoc<i32> = &'a i32, Assoc<i64> = &'a i64>) {
let a = v.get::<i32>();
//^ &i32
let a = v.get::<i64>();
//^ &i64
}
"#,
);
}
#[test]
fn gats_with_dyn() {
// This test is here to keep track of how we infer things despite traits with GATs being not
// object-safe currently.
// FIXME: reconsider how to treat these invalid types.
check_infer_with_mismatches(
r#"
//- minicore: deref
use core::ops::Deref;
trait Trait {
type Assoc<T>: Deref<Target = T>;
fn get<U>(&self) -> Self::Assoc<U>;
}
fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
v.get::<i32>().deref();
}
"#,
expect![[r#"
90..94 'self': &Self
127..128 'v': &(dyn Trait<Assoc<i32> = &i32>)
164..195 '{ ...f(); }': ()
170..171 'v': &(dyn Trait<Assoc<i32> = &i32>)
170..184 'v.get::<i32>()': &i32
170..192 'v.get:...eref()': &i32
"#]],
);
}
#[test]
fn gats_in_associated_type_binding() {
check_types(
r#"
trait Trait {
type Assoc<T>;
fn get<U>(&self) -> Self::Assoc<U>;
}
fn f<T>(t: T)
where
T: Trait<Assoc<i32> = u32>,
T: Trait<Assoc<isize> = usize>,
{
let a = t.get::<i32>();
//^ u32
let a = t.get::<isize>();
//^ usize
let a = t.get::<()>();
//^ Trait::Assoc<(), T>
}
"#,
);
}

View File

@ -5,7 +5,7 @@ use itertools::Itertools;
use crate::{
chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
CallableDefId, Interner,
CallableDefId, Interner, ProjectionTyExt,
};
use hir_def::{AdtId, ItemContainerId, Lookup, TypeAliasId};
@ -63,17 +63,31 @@ impl DebugContext<'_> {
ItemContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"),
};
let trait_data = self.0.trait_data(trait_);
let params = projection_ty.substitution.as_slice(Interner);
write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
if params.len() > 1 {
let trait_name = &self.0.trait_data(trait_).name;
let trait_ref = projection_ty.trait_ref(self.0);
let trait_params = trait_ref.substitution.as_slice(Interner);
let self_ty = trait_ref.self_type_parameter(Interner);
write!(fmt, "<{:?} as {}", self_ty, trait_name)?;
if trait_params.len() > 1 {
write!(
fmt,
"<{}>",
&params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
)?;
}
write!(fmt, ">::{}", type_alias_data.name)
write!(fmt, ">::{}", type_alias_data.name)?;
let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
if !proj_params.is_empty() {
write!(
fmt,
"<{}>",
proj_params.iter().format_with(", ", |x, f| f(&format_args!("{:?}", x))),
)?;
}
Ok(())
}
pub(crate) fn debug_fn_def_id(

View File

@ -270,7 +270,7 @@ impl SourceAnalyzer {
let expr_id = self.expr_id(db, &call.clone().into())?;
let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, &substs))
Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
}
pub(crate) fn resolve_await_to_poll(
@ -311,7 +311,7 @@ impl SourceAnalyzer {
// HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
// doesn't have any generic parameters, so we skip building another subst for `poll()`.
let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build();
Some(self.resolve_impl_method_or_trait_def(db, poll_fn, &substs))
Some(self.resolve_impl_method_or_trait_def(db, poll_fn, substs))
}
pub(crate) fn resolve_prefix_expr(
@ -331,7 +331,7 @@ impl SourceAnalyzer {
// don't have any generic parameters, so we skip building another subst for the methods.
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
pub(crate) fn resolve_index_expr(
@ -351,7 +351,7 @@ impl SourceAnalyzer {
.push(base_ty.clone())
.push(index_ty.clone())
.build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
pub(crate) fn resolve_bin_expr(
@ -372,7 +372,7 @@ impl SourceAnalyzer {
.push(rhs.clone())
.build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
pub(crate) fn resolve_try_expr(
@ -392,7 +392,7 @@ impl SourceAnalyzer {
// doesn't have any generic parameters, so we skip building another subst for `branch()`.
let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
}
pub(crate) fn resolve_field(
@ -487,9 +487,9 @@ impl SourceAnalyzer {
let mut prefer_value_ns = false;
let resolved = (|| {
let infer = self.infer.as_deref()?;
if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
let expr_id = self.expr_id(db, &path_expr.into())?;
let infer = self.infer.as_ref()?;
if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
let assoc = match assoc {
AssocItemId::FunctionId(f_in_trait) => {
@ -497,9 +497,12 @@ impl SourceAnalyzer {
None => assoc,
Some(func_ty) => {
if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
self.resolve_impl_method(db, f_in_trait, subs)
.map(AssocItemId::FunctionId)
.unwrap_or(assoc)
self.resolve_impl_method_or_trait_def(
db,
f_in_trait,
subs.clone(),
)
.into()
} else {
assoc
}
@ -520,18 +523,18 @@ impl SourceAnalyzer {
prefer_value_ns = true;
} else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
let pat_id = self.pat_id(&path_pat.into())?;
if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
if let Some(assoc) = infer.assoc_resolutions_for_pat(pat_id) {
return Some(PathResolution::Def(AssocItem::from(assoc).into()));
}
if let Some(VariantId::EnumVariantId(variant)) =
self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
infer.variant_resolution_for_pat(pat_id)
{
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
}
} else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
let expr_id = self.expr_id(db, &rec_lit.into())?;
if let Some(VariantId::EnumVariantId(variant)) =
self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
infer.variant_resolution_for_expr(expr_id)
{
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
}
@ -541,8 +544,7 @@ impl SourceAnalyzer {
|| parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
let pat_id = self.pat_id(&pat)?;
let variant_res_for_pat =
self.infer.as_ref()?.variant_resolution_for_pat(pat_id);
let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id);
if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
}
@ -780,37 +782,22 @@ impl SourceAnalyzer {
false
}
fn resolve_impl_method(
&self,
db: &dyn HirDatabase,
func: FunctionId,
substs: &Substitution,
) -> Option<FunctionId> {
let impled_trait = match func.lookup(db.upcast()).container {
ItemContainerId::TraitId(trait_id) => trait_id,
_ => return None,
};
if substs.is_empty(Interner) {
return None;
}
let self_ty = substs.at(Interner, 0).ty(Interner)?;
let krate = self.resolver.krate();
let trait_env = self.resolver.body_owner()?.as_generic_def_id().map_or_else(
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|d| db.trait_environment(d),
);
let fun_data = db.function_data(func);
method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name)
}
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,
func: FunctionId,
substs: &Substitution,
substs: Substitution,
) -> FunctionId {
self.resolve_impl_method(db, func, substs).unwrap_or(func)
let krate = self.resolver.krate();
let owner = match self.resolver.body_owner() {
Some(it) => it,
None => return func,
};
let env = owner.as_generic_def_id().map_or_else(
|| Arc::new(hir_ty::TraitEnvironment::empty(krate)),
|d| db.trait_environment(d),
);
method_resolution::lookup_impl_method(db, env, func, substs)
}
fn lang_trait_fn(

View File

@ -1834,4 +1834,86 @@ fn f() {
"#,
);
}
#[test]
fn goto_bin_op_multiple_impl() {
check(
r#"
//- minicore: add
struct S;
impl core::ops::Add for S {
fn add(
//^^^
) {}
}
impl core::ops::Add<usize> for S {
fn add(
) {}
}
fn f() {
S +$0 S
}
"#,
);
check(
r#"
//- minicore: add
struct S;
impl core::ops::Add for S {
fn add(
) {}
}
impl core::ops::Add<usize> for S {
fn add(
//^^^
) {}
}
fn f() {
S +$0 0usize
}
"#,
);
}
#[test]
fn path_call_multiple_trait_impl() {
check(
r#"
trait Trait<T> {
fn f(_: T);
}
impl Trait<i32> for usize {
fn f(_: i32) {}
//^
}
impl Trait<i64> for usize {
fn f(_: i64) {}
}
fn main() {
usize::f$0(0i32);
}
"#,
);
check(
r#"
trait Trait<T> {
fn f(_: T);
}
impl Trait<i32> for usize {
fn f(_: i32) {}
}
impl Trait<i64> for usize {
fn f(_: i64) {}
//^
}
fn main() {
usize::f$0(0i64);
}
"#,
)
}
}

View File

@ -23,7 +23,7 @@ crossbeam-channel = "0.5.5"
dissimilar = "1.0.4"
itertools = "0.10.5"
scip = "0.1.1"
lsp-types = { version = "0.93.1", features = ["proposed"] }
lsp-types = { version = "=0.93.2", features = ["proposed"] }
parking_lot = "0.12.1"
xflags = "0.3.0"
oorandom = "11.1.3"

View File

@ -11,7 +11,7 @@ use std::{env, fs, path::Path, process};
use lsp_server::Connection;
use project_model::ProjectManifest;
use rust_analyzer::{cli::flags, config::Config, from_json, lsp_ext::supports_utf8, Result};
use rust_analyzer::{cli::flags, config::Config, from_json, Result};
use vfs::AbsPathBuf;
#[cfg(all(feature = "mimalloc"))]
@ -191,11 +191,7 @@ fn run_server() -> Result<()> {
name: String::from("rust-analyzer"),
version: Some(rust_analyzer::version().to_string()),
}),
offset_encoding: if supports_utf8(config.caps()) {
Some("utf-8".to_string())
} else {
None
},
offset_encoding: None,
};
let initialize_result = serde_json::to_value(initialize_result).unwrap();

View File

@ -6,19 +6,25 @@ use lsp_types::{
FileOperationFilter, FileOperationPattern, FileOperationPatternKind,
FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability,
ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf,
RenameOptions, SaveOptions, SelectionRangeProviderCapability, SemanticTokensFullOptions,
SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions,
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
PositionEncodingKind, RenameOptions, SaveOptions, SelectionRangeProviderCapability,
SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities,
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
};
use serde_json::json;
use crate::config::{Config, RustfmtConfig};
use crate::lsp_ext::supports_utf8;
use crate::semantic_tokens;
pub fn server_capabilities(config: &Config) -> ServerCapabilities {
ServerCapabilities {
position_encoding: if supports_utf8(config.caps()) {
Some(PositionEncodingKind::UTF8)
} else {
None
},
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),

View File

@ -20,7 +20,7 @@ use crate::cli::{
load_cargo::{load_workspace, LoadCargoConfig},
Result,
};
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
use crate::to_proto;
use crate::version::version;
@ -126,7 +126,7 @@ impl LsifManager<'_> {
let line_index = self.db.line_index(file_id);
let line_index = LineIndex {
index: line_index,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
endings: LineEndings::Unix,
};
let range_id = self.add_vertex(lsif::Vertex::Range {
@ -248,7 +248,7 @@ impl LsifManager<'_> {
let line_index = self.db.line_index(file_id);
let line_index = LineIndex {
index: line_index,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
endings: LineEndings::Unix,
};
let result = folds

View File

@ -5,7 +5,7 @@ use std::{
time::Instant,
};
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
use hir::Name;
use ide::{
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
@ -91,7 +91,7 @@ impl flags::Scip {
let line_index = LineIndex {
index: db.line_index(file_id),
encoding: OffsetEncoding::Utf8,
encoding: PositionEncoding::Utf8,
endings: LineEndings::Unix,
};

View File

@ -32,7 +32,7 @@ use vfs::AbsPathBuf;
use crate::{
caps::completion_item_edit_resolve,
diagnostics::DiagnosticsMapConfig,
line_index::OffsetEncoding,
line_index::PositionEncoding,
lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
};
@ -948,11 +948,11 @@ impl Config {
.is_some()
}
pub fn offset_encoding(&self) -> OffsetEncoding {
pub fn position_encoding(&self) -> PositionEncoding {
if supports_utf8(&self.caps) {
OffsetEncoding::Utf8
PositionEncoding::Utf8
} else {
OffsetEncoding::Utf16
PositionEncoding::Utf16
}
}

View File

@ -8,7 +8,7 @@ use stdx::format_to;
use vfs::{AbsPath, AbsPathBuf};
use crate::{
global_state::GlobalStateSnapshot, line_index::OffsetEncoding, lsp_ext,
global_state::GlobalStateSnapshot, line_index::PositionEncoding, lsp_ext,
to_proto::url_from_abs_path,
};
@ -66,17 +66,17 @@ fn location(
let uri = url_from_abs_path(&file_name);
let range = {
let offset_encoding = snap.config.offset_encoding();
let position_encoding = snap.config.position_encoding();
lsp_types::Range::new(
position(&offset_encoding, span, span.line_start, span.column_start),
position(&offset_encoding, span, span.line_end, span.column_end),
position(&position_encoding, span, span.line_start, span.column_start),
position(&position_encoding, span, span.line_end, span.column_end),
)
};
lsp_types::Location::new(uri, range)
}
fn position(
offset_encoding: &OffsetEncoding,
position_encoding: &PositionEncoding,
span: &DiagnosticSpan,
line_offset: usize,
column_offset: usize,
@ -93,9 +93,9 @@ fn position(
};
}
let mut char_offset = 0;
let len_func = match offset_encoding {
OffsetEncoding::Utf8 => char::len_utf8,
OffsetEncoding::Utf16 => char::len_utf16,
let len_func = match position_encoding {
PositionEncoding::Utf8 => char::len_utf8,
PositionEncoding::Utf16 => char::len_utf16,
};
for c in line.text.chars() {
char_offset += 1;

View File

@ -8,7 +8,7 @@ use vfs::AbsPathBuf;
use crate::{
from_json,
global_state::GlobalStateSnapshot,
line_index::{LineIndex, OffsetEncoding},
line_index::{LineIndex, PositionEncoding},
lsp_ext,
lsp_utils::invalid_params_error,
Result,
@ -25,10 +25,10 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
let line_col = match line_index.encoding {
OffsetEncoding::Utf8 => {
PositionEncoding::Utf8 => {
LineCol { line: position.line as u32, col: position.character as u32 }
}
OffsetEncoding::Utf16 => {
PositionEncoding::Utf16 => {
let line_col =
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
line_index.index.to_utf8(line_col)

View File

@ -383,7 +383,7 @@ impl GlobalStateSnapshot {
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
let endings = self.vfs.read().1[&file_id];
let index = self.analysis.file_line_index(file_id)?;
let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
let res = LineIndex { index, endings, encoding: self.config.position_encoding() };
Ok(res)
}

View File

@ -7,7 +7,7 @@
use std::sync::Arc;
pub enum OffsetEncoding {
pub enum PositionEncoding {
Utf8,
Utf16,
}
@ -15,7 +15,7 @@ pub enum OffsetEncoding {
pub(crate) struct LineIndex {
pub(crate) index: Arc<ide::LineIndex>,
pub(crate) endings: LineEndings,
pub(crate) encoding: OffsetEncoding,
pub(crate) encoding: PositionEncoding,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]

View File

@ -3,6 +3,7 @@
use std::{collections::HashMap, path::PathBuf};
use lsp_types::request::Request;
use lsp_types::PositionEncodingKind;
use lsp_types::{
notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
@ -455,7 +456,15 @@ pub(crate) enum CodeLensResolveData {
}
pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
caps.offset_encoding.as_deref().unwrap_or_default().iter().any(|it| it == "utf-8")
match &caps.general {
Some(general) => general
.position_encodings
.as_deref()
.unwrap_or_default()
.iter()
.any(|it| it == &PositionEncodingKind::UTF8),
_ => false,
}
}
pub enum MoveItem {}

View File

@ -6,7 +6,7 @@ use lsp_server::Notification;
use crate::{
from_proto,
global_state::GlobalState,
line_index::{LineEndings, LineIndex, OffsetEncoding},
line_index::{LineEndings, LineIndex, PositionEncoding},
LspError,
};
@ -140,7 +140,7 @@ pub(crate) fn apply_document_changes(
index: Arc::new(ide::LineIndex::new(old_text)),
// We don't care about line endings or offset encoding here.
endings: LineEndings::Unix,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
};
// The changes we got must be applied sequentially, but can cross lines so we

View File

@ -607,30 +607,34 @@ impl GlobalState {
/// Handles a request.
fn on_request(&mut self, req: Request) {
if self.shutdown_requested {
self.respond(lsp_server::Response::new_err(
req.id,
lsp_server::ErrorCode::InvalidRequest as i32,
"Shutdown already requested.".to_owned(),
));
return;
let mut dispatcher = RequestDispatcher { req: Some(req), global_state: self };
dispatcher.on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
s.shutdown_requested = true;
Ok(())
});
if let RequestDispatcher { req: Some(req), global_state: this } = &mut dispatcher {
if this.shutdown_requested {
this.respond(lsp_server::Response::new_err(
req.id.clone(),
lsp_server::ErrorCode::InvalidRequest as i32,
"Shutdown already requested.".to_owned(),
));
return;
}
// Avoid flashing a bunch of unresolved references during initial load.
if this.workspaces.is_empty() && !this.is_quiescent() {
this.respond(lsp_server::Response::new_err(
req.id.clone(),
lsp_server::ErrorCode::ContentModified as i32,
"waiting for cargo metadata or cargo check".to_owned(),
));
return;
}
}
// Avoid flashing a bunch of unresolved references during initial load.
if self.workspaces.is_empty() && !self.is_quiescent() {
self.respond(lsp_server::Response::new_err(
req.id,
lsp_server::ErrorCode::ContentModified as i32,
"waiting for cargo metadata or cargo check".to_owned(),
));
return;
}
RequestDispatcher { req: Some(req), global_state: self }
.on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
s.shutdown_requested = true;
Ok(())
})
dispatcher
.on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
.on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
.on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)

View File

@ -21,7 +21,7 @@ use crate::{
cargo_target_spec::CargoTargetSpec,
config::{CallInfoConfig, Config},
global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, OffsetEncoding},
line_index::{LineEndings, LineIndex, PositionEncoding},
lsp_ext,
lsp_utils::invalid_params_error,
semantic_tokens, Result,
@ -30,8 +30,8 @@ use crate::{
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
let line_col = line_index.index.line_col(offset);
match line_index.encoding {
OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
OffsetEncoding::Utf16 => {
PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
PositionEncoding::Utf16 => {
let line_col = line_index.index.to_utf16(line_col);
lsp_types::Position::new(line_col.line, line_col.col)
}
@ -1394,7 +1394,7 @@ fn main() {
let line_index = LineIndex {
index: Arc::new(ide::LineIndex::new(text)),
endings: LineEndings::Unix,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
};
let converted: Vec<lsp_types::FoldingRange> =
folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();

View File

@ -51,7 +51,7 @@ TypeArg =
Type
AssocTypeArg =
NameRef GenericParamList? (':' TypeBoundList | ('=' Type | ConstArg))
NameRef GenericArgList? (':' TypeBoundList | ('=' Type | ConstArg))
LifetimeArg =
Lifetime

View File

@ -120,7 +120,7 @@ pub struct AssocTypeArg {
impl ast::HasTypeBounds for AssocTypeArg {}
impl AssocTypeArg {
pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
@ -142,16 +142,6 @@ impl ConstArg {
pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericParamList {
pub(crate) syntax: SyntaxNode,
}
impl GenericParamList {
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TypeBoundList {
pub(crate) syntax: SyntaxNode,
@ -527,6 +517,16 @@ impl Abi {
pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct GenericParamList {
pub(crate) syntax: SyntaxNode,
}
impl GenericParamList {
pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct WhereClause {
pub(crate) syntax: SyntaxNode,
@ -1834,17 +1834,6 @@ impl AstNode for ConstArg {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for GenericParamList {
fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for TypeBoundList {
fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
fn cast(syntax: SyntaxNode) -> Option<Self> {
@ -2153,6 +2142,17 @@ impl AstNode for Abi {
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for GenericParamList {
fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
fn cast(syntax: SyntaxNode) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })
} else {
None
}
}
fn syntax(&self) -> &SyntaxNode { &self.syntax }
}
impl AstNode for WhereClause {
fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
fn cast(syntax: SyntaxNode) -> Option<Self> {
@ -4263,11 +4263,6 @@ impl std::fmt::Display for ConstArg {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for GenericParamList {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for TypeBoundList {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
@ -4408,6 +4403,11 @@ impl std::fmt::Display for Abi {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for GenericParamList {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)
}
}
impl std::fmt::Display for WhereClause {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self.syntax(), f)

View File

@ -479,7 +479,9 @@ It is not cheap enough to enable in prod, and this is a bug which should be fixe
### Configurability
rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet.
The rule of thumb is to enable most features by default unless they are buggy or degrade performance too much.
There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience.
Enabling them by default is a matter of discoverability, as many users end up don't know about some features even though they are presented in the manual.
Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should.
### Serialization

View File

@ -1,5 +1,5 @@
<!---
lsp_ext.rs hash: 7b710095d773b978
lsp_ext.rs hash: 62068e53ac202dc8
If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
@ -19,12 +19,6 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde
If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).
## UTF-8 offsets
rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinate space for offsets (by default, LSP uses UTF-16 offsets).
https://clangd.llvm.org/extensions.html#utf-8-offsets
## Configuration in `initializationOptions`
**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567

View File

@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient";
import * as ra from "./lsp_ext";
import * as path from "path";
import { Ctx, Cmd } from "./ctx";
import { Ctx, Cmd, CtxInit } from "./ctx";
import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets";
import { spawnSync } from "child_process";
import { RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run";
@ -16,14 +16,14 @@ import { LINKED_COMMANDS } from "./client";
export * from "./ast_inspector";
export * from "./run";
export function analyzerStatus(ctx: Ctx): Cmd {
export function analyzerStatus(ctx: CtxInit): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse("rust-analyzer-status://status");
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
if (!vscode.window.activeTextEditor) return "";
const client = await ctx.getClient();
const client = ctx.client;
const params: ra.AnalyzerStatusParams = {};
const doc = ctx.activeRustEditor?.document;
@ -52,7 +52,7 @@ export function analyzerStatus(ctx: Ctx): Cmd {
};
}
export function memoryUsage(ctx: Ctx): Cmd {
export function memoryUsage(ctx: CtxInit): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse("rust-analyzer-memory://memory");
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
@ -60,14 +60,9 @@ export function memoryUsage(ctx: Ctx): Cmd {
provideTextDocumentContent(_uri: vscode.Uri): vscode.ProviderResult<string> {
if (!vscode.window.activeTextEditor) return "";
return ctx
.getClient()
.then((it) => it.sendRequest(ra.memoryUsage))
.then((mem: any) => {
return (
"Per-query memory usage:\n" + mem + "\n(note: database has been cleared)"
);
});
return ctx.client.sendRequest(ra.memoryUsage).then((mem: any) => {
return "Per-query memory usage:\n" + mem + "\n(note: database has been cleared)";
});
}
get onDidChange(): vscode.Event<vscode.Uri> {
@ -86,18 +81,18 @@ export function memoryUsage(ctx: Ctx): Cmd {
};
}
export function shuffleCrateGraph(ctx: Ctx): Cmd {
export function shuffleCrateGraph(ctx: CtxInit): Cmd {
return async () => {
return ctx.getClient().then((it) => it.sendRequest(ra.shuffleCrateGraph));
return ctx.client.sendRequest(ra.shuffleCrateGraph);
};
}
export function matchingBrace(ctx: Ctx): Cmd {
export function matchingBrace(ctx: CtxInit): Cmd {
return async () => {
const editor = ctx.activeRustEditor;
if (!editor) return;
const client = await ctx.getClient();
const client = ctx.client;
const response = await client.sendRequest(ra.matchingBrace, {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
@ -114,12 +109,12 @@ export function matchingBrace(ctx: Ctx): Cmd {
};
}
export function joinLines(ctx: Ctx): Cmd {
export function joinLines(ctx: CtxInit): Cmd {
return async () => {
const editor = ctx.activeRustEditor;
if (!editor) return;
const client = await ctx.getClient();
const client = ctx.client;
const items: lc.TextEdit[] = await client.sendRequest(ra.joinLines, {
ranges: editor.selections.map((it) => client.code2ProtocolConverter.asRange(it)),
@ -134,19 +129,19 @@ export function joinLines(ctx: Ctx): Cmd {
};
}
export function moveItemUp(ctx: Ctx): Cmd {
export function moveItemUp(ctx: CtxInit): Cmd {
return moveItem(ctx, ra.Direction.Up);
}
export function moveItemDown(ctx: Ctx): Cmd {
export function moveItemDown(ctx: CtxInit): Cmd {
return moveItem(ctx, ra.Direction.Down);
}
export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd {
export function moveItem(ctx: CtxInit, direction: ra.Direction): Cmd {
return async () => {
const editor = ctx.activeRustEditor;
if (!editor) return;
const client = await ctx.getClient();
const client = ctx.client;
const lcEdits = await client.sendRequest(ra.moveItem, {
range: client.code2ProtocolConverter.asRange(editor.selection),
@ -161,13 +156,13 @@ export function moveItem(ctx: Ctx, direction: ra.Direction): Cmd {
};
}
export function onEnter(ctx: Ctx): Cmd {
export function onEnter(ctx: CtxInit): Cmd {
async function handleKeypress() {
const editor = ctx.activeRustEditor;
if (!editor) return false;
const client = await ctx.getClient();
const client = ctx.client;
const lcEdits = await client
.sendRequest(ra.onEnter, {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
@ -193,13 +188,13 @@ export function onEnter(ctx: Ctx): Cmd {
};
}
export function parentModule(ctx: Ctx): Cmd {
export function parentModule(ctx: CtxInit): Cmd {
return async () => {
const editor = vscode.window.activeTextEditor;
if (!editor) return;
if (!(isRustDocument(editor.document) || isCargoTomlDocument(editor.document))) return;
const client = await ctx.getClient();
const client = ctx.client;
const locations = await client.sendRequest(ra.parentModule, {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
@ -230,12 +225,12 @@ export function parentModule(ctx: Ctx): Cmd {
};
}
export function openCargoToml(ctx: Ctx): Cmd {
export function openCargoToml(ctx: CtxInit): Cmd {
return async () => {
const editor = ctx.activeRustEditor;
if (!editor) return;
const client = await ctx.getClient();
const client = ctx.client;
const response = await client.sendRequest(ra.openCargoToml, {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(editor.document),
});
@ -251,12 +246,12 @@ export function openCargoToml(ctx: Ctx): Cmd {
};
}
export function ssr(ctx: Ctx): Cmd {
export function ssr(ctx: CtxInit): Cmd {
return async () => {
const editor = vscode.window.activeTextEditor;
if (!editor) return;
const client = await ctx.getClient();
const client = ctx.client;
const position = editor.selection.active;
const selections = editor.selections;
@ -308,7 +303,7 @@ export function ssr(ctx: Ctx): Cmd {
};
}
export function serverVersion(ctx: Ctx): Cmd {
export function serverVersion(ctx: CtxInit): Cmd {
return async () => {
if (!ctx.serverPath) {
void vscode.window.showWarningMessage(`rust-analyzer server is not running`);
@ -324,7 +319,7 @@ export function serverVersion(ctx: Ctx): Cmd {
// Opens the virtual file that will show the syntax tree
//
// The contents of the file come from the `TextDocumentContentProvider`
export function syntaxTree(ctx: Ctx): Cmd {
export function syntaxTree(ctx: CtxInit): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse("rust-analyzer-syntax-tree://syntaxtree/tree.rast");
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
@ -360,7 +355,7 @@ export function syntaxTree(ctx: Ctx): Cmd {
): Promise<string> {
const rustEditor = ctx.activeRustEditor;
if (!rustEditor) return "";
const client = await ctx.getClient();
const client = ctx.client;
// When the range based query is enabled we take the range of the selection
const range =
@ -407,7 +402,7 @@ export function syntaxTree(ctx: Ctx): Cmd {
// Opens the virtual file that will show the HIR of the function containing the cursor position
//
// The contents of the file come from the `TextDocumentContentProvider`
export function viewHir(ctx: Ctx): Cmd {
export function viewHir(ctx: CtxInit): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse("rust-analyzer-hir://viewHir/hir.rs");
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
@ -444,7 +439,7 @@ export function viewHir(ctx: Ctx): Cmd {
const rustEditor = ctx.activeRustEditor;
if (!rustEditor) return "";
const client = await ctx.getClient();
const client = ctx.client;
const params = {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
rustEditor.document
@ -473,7 +468,7 @@ export function viewHir(ctx: Ctx): Cmd {
};
}
export function viewFileText(ctx: Ctx): Cmd {
export function viewFileText(ctx: CtxInit): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse("rust-analyzer-file-text://viewFileText/file.rs");
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
@ -509,7 +504,7 @@ export function viewFileText(ctx: Ctx): Cmd {
): Promise<string> {
const rustEditor = ctx.activeRustEditor;
if (!rustEditor) return "";
const client = await ctx.getClient();
const client = ctx.client;
const params = client.code2ProtocolConverter.asTextDocumentIdentifier(
rustEditor.document
@ -536,7 +531,7 @@ export function viewFileText(ctx: Ctx): Cmd {
};
}
export function viewItemTree(ctx: Ctx): Cmd {
export function viewItemTree(ctx: CtxInit): Cmd {
const tdcp = new (class implements vscode.TextDocumentContentProvider {
readonly uri = vscode.Uri.parse("rust-analyzer-item-tree://viewItemTree/itemtree.rs");
readonly eventEmitter = new vscode.EventEmitter<vscode.Uri>();
@ -572,7 +567,7 @@ export function viewItemTree(ctx: Ctx): Cmd {
): Promise<string> {
const rustEditor = ctx.activeRustEditor;
if (!rustEditor) return "";
const client = await ctx.getClient();
const client = ctx.client;
const params = {
textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(
@ -601,7 +596,7 @@ export function viewItemTree(ctx: Ctx): Cmd {
};
}
function crateGraph(ctx: Ctx, full: boolean): Cmd {
function crateGraph(ctx: CtxInit, full: boolean): Cmd {
return async () => {
const nodeModulesPath = vscode.Uri.file(path.join(ctx.extensionPath, "node_modules"));
@ -618,7 +613,7 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd {
const params = {
full: full,
};
const client = await ctx.getClient();
const client = ctx.client;
const dot = await client.sendRequest(ra.viewCrateGraph, params);
const uri = panel.webview.asWebviewUri(nodeModulesPath);
@ -664,18 +659,18 @@ function crateGraph(ctx: Ctx, full: boolean): Cmd {
};
}
export function viewCrateGraph(ctx: Ctx): Cmd {
export function viewCrateGraph(ctx: CtxInit): Cmd {
return crateGraph(ctx, false);
}
export function viewFullCrateGraph(ctx: Ctx): Cmd {
export function viewFullCrateGraph(ctx: CtxInit): Cmd {
return crateGraph(ctx, true);
}
// Opens the virtual file that will show the syntax tree
//
// The contents of the file come from the `TextDocumentContentProvider`
export function expandMacro(ctx: Ctx): Cmd {
export function expandMacro(ctx: CtxInit): Cmd {
function codeFormat(expanded: ra.ExpandedMacro): string {
let result = `// Recursive expansion of ${expanded.name}! macro\n`;
result += "// " + "=".repeat(result.length - 3);
@ -691,7 +686,7 @@ export function expandMacro(ctx: Ctx): Cmd {
async provideTextDocumentContent(_uri: vscode.Uri): Promise<string> {
const editor = vscode.window.activeTextEditor;
if (!editor) return "";
const client = await ctx.getClient();
const client = ctx.client;
const position = editor.selection.active;
@ -723,8 +718,8 @@ export function expandMacro(ctx: Ctx): Cmd {
};
}
export function reloadWorkspace(ctx: Ctx): Cmd {
return async () => (await ctx.getClient()).sendRequest(ra.reloadWorkspace);
export function reloadWorkspace(ctx: CtxInit): Cmd {
return async () => ctx.client.sendRequest(ra.reloadWorkspace);
}
async function showReferencesImpl(
@ -743,13 +738,13 @@ async function showReferencesImpl(
}
}
export function showReferences(ctx: Ctx): Cmd {
export function showReferences(ctx: CtxInit): Cmd {
return async (uri: string, position: lc.Position, locations: lc.Location[]) => {
await showReferencesImpl(await ctx.getClient(), uri, position, locations);
await showReferencesImpl(ctx.client, uri, position, locations);
};
}
export function applyActionGroup(_ctx: Ctx): Cmd {
export function applyActionGroup(_ctx: CtxInit): Cmd {
return async (actions: { label: string; arguments: lc.CodeAction }[]) => {
const selectedAction = await vscode.window.showQuickPick(actions);
if (!selectedAction) return;
@ -760,9 +755,9 @@ export function applyActionGroup(_ctx: Ctx): Cmd {
};
}
export function gotoLocation(ctx: Ctx): Cmd {
export function gotoLocation(ctx: CtxInit): Cmd {
return async (locationLink: lc.LocationLink) => {
const client = await ctx.getClient();
const client = ctx.client;
const uri = client.protocol2CodeConverter.asUri(locationLink.targetUri);
let range = client.protocol2CodeConverter.asRange(locationLink.targetSelectionRange);
// collapse the range to a cursor position
@ -772,13 +767,13 @@ export function gotoLocation(ctx: Ctx): Cmd {
};
}
export function openDocs(ctx: Ctx): Cmd {
export function openDocs(ctx: CtxInit): Cmd {
return async () => {
const editor = vscode.window.activeTextEditor;
if (!editor) {
return;
}
const client = await ctx.getClient();
const client = ctx.client;
const position = editor.selection.active;
const textDocument = { uri: editor.document.uri.toString() };
@ -791,16 +786,16 @@ export function openDocs(ctx: Ctx): Cmd {
};
}
export function cancelFlycheck(ctx: Ctx): Cmd {
export function cancelFlycheck(ctx: CtxInit): Cmd {
return async () => {
const client = await ctx.getClient();
const client = ctx.client;
await client.sendRequest(ra.cancelFlycheck);
};
}
export function resolveCodeAction(ctx: Ctx): Cmd {
export function resolveCodeAction(ctx: CtxInit): Cmd {
return async (params: lc.CodeAction) => {
const client = await ctx.getClient();
const client = ctx.client;
params.command = undefined;
const item = await client?.sendRequest(lc.CodeActionResolveRequest.type, params);
if (!item?.edit) {
@ -825,13 +820,13 @@ export function resolveCodeAction(ctx: Ctx): Cmd {
};
}
export function applySnippetWorkspaceEditCommand(_ctx: Ctx): Cmd {
export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd {
return async (edit: vscode.WorkspaceEdit) => {
await applySnippetWorkspaceEdit(edit);
};
}
export function run(ctx: Ctx): Cmd {
export function run(ctx: CtxInit): Cmd {
let prevRunnable: RunnableQuickPick | undefined;
return async () => {
@ -845,11 +840,11 @@ export function run(ctx: Ctx): Cmd {
};
}
export function peekTests(ctx: Ctx): Cmd {
export function peekTests(ctx: CtxInit): Cmd {
return async () => {
const editor = ctx.activeRustEditor;
if (!editor) return;
const client = await ctx.getClient();
const client = ctx.client;
await vscode.window.withProgress(
{
@ -878,7 +873,7 @@ export function peekTests(ctx: Ctx): Cmd {
};
}
export function runSingle(ctx: Ctx): Cmd {
export function runSingle(ctx: CtxInit): Cmd {
return async (runnable: ra.Runnable) => {
const editor = ctx.activeRustEditor;
if (!editor) return;
@ -895,7 +890,7 @@ export function runSingle(ctx: Ctx): Cmd {
};
}
export function copyRunCommandLine(ctx: Ctx) {
export function copyRunCommandLine(ctx: CtxInit) {
let prevRunnable: RunnableQuickPick | undefined;
return async () => {
const item = await selectRunnable(ctx, prevRunnable);
@ -907,7 +902,7 @@ export function copyRunCommandLine(ctx: Ctx) {
};
}
export function debug(ctx: Ctx): Cmd {
export function debug(ctx: CtxInit): Cmd {
let prevDebuggee: RunnableQuickPick | undefined;
return async () => {
@ -920,13 +915,13 @@ export function debug(ctx: Ctx): Cmd {
};
}
export function debugSingle(ctx: Ctx): Cmd {
export function debugSingle(ctx: CtxInit): Cmd {
return async (config: ra.Runnable) => {
await startDebugSession(ctx, config);
};
}
export function newDebugConfig(ctx: Ctx): Cmd {
export function newDebugConfig(ctx: CtxInit): Cmd {
return async () => {
const item = await selectRunnable(ctx, undefined, true, false);
if (!item) return;

View File

@ -4,12 +4,17 @@ import * as ra from "./lsp_ext";
import { Config, substituteVariablesInEnv, substituteVSCodeVariables } from "./config";
import { createClient } from "./client";
import { isRustEditor, log, RustEditor } from "./util";
import { isRustDocument, isRustEditor, log, RustEditor } from "./util";
import { ServerStatusParams } from "./lsp_ext";
import { PersistentState } from "./persistent_state";
import { bootstrap } from "./bootstrap";
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
// only those are in use. We use "Empty" to represent these scenarios
// (r-a still somewhat works with Live Share, because commands are tunneled to the host)
export type Workspace =
| { kind: "Empty" }
| {
kind: "Workspace Folder";
}
@ -18,16 +23,39 @@ export type Workspace =
files: vscode.TextDocument[];
};
export function fetchWorkspace(): Workspace {
const folders = (vscode.workspace.workspaceFolders || []).filter(
(folder) => folder.uri.scheme === "file"
);
const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
isRustDocument(document)
);
return folders.length === 0
? rustDocuments.length === 0
? { kind: "Empty" }
: {
kind: "Detached Files",
files: rustDocuments,
}
: { kind: "Workspace Folder" };
}
export type CommandFactory = {
enabled: (ctx: Ctx) => Cmd;
enabled: (ctx: CtxInit) => Cmd;
disabled?: (ctx: Ctx) => Cmd;
};
export type CtxInit = Ctx & {
readonly client: lc.LanguageClient;
};
export class Ctx {
readonly statusBar: vscode.StatusBarItem;
readonly config: Config;
readonly workspace: Workspace;
private client: lc.LanguageClient | undefined;
private _client: lc.LanguageClient | undefined;
private _serverPath: string | undefined;
private traceOutputChannel: vscode.OutputChannel | undefined;
private outputChannel: vscode.OutputChannel | undefined;
@ -36,18 +64,17 @@ export class Ctx {
private commandFactories: Record<string, CommandFactory>;
private commandDisposables: Disposable[];
workspace: Workspace;
get client() {
return this._client;
}
constructor(
readonly extCtx: vscode.ExtensionContext,
workspace: Workspace,
commandFactories: Record<string, CommandFactory>
commandFactories: Record<string, CommandFactory>,
workspace: Workspace
) {
extCtx.subscriptions.push(this);
this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
this.statusBar.text = "rust-analyzer";
this.statusBar.tooltip = "ready";
this.statusBar.command = "rust-analyzer.analyzerStatus";
this.statusBar.show();
this.workspace = workspace;
this.clientSubscriptions = [];
@ -57,7 +84,10 @@ export class Ctx {
this.state = new PersistentState(extCtx.globalState);
this.config = new Config(extCtx);
this.updateCommands();
this.updateCommands("disable");
this.setServerStatus({
health: "stopped",
});
}
dispose() {
@ -67,16 +97,36 @@ export class Ctx {
this.commandDisposables.forEach((disposable) => disposable.dispose());
}
clientFetcher() {
const self = this;
return {
get client(): lc.LanguageClient | undefined {
return self.client;
},
};
async onWorkspaceFolderChanges() {
const workspace = fetchWorkspace();
if (workspace.kind === "Detached Files" && this.workspace.kind === "Detached Files") {
if (workspace.files !== this.workspace.files) {
if (this.client?.isRunning()) {
// Ideally we wouldn't need to tear down the server here, but currently detached files
// are only specified at server start
await this.stopAndDispose();
await this.start();
}
return;
}
}
if (workspace.kind === "Workspace Folder" && this.workspace.kind === "Workspace Folder") {
return;
}
if (workspace.kind === "Empty") {
await this.stopAndDispose();
return;
}
if (this.client?.isRunning()) {
await this.restart();
}
}
async getClient() {
private async getOrCreateClient() {
if (this.workspace.kind === "Empty") {
return;
}
if (!this.traceOutputChannel) {
this.traceOutputChannel = vscode.window.createOutputChannel(
"Rust Analyzer Language Server Trace"
@ -88,7 +138,7 @@ export class Ctx {
this.pushExtCleanup(this.outputChannel);
}
if (!this.client) {
if (!this._client) {
this._serverPath = await bootstrap(this.extCtx, this.config, this.state).catch(
(err) => {
let message = "bootstrap error. ";
@ -125,47 +175,61 @@ export class Ctx {
const initializationOptions = substituteVSCodeVariables(rawInitializationOptions);
this.client = await createClient(
this._client = await createClient(
this.traceOutputChannel,
this.outputChannel,
initializationOptions,
serverOptions
);
this.pushClientCleanup(
this.client.onNotification(ra.serverStatus, (params) =>
this._client.onNotification(ra.serverStatus, (params) =>
this.setServerStatus(params)
)
);
}
return this.client;
return this._client;
}
async activate() {
log.info("Activating language client");
const client = await this.getClient();
async start() {
log.info("Starting language client");
const client = await this.getOrCreateClient();
if (!client) {
return;
}
await client.start();
this.updateCommands();
return client;
}
async deactivate() {
log.info("Deactivating language client");
await this.client?.stop();
this.updateCommands();
async restart() {
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
await this.stopAndDispose();
await this.start();
}
async stop() {
if (!this._client) {
return;
}
log.info("Stopping language client");
this.updateCommands("disable");
await this._client.stop();
}
async stopAndDispose() {
if (!this._client) {
return;
}
log.info("Disposing language client");
this.updateCommands("disable");
await this.disposeClient();
this.updateCommands();
}
private async disposeClient() {
this.clientSubscriptions?.forEach((disposable) => disposable.dispose());
this.clientSubscriptions = [];
await this.client?.dispose();
await this._client?.dispose();
this._serverPath = undefined;
this.client = undefined;
this._client = undefined;
}
get activeRustEditor(): RustEditor | undefined {
@ -185,32 +249,41 @@ export class Ctx {
return this._serverPath;
}
private updateCommands() {
private updateCommands(forceDisable?: "disable") {
this.commandDisposables.forEach((disposable) => disposable.dispose());
this.commandDisposables = [];
const fetchFactory = (factory: CommandFactory, fullName: string) => {
return this.client && this.client.isRunning()
? factory.enabled
: factory.disabled ||
((_) => () =>
vscode.window.showErrorMessage(
`command ${fullName} failed: rust-analyzer server is not running`
));
const clientRunning = (!forceDisable && this._client?.isRunning()) ?? false;
const isClientRunning = function (_ctx: Ctx): _ctx is CtxInit {
return clientRunning;
};
for (const [name, factory] of Object.entries(this.commandFactories)) {
const fullName = `rust-analyzer.${name}`;
const callback = fetchFactory(factory, fullName)(this);
let callback;
if (isClientRunning(this)) {
// we asserted that `client` is defined
callback = factory.enabled(this);
} else if (factory.disabled) {
callback = factory.disabled(this);
} else {
callback = () =>
vscode.window.showErrorMessage(
`command ${fullName} failed: rust-analyzer server is not running`
);
}
this.commandDisposables.push(vscode.commands.registerCommand(fullName, callback));
}
}
setServerStatus(status: ServerStatusParams) {
setServerStatus(status: ServerStatusParams | { health: "stopped" }) {
let icon = "";
const statusBar = this.statusBar;
switch (status.health) {
case "ok":
statusBar.tooltip = status.message ?? "Ready";
statusBar.command = undefined;
statusBar.tooltip = (status.message ?? "Ready") + "\nClick to stop server.";
statusBar.command = "rust-analyzer.stopServer";
statusBar.color = undefined;
statusBar.backgroundColor = undefined;
break;
@ -234,6 +307,13 @@ export class Ctx {
statusBar.backgroundColor = new vscode.ThemeColor("statusBarItem.errorBackground");
icon = "$(error) ";
break;
case "stopped":
statusBar.tooltip = "Server is stopped.\nClick to start.";
statusBar.command = "rust-analyzer.startServer";
statusBar.color = undefined;
statusBar.backgroundColor = undefined;
statusBar.text = `$(stop-circle) rust-analyzer`;
return;
}
if (!status.quiescent) icon = "$(sync~spin) ";
statusBar.text = `${icon}rust-analyzer`;

View File

@ -2,15 +2,13 @@ import * as vscode from "vscode";
import * as lc from "vscode-languageclient/node";
import * as commands from "./commands";
import { CommandFactory, Ctx, Workspace } from "./ctx";
import { isRustDocument } from "./util";
import { CommandFactory, Ctx, fetchWorkspace } from "./ctx";
import { activateTaskProvider } from "./tasks";
import { setContextValue } from "./util";
const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
export interface RustAnalyzerExtensionApi {
// FIXME: this should be non-optional
readonly client?: lc.LanguageClient;
}
@ -32,32 +30,7 @@ export async function activate(
.then(() => {}, console.error);
}
// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
// only those are in use.
// (r-a still somewhat works with Live Share, because commands are tunneled to the host)
const folders = (vscode.workspace.workspaceFolders || []).filter(
(folder) => folder.uri.scheme === "file"
);
const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
isRustDocument(document)
);
if (folders.length === 0 && rustDocuments.length === 0) {
// FIXME: Ideally we would choose not to activate at all (and avoid registering
// non-functional editor commands), but VS Code doesn't seem to have a good way of doing
// that
return {};
}
const workspace: Workspace =
folders.length === 0
? {
kind: "Detached Files",
files: rustDocuments,
}
: { kind: "Workspace Folder" };
const ctx = new Ctx(context, workspace, createCommands());
const ctx = new Ctx(context, createCommands(), fetchWorkspace());
// VS Code doesn't show a notification when an extension fails to activate
// so we do it ourselves.
const api = await activateServer(ctx).catch((err) => {
@ -75,18 +48,23 @@ async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
ctx.pushExtCleanup(activateTaskProvider(ctx.config));
}
vscode.workspace.onDidChangeWorkspaceFolders(
async (_) => ctx.onWorkspaceFolderChanges(),
null,
ctx.subscriptions
);
vscode.workspace.onDidChangeConfiguration(
async (_) => {
await ctx
.clientFetcher()
.client?.sendNotification("workspace/didChangeConfiguration", { settings: "" });
await ctx.client?.sendNotification("workspace/didChangeConfiguration", {
settings: "",
});
},
null,
ctx.subscriptions
);
await ctx.activate();
return ctx.clientFetcher();
await ctx.start();
return ctx;
}
function createCommands(): Record<string, CommandFactory> {
@ -98,33 +76,30 @@ function createCommands(): Record<string, CommandFactory> {
reload: {
enabled: (ctx) => async () => {
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
await ctx.stop();
await ctx.activate();
await ctx.restart();
},
disabled: (ctx) => async () => {
void vscode.window.showInformationMessage("Reloading rust-analyzer...");
await ctx.activate();
await ctx.start();
},
},
startServer: {
enabled: (ctx) => async () => {
await ctx.activate();
await ctx.start();
},
disabled: (ctx) => async () => {
await ctx.activate();
await ctx.start();
},
},
stopServer: {
enabled: (ctx) => async () => {
// FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
await ctx.stop();
await ctx.stopAndDispose();
ctx.setServerStatus({
health: "ok",
quiescent: true,
message: "server is not running",
health: "stopped",
});
},
disabled: (_) => async () => {},
},
analyzerStatus: { enabled: commands.analyzerStatus },

View File

@ -3,7 +3,7 @@ import * as lc from "vscode-languageclient";
import * as ra from "./lsp_ext";
import * as tasks from "./tasks";
import { Ctx } from "./ctx";
import { CtxInit } from "./ctx";
import { makeDebugConfig } from "./debug";
import { Config, RunnableEnvCfg } from "./config";
@ -12,7 +12,7 @@ const quickPickButtons = [
];
export async function selectRunnable(
ctx: Ctx,
ctx: CtxInit,
prevRunnable?: RunnableQuickPick,
debuggeeOnly = false,
showButtons: boolean = true
@ -20,7 +20,7 @@ export async function selectRunnable(
const editor = ctx.activeRustEditor;
if (!editor) return;
const client = await ctx.getClient();
const client = ctx.client;
const textDocument: lc.TextDocumentIdentifier = {
uri: editor.document.uri.toString(),
};

View File

@ -13,4 +13,4 @@ serde = { version = "1.0.144", features = ["derive"] }
crossbeam-channel = "0.5.6"
[dev-dependencies]
lsp-types = "0.93.1"
lsp-types = "=0.93.2"