[clangd] NFC: Migrate to LLVM STLExtras API where possible
This patch improves readability by migrating `std::function(ForwardIt start, ForwardIt end, ...)` to LLVM's STLExtras range-based equivalent `llvm::function(RangeT &&Range, ...)`. Similar change in Clang: D52576. Reviewed By: sammccall Differential Revision: https://reviews.llvm.org/D52650 llvm-svn: 343937
This commit is contained in:
parent
01daf62a0d
commit
4a5ff88fdb
|
@ -549,8 +549,7 @@ void ClangdLSPServer::onDiagnosticsReady(PathRef File,
|
|||
DiagnosticsJSON.push_back(std::move(LSPDiag));
|
||||
|
||||
auto &FixItsForDiagnostic = LocalFixIts[Diag];
|
||||
std::copy(Fixes.begin(), Fixes.end(),
|
||||
std::back_inserter(FixItsForDiagnostic));
|
||||
llvm::copy(Fixes, std::back_inserter(FixItsForDiagnostic));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -361,17 +361,15 @@ llvm::Optional<Path> ClangdServer::switchSourceHeader(PathRef Path) {
|
|||
|
||||
// Lookup in a list of known extensions.
|
||||
auto SourceIter =
|
||||
std::find_if(std::begin(SourceExtensions), std::end(SourceExtensions),
|
||||
[&PathExt](PathRef SourceExt) {
|
||||
return SourceExt.equals_lower(PathExt);
|
||||
});
|
||||
llvm::find_if(SourceExtensions, [&PathExt](PathRef SourceExt) {
|
||||
return SourceExt.equals_lower(PathExt);
|
||||
});
|
||||
bool IsSource = SourceIter != std::end(SourceExtensions);
|
||||
|
||||
auto HeaderIter =
|
||||
std::find_if(std::begin(HeaderExtensions), std::end(HeaderExtensions),
|
||||
[&PathExt](PathRef HeaderExt) {
|
||||
return HeaderExt.equals_lower(PathExt);
|
||||
});
|
||||
llvm::find_if(HeaderExtensions, [&PathExt](PathRef HeaderExt) {
|
||||
return HeaderExt.equals_lower(PathExt);
|
||||
});
|
||||
|
||||
bool IsHeader = HeaderIter != std::end(HeaderExtensions);
|
||||
|
||||
|
|
|
@ -306,11 +306,10 @@ struct CodeCompletionBuilder {
|
|||
Completion.FixIts.push_back(
|
||||
toTextEdit(FixIt, ASTCtx.getSourceManager(), ASTCtx.getLangOpts()));
|
||||
}
|
||||
std::sort(Completion.FixIts.begin(), Completion.FixIts.end(),
|
||||
[](const TextEdit &X, const TextEdit &Y) {
|
||||
return std::tie(X.range.start.line, X.range.start.character) <
|
||||
std::tie(Y.range.start.line, Y.range.start.character);
|
||||
});
|
||||
llvm::sort(Completion.FixIts, [](const TextEdit &X, const TextEdit &Y) {
|
||||
return std::tie(X.range.start.line, X.range.start.character) <
|
||||
std::tie(Y.range.start.line, Y.range.start.character);
|
||||
});
|
||||
Completion.Deprecated |=
|
||||
(C.SemaResult->Availability == CXAvailability_Deprecated);
|
||||
}
|
||||
|
@ -861,8 +860,8 @@ public:
|
|||
IndexRequest.IDs.size(), FetchedDocs.size());
|
||||
}
|
||||
|
||||
std::sort(
|
||||
ScoredSignatures.begin(), ScoredSignatures.end(),
|
||||
llvm::sort(
|
||||
ScoredSignatures,
|
||||
[](const ScoredSignature &L, const ScoredSignature &R) {
|
||||
// Ordering follows:
|
||||
// - Less number of parameters is better.
|
||||
|
@ -1164,13 +1163,12 @@ llvm::SmallVector<StringRef, 1>
|
|||
getRankedIncludes(const Symbol &Sym) {
|
||||
auto Includes = Sym.IncludeHeaders;
|
||||
// Sort in descending order by reference count and header length.
|
||||
std::sort(Includes.begin(), Includes.end(),
|
||||
[](const Symbol::IncludeHeaderWithReferences &LHS,
|
||||
const Symbol::IncludeHeaderWithReferences &RHS) {
|
||||
if (LHS.References == RHS.References)
|
||||
return LHS.IncludeHeader.size() < RHS.IncludeHeader.size();
|
||||
return LHS.References > RHS.References;
|
||||
});
|
||||
llvm::sort(Includes, [](const Symbol::IncludeHeaderWithReferences &LHS,
|
||||
const Symbol::IncludeHeaderWithReferences &RHS) {
|
||||
if (LHS.References == RHS.References)
|
||||
return LHS.IncludeHeader.size() < RHS.IncludeHeader.size();
|
||||
return LHS.References > RHS.References;
|
||||
});
|
||||
llvm::SmallVector<StringRef, 1> Headers;
|
||||
for (const auto &Include : Includes)
|
||||
Headers.push_back(Include.IncludeHeader);
|
||||
|
|
|
@ -128,8 +128,7 @@ private:
|
|||
using KVPair = std::pair<Key, std::unique_ptr<ParsedAST>>;
|
||||
|
||||
std::vector<KVPair>::iterator findByKey(Key K) {
|
||||
return std::find_if(LRU.begin(), LRU.end(),
|
||||
[K](const KVPair &P) { return P.first == K; });
|
||||
return llvm::find_if(LRU, [K](const KVPair &P) { return P.first == K; });
|
||||
}
|
||||
|
||||
std::mutex Mut;
|
||||
|
|
|
@ -104,21 +104,19 @@ public:
|
|||
}
|
||||
|
||||
// Sort results. Declarations being referenced explicitly come first.
|
||||
std::sort(Result.begin(), Result.end(),
|
||||
[](const DeclInfo &L, const DeclInfo &R) {
|
||||
if (L.IsReferencedExplicitly != R.IsReferencedExplicitly)
|
||||
return L.IsReferencedExplicitly > R.IsReferencedExplicitly;
|
||||
return L.D->getBeginLoc() < R.D->getBeginLoc();
|
||||
});
|
||||
llvm::sort(Result, [](const DeclInfo &L, const DeclInfo &R) {
|
||||
if (L.IsReferencedExplicitly != R.IsReferencedExplicitly)
|
||||
return L.IsReferencedExplicitly > R.IsReferencedExplicitly;
|
||||
return L.D->getBeginLoc() < R.D->getBeginLoc();
|
||||
});
|
||||
return Result;
|
||||
}
|
||||
|
||||
std::vector<MacroDecl> takeMacroInfos() {
|
||||
// Don't keep the same Macro info multiple times.
|
||||
std::sort(MacroInfos.begin(), MacroInfos.end(),
|
||||
[](const MacroDecl &Left, const MacroDecl &Right) {
|
||||
return Left.Info < Right.Info;
|
||||
});
|
||||
llvm::sort(MacroInfos, [](const MacroDecl &Left, const MacroDecl &Right) {
|
||||
return Left.Info < Right.Info;
|
||||
});
|
||||
|
||||
auto Last = std::unique(MacroInfos.begin(), MacroInfos.end(),
|
||||
[](const MacroDecl &Left, const MacroDecl &Right) {
|
||||
|
|
|
@ -46,12 +46,11 @@ CanonicalIncludes::mapHeader(llvm::ArrayRef<std::string> Headers,
|
|||
return SE->second;
|
||||
// Find the first header such that the extension is not '.inc', and isn't a
|
||||
// recognized non-header file
|
||||
auto I =
|
||||
std::find_if(Headers.begin(), Headers.end(), [](llvm::StringRef Include) {
|
||||
// Skip .inc file whose including header file should
|
||||
// be #included instead.
|
||||
return !Include.endswith(".inc");
|
||||
});
|
||||
auto I = llvm::find_if(Headers, [](llvm::StringRef Include) {
|
||||
// Skip .inc file whose including header file should
|
||||
// be #included instead.
|
||||
return !Include.endswith(".inc");
|
||||
});
|
||||
if (I == Headers.end())
|
||||
return Headers[0]; // Fallback to the declaring header.
|
||||
StringRef Header = *I;
|
||||
|
|
|
@ -128,8 +128,8 @@ std::unique_ptr<SymbolIndex> FileSymbols::buildMemIndex() {
|
|||
for (auto &Sym : MergedRefs) {
|
||||
auto &SymRefs = Sym.second;
|
||||
// Sorting isn't required, but yields more stable results over rebuilds.
|
||||
std::sort(SymRefs.begin(), SymRefs.end());
|
||||
std::copy(SymRefs.begin(), SymRefs.end(), back_inserter(RefsStorage));
|
||||
llvm::sort(SymRefs);
|
||||
llvm::copy(SymRefs, back_inserter(RefsStorage));
|
||||
AllRefs.try_emplace(
|
||||
Sym.first,
|
||||
ArrayRef<Ref>(&RefsStorage[RefsStorage.size() - SymRefs.size()],
|
||||
|
|
|
@ -84,10 +84,8 @@ float quality(const Symbol &S) {
|
|||
}
|
||||
|
||||
SymbolSlab::const_iterator SymbolSlab::find(const SymbolID &ID) const {
|
||||
auto It = std::lower_bound(Symbols.begin(), Symbols.end(), ID,
|
||||
[](const Symbol &S, const SymbolID &I) {
|
||||
return S.ID < I;
|
||||
});
|
||||
auto It = llvm::lower_bound(
|
||||
Symbols, ID, [](const Symbol &S, const SymbolID &I) { return S.ID < I; });
|
||||
if (It != Symbols.end() && It->ID == ID)
|
||||
return It;
|
||||
return Symbols.end();
|
||||
|
@ -112,8 +110,8 @@ void SymbolSlab::Builder::insert(const Symbol &S) {
|
|||
SymbolSlab SymbolSlab::Builder::build() && {
|
||||
Symbols = {Symbols.begin(), Symbols.end()}; // Force shrink-to-fit.
|
||||
// Sort symbols so the slab can binary search over them.
|
||||
std::sort(Symbols.begin(), Symbols.end(),
|
||||
[](const Symbol &L, const Symbol &R) { return L.ID < R.ID; });
|
||||
llvm::sort(Symbols,
|
||||
[](const Symbol &L, const Symbol &R) { return L.ID < R.ID; });
|
||||
// We may have unused strings from overwritten symbols. Build a new arena.
|
||||
BumpPtrAllocator NewArena;
|
||||
llvm::UniqueStringSaver Strings(NewArena);
|
||||
|
@ -155,8 +153,8 @@ RefSlab RefSlab::Builder::build() && {
|
|||
Result.reserve(Refs.size());
|
||||
for (auto &Sym : Refs) {
|
||||
auto &SymRefs = Sym.second;
|
||||
std::sort(SymRefs.begin(), SymRefs.end());
|
||||
// TODO: do we really need to dedup?
|
||||
llvm::sort(SymRefs);
|
||||
// FIXME: do we really need to dedup?
|
||||
SymRefs.erase(std::unique(SymRefs.begin(), SymRefs.end()), SymRefs.end());
|
||||
|
||||
auto *Array = Arena.Allocate<Ref>(SymRefs.size());
|
||||
|
|
|
@ -158,7 +158,7 @@ public:
|
|||
// Finalize the table and write it to OS. No more strings may be added.
|
||||
void finalize(raw_ostream &OS) {
|
||||
Sorted = {Unique.begin(), Unique.end()};
|
||||
std::sort(Sorted.begin(), Sorted.end());
|
||||
llvm::sort(Sorted);
|
||||
for (unsigned I = 0; I < Sorted.size(); ++I)
|
||||
Index.try_emplace({Sorted[I].data(), Sorted[I].size()}, I);
|
||||
|
||||
|
|
|
@ -129,8 +129,7 @@ bool isPrivateProtoDecl(const NamedDecl &ND) {
|
|||
// will include OUTER_INNER and exclude some_enum_constant.
|
||||
// FIXME: the heuristic relies on naming style (i.e. no underscore in
|
||||
// user-defined names) and can be improved.
|
||||
return (ND.getKind() != Decl::EnumConstant) ||
|
||||
std::any_of(Name.begin(), Name.end(), islower);
|
||||
return (ND.getKind() != Decl::EnumConstant) || llvm::any_of(Name, islower);
|
||||
}
|
||||
|
||||
// We only collect #include paths for symbols that are suitable for global code
|
||||
|
|
|
@ -123,8 +123,7 @@ void Dex::buildIndex() {
|
|||
|
||||
// Symbols are sorted by symbol qualities so that items in the posting lists
|
||||
// are stored in the descending order of symbol quality.
|
||||
std::sort(begin(ScoredSymbols), end(ScoredSymbols),
|
||||
std::greater<std::pair<float, const Symbol *>>());
|
||||
llvm::sort(ScoredSymbols, std::greater<std::pair<float, const Symbol *>>());
|
||||
|
||||
// SymbolQuality was empty up until now.
|
||||
SymbolQuality.resize(Symbols.size());
|
||||
|
|
|
@ -40,11 +40,10 @@ public:
|
|||
// highest element starting from the front. When child iterators in the
|
||||
// beginning have smaller estimated size, the sync() will have less restarts
|
||||
// and become more effective.
|
||||
std::sort(begin(Children), end(Children),
|
||||
[](const std::unique_ptr<Iterator> &LHS,
|
||||
const std::unique_ptr<Iterator> &RHS) {
|
||||
return LHS->estimateSize() < RHS->estimateSize();
|
||||
});
|
||||
llvm::sort(Children, [](const std::unique_ptr<Iterator> &LHS,
|
||||
const std::unique_ptr<Iterator> &RHS) {
|
||||
return LHS->estimateSize() < RHS->estimateSize();
|
||||
});
|
||||
}
|
||||
|
||||
bool reachedEnd() const override { return ReachedEnd; }
|
||||
|
|
|
@ -1139,6 +1139,12 @@ auto lower_bound(R &&Range, ForwardIt I) -> decltype(adl_begin(Range)) {
|
|||
return std::lower_bound(adl_begin(Range), adl_end(Range), I);
|
||||
}
|
||||
|
||||
template <typename R, typename ForwardIt, typename Compare>
|
||||
auto lower_bound(R &&Range, ForwardIt I, Compare C)
|
||||
-> decltype(adl_begin(Range)) {
|
||||
return std::lower_bound(adl_begin(Range), adl_end(Range), I, C);
|
||||
}
|
||||
|
||||
/// Provide wrappers to std::upper_bound which take ranges instead of having to
|
||||
/// pass begin/end explicitly.
|
||||
template <typename R, typename ForwardIt>
|
||||
|
|
Loading…
Reference in New Issue