mirror of
https://github.com/MaskRay/ccls.git
synced 2024-11-22 07:35:08 +00:00
Reformat
This commit is contained in:
parent
af3c09d00d
commit
db7e25c542
@ -2,9 +2,9 @@
|
||||
|
||||
#include "clang_index.h"
|
||||
#include "clang_translation_unit.h"
|
||||
#include "lru_cache.h"
|
||||
#include "lsp_completion.h"
|
||||
#include "lsp_diagnostic.h"
|
||||
#include "lru_cache.h"
|
||||
#include "project.h"
|
||||
#include "threaded_queue.h"
|
||||
#include "working_files.h"
|
||||
|
@ -1811,8 +1811,8 @@ void OnIndexDeclaration(CXClientData client_data, const CXIdxDeclInfo* decl) {
|
||||
// TODO: For type section, verify if this ever runs for non definitions?
|
||||
// if (!decl->isRedeclaration) {
|
||||
|
||||
SetTypeName(type, cursor, decl->semanticContainer,
|
||||
decl->entityInfo->name, param);
|
||||
SetTypeName(type, cursor, decl->semanticContainer, decl->entityInfo->name,
|
||||
param);
|
||||
type->def.kind = GetSymbolKind(decl->entityInfo->kind);
|
||||
if (param->config->index.comments)
|
||||
type->def.comments = cursor.get_comments();
|
||||
|
@ -8,9 +8,9 @@
|
||||
#include "import_pipeline.h"
|
||||
#include "include_complete.h"
|
||||
#include "indexer.h"
|
||||
#include "lsp_diagnostic.h"
|
||||
#include "lex_utils.h"
|
||||
#include "lru_cache.h"
|
||||
#include "lsp_diagnostic.h"
|
||||
#include "match.h"
|
||||
#include "message_handler.h"
|
||||
#include "options.h"
|
||||
@ -196,7 +196,8 @@ void RunQueryDbThread(const std::string& bin_name,
|
||||
Out_Error out;
|
||||
out.id = id;
|
||||
out.error.code = lsErrorCodes::InternalError;
|
||||
out.error.message = "Dropping completion request; a newer request "
|
||||
out.error.message =
|
||||
"Dropping completion request; a newer request "
|
||||
"has come in that will be serviced instead.";
|
||||
QueueManager::WriteStdout(IpcId::Unknown, out);
|
||||
}
|
||||
|
@ -238,9 +238,7 @@ MAKE_REFLECT_STRUCT(Config::Diagnostics,
|
||||
frequencyMs,
|
||||
onParse,
|
||||
whitelist)
|
||||
MAKE_REFLECT_STRUCT(Config::Highlight,
|
||||
blacklist,
|
||||
whitelist)
|
||||
MAKE_REFLECT_STRUCT(Config::Highlight, blacklist, whitelist)
|
||||
MAKE_REFLECT_STRUCT(Config::Index,
|
||||
attributeMakeCallsToCtor,
|
||||
blacklist,
|
||||
|
@ -19,8 +19,10 @@ void DiagnosticsEngine::Publish(WorkingFiles* working_files,
|
||||
working_file->diagnostics_ = diagnostics;
|
||||
});
|
||||
|
||||
int64_t now = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::high_resolution_clock::now().time_since_epoch()).count();
|
||||
int64_t now =
|
||||
std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::high_resolution_clock::now().time_since_epoch())
|
||||
.count();
|
||||
if (frequencyMs_ >= 0 && (nextPublish_ <= now || diagnostics.empty()) &&
|
||||
match_->IsMatch(path)) {
|
||||
nextPublish_ = now + frequencyMs_;
|
||||
|
@ -43,7 +43,7 @@ void CalculateRoles(std::string_view s, int roles[], int* class_set) {
|
||||
}
|
||||
roles[s.size() - 1] = fn();
|
||||
}
|
||||
}
|
||||
} // namespace
|
||||
|
||||
int FuzzyMatcher::MissScore(int j, bool last) {
|
||||
int s = last ? -20 : 0;
|
||||
@ -152,7 +152,8 @@ TEST_SUITE("fuzzy_match") {
|
||||
Ranks("ab", {"ab", "aoo_boo", "acb"});
|
||||
Ranks("CC", {"CamelCase", "camelCase", "camelcase"});
|
||||
Ranks("cC", {"camelCase", "CamelCase", "camelcase"});
|
||||
Ranks("c c", {"camel case", "camelCase", "CamelCase", "camelcase", "camel ace"});
|
||||
Ranks("c c",
|
||||
{"camel case", "camelCase", "CamelCase", "camelcase", "camel ace"});
|
||||
Ranks("Da.Te", {"Data.Text", "Data.Text.Lazy", "Data.Aeson.Encoding.text"});
|
||||
// prefix
|
||||
Ranks("is", {"isIEEE", "inSuf"});
|
||||
|
@ -140,7 +140,8 @@ void IncludeComplete::InsertCompletionItem(const std::string& absolute_path,
|
||||
auto it = absolute_path_to_completion_item.find(absolute_path);
|
||||
if (it == absolute_path_to_completion_item.end() ||
|
||||
completion_items[it->second].detail.length() > item.detail.length()) {
|
||||
absolute_path_to_completion_item[absolute_path] = completion_items.size() - 1;
|
||||
absolute_path_to_completion_item[absolute_path] =
|
||||
completion_items.size() - 1;
|
||||
}
|
||||
} else {
|
||||
lsCompletionItem& inserted_item =
|
||||
|
@ -180,10 +180,9 @@ struct TypeDefDefinitionData {
|
||||
|
||||
bool operator==(const TypeDefDefinitionData& o) const {
|
||||
return detailed_name == o.detailed_name && spell == o.spell &&
|
||||
extent == o.extent && alias_of == o.alias_of &&
|
||||
bases == o.bases && types == o.types && funcs == o.funcs &&
|
||||
vars == o.vars && kind == o.kind && hover == o.hover &&
|
||||
comments == o.comments;
|
||||
extent == o.extent && alias_of == o.alias_of && bases == o.bases &&
|
||||
types == o.types && funcs == o.funcs && vars == o.vars &&
|
||||
kind == o.kind && hover == o.hover && comments == o.comments;
|
||||
}
|
||||
bool operator!=(const TypeDefDefinitionData& o) const {
|
||||
return !(*this == o);
|
||||
@ -194,9 +193,7 @@ struct TypeDefDefinitionData {
|
||||
short_name_size);
|
||||
}
|
||||
// Used by cquery_inheritance_hierarchy.cc:Expand generic lambda
|
||||
std::string_view DetailedName(bool) const {
|
||||
return detailed_name;
|
||||
}
|
||||
std::string_view DetailedName(bool) const { return detailed_name; }
|
||||
};
|
||||
template <typename TVisitor, typename Family>
|
||||
void Reflect(TVisitor& visitor, TypeDefDefinitionData<Family>& value) {
|
||||
|
@ -11,7 +11,9 @@ const char* IpcIdToString(IpcId id) {
|
||||
case IpcId::Exit:
|
||||
return "exit";
|
||||
|
||||
#define CASE(name, method) case IpcId::name: return method;
|
||||
#define CASE(name, method) \
|
||||
case IpcId::name: \
|
||||
return method;
|
||||
#include "methods.inc"
|
||||
#undef CASE
|
||||
|
||||
|
@ -355,7 +355,8 @@ TEST_SUITE("LexWordAroundPos") {
|
||||
std::string content = " file:ns::_my_t5ype7 ";
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 'f'), content) == "file");
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 's'), content) == "ns");
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 'y'), content) == "ns::_my_t5ype7");
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 'y'), content) ==
|
||||
"ns::_my_t5ype7");
|
||||
}
|
||||
|
||||
TEST_CASE("dot, dash, colon are skipped") {
|
||||
|
@ -4,8 +4,8 @@
|
||||
#include "serializers/json.h"
|
||||
|
||||
#include <doctest/doctest.h>
|
||||
#include <loguru.hpp>
|
||||
#include <rapidjson/writer.h>
|
||||
#include <loguru.hpp>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <iostream>
|
||||
|
@ -27,14 +27,14 @@ struct CqueryBaseHandler : BaseMessageHandler<Ipc_CqueryBase> {
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
if (sym.kind == SymbolKind::Type) {
|
||||
if (const auto* def = db->GetType(sym).AnyDef())
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, def->bases),
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, def->bases),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
} else if (sym.kind == SymbolKind::Func) {
|
||||
if (const auto* def = db->GetFunc(sym).AnyDef())
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, def->bases),
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, def->bases),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
}
|
||||
|
@ -5,7 +5,12 @@
|
||||
#include <loguru.hpp>
|
||||
|
||||
namespace {
|
||||
enum class CallType : uint8_t { Direct = 0, Base = 1, Derived = 2, All = 1 | 2 };
|
||||
enum class CallType : uint8_t {
|
||||
Direct = 0,
|
||||
Base = 1,
|
||||
Derived = 2,
|
||||
All = 1 | 2
|
||||
};
|
||||
MAKE_REFLECT_TYPE_PROXY(CallType);
|
||||
|
||||
bool operator&(CallType lhs, CallType rhs) {
|
||||
@ -96,7 +101,8 @@ bool Expand(MessageHandler* m,
|
||||
if (const auto* def = func.AnyDef())
|
||||
for (SymbolRef ref : def->callees)
|
||||
if (ref.kind == SymbolKind::Func)
|
||||
handle(Use(ref.range, ref.id, ref.kind, ref.role, def->file), call_type);
|
||||
handle(Use(ref.range, ref.id, ref.kind, ref.role, def->file),
|
||||
call_type);
|
||||
} else {
|
||||
for (Use use : func.uses)
|
||||
if (use.kind == SymbolKind::Func)
|
||||
|
@ -27,14 +27,14 @@ struct CqueryDerivedHandler : BaseMessageHandler<Ipc_CqueryDerived> {
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
if (sym.kind == SymbolKind::Type) {
|
||||
QueryType& type = db->GetType(sym);
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, type.derived),
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, type.derived),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
} else if (sym.kind == SymbolKind::Func) {
|
||||
QueryFunc& func = db->GetFunc(sym);
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, func.derived),
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, func.derived),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
}
|
||||
|
@ -7,8 +7,8 @@ struct Ipc_CqueryInheritanceHierarchy
|
||||
: public RequestMessage<Ipc_CqueryInheritanceHierarchy> {
|
||||
const static IpcId kIpcId = IpcId::CqueryInheritanceHierarchy;
|
||||
struct Params {
|
||||
// If id+kind are specified, expand a node; otherwise textDocument+position should
|
||||
// be specified for building the root and |levels| of nodes below.
|
||||
// If id+kind are specified, expand a node; otherwise textDocument+position
|
||||
// should be specified for building the root and |levels| of nodes below.
|
||||
lsTextDocumentIdentifier textDocument;
|
||||
lsPosition position;
|
||||
|
||||
@ -161,8 +161,8 @@ struct CqueryInheritanceHierarchyHandler
|
||||
WorkingFile* working_file =
|
||||
working_files->GetFileByFilename(file->def->path);
|
||||
|
||||
for (SymbolRef sym :
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
for (SymbolRef sym : FindSymbolsAtLocation(working_file, file,
|
||||
request->params.position)) {
|
||||
if (sym.kind == SymbolKind::Func || sym.kind == SymbolKind::Type) {
|
||||
out.result = BuildInitial(sym, params.derived, params.detailedName,
|
||||
params.levels);
|
||||
|
@ -235,8 +235,8 @@ struct CqueryMemberHierarchyHandler
|
||||
case SymbolKind::Var: {
|
||||
const QueryVar::Def* def = db->GetVar(sym).AnyDef();
|
||||
if (def && def->type)
|
||||
out.result = BuildInitial(QueryTypeId(*def->type), params.detailedName,
|
||||
params.levels);
|
||||
out.result = BuildInitial(QueryTypeId(*def->type),
|
||||
params.detailedName, params.levels);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
|
@ -441,8 +441,8 @@ struct TextDocumentCodeActionHandler
|
||||
// For error diagnostics, provide an action to resolve an include.
|
||||
// TODO: find a way to index diagnostic contents so line numbers
|
||||
// don't get mismatched when actively editing a file.
|
||||
std::string_view include_query =
|
||||
LexIdentifierAroundPos(diag.range.start, working_file->buffer_content);
|
||||
std::string_view include_query = LexIdentifierAroundPos(
|
||||
diag.range.start, working_file->buffer_content);
|
||||
if (diag.severity == lsDiagnosticSeverity::Error &&
|
||||
!include_query.empty()) {
|
||||
const size_t kMaxResults = 20;
|
||||
|
@ -118,9 +118,11 @@ struct TextDocumentCodeLensHandler
|
||||
AddCodeLens("ref", "refs", &common, OffsetStartColumn(use, 0),
|
||||
type.uses, true /*force_display*/);
|
||||
AddCodeLens("derived", "derived", &common, OffsetStartColumn(use, 1),
|
||||
GetDeclarations(db, type.derived), false /*force_display*/);
|
||||
GetDeclarations(db, type.derived),
|
||||
false /*force_display*/);
|
||||
AddCodeLens("var", "vars", &common, OffsetStartColumn(use, 2),
|
||||
GetDeclarations(db, type.instances), false /*force_display*/);
|
||||
GetDeclarations(db, type.instances),
|
||||
false /*force_display*/);
|
||||
break;
|
||||
}
|
||||
case SymbolKind::Func: {
|
||||
@ -164,8 +166,8 @@ struct TextDocumentCodeLensHandler
|
||||
false /*force_display*/);
|
||||
}
|
||||
|
||||
AddCodeLens("derived", "derived", &common,
|
||||
OffsetStartColumn(use, offset++),
|
||||
AddCodeLens(
|
||||
"derived", "derived", &common, OffsetStartColumn(use, offset++),
|
||||
GetDeclarations(db, func.derived), false /*force_display*/);
|
||||
|
||||
// "Base"
|
||||
@ -193,7 +195,8 @@ struct TextDocumentCodeLensHandler
|
||||
}
|
||||
} else {
|
||||
AddCodeLens("base", "base", &common, OffsetStartColumn(use, 1),
|
||||
GetDeclarations(db, def->bases), false /*force_display*/);
|
||||
GetDeclarations(db, def->bases),
|
||||
false /*force_display*/);
|
||||
}
|
||||
|
||||
break;
|
||||
|
@ -156,8 +156,7 @@ struct TextDocumentDefinitionHandler
|
||||
}
|
||||
}
|
||||
if (best_i != -1) {
|
||||
Maybe<Use> use =
|
||||
GetDefinitionSpell(db, db->symbols[best_i]);
|
||||
Maybe<Use> use = GetDefinitionSpell(db, db->symbols[best_i]);
|
||||
assert(use);
|
||||
if (auto ls_loc = GetLsLocationEx(db, working_files, *use,
|
||||
config->xref.container))
|
||||
|
@ -45,7 +45,8 @@ struct TextDocumentDocumentSymbolHandler
|
||||
if (sym.kind == SymbolKind::Var) {
|
||||
QueryVar& var = db->GetVar(sym);
|
||||
auto* def = var.AnyDef();
|
||||
if (!def || !def->spell) continue;
|
||||
if (!def || !def->spell)
|
||||
continue;
|
||||
// Ignore local variables.
|
||||
if (def->spell->kind == SymbolKind::Func &&
|
||||
def->storage != StorageClass::Static &&
|
||||
|
@ -107,7 +107,8 @@ struct WorkspaceSymbolHandler : BaseMessageHandler<Ipc_WorkspaceSymbol> {
|
||||
|
||||
for (int i = 0; i < (int)db->symbols.size(); ++i) {
|
||||
std::string_view detailed_name = db->GetSymbolDetailedName(i);
|
||||
if (CaseFoldingSubsequenceMatch(query_without_space, detailed_name).first) {
|
||||
if (CaseFoldingSubsequenceMatch(query_without_space, detailed_name)
|
||||
.first) {
|
||||
// Do not show the same entry twice.
|
||||
if (!inserted_results.insert(std::string(detailed_name)).second)
|
||||
continue;
|
||||
|
@ -89,8 +89,7 @@ std::vector<std::string> kPathArgs = {
|
||||
"-I", "-iquote", "-isystem", "--sysroot=",
|
||||
"-isysroot", "-gcc-toolchain", "-include-pch", "-iframework",
|
||||
"-F", "-imacros", "-include", "/I",
|
||||
"-idirafter"
|
||||
};
|
||||
"-idirafter"};
|
||||
|
||||
// Arguments which always require an absolute path, ie, clang -working-directory
|
||||
// does not work as expected. Argument processing assumes that this is a subset
|
||||
|
@ -38,10 +38,10 @@ struct Project {
|
||||
// compile_commands.json in it, otherwise they are retrieved in
|
||||
// |root_directory|.
|
||||
// For .cquery, recursive directory listing is used and files with known
|
||||
// suffixes are indexed. .cquery files can exist in subdirectories and they will affect
|
||||
// flags in their subtrees (relative paths are relative to the project root,
|
||||
// not subdirectories).
|
||||
// For compile_commands.json, its entries are indexed.
|
||||
// suffixes are indexed. .cquery files can exist in subdirectories and they
|
||||
// will affect flags in their subtrees (relative paths are relative to the
|
||||
// project root, not subdirectories). For compile_commands.json, its entries
|
||||
// are indexed.
|
||||
void Load(Config* config, const std::string& root_directory);
|
||||
|
||||
// Lookup the CompilationEntry for |filename|. If no entry was found this
|
||||
|
@ -100,8 +100,7 @@ std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
return GetDeclarations(db->vars, ids);
|
||||
}
|
||||
|
||||
std::vector<Use> GetNonDefDeclarations(QueryDatabase* db,
|
||||
SymbolIdx sym) {
|
||||
std::vector<Use> GetNonDefDeclarations(QueryDatabase* db, SymbolIdx sym) {
|
||||
switch (sym.kind) {
|
||||
case SymbolKind::Func:
|
||||
return db->GetFunc(sym).declarations;
|
||||
|
@ -10,10 +10,14 @@ Maybe<Use> GetDefinitionExtent(QueryDatabase* db, SymbolIdx sym);
|
||||
Maybe<QueryFileId> GetDeclarationFileForSymbol(QueryDatabase* db,
|
||||
SymbolIdx sym);
|
||||
|
||||
// Get defining declaration (if exists) or an arbitrary declaration (otherwise) for each id.
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db, const std::vector<QueryFuncId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db, const std::vector<QueryTypeId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db, const std::vector<QueryVarId>& ids);
|
||||
// Get defining declaration (if exists) or an arbitrary declaration (otherwise)
|
||||
// for each id.
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
const std::vector<QueryFuncId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
const std::vector<QueryTypeId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
const std::vector<QueryVarId>& ids);
|
||||
|
||||
// Get non-defining declarations.
|
||||
std::vector<Use> GetNonDefDeclarations(QueryDatabase* db, SymbolIdx sym);
|
||||
@ -78,7 +82,10 @@ void EachEntityDef(QueryDatabase* db, SymbolIdx sym, Fn&& fn) {
|
||||
}
|
||||
|
||||
template <typename Fn>
|
||||
void EachOccurrence(QueryDatabase* db, SymbolIdx sym, bool include_decl, Fn&& fn) {
|
||||
void EachOccurrence(QueryDatabase* db,
|
||||
SymbolIdx sym,
|
||||
bool include_decl,
|
||||
Fn&& fn) {
|
||||
WithEntity(db, sym, [&](const auto& entity) {
|
||||
for (Use use : entity.uses)
|
||||
fn(use);
|
||||
|
@ -6,11 +6,11 @@
|
||||
#include "utils.h"
|
||||
|
||||
#include <doctest/doctest.h>
|
||||
#include <loguru/loguru.hpp>
|
||||
#include <rapidjson/document.h>
|
||||
#include <rapidjson/prettywriter.h>
|
||||
#include <rapidjson/stringbuffer.h>
|
||||
#include <rapidjson/writer.h>
|
||||
#include <loguru/loguru.hpp>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
@ -32,12 +32,8 @@ template <typename... Queue>
|
||||
struct MultiQueueLock {
|
||||
MultiQueueLock(Queue... lockable) : tuple_{lockable...} { lock(); }
|
||||
~MultiQueueLock() { unlock(); }
|
||||
void lock() {
|
||||
lock_impl(typename std::index_sequence_for<Queue...>{});
|
||||
}
|
||||
void unlock() {
|
||||
unlock_impl(typename std::index_sequence_for<Queue...>{});
|
||||
}
|
||||
void lock() { lock_impl(typename std::index_sequence_for<Queue...>{}); }
|
||||
void unlock() { unlock_impl(typename std::index_sequence_for<Queue...>{}); }
|
||||
|
||||
private:
|
||||
template <size_t... Is>
|
||||
|
Loading…
Reference in New Issue
Block a user