mirror of
https://github.com/MaskRay/ccls.git
synced 2024-11-22 07:35:08 +00:00
Reformat
This commit is contained in:
parent
af3c09d00d
commit
db7e25c542
@ -94,7 +94,7 @@ lsCompletionItemKind GetCompletionKind(CXCursorKind cursor_kind) {
|
||||
return lsCompletionItemKind::Method;
|
||||
|
||||
case CXCursor_FunctionTemplate:
|
||||
return lsCompletionItemKind::Function;
|
||||
return lsCompletionItemKind::Function;
|
||||
|
||||
case CXCursor_Constructor:
|
||||
case CXCursor_Destructor:
|
||||
@ -186,7 +186,7 @@ void BuildCompletionItemTexts(std::vector<lsCompletionItem>& out,
|
||||
case CXCompletionChunk_Equal: text = '='; break;
|
||||
case CXCompletionChunk_HorizontalSpace: text = ' '; break;
|
||||
case CXCompletionChunk_VerticalSpace: text = ' '; break;
|
||||
// clang-format on
|
||||
// clang-format on
|
||||
|
||||
case CXCompletionChunk_ResultType:
|
||||
result_type =
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
#include "clang_index.h"
|
||||
#include "clang_translation_unit.h"
|
||||
#include "lru_cache.h"
|
||||
#include "lsp_completion.h"
|
||||
#include "lsp_diagnostic.h"
|
||||
#include "lru_cache.h"
|
||||
#include "project.h"
|
||||
#include "threaded_queue.h"
|
||||
#include "working_files.h"
|
||||
|
@ -29,7 +29,7 @@ class ClangType {
|
||||
// NOTE: This will return false for pointed types. Should we call
|
||||
// strip_qualifiers for the user?
|
||||
return cx_type.kind >= CXType_FirstBuiltin &&
|
||||
cx_type.kind <= CXType_LastBuiltin;
|
||||
cx_type.kind <= CXType_LastBuiltin;
|
||||
}
|
||||
|
||||
ClangCursor get_declaration() const;
|
||||
|
@ -493,16 +493,16 @@ Use SetUse(IndexFile* db, Range range, ClangCursor parent, Role role) {
|
||||
|
||||
const char* GetAnonName(CXCursorKind kind) {
|
||||
switch (kind) {
|
||||
case CXCursor_ClassDecl:
|
||||
return "(anon class)";
|
||||
case CXCursor_EnumDecl:
|
||||
return "(anon enum)";
|
||||
case CXCursor_StructDecl:
|
||||
return "(anon struct)";
|
||||
case CXCursor_UnionDecl:
|
||||
return "(anon union)";
|
||||
default:
|
||||
return "(anon)";
|
||||
case CXCursor_ClassDecl:
|
||||
return "(anon class)";
|
||||
case CXCursor_EnumDecl:
|
||||
return "(anon enum)";
|
||||
case CXCursor_StructDecl:
|
||||
return "(anon struct)";
|
||||
case CXCursor_UnionDecl:
|
||||
return "(anon union)";
|
||||
default:
|
||||
return "(anon)";
|
||||
}
|
||||
}
|
||||
|
||||
@ -779,7 +779,7 @@ void Uniquify(std::vector<Use>& uses) {
|
||||
}
|
||||
|
||||
// FIXME Reference: set id in call sites and remove this
|
||||
//void AddUse(std::vector<Use>& values, Range value) {
|
||||
// void AddUse(std::vector<Use>& values, Range value) {
|
||||
// values.push_back(
|
||||
// Use(value, Id<void>(), SymbolKind::File, Role::Reference, {}));
|
||||
//}
|
||||
@ -1811,8 +1811,8 @@ void OnIndexDeclaration(CXClientData client_data, const CXIdxDeclInfo* decl) {
|
||||
// TODO: For type section, verify if this ever runs for non definitions?
|
||||
// if (!decl->isRedeclaration) {
|
||||
|
||||
SetTypeName(type, cursor, decl->semanticContainer,
|
||||
decl->entityInfo->name, param);
|
||||
SetTypeName(type, cursor, decl->semanticContainer, decl->entityInfo->name,
|
||||
param);
|
||||
type->def.kind = GetSymbolKind(decl->entityInfo->kind);
|
||||
if (param->config->index.comments)
|
||||
type->def.comments = cursor.get_comments();
|
||||
|
@ -177,7 +177,7 @@ const char* ClangBuiltinTypeName(CXTypeKind kind) {
|
||||
#endif
|
||||
case CXType_NullPtr: return "nullptr";
|
||||
default: return "";
|
||||
// clang-format on
|
||||
// clang-format on
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -8,9 +8,9 @@
|
||||
#include "import_pipeline.h"
|
||||
#include "include_complete.h"
|
||||
#include "indexer.h"
|
||||
#include "lsp_diagnostic.h"
|
||||
#include "lex_utils.h"
|
||||
#include "lru_cache.h"
|
||||
#include "lsp_diagnostic.h"
|
||||
#include "match.h"
|
||||
#include "message_handler.h"
|
||||
#include "options.h"
|
||||
@ -196,7 +196,8 @@ void RunQueryDbThread(const std::string& bin_name,
|
||||
Out_Error out;
|
||||
out.id = id;
|
||||
out.error.code = lsErrorCodes::InternalError;
|
||||
out.error.message = "Dropping completion request; a newer request "
|
||||
out.error.message =
|
||||
"Dropping completion request; a newer request "
|
||||
"has come in that will be serviced instead.";
|
||||
QueueManager::WriteStdout(IpcId::Unknown, out);
|
||||
}
|
||||
|
@ -238,9 +238,7 @@ MAKE_REFLECT_STRUCT(Config::Diagnostics,
|
||||
frequencyMs,
|
||||
onParse,
|
||||
whitelist)
|
||||
MAKE_REFLECT_STRUCT(Config::Highlight,
|
||||
blacklist,
|
||||
whitelist)
|
||||
MAKE_REFLECT_STRUCT(Config::Highlight, blacklist, whitelist)
|
||||
MAKE_REFLECT_STRUCT(Config::Index,
|
||||
attributeMakeCallsToCtor,
|
||||
blacklist,
|
||||
|
@ -7,7 +7,7 @@
|
||||
void DiagnosticsEngine::Init(Config* config) {
|
||||
frequencyMs_ = config->diagnostics.frequencyMs;
|
||||
match_ = std::make_unique<GroupMatch>(config->diagnostics.whitelist,
|
||||
config->diagnostics.blacklist);
|
||||
config->diagnostics.blacklist);
|
||||
}
|
||||
|
||||
void DiagnosticsEngine::Publish(WorkingFiles* working_files,
|
||||
@ -15,12 +15,14 @@ void DiagnosticsEngine::Publish(WorkingFiles* working_files,
|
||||
std::vector<lsDiagnostic> diagnostics) {
|
||||
// Cache diagnostics so we can show fixits.
|
||||
working_files->DoActionOnFile(path, [&](WorkingFile* working_file) {
|
||||
if (working_file)
|
||||
working_file->diagnostics_ = diagnostics;
|
||||
});
|
||||
if (working_file)
|
||||
working_file->diagnostics_ = diagnostics;
|
||||
});
|
||||
|
||||
int64_t now = std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::high_resolution_clock::now().time_since_epoch()).count();
|
||||
int64_t now =
|
||||
std::chrono::duration_cast<std::chrono::milliseconds>(
|
||||
std::chrono::high_resolution_clock::now().time_since_epoch())
|
||||
.count();
|
||||
if (frequencyMs_ >= 0 && (nextPublish_ <= now || diagnostics.empty()) &&
|
||||
match_->IsMatch(path)) {
|
||||
nextPublish_ = now + frequencyMs_;
|
||||
|
@ -43,7 +43,7 @@ void CalculateRoles(std::string_view s, int roles[], int* class_set) {
|
||||
}
|
||||
roles[s.size() - 1] = fn();
|
||||
}
|
||||
}
|
||||
} // namespace
|
||||
|
||||
int FuzzyMatcher::MissScore(int j, bool last) {
|
||||
int s = last ? -20 : 0;
|
||||
@ -152,7 +152,8 @@ TEST_SUITE("fuzzy_match") {
|
||||
Ranks("ab", {"ab", "aoo_boo", "acb"});
|
||||
Ranks("CC", {"CamelCase", "camelCase", "camelcase"});
|
||||
Ranks("cC", {"camelCase", "CamelCase", "camelcase"});
|
||||
Ranks("c c", {"camel case", "camelCase", "CamelCase", "camelcase", "camel ace"});
|
||||
Ranks("c c",
|
||||
{"camel case", "camelCase", "CamelCase", "camelcase", "camel ace"});
|
||||
Ranks("Da.Te", {"Data.Text", "Data.Text.Lazy", "Data.Aeson.Encoding.text"});
|
||||
// prefix
|
||||
Ranks("is", {"isIEEE", "inSuf"});
|
||||
|
@ -6,7 +6,7 @@
|
||||
#include <string>
|
||||
|
||||
class FuzzyMatcher {
|
||||
public:
|
||||
public:
|
||||
constexpr static int kMaxPat = 100;
|
||||
constexpr static int kMaxText = 200;
|
||||
// Negative but far from INT_MIN so that intermediate results are hard to
|
||||
@ -16,7 +16,7 @@ public:
|
||||
FuzzyMatcher(std::string_view pattern);
|
||||
int Match(std::string_view text);
|
||||
|
||||
private:
|
||||
private:
|
||||
std::string pat;
|
||||
std::string_view text;
|
||||
int pat_set, text_set;
|
||||
|
@ -679,7 +679,7 @@ void QueryDb_DoIdMap(QueueManager* queue,
|
||||
|
||||
auto id_map = std::make_unique<IdMap>(db, file->id_cache);
|
||||
return std::make_unique<Index_OnIdMapped::File>(std::move(file),
|
||||
std::move(id_map));
|
||||
std::move(id_map));
|
||||
};
|
||||
response.current = make_map(std::move(request->current));
|
||||
response.previous = make_map(std::move(request->previous));
|
||||
|
@ -113,7 +113,7 @@ void IncludeComplete::Rescan() {
|
||||
if (!match_ && (!config_->completion.includeWhitelist.empty() ||
|
||||
!config_->completion.includeBlacklist.empty()))
|
||||
match_ = std::make_unique<GroupMatch>(config_->completion.includeWhitelist,
|
||||
config_->completion.includeBlacklist);
|
||||
config_->completion.includeBlacklist);
|
||||
|
||||
is_scanning = true;
|
||||
WorkThread::StartThread("scan_includes", [this]() {
|
||||
@ -140,7 +140,8 @@ void IncludeComplete::InsertCompletionItem(const std::string& absolute_path,
|
||||
auto it = absolute_path_to_completion_item.find(absolute_path);
|
||||
if (it == absolute_path_to_completion_item.end() ||
|
||||
completion_items[it->second].detail.length() > item.detail.length()) {
|
||||
absolute_path_to_completion_item[absolute_path] = completion_items.size() - 1;
|
||||
absolute_path_to_completion_item[absolute_path] =
|
||||
completion_items.size() - 1;
|
||||
}
|
||||
} else {
|
||||
lsCompletionItem& inserted_item =
|
||||
|
@ -180,10 +180,9 @@ struct TypeDefDefinitionData {
|
||||
|
||||
bool operator==(const TypeDefDefinitionData& o) const {
|
||||
return detailed_name == o.detailed_name && spell == o.spell &&
|
||||
extent == o.extent && alias_of == o.alias_of &&
|
||||
bases == o.bases && types == o.types && funcs == o.funcs &&
|
||||
vars == o.vars && kind == o.kind && hover == o.hover &&
|
||||
comments == o.comments;
|
||||
extent == o.extent && alias_of == o.alias_of && bases == o.bases &&
|
||||
types == o.types && funcs == o.funcs && vars == o.vars &&
|
||||
kind == o.kind && hover == o.hover && comments == o.comments;
|
||||
}
|
||||
bool operator!=(const TypeDefDefinitionData& o) const {
|
||||
return !(*this == o);
|
||||
@ -194,9 +193,7 @@ struct TypeDefDefinitionData {
|
||||
short_name_size);
|
||||
}
|
||||
// Used by cquery_inheritance_hierarchy.cc:Expand generic lambda
|
||||
std::string_view DetailedName(bool) const {
|
||||
return detailed_name;
|
||||
}
|
||||
std::string_view DetailedName(bool) const { return detailed_name; }
|
||||
};
|
||||
template <typename TVisitor, typename Family>
|
||||
void Reflect(TVisitor& visitor, TypeDefDefinitionData<Family>& value) {
|
||||
|
@ -11,8 +11,10 @@ const char* IpcIdToString(IpcId id) {
|
||||
case IpcId::Exit:
|
||||
return "exit";
|
||||
|
||||
#define CASE(name, method) case IpcId::name: return method;
|
||||
#include "methods.inc"
|
||||
#define CASE(name, method) \
|
||||
case IpcId::name: \
|
||||
return method;
|
||||
#include "methods.inc"
|
||||
#undef CASE
|
||||
|
||||
case IpcId::Unknown:
|
||||
|
@ -14,7 +14,7 @@ enum class IpcId : int {
|
||||
Exit,
|
||||
|
||||
#define CASE(x, _) x,
|
||||
#include "methods.inc"
|
||||
#include "methods.inc"
|
||||
#undef CASE
|
||||
|
||||
// Internal implementation detail.
|
||||
|
@ -355,7 +355,8 @@ TEST_SUITE("LexWordAroundPos") {
|
||||
std::string content = " file:ns::_my_t5ype7 ";
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 'f'), content) == "file");
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 's'), content) == "ns");
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 'y'), content) == "ns::_my_t5ype7");
|
||||
REQUIRE(LexIdentifierAroundPos(CharPos(content, 'y'), content) ==
|
||||
"ns::_my_t5ype7");
|
||||
}
|
||||
|
||||
TEST_CASE("dot, dash, colon are skipped") {
|
||||
|
@ -4,8 +4,8 @@
|
||||
#include "serializers/json.h"
|
||||
|
||||
#include <doctest/doctest.h>
|
||||
#include <loguru.hpp>
|
||||
#include <rapidjson/writer.h>
|
||||
#include <loguru.hpp>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <iostream>
|
||||
|
@ -145,7 +145,7 @@ void EmitSemanticHighlighting(QueryDatabase* db,
|
||||
if (def->spell)
|
||||
parent_kind = GetSymbolKind(db, *def->spell);
|
||||
if (parent_kind == lsSymbolKind::Unknown) {
|
||||
for (Use use: func.declarations) {
|
||||
for (Use use : func.declarations) {
|
||||
parent_kind = GetSymbolKind(db, use);
|
||||
break;
|
||||
}
|
||||
|
@ -27,15 +27,15 @@ struct CqueryBaseHandler : BaseMessageHandler<Ipc_CqueryBase> {
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
if (sym.kind == SymbolKind::Type) {
|
||||
if (const auto* def = db->GetType(sym).AnyDef())
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, def->bases),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, def->bases),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
} else if (sym.kind == SymbolKind::Func) {
|
||||
if (const auto* def = db->GetFunc(sym).AnyDef())
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, def->bases),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, def->bases),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,12 @@
|
||||
#include <loguru.hpp>
|
||||
|
||||
namespace {
|
||||
enum class CallType : uint8_t { Direct = 0, Base = 1, Derived = 2, All = 1 | 2 };
|
||||
enum class CallType : uint8_t {
|
||||
Direct = 0,
|
||||
Base = 1,
|
||||
Derived = 2,
|
||||
All = 1 | 2
|
||||
};
|
||||
MAKE_REFLECT_TYPE_PROXY(CallType);
|
||||
|
||||
bool operator&(CallType lhs, CallType rhs) {
|
||||
@ -96,7 +101,8 @@ bool Expand(MessageHandler* m,
|
||||
if (const auto* def = func.AnyDef())
|
||||
for (SymbolRef ref : def->callees)
|
||||
if (ref.kind == SymbolKind::Func)
|
||||
handle(Use(ref.range, ref.id, ref.kind, ref.role, def->file), call_type);
|
||||
handle(Use(ref.range, ref.id, ref.kind, ref.role, def->file),
|
||||
call_type);
|
||||
} else {
|
||||
for (Use use : func.uses)
|
||||
if (use.kind == SymbolKind::Func)
|
||||
@ -165,7 +171,7 @@ struct CqueryCallHierarchyHandler
|
||||
entry.callType = CallType::Direct;
|
||||
if (def->spell) {
|
||||
if (optional<lsLocation> loc =
|
||||
GetLsLocation(db, working_files, *def->spell))
|
||||
GetLsLocation(db, working_files, *def->spell))
|
||||
entry.location = *loc;
|
||||
}
|
||||
Expand(this, &entry, callee, call_type, detailed_name, levels);
|
||||
@ -193,11 +199,11 @@ struct CqueryCallHierarchyHandler
|
||||
WorkingFile* working_file =
|
||||
working_files->GetFileByFilename(file->def->path);
|
||||
for (SymbolRef sym :
|
||||
FindSymbolsAtLocation(working_file, file, params.position)) {
|
||||
FindSymbolsAtLocation(working_file, file, params.position)) {
|
||||
if (sym.kind == SymbolKind::Func) {
|
||||
out.result =
|
||||
BuildInitial(QueryFuncId(sym.id), params.callee, params.callType,
|
||||
params.detailedName, params.levels);
|
||||
params.detailedName, params.levels);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -27,15 +27,15 @@ struct CqueryDerivedHandler : BaseMessageHandler<Ipc_CqueryDerived> {
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
if (sym.kind == SymbolKind::Type) {
|
||||
QueryType& type = db->GetType(sym);
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, type.derived),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, type.derived),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
} else if (sym.kind == SymbolKind::Func) {
|
||||
QueryFunc& func = db->GetFunc(sym);
|
||||
out.result =
|
||||
GetLsLocationExs(db, working_files, GetDeclarations(db, func.derived),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
out.result = GetLsLocationExs(
|
||||
db, working_files, GetDeclarations(db, func.derived),
|
||||
config->xref.container, config->xref.maxNum);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -7,8 +7,8 @@ struct Ipc_CqueryInheritanceHierarchy
|
||||
: public RequestMessage<Ipc_CqueryInheritanceHierarchy> {
|
||||
const static IpcId kIpcId = IpcId::CqueryInheritanceHierarchy;
|
||||
struct Params {
|
||||
// If id+kind are specified, expand a node; otherwise textDocument+position should
|
||||
// be specified for building the root and |levels| of nodes below.
|
||||
// If id+kind are specified, expand a node; otherwise textDocument+position
|
||||
// should be specified for building the root and |levels| of nodes below.
|
||||
lsTextDocumentIdentifier textDocument;
|
||||
lsPosition position;
|
||||
|
||||
@ -161,8 +161,8 @@ struct CqueryInheritanceHierarchyHandler
|
||||
WorkingFile* working_file =
|
||||
working_files->GetFileByFilename(file->def->path);
|
||||
|
||||
for (SymbolRef sym :
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
for (SymbolRef sym : FindSymbolsAtLocation(working_file, file,
|
||||
request->params.position)) {
|
||||
if (sym.kind == SymbolKind::Func || sym.kind == SymbolKind::Type) {
|
||||
out.result = BuildInitial(sym, params.derived, params.detailedName,
|
||||
params.levels);
|
||||
|
@ -75,7 +75,7 @@ void DoField(MessageHandler* m,
|
||||
entry1.fieldName = std::string(def1->ShortName());
|
||||
if (def1->spell) {
|
||||
if (optional<lsLocation> loc =
|
||||
GetLsLocation(m->db, m->working_files, *def1->spell))
|
||||
GetLsLocation(m->db, m->working_files, *def1->spell))
|
||||
entry1.location = *loc;
|
||||
}
|
||||
if (def1->type) {
|
||||
@ -128,7 +128,7 @@ bool Expand(MessageHandler* m,
|
||||
if (def1 && def1->spell) {
|
||||
// The declaration of target type.
|
||||
if (optional<lsLocation> loc =
|
||||
GetLsLocation(m->db, m->working_files, *def1->spell))
|
||||
GetLsLocation(m->db, m->working_files, *def1->spell))
|
||||
entry1.location = *loc;
|
||||
} else if (def->spell) {
|
||||
// Builtin types have no declaration but the typedef declaration
|
||||
@ -175,7 +175,7 @@ struct CqueryMemberHierarchyHandler
|
||||
entry.name = std::string(def->ShortName());
|
||||
if (def->spell) {
|
||||
if (optional<lsLocation> loc =
|
||||
GetLsLocation(db, working_files, *def->spell))
|
||||
GetLsLocation(db, working_files, *def->spell))
|
||||
entry.location = *loc;
|
||||
}
|
||||
EachDefinedEntity(db->vars, def->vars, [&](QueryVar& var) {
|
||||
@ -195,7 +195,7 @@ struct CqueryMemberHierarchyHandler
|
||||
entry.id = root_id;
|
||||
if (def->spell) {
|
||||
if (optional<lsLocation> loc =
|
||||
GetLsLocation(db, working_files, *def->spell))
|
||||
GetLsLocation(db, working_files, *def->spell))
|
||||
entry.location = *loc;
|
||||
}
|
||||
Expand(this, &entry, detailed_name, levels);
|
||||
@ -220,27 +220,27 @@ struct CqueryMemberHierarchyHandler
|
||||
params.textDocument.uri.GetPath(), &file))
|
||||
return;
|
||||
WorkingFile* working_file =
|
||||
working_files->GetFileByFilename(file->def->path);
|
||||
working_files->GetFileByFilename(file->def->path);
|
||||
for (SymbolRef sym :
|
||||
FindSymbolsAtLocation(working_file, file, params.position)) {
|
||||
FindSymbolsAtLocation(working_file, file, params.position)) {
|
||||
switch (sym.kind) {
|
||||
case SymbolKind::Func:
|
||||
out.result = BuildInitial(QueryFuncId(sym.id), params.detailedName,
|
||||
params.levels);
|
||||
break;
|
||||
case SymbolKind::Type:
|
||||
out.result = BuildInitial(QueryTypeId(sym.id), params.detailedName,
|
||||
params.levels);
|
||||
break;
|
||||
case SymbolKind::Var: {
|
||||
const QueryVar::Def* def = db->GetVar(sym).AnyDef();
|
||||
if (def && def->type)
|
||||
out.result = BuildInitial(QueryTypeId(*def->type), params.detailedName,
|
||||
case SymbolKind::Func:
|
||||
out.result = BuildInitial(QueryFuncId(sym.id), params.detailedName,
|
||||
params.levels);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
continue;
|
||||
break;
|
||||
case SymbolKind::Type:
|
||||
out.result = BuildInitial(QueryTypeId(sym.id), params.detailedName,
|
||||
params.levels);
|
||||
break;
|
||||
case SymbolKind::Var: {
|
||||
const QueryVar::Def* def = db->GetVar(sym).AnyDef();
|
||||
if (def && def->type)
|
||||
out.result = BuildInitial(QueryTypeId(*def->type),
|
||||
params.detailedName, params.levels);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -441,8 +441,8 @@ struct TextDocumentCodeActionHandler
|
||||
// For error diagnostics, provide an action to resolve an include.
|
||||
// TODO: find a way to index diagnostic contents so line numbers
|
||||
// don't get mismatched when actively editing a file.
|
||||
std::string_view include_query =
|
||||
LexIdentifierAroundPos(diag.range.start, working_file->buffer_content);
|
||||
std::string_view include_query = LexIdentifierAroundPos(
|
||||
diag.range.start, working_file->buffer_content);
|
||||
if (diag.severity == lsDiagnosticSeverity::Error &&
|
||||
!include_query.empty()) {
|
||||
const size_t kMaxResults = 20;
|
||||
|
@ -118,9 +118,11 @@ struct TextDocumentCodeLensHandler
|
||||
AddCodeLens("ref", "refs", &common, OffsetStartColumn(use, 0),
|
||||
type.uses, true /*force_display*/);
|
||||
AddCodeLens("derived", "derived", &common, OffsetStartColumn(use, 1),
|
||||
GetDeclarations(db, type.derived), false /*force_display*/);
|
||||
GetDeclarations(db, type.derived),
|
||||
false /*force_display*/);
|
||||
AddCodeLens("var", "vars", &common, OffsetStartColumn(use, 2),
|
||||
GetDeclarations(db, type.instances), false /*force_display*/);
|
||||
GetDeclarations(db, type.instances),
|
||||
false /*force_display*/);
|
||||
break;
|
||||
}
|
||||
case SymbolKind::Func: {
|
||||
@ -164,9 +166,9 @@ struct TextDocumentCodeLensHandler
|
||||
false /*force_display*/);
|
||||
}
|
||||
|
||||
AddCodeLens("derived", "derived", &common,
|
||||
OffsetStartColumn(use, offset++),
|
||||
GetDeclarations(db, func.derived), false /*force_display*/);
|
||||
AddCodeLens(
|
||||
"derived", "derived", &common, OffsetStartColumn(use, offset++),
|
||||
GetDeclarations(db, func.derived), false /*force_display*/);
|
||||
|
||||
// "Base"
|
||||
if (def->bases.size() == 1) {
|
||||
@ -193,7 +195,8 @@ struct TextDocumentCodeLensHandler
|
||||
}
|
||||
} else {
|
||||
AddCodeLens("base", "base", &common, OffsetStartColumn(use, 1),
|
||||
GetDeclarations(db, def->bases), false /*force_display*/);
|
||||
GetDeclarations(db, def->bases),
|
||||
false /*force_display*/);
|
||||
}
|
||||
|
||||
break;
|
||||
|
@ -197,10 +197,10 @@ void FilterAndSortCompletionResponse(
|
||||
: FuzzyMatcher::kMinScore;
|
||||
}
|
||||
items.erase(std::remove_if(items.begin(), items.end(),
|
||||
[](const lsCompletionItem& item) {
|
||||
return item.score_ <= FuzzyMatcher::kMinScore;
|
||||
}),
|
||||
items.end());
|
||||
[](const lsCompletionItem& item) {
|
||||
return item.score_ <= FuzzyMatcher::kMinScore;
|
||||
}),
|
||||
items.end());
|
||||
std::sort(items.begin(), items.end(),
|
||||
[](const lsCompletionItem& lhs, const lsCompletionItem& rhs) {
|
||||
if (lhs.score_ != rhs.score_)
|
||||
@ -300,7 +300,7 @@ struct TextDocumentCompletionHandler : MessageHandler {
|
||||
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(
|
||||
include_complete->completion_items_mutex, std::defer_lock);
|
||||
include_complete->completion_items_mutex, std::defer_lock);
|
||||
if (include_complete->is_scanning)
|
||||
lock.lock();
|
||||
std::string quote = result.match[5];
|
||||
@ -314,7 +314,7 @@ struct TextDocumentCompletionHandler : MessageHandler {
|
||||
item.filterText = item.label;
|
||||
|
||||
FilterAndSortCompletionResponse(&out, result.pattern,
|
||||
config->completion.filterAndSort);
|
||||
config->completion.filterAndSort);
|
||||
DecorateIncludePaths(result.match, &out.result.items);
|
||||
|
||||
for (lsCompletionItem& item : out.result.items) {
|
||||
|
@ -156,8 +156,7 @@ struct TextDocumentDefinitionHandler
|
||||
}
|
||||
}
|
||||
if (best_i != -1) {
|
||||
Maybe<Use> use =
|
||||
GetDefinitionSpell(db, db->symbols[best_i]);
|
||||
Maybe<Use> use = GetDefinitionSpell(db, db->symbols[best_i]);
|
||||
assert(use);
|
||||
if (auto ls_loc = GetLsLocationEx(db, working_files, *use,
|
||||
config->xref.container))
|
||||
|
@ -45,7 +45,8 @@ struct TextDocumentDocumentSymbolHandler
|
||||
if (sym.kind == SymbolKind::Var) {
|
||||
QueryVar& var = db->GetVar(sym);
|
||||
auto* def = var.AnyDef();
|
||||
if (!def || !def->spell) continue;
|
||||
if (!def || !def->spell)
|
||||
continue;
|
||||
// Ignore local variables.
|
||||
if (def->spell->kind == SymbolKind::Func &&
|
||||
def->storage != StorageClass::Static &&
|
||||
|
@ -22,21 +22,21 @@ MAKE_REFLECT_STRUCT(Out_TextDocumentTypeDefinition, jsonrpc, id, result);
|
||||
struct TextDocumentTypeDefinitionHandler
|
||||
: BaseMessageHandler<Ipc_TextDocumentTypeDefinition> {
|
||||
void Run(Ipc_TextDocumentTypeDefinition* request) override {
|
||||
QueryFile* file;
|
||||
if (!FindFileOrFail(db, project, request->id,
|
||||
request->params.textDocument.uri.GetPath(), &file,
|
||||
nullptr)) {
|
||||
return;
|
||||
}
|
||||
WorkingFile* working_file =
|
||||
working_files->GetFileByFilename(file->def->path);
|
||||
QueryFile* file;
|
||||
if (!FindFileOrFail(db, project, request->id,
|
||||
request->params.textDocument.uri.GetPath(), &file,
|
||||
nullptr)) {
|
||||
return;
|
||||
}
|
||||
WorkingFile* working_file =
|
||||
working_files->GetFileByFilename(file->def->path);
|
||||
|
||||
Out_TextDocumentTypeDefinition out;
|
||||
out.id = request->id;
|
||||
for (SymbolRef sym :
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
Id<void> id = sym.id;
|
||||
switch (sym.kind) {
|
||||
Out_TextDocumentTypeDefinition out;
|
||||
out.id = request->id;
|
||||
for (SymbolRef sym :
|
||||
FindSymbolsAtLocation(working_file, file, request->params.position)) {
|
||||
Id<void> id = sym.id;
|
||||
switch (sym.kind) {
|
||||
case SymbolKind::Var: {
|
||||
const QueryVar::Def* def = db->GetVar(sym).AnyDef();
|
||||
if (!def || !def->type)
|
||||
@ -56,10 +56,10 @@ struct TextDocumentTypeDefinitionHandler
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
QueueManager::WriteStdout(IpcId::TextDocumentTypeDefinition, out);
|
||||
QueueManager::WriteStdout(IpcId::TextDocumentTypeDefinition, out);
|
||||
}
|
||||
};
|
||||
REGISTER_MESSAGE_HANDLER(TextDocumentTypeDefinitionHandler);
|
||||
|
@ -25,11 +25,11 @@ struct WorkspaceDidChangeConfigurationHandler
|
||||
Timer time;
|
||||
project->Load(config, config->projectRoot);
|
||||
time.ResetAndPrint("[perf] Loaded compilation entries (" +
|
||||
std::to_string(project->entries.size()) + " files)");
|
||||
std::to_string(project->entries.size()) + " files)");
|
||||
|
||||
time.Reset();
|
||||
project->Index(config, QueueManager::instance(), working_files,
|
||||
std::monostate());
|
||||
std::monostate());
|
||||
time.ResetAndPrint(
|
||||
"[perf] Dispatched workspace/didChangeConfiguration index requests");
|
||||
}
|
||||
|
@ -107,7 +107,8 @@ struct WorkspaceSymbolHandler : BaseMessageHandler<Ipc_WorkspaceSymbol> {
|
||||
|
||||
for (int i = 0; i < (int)db->symbols.size(); ++i) {
|
||||
std::string_view detailed_name = db->GetSymbolDetailedName(i);
|
||||
if (CaseFoldingSubsequenceMatch(query_without_space, detailed_name).first) {
|
||||
if (CaseFoldingSubsequenceMatch(query_without_space, detailed_name)
|
||||
.first) {
|
||||
// Do not show the same entry twice.
|
||||
if (!inserted_results.insert(std::string(detailed_name)).second)
|
||||
continue;
|
||||
|
@ -89,8 +89,7 @@ std::vector<std::string> kPathArgs = {
|
||||
"-I", "-iquote", "-isystem", "--sysroot=",
|
||||
"-isysroot", "-gcc-toolchain", "-include-pch", "-iframework",
|
||||
"-F", "-imacros", "-include", "/I",
|
||||
"-idirafter"
|
||||
};
|
||||
"-idirafter"};
|
||||
|
||||
// Arguments which always require an absolute path, ie, clang -working-directory
|
||||
// does not work as expected. Argument processing assumes that this is a subset
|
||||
@ -431,7 +430,7 @@ std::vector<Project::Entry> LoadCompilationEntriesFromDirectory(
|
||||
comp_db_dir.c_str(), &cx_db_load_error);
|
||||
if (!config->compilationDatabaseCommand.empty()) {
|
||||
#ifdef _WIN32
|
||||
// TODO
|
||||
// TODO
|
||||
#else
|
||||
unlink((comp_db_dir + "/compile_commands.json").c_str());
|
||||
rmdir(comp_db_dir.c_str());
|
||||
|
@ -38,10 +38,10 @@ struct Project {
|
||||
// compile_commands.json in it, otherwise they are retrieved in
|
||||
// |root_directory|.
|
||||
// For .cquery, recursive directory listing is used and files with known
|
||||
// suffixes are indexed. .cquery files can exist in subdirectories and they will affect
|
||||
// flags in their subtrees (relative paths are relative to the project root,
|
||||
// not subdirectories).
|
||||
// For compile_commands.json, its entries are indexed.
|
||||
// suffixes are indexed. .cquery files can exist in subdirectories and they
|
||||
// will affect flags in their subtrees (relative paths are relative to the
|
||||
// project root, not subdirectories). For compile_commands.json, its entries
|
||||
// are indexed.
|
||||
void Load(Config* config, const std::string& root_directory);
|
||||
|
||||
// Lookup the CompilationEntry for |filename|. If no entry was found this
|
||||
|
@ -100,8 +100,7 @@ std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
return GetDeclarations(db->vars, ids);
|
||||
}
|
||||
|
||||
std::vector<Use> GetNonDefDeclarations(QueryDatabase* db,
|
||||
SymbolIdx sym) {
|
||||
std::vector<Use> GetNonDefDeclarations(QueryDatabase* db, SymbolIdx sym) {
|
||||
switch (sym.kind) {
|
||||
case SymbolKind::Func:
|
||||
return db->GetFunc(sym).declarations;
|
||||
|
@ -10,10 +10,14 @@ Maybe<Use> GetDefinitionExtent(QueryDatabase* db, SymbolIdx sym);
|
||||
Maybe<QueryFileId> GetDeclarationFileForSymbol(QueryDatabase* db,
|
||||
SymbolIdx sym);
|
||||
|
||||
// Get defining declaration (if exists) or an arbitrary declaration (otherwise) for each id.
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db, const std::vector<QueryFuncId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db, const std::vector<QueryTypeId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db, const std::vector<QueryVarId>& ids);
|
||||
// Get defining declaration (if exists) or an arbitrary declaration (otherwise)
|
||||
// for each id.
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
const std::vector<QueryFuncId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
const std::vector<QueryTypeId>& ids);
|
||||
std::vector<Use> GetDeclarations(QueryDatabase* db,
|
||||
const std::vector<QueryVarId>& ids);
|
||||
|
||||
// Get non-defining declarations.
|
||||
std::vector<Use> GetNonDefDeclarations(QueryDatabase* db, SymbolIdx sym);
|
||||
@ -78,7 +82,10 @@ void EachEntityDef(QueryDatabase* db, SymbolIdx sym, Fn&& fn) {
|
||||
}
|
||||
|
||||
template <typename Fn>
|
||||
void EachOccurrence(QueryDatabase* db, SymbolIdx sym, bool include_decl, Fn&& fn) {
|
||||
void EachOccurrence(QueryDatabase* db,
|
||||
SymbolIdx sym,
|
||||
bool include_decl,
|
||||
Fn&& fn) {
|
||||
WithEntity(db, sym, [&](const auto& entity) {
|
||||
for (Use use : entity.uses)
|
||||
fn(use);
|
||||
|
@ -81,7 +81,7 @@ struct Index_OnIndexed {
|
||||
class QueueManager {
|
||||
static std::unique_ptr<QueueManager> instance_;
|
||||
|
||||
public:
|
||||
public:
|
||||
static QueueManager* instance() { return instance_.get(); }
|
||||
static void Init(MultiQueueWaiter* querydb_waiter,
|
||||
MultiQueueWaiter* indexer_waiter,
|
||||
|
@ -62,7 +62,7 @@ SemanticHighlightSymbolCache::SemanticHighlightSymbolCache()
|
||||
|
||||
void SemanticHighlightSymbolCache::Init(Config* config) {
|
||||
match_ = std::make_unique<GroupMatch>(config->highlight.whitelist,
|
||||
config->highlight.blacklist);
|
||||
config->highlight.blacklist);
|
||||
}
|
||||
|
||||
std::shared_ptr<SemanticHighlightSymbolCache::Entry>
|
||||
|
@ -6,11 +6,11 @@
|
||||
#include "utils.h"
|
||||
|
||||
#include <doctest/doctest.h>
|
||||
#include <loguru/loguru.hpp>
|
||||
#include <rapidjson/document.h>
|
||||
#include <rapidjson/prettywriter.h>
|
||||
#include <rapidjson/stringbuffer.h>
|
||||
#include <rapidjson/writer.h>
|
||||
#include <loguru/loguru.hpp>
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
|
@ -32,12 +32,8 @@ template <typename... Queue>
|
||||
struct MultiQueueLock {
|
||||
MultiQueueLock(Queue... lockable) : tuple_{lockable...} { lock(); }
|
||||
~MultiQueueLock() { unlock(); }
|
||||
void lock() {
|
||||
lock_impl(typename std::index_sequence_for<Queue...>{});
|
||||
}
|
||||
void unlock() {
|
||||
unlock_impl(typename std::index_sequence_for<Queue...>{});
|
||||
}
|
||||
void lock() { lock_impl(typename std::index_sequence_for<Queue...>{}); }
|
||||
void unlock() { unlock_impl(typename std::index_sequence_for<Queue...>{}); }
|
||||
|
||||
private:
|
||||
template <size_t... Is>
|
||||
|
Loading…
Reference in New Issue
Block a user