Use global config.

This commit is contained in:
Fangrui Song 2018-04-03 23:05:41 -07:00
parent 062b1ad0fd
commit fdb562bb42
43 changed files with 211 additions and 299 deletions

View File

@ -21,7 +21,7 @@ struct RealCacheManager : ICacheManager {
std::string cache_path = GetCachePath(file.path);
WriteToFile(cache_path, file.file_contents);
std::string indexed_content = Serialize(g_config.cacheFormat, file);
std::string indexed_content = Serialize(g_config->cacheFormat, file);
WriteToFile(AppendSerializationFormat(cache_path), indexed_content);
}
@ -38,34 +38,32 @@ struct RealCacheManager : ICacheManager {
if (!file_content || !serialized_indexed_content)
return nullptr;
return Deserialize(g_config.cacheFormat, path, *serialized_indexed_content,
return Deserialize(g_config->cacheFormat, path, *serialized_indexed_content,
*file_content, IndexFile::kMajorVersion);
}
std::string GetCachePath(const std::string& source_file) {
assert(!g_config.cacheDirectory.empty());
assert(!g_config->cacheDirectory.empty());
std::string cache_file;
size_t len = g_config.projectRoot.size();
if (StartsWith(source_file, g_config.projectRoot)) {
cache_file = EscapeFileName(g_config.projectRoot) +
size_t len = g_config->projectRoot.size();
if (StartsWith(source_file, g_config->projectRoot)) {
cache_file = EscapeFileName(g_config->projectRoot) +
EscapeFileName(source_file.substr(len));
} else {
cache_file = '@' + EscapeFileName(g_config.projectRoot) +
cache_file = '@' + EscapeFileName(g_config->projectRoot) +
EscapeFileName(source_file);
}
return g_config.cacheDirectory + cache_file;
return g_config->cacheDirectory + cache_file;
}
std::string AppendSerializationFormat(const std::string& base) {
switch (g_config.cacheFormat) {
switch (g_config->cacheFormat) {
case SerializeFormat::Json:
return base + ".json";
case SerializeFormat::MessagePack:
return base + ".mpack";
}
assert(false);
return ".json";
}
};

View File

@ -383,7 +383,7 @@ void TryEnsureDocumentParsed(ClangCompleteManager* manager,
unsaved, Flags());
// Build diagnostics.
if (manager->config_->diagnostics.onParse && *tu) {
if (g_config->diagnostics.onParse && *tu) {
// If we're emitting diagnostics, do an immediate reparse, otherwise we will
// emit stale/bad diagnostics.
*tu = ClangTranslationUnit::Reparse(std::move(*tu), unsaved);
@ -450,7 +450,7 @@ void CompletionQueryMain(ClangCompleteManager* completion_manager) {
completion_manager->completion_request_.Dequeue();
// Drop older requests if we're not buffering.
while (completion_manager->config_->completion.dropOldRequests &&
while (g_config->completion.dropOldRequests &&
!completion_manager->completion_request_.IsEmpty()) {
completion_manager->on_dropped_(request->id);
request = completion_manager->completion_request_.Dequeue();
@ -528,7 +528,7 @@ void CompletionQueryMain(ClangCompleteManager* completion_manager) {
clang_getCompletionBriefComment(result.CompletionString));
// label/detail/filterText/insertText/priority
if (completion_manager->config_->completion.detailedLabel) {
if (g_config->completion.detailedLabel) {
ls_completion_item.detail = ToString(
clang_getCompletionParent(result.CompletionString, nullptr));
@ -538,10 +538,10 @@ void CompletionQueryMain(ClangCompleteManager* completion_manager) {
// label/filterText/insertText
BuildCompletionItemTexts(
ls_result, result.CompletionString,
completion_manager->config_->client.snippetSupport);
g_config->client.snippetSupport);
for (auto i = first_idx; i < ls_result.size(); ++i) {
if (completion_manager->config_->client.snippetSupport &&
if (g_config->client.snippetSupport &&
ls_result[i].insertTextFormat ==
lsInsertTextFormat::Snippet) {
ls_result[i].insertText += "$0";
@ -558,8 +558,8 @@ void CompletionQueryMain(ClangCompleteManager* completion_manager) {
ls_completion_item.detail, ls_completion_item.insertText,
do_insert, ls_completion_item.insertTextFormat,
&ls_completion_item.parameters_,
completion_manager->config_->client.snippetSupport);
if (completion_manager->config_->client.snippetSupport &&
g_config->client.snippetSupport);
if (g_config->client.snippetSupport &&
ls_completion_item.insertTextFormat ==
lsInsertTextFormat::Snippet) {
ls_completion_item.insertText += "$0";
@ -658,14 +658,12 @@ ClangCompleteManager::CompletionRequest::CompletionRequest(
on_complete(on_complete),
emit_diagnostics(emit_diagnostics) {}
ClangCompleteManager::ClangCompleteManager(Config* config,
Project* project,
ClangCompleteManager::ClangCompleteManager(Project* project,
WorkingFiles* working_files,
OnDiagnostic on_diagnostic,
OnIndex on_index,
OnDropped on_dropped)
: config_(config),
project_(project),
: project_(project),
working_files_(working_files),
on_diagnostic_(on_diagnostic),
on_index_(on_index),
@ -683,8 +681,6 @@ ClangCompleteManager::ClangCompleteManager(Config* config,
});
}
ClangCompleteManager::~ClangCompleteManager() {}
void ClangCompleteManager::CodeComplete(
const lsRequestId& id,
const lsTextDocumentPositionParams& completion_location,

View File

@ -72,13 +72,11 @@ struct ClangCompleteManager {
bool emit_diagnostics = false;
};
ClangCompleteManager(Config* config,
Project* project,
ClangCompleteManager(Project* project,
WorkingFiles* working_files,
OnDiagnostic on_diagnostic,
OnIndex on_index,
OnDropped on_dropped);
~ClangCompleteManager();
// Start a code completion at the given location. |on_complete| will run when
// completion results are available. |on_complete| may run on any thread.
@ -120,7 +118,6 @@ struct ClangCompleteManager {
const int kMaxCompletionSessions = 5;
// Global state.
Config* config_;
Project* project_;
WorkingFiles* working_files_;
OnDiagnostic on_diagnostic_;

View File

@ -13,7 +13,6 @@
#include <algorithm>
#include <cassert>
#include <chrono>
#include <iostream>
// TODO: See if we can use clang_indexLoc_getFileLocation to get a type ref on
// |Foobar| in DISALLOW_COPY(Foobar)
@ -279,8 +278,6 @@ struct ConstructorCache {
};
struct IndexParam {
Config* config = nullptr;
std::unordered_set<CXFile> seen_cx_files;
std::vector<std::string> seen_files;
FileContentsMap file_contents;
@ -300,10 +297,9 @@ struct IndexParam {
NamespaceHelper ns;
ConstructorCache ctors;
IndexParam(Config* config,
ClangTranslationUnit* tu,
IndexParam(ClangTranslationUnit* tu,
FileConsumer* file_consumer)
: config(config), tu(tu), file_consumer(file_consumer) {}
: tu(tu), file_consumer(file_consumer) {}
#if CINDEX_HAVE_PRETTY
CXPrintingPolicy print_policy = nullptr;
@ -585,7 +581,7 @@ void SetVarDetail(IndexVar* var,
// string. Shorten it to just "lambda".
if (type_name.find("(lambda at") != std::string::npos)
type_name = "lambda";
if (param->config->index.comments)
if (g_config->index.comments)
def.comments = cursor.get_comments();
def.storage = GetStorageClass(clang_Cursor_getStorageClass(cursor.cx_cursor));
@ -870,10 +866,8 @@ CXIdxClientFile OnIndexIncludedFile(CXClientData client_data,
ClangCursor::VisitResult DumpVisitor(ClangCursor cursor,
ClangCursor parent,
int* level) {
for (int i = 0; i < *level; ++i)
std::cerr << " ";
std::cerr << ToString(cursor.get_kind()) << " " << cursor.get_spell_name()
<< std::endl;
fprintf(stderr, "%*s%s %s\n", *level * 2, "",
ToString(cursor.get_kind()).c_str(), cursor.get_spell_name().c_str());
*level += 1;
cursor.VisitChildren(&DumpVisitor, level);
@ -1248,7 +1242,7 @@ ClangCursor::VisitResult VisitMacroDefinitionAndExpansions(ClangCursor cursor,
var_def->def.hover =
"#define " + GetDocumentContentInRange(param->tu->cx_tu, cx_extent);
var_def->def.kind = lsSymbolKind::Macro;
if (param->config->index.comments)
if (g_config->index.comments)
var_def->def.comments = cursor.get_comments();
var_def->def.spell =
SetUse(db, decl_loc_spelling, parent, Role::Definition);
@ -1615,7 +1609,7 @@ void OnIndexDeclaration(CXClientData client_data, const CXIdxDeclInfo* decl) {
IndexFuncId func_id = db->ToFuncId(decl_cursor_resolved.cx_cursor);
IndexFunc* func = db->Resolve(func_id);
if (param->config->index.comments)
if (g_config->index.comments)
func->def.comments = cursor.get_comments();
func->def.kind = GetSymbolKind(decl->entityInfo->kind);
func->def.storage =
@ -1763,7 +1757,7 @@ void OnIndexDeclaration(CXClientData client_data, const CXIdxDeclInfo* decl) {
SetTypeName(type, decl_cursor, decl->semanticContainer,
decl->entityInfo->name, param);
type->def.kind = GetSymbolKind(decl->entityInfo->kind);
if (param->config->index.comments)
if (g_config->index.comments)
type->def.comments = decl_cursor.get_comments();
// For Typedef/CXXTypeAlias spanning a few lines, display the declaration
@ -1808,7 +1802,7 @@ void OnIndexDeclaration(CXClientData client_data, const CXIdxDeclInfo* decl) {
SetTypeName(type, cursor, decl->semanticContainer, decl->entityInfo->name,
param);
type->def.kind = GetSymbolKind(decl->entityInfo->kind);
if (param->config->index.comments)
if (g_config->index.comments)
type->def.comments = cursor.get_comments();
// }
@ -2099,7 +2093,7 @@ void OnIndexReference(CXClientData client_data, const CXIdxEntityRefInfo* ref) {
bool is_template = ref->referencedEntity->templateKind !=
CXIdxEntityCXXTemplateKind::CXIdxEntity_NonTemplate;
if (param->config->index.attributeMakeCallsToCtor && is_template &&
if (g_config->index.attributeMakeCallsToCtor && is_template &&
str_begin("make", ref->referencedEntity->name)) {
// Try to find the return type of called function. That type will have
// the constructor function we add a usage to.
@ -2154,7 +2148,6 @@ void OnIndexReference(CXClientData client_data, const CXIdxEntityRefInfo* ref) {
}
std::vector<std::unique_ptr<IndexFile>> Parse(
Config* config,
FileConsumerSharedState* file_consumer_shared,
std::string file,
const std::vector<std::string>& args,
@ -2162,7 +2155,7 @@ std::vector<std::unique_ptr<IndexFile>> Parse(
PerformanceImportFile* perf,
ClangIndex* index,
bool dump_ast) {
if (!config->index.enabled)
if (!g_config->index.enabled)
return {};
file = NormalizePath(file);
@ -2190,12 +2183,11 @@ std::vector<std::unique_ptr<IndexFile>> Parse(
if (dump_ast)
Dump(clang_getTranslationUnitCursor(tu->cx_tu));
return ParseWithTu(config, file_consumer_shared, perf, tu.get(), index, file,
return ParseWithTu(file_consumer_shared, perf, tu.get(), index, file,
args, unsaved_files);
}
std::vector<std::unique_ptr<IndexFile>> ParseWithTu(
Config* config,
FileConsumerSharedState* file_consumer_shared,
PerformanceImportFile* perf,
ClangTranslationUnit* tu,
@ -2218,7 +2210,7 @@ std::vector<std::unique_ptr<IndexFile>> ParseWithTu(
callback.indexEntityReference = &OnIndexReference;
FileConsumer file_consumer(file_consumer_shared, file);
IndexParam param(config, tu, &file_consumer);
IndexParam param(tu, &file_consumer);
for (const CXUnsavedFile& contents : file_contents) {
param.file_contents[contents.Filename] = FileContents(
contents.Filename, std::string(contents.Contents, contents.Length));

View File

@ -109,7 +109,7 @@ std::string FileName(CXFile file) {
CXString cx_name = clang_getFileName(file);
std::string ret = NormalizePath(ToString(cx_name));
// Resolve /usr/include/c++/7.3.0 symlink.
if (!StartsWith(ret, g_config.projectRoot))
if (!StartsWith(ret, g_config->projectRoot))
ret = fs::canonical(ret);
return ret;
}

View File

@ -110,8 +110,7 @@ See more on https://github.com/MaskRay/ccls/wiki
// QUERYDB MAIN ////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
bool QueryDbMainLoop(Config* config,
QueryDatabase* db,
bool QueryDbMainLoop(QueryDatabase* db,
MultiQueueWaiter* waiter,
Project* project,
FileConsumerSharedState* file_consumer_shared,
@ -147,7 +146,7 @@ bool QueryDbMainLoop(Config* config,
// TODO: consider rate-limiting and checking for IPC messages so we don't
// block requests / we can serve partial requests.
if (QueryDb_ImportMain(config, db, import_manager, status, semantic_cache,
if (QueryDb_ImportMain(db, import_manager, status, semantic_cache,
working_files)) {
did_work = true;
}
@ -156,7 +155,6 @@ bool QueryDbMainLoop(Config* config,
}
void RunQueryDbThread(const std::string& bin_name,
Config* config,
MultiQueueWaiter* querydb_waiter,
MultiQueueWaiter* indexer_waiter) {
Project project;
@ -166,14 +164,14 @@ void RunQueryDbThread(const std::string& bin_name,
DiagnosticsEngine diag_engine;
ClangCompleteManager clang_complete(
config, &project, &working_files,
&project, &working_files,
[&](std::string path, std::vector<lsDiagnostic> diagnostics) {
diag_engine.Publish(&working_files, path, diagnostics);
},
[&](ClangTranslationUnit* tu, const std::vector<CXUnsavedFile>& unsaved,
const std::string& path, const std::vector<std::string>& args) {
IndexWithTuFromCodeCompletion(config, &file_consumer_shared, tu,
unsaved, path, args);
IndexWithTuFromCodeCompletion(&file_consumer_shared, tu, unsaved, path,
args);
},
[](lsRequestId id) {
if (!std::holds_alternative<std::monostate>(id)) {
@ -187,7 +185,7 @@ void RunQueryDbThread(const std::string& bin_name,
}
});
IncludeComplete include_complete(config, &project);
IncludeComplete include_complete(&project);
auto global_code_complete_cache = std::make_unique<CodeCompleteCache>();
auto non_global_code_complete_cache = std::make_unique<CodeCompleteCache>();
auto signature_cache = std::make_unique<CodeCompleteCache>();
@ -198,7 +196,6 @@ void RunQueryDbThread(const std::string& bin_name,
// Setup shared references.
for (MessageHandler* handler : *MessageHandler::message_handlers) {
handler->config = config;
handler->db = &db;
handler->waiter = indexer_waiter;
handler->project = &project;
@ -221,7 +218,7 @@ void RunQueryDbThread(const std::string& bin_name,
SetCurrentThreadName("querydb");
while (true) {
bool did_work = QueryDbMainLoop(
config, &db, querydb_waiter, &project, &file_consumer_shared,
&db, querydb_waiter, &project, &file_consumer_shared,
&import_manager, &import_pipeline_status, &timestamp_manager,
&semantic_cache, &working_files, &clang_complete, &include_complete,
global_code_complete_cache.get(), non_global_code_complete_cache.get(),
@ -247,8 +244,7 @@ void RunQueryDbThread(const std::string& bin_name,
// blocks.
//
// |ipc| is connected to a server.
void LaunchStdinLoop(Config* config,
std::unordered_map<MethodType, Timer>* request_times) {
void LaunchStdinLoop(std::unordered_map<MethodType, Timer>* request_times) {
// If flushing cin requires flushing cout there could be deadlocks in some
// clients.
std::cin.tie(nullptr);
@ -317,13 +313,12 @@ void LaunchStdoutThread(std::unordered_map<MethodType, Timer>* request_times,
}
void LanguageServerMain(const std::string& bin_name,
Config* config,
MultiQueueWaiter* querydb_waiter,
MultiQueueWaiter* indexer_waiter,
MultiQueueWaiter* stdout_waiter) {
std::unordered_map<MethodType, Timer> request_times;
LaunchStdinLoop(config, &request_times);
LaunchStdinLoop(&request_times);
// We run a dedicated thread for writing to stdout because there can be an
// unknown number of delays when output information.
@ -331,7 +326,7 @@ void LanguageServerMain(const std::string& bin_name,
// Start querydb which takes over this thread. The querydb will launch
// indexer threads as needed.
RunQueryDbThread(bin_name, config, querydb_waiter, indexer_waiter);
RunQueryDbThread(bin_name, querydb_waiter, indexer_waiter);
}
int main(int argc, char** argv) {
@ -410,9 +405,7 @@ int main(int argc, char** argv) {
}
}
// std::cerr << "Running language server" << std::endl;
auto config = std::make_unique<Config>();
LanguageServerMain(argv[0], config.get(), &querydb_waiter, &indexer_waiter,
LanguageServerMain(argv[0], &querydb_waiter, &indexer_waiter,
&stdout_waiter);
}

View File

@ -1,3 +1,3 @@
#include "config.h"
Config g_config;
std::unique_ptr<Config> g_config;

View File

@ -2,6 +2,7 @@
#include "serializer.h"
#include <memory>
#include <string>
/*
@ -197,6 +198,10 @@ struct Config {
// be logged.
bool logSkippedPaths = false;
// Allow indexing on textDocument/didChange.
// May be too slow for big projects, so it is off by default.
bool onDidChange = false;
// Number of indexer threads. If 0, 80% of cores are used.
int threads = 0;
@ -212,10 +217,6 @@ struct Config {
bool sort = true;
} workspaceSymbol;
// Allow indexing on textDocument/didChange.
// May be too slow for big projects, so it is off by default.
bool enableIndexOnDidChange = false;
struct Xref {
// If true, |Location[]| response will include lexical container.
bool container = false;
@ -249,6 +250,7 @@ MAKE_REFLECT_STRUCT(Config::Index,
comments,
enabled,
logSkippedPaths,
onDidChange,
threads,
whitelist);
MAKE_REFLECT_STRUCT(Config::WorkspaceSymbol, maxNum, sort);
@ -276,9 +278,7 @@ MAKE_REFLECT_STRUCT(Config,
index,
workspaceSymbol,
xref,
enableIndexOnDidChange,
dumpAST);
extern Config g_config;
extern std::unique_ptr<Config> g_config;

View File

@ -4,10 +4,10 @@
#include <chrono>
void DiagnosticsEngine::Init(Config* config) {
frequencyMs_ = config->diagnostics.frequencyMs;
match_ = std::make_unique<GroupMatch>(config->diagnostics.whitelist,
config->diagnostics.blacklist);
void DiagnosticsEngine::Init() {
frequencyMs_ = g_config->diagnostics.frequencyMs;
match_ = std::make_unique<GroupMatch>(g_config->diagnostics.whitelist,
g_config->diagnostics.blacklist);
}
void DiagnosticsEngine::Publish(WorkingFiles* working_files,

View File

@ -9,7 +9,7 @@ class DiagnosticsEngine {
int frequencyMs_;
public:
void Init(Config*);
void Init();
void Publish(WorkingFiles* working_files,
std::string path,
std::vector<lsDiagnostic> diagnostics);

View File

@ -160,7 +160,7 @@ TEST_SUITE("fuzzy_match") {
CHECK(Ranks("ab", {"ab", "aoo_boo", "acb"}));
CHECK(Ranks("CC", {"CamelCase", "camelCase", "camelcase"}));
CHECK(Ranks("cC", {"camelCase", "CamelCase", "camelcase"}));
CHECK(Ranks("c c", {"camel case", "camelCase", "CamelCase", "camelcase",
CHECK(Ranks("c c", {"camelCase", "camel case", "CamelCase", "camelcase",
"camel ace"}));
CHECK(Ranks("Da.Te",
{"Data.Text", "Data.Text.Lazy", "Data.Aeson.Encoding.text"}));

View File

@ -7,19 +7,18 @@ struct ClangIndexer : IIndexer {
~ClangIndexer() override = default;
std::vector<std::unique_ptr<IndexFile>> Index(
Config* config,
FileConsumerSharedState* file_consumer_shared,
std::string file,
const std::vector<std::string>& args,
const std::vector<FileContents>& file_contents,
PerformanceImportFile* perf) override {
bool dump_ast = false;
for (const std::string& pattern : config->dumpAST)
for (const std::string& pattern : g_config->dumpAST)
if (file.find(pattern) != std::string::npos) {
dump_ast = true;
break;
}
return Parse(config, file_consumer_shared, file, args, file_contents, perf,
return Parse(file_consumer_shared, file, args, file_contents, perf,
&index, dump_ast);
}
@ -51,7 +50,6 @@ struct TestIndexer : IIndexer {
~TestIndexer() override = default;
std::vector<std::unique_ptr<IndexFile>> Index(
Config* config,
FileConsumerSharedState* file_consumer_shared,
std::string file,
const std::vector<std::string>& args,

View File

@ -12,7 +12,6 @@
// like IndexFile
// - rename this file to indexer.h
struct Config;
struct IndexFile;
struct FileContents;
struct FileConsumerSharedState;
@ -35,7 +34,6 @@ struct IIndexer {
virtual ~IIndexer() = default;
virtual std::vector<std::unique_ptr<IndexFile>> Index(
Config* config,
FileConsumerSharedState* file_consumer_shared,
std::string file,
const std::vector<std::string>& args,

View File

@ -95,15 +95,15 @@ long long GetCurrentTimeInMilliseconds() {
}
struct ActiveThread {
ActiveThread(Config* config, ImportPipelineStatus* status)
: config_(config), status_(status) {
if (config_->progressReportFrequencyMs < 0)
ActiveThread(ImportPipelineStatus* status)
: status_(status) {
if (g_config->progressReportFrequencyMs < 0)
return;
++status_->num_active_threads;
}
~ActiveThread() {
if (config_->progressReportFrequencyMs < 0)
if (g_config->progressReportFrequencyMs < 0)
return;
--status_->num_active_threads;
@ -122,7 +122,7 @@ struct ActiveThread {
out.params.activeThreads = status_->num_active_threads;
// Ignore this progress update if the last update was too recent.
if (config_->progressReportFrequencyMs != 0) {
if (g_config->progressReportFrequencyMs != 0) {
// Make sure we output a status update if queue lengths are zero.
bool all_zero =
out.params.indexRequestCount == 0 && out.params.doIdMapCount == 0 &&
@ -133,13 +133,12 @@ struct ActiveThread {
GetCurrentTimeInMilliseconds() < status_->next_progress_output)
return;
status_->next_progress_output =
GetCurrentTimeInMilliseconds() + config_->progressReportFrequencyMs;
GetCurrentTimeInMilliseconds() + g_config->progressReportFrequencyMs;
}
QueueManager::WriteStdout(kMethodType_Unknown, out);
}
Config* config_;
ImportPipelineStatus* status_;
};
@ -356,8 +355,7 @@ std::vector<FileContents> PreloadFileContents(
return file_contents;
}
void ParseFile(Config* config,
DiagnosticsEngine* diag_engine,
void ParseFile(DiagnosticsEngine* diag_engine,
WorkingFiles* working_files,
FileConsumerSharedState* file_consumer_shared,
TimestampManager* timestamp_manager,
@ -390,11 +388,11 @@ void ParseFile(Config* config,
std::vector<Index_DoIdMap> result;
PerformanceImportFile perf;
auto indexes = indexer->Index(config, file_consumer_shared, path_to_index,
entry.args, file_contents, &perf);
auto indexes = indexer->Index(file_consumer_shared, path_to_index, entry.args,
file_contents, &perf);
if (indexes.empty()) {
if (config->index.enabled &&
if (g_config->index.enabled &&
!std::holds_alternative<std::monostate>(request.id)) {
Out_Error out;
out.id = request.id;
@ -428,7 +426,6 @@ void ParseFile(Config* config,
}
bool IndexMain_DoParse(
Config* config,
DiagnosticsEngine* diag_engine,
WorkingFiles* working_files,
FileConsumerSharedState* file_consumer_shared,
@ -444,7 +441,7 @@ bool IndexMain_DoParse(
Project::Entry entry;
entry.filename = request->path;
entry.args = request->args;
ParseFile(config, diag_engine, working_files, file_consumer_shared,
ParseFile(diag_engine, working_files, file_consumer_shared,
timestamp_manager, modification_timestamp_fetcher, import_manager,
indexer, request.value(), entry);
return true;
@ -544,7 +541,6 @@ ImportPipelineStatus::ImportPipelineStatus()
// real-time indexing.
// TODO: add option to disable this.
void IndexWithTuFromCodeCompletion(
Config* config,
FileConsumerSharedState* file_consumer_shared,
ClangTranslationUnit* tu,
const std::vector<CXUnsavedFile>& file_contents,
@ -554,10 +550,10 @@ void IndexWithTuFromCodeCompletion(
PerformanceImportFile perf;
ClangIndex index;
auto indexes = ParseWithTu(config, file_consumer_shared, &perf, tu, &index,
path, args, file_contents);
auto indexes = ParseWithTu(file_consumer_shared, &perf, tu, &index, path,
args, file_contents);
if (indexes.empty())
return;
return;
std::vector<Index_DoIdMap> result;
for (std::unique_ptr<IndexFile>& new_index : indexes) {
@ -579,8 +575,7 @@ void IndexWithTuFromCodeCompletion(
QueueManager::instance()->do_id_map.EnqueueAll(std::move(result));
}
void Indexer_Main(Config* config,
DiagnosticsEngine* diag_engine,
void Indexer_Main(DiagnosticsEngine* diag_engine,
FileConsumerSharedState* file_consumer_shared,
TimestampManager* timestamp_manager,
ImportManager* import_manager,
@ -597,7 +592,7 @@ void Indexer_Main(Config* config,
bool did_work = false;
{
ActiveThread active_thread(config, status);
ActiveThread active_thread(status);
// TODO: process all off IndexMain_DoIndex before calling
// IndexMain_DoCreateIndexUpdate for better icache behavior. We need to
@ -608,7 +603,7 @@ void Indexer_Main(Config* config,
// IndexMain_DoCreateIndexUpdate so we don't starve querydb from doing any
// work. Running both also lets the user query the partially constructed
// index.
did_work = IndexMain_DoParse(config, diag_engine, working_files,
did_work = IndexMain_DoParse(diag_engine, working_files,
file_consumer_shared, timestamp_manager,
&modification_timestamp_fetcher,
import_manager, indexer.get()) ||
@ -723,15 +718,14 @@ void QueryDb_OnIndexed(QueueManager* queue,
} // namespace
bool QueryDb_ImportMain(Config* config,
QueryDatabase* db,
bool QueryDb_ImportMain(QueryDatabase* db,
ImportManager* import_manager,
ImportPipelineStatus* status,
SemanticHighlightSymbolCache* semantic_cache,
WorkingFiles* working_files) {
auto* queue = QueueManager::instance();
ActiveThread active_thread(config, status);
ActiveThread active_thread(status);
bool did_work = false;
@ -760,16 +754,17 @@ bool QueryDb_ImportMain(Config* config,
TEST_SUITE("ImportPipeline") {
struct Fixture {
Fixture() {
g_config = std::make_unique<Config>();
QueueManager::Init(&querydb_waiter, &indexer_waiter, &stdout_waiter);
queue = QueueManager::instance();
cache_manager = ICacheManager::MakeFake({});
indexer = IIndexer::MakeTestIndexer({});
diag_engine.Init(&config);
diag_engine.Init();
}
bool PumpOnce() {
return IndexMain_DoParse(&config, &diag_engine, &working_files,
return IndexMain_DoParse(&diag_engine, &working_files,
&file_consumer_shared, &timestamp_manager,
&modification_timestamp_fetcher, &import_manager,
indexer.get());
@ -788,7 +783,6 @@ TEST_SUITE("ImportPipeline") {
MultiQueueWaiter stdout_waiter;
QueueManager* queue = nullptr;
Config config;
DiagnosticsEngine diag_engine;
WorkingFiles working_files;
FileConsumerSharedState file_consumer_shared;

View File

@ -9,7 +9,6 @@
#include <vector>
struct ClangTranslationUnit;
struct Config;
class DiagnosticsEngine;
struct FileConsumerSharedState;
struct ImportManager;
@ -28,15 +27,13 @@ struct ImportPipelineStatus {
};
void IndexWithTuFromCodeCompletion(
Config* config,
FileConsumerSharedState* file_consumer_shared,
ClangTranslationUnit* tu,
const std::vector<CXUnsavedFile>& file_contents,
const std::string& path,
const std::vector<std::string>& args);
void Indexer_Main(Config* config,
DiagnosticsEngine* diag_engine,
void Indexer_Main(DiagnosticsEngine* diag_engine,
FileConsumerSharedState* file_consumer_shared,
TimestampManager* timestamp_manager,
ImportManager* import_manager,
@ -45,8 +42,7 @@ void Indexer_Main(Config* config,
WorkingFiles* working_files,
MultiQueueWaiter* waiter);
bool QueryDb_ImportMain(Config* config,
QueryDatabase* db,
bool QueryDb_ImportMain(QueryDatabase* db,
ImportManager* import_manager,
ImportPipelineStatus* status,
SemanticHighlightSymbolCache* semantic_cache,

View File

@ -13,14 +13,14 @@ struct CompletionCandidate {
lsCompletionItem completion_item;
};
std::string ElideLongPath(Config* config, const std::string& path) {
if (config->completion.includeMaxPathSize <= 0)
std::string ElideLongPath(const std::string& path) {
if (g_config->completion.includeMaxPathSize <= 0)
return path;
if ((int)path.size() <= config->completion.includeMaxPathSize)
if ((int)path.size() <= g_config->completion.includeMaxPathSize)
return path;
size_t start = path.size() - config->completion.includeMaxPathSize;
size_t start = path.size() - g_config->completion.includeMaxPathSize;
return ".." + path.substr(start + 2);
}
@ -73,12 +73,11 @@ bool TrimPath(Project* project,
return angle;
}
lsCompletionItem BuildCompletionItem(Config* config,
const std::string& path,
lsCompletionItem BuildCompletionItem(const std::string& path,
bool use_angle_brackets,
bool is_stl) {
lsCompletionItem item;
item.label = ElideLongPath(config, path);
item.label = ElideLongPath(path);
item.detail = path; // the include path, used in de-duplicating
item.textEdit = lsTextEdit();
item.textEdit->newText = path;
@ -96,8 +95,8 @@ lsCompletionItem BuildCompletionItem(Config* config,
} // namespace
IncludeComplete::IncludeComplete(Config* config, Project* project)
: is_scanning(false), config_(config), project_(project) {}
IncludeComplete::IncludeComplete(Project* project)
: is_scanning(false), project_(project) {}
void IncludeComplete::Rescan() {
if (is_scanning)
@ -107,17 +106,17 @@ void IncludeComplete::Rescan() {
absolute_path_to_completion_item.clear();
inserted_paths.clear();
if (!match_ && (!config_->completion.includeWhitelist.empty() ||
!config_->completion.includeBlacklist.empty()))
match_ = std::make_unique<GroupMatch>(config_->completion.includeWhitelist,
config_->completion.includeBlacklist);
if (!match_ && (g_config->completion.includeWhitelist.size() ||
g_config->completion.includeBlacklist.size()))
match_ = std::make_unique<GroupMatch>(g_config->completion.includeWhitelist,
g_config->completion.includeBlacklist);
is_scanning = true;
StartThread("scan_includes", [this]() {
Timer timer;
InsertStlIncludes();
InsertIncludesFromDirectory(config_->projectRoot,
InsertIncludesFromDirectory(g_config->projectRoot,
false /*use_angle_brackets*/);
for (const std::string& dir : project_->quote_include_directories)
InsertIncludesFromDirectory(dir, false /*use_angle_brackets*/);
@ -150,16 +149,16 @@ void IncludeComplete::InsertCompletionItem(const std::string& absolute_path,
}
void IncludeComplete::AddFile(const std::string& absolute_path) {
if (!EndsWithAny(absolute_path, config_->completion.includeSuffixWhitelist))
if (!EndsWithAny(absolute_path, g_config->completion.includeSuffixWhitelist))
return;
if (match_ && !match_->IsMatch(absolute_path))
return;
std::string trimmed_path = absolute_path;
bool use_angle_brackets =
TrimPath(project_, config_->projectRoot, &trimmed_path);
lsCompletionItem item = BuildCompletionItem(
config_, trimmed_path, use_angle_brackets, false /*is_stl*/);
TrimPath(project_, g_config->projectRoot, &trimmed_path);
lsCompletionItem item =
BuildCompletionItem(trimmed_path, use_angle_brackets, false /*is_stl*/);
std::unique_lock<std::mutex> lock(completion_items_mutex, std::defer_lock);
if (is_scanning)
@ -180,15 +179,15 @@ void IncludeComplete::InsertIncludesFromDirectory(std::string directory,
GetFilesInFolder(
directory, true /*recursive*/, false /*add_folder_to_path*/,
[&](const std::string& path) {
if (!EndsWithAny(path, config_->completion.includeSuffixWhitelist))
if (!EndsWithAny(path, g_config->completion.includeSuffixWhitelist))
return;
if (match_ && !match_->IsMatch(directory + path))
return;
CompletionCandidate candidate;
candidate.absolute_path = directory + path;
candidate.completion_item = BuildCompletionItem(
config_, path, use_angle_brackets, false /*is_stl*/);
candidate.completion_item =
BuildCompletionItem(path, use_angle_brackets, false /*is_stl*/);
results.push_back(candidate);
});
@ -202,7 +201,7 @@ void IncludeComplete::InsertStlIncludes() {
std::lock_guard<std::mutex> lock(completion_items_mutex);
for (const char* stl_header : kStandardLibraryIncludes) {
completion_items.push_back(BuildCompletionItem(
config_, stl_header, true /*use_angle_brackets*/, true /*is_stl*/));
stl_header, true /*use_angle_brackets*/, true /*is_stl*/));
}
}

View File

@ -10,7 +10,7 @@ struct GroupMatch;
struct Project;
struct IncludeComplete {
IncludeComplete(Config* config, Project* project);
IncludeComplete(Project* project);
// Starts scanning directories. Clears existing cache.
void Rescan();
@ -45,7 +45,6 @@ struct IncludeComplete {
std::unordered_map<std::string, int> inserted_paths;
// Cached references
Config* config_;
Project* project_;
std::unique_ptr<GroupMatch> match_;
};

View File

@ -518,7 +518,6 @@ struct NamespaceHelper {
// |dependencies| are the existing dependencies of |import_file| if this is a
// reparse.
std::vector<std::unique_ptr<IndexFile>> Parse(
Config* config,
FileConsumerSharedState* file_consumer_shared,
std::string file,
const std::vector<std::string>& args,
@ -527,7 +526,6 @@ std::vector<std::unique_ptr<IndexFile>> Parse(
ClangIndex* index,
bool dump_ast = false);
std::vector<std::unique_ptr<IndexFile>> ParseWithTu(
Config* config,
FileConsumerSharedState* file_consumer_shared,
PerformanceImportFile* perf,
ClangTranslationUnit* tu,

View File

@ -3,7 +3,6 @@
#include <doctest/doctest.h>
#include <algorithm>
#include <iostream>
// VSCode (UTF-16) disagrees with Emacs lsp-mode (UTF-8) on how to represent
// text documents.

View File

@ -7,7 +7,6 @@
#include <loguru.hpp>
#include <stdio.h>
#include <iostream>
MessageRegistry* MessageRegistry::instance_ = nullptr;

View File

@ -101,9 +101,9 @@ SemanticHighlightSymbolCache::Entry::GetMapForSymbol_(SymbolKind kind) {
SemanticHighlightSymbolCache::SemanticHighlightSymbolCache()
: cache_(kCacheSize) {}
void SemanticHighlightSymbolCache::Init(Config* config) {
match_ = std::make_unique<GroupMatch>(config->highlight.whitelist,
config->highlight.blacklist);
void SemanticHighlightSymbolCache::Init() {
match_ = std::make_unique<GroupMatch>(g_config->highlight.whitelist,
g_config->highlight.blacklist);
}
std::shared_ptr<SemanticHighlightSymbolCache::Entry>

View File

@ -55,7 +55,7 @@ struct SemanticHighlightSymbolCache {
std::unique_ptr<GroupMatch> match_;
SemanticHighlightSymbolCache();
void Init(Config*);
void Init();
std::shared_ptr<Entry> GetCacheForFile(const std::string& path);
};
@ -103,7 +103,6 @@ MAKE_REFLECT_STRUCT(Out_CclsPublishSemanticHighlighting,
static type type##message_handler_instance_;
struct MessageHandler {
Config* config = nullptr;
QueryDatabase* db = nullptr;
MultiQueueWaiter* waiter = nullptr;
Project* project = nullptr;

View File

@ -33,15 +33,13 @@ struct Handler_CclsBase : BaseMessageHandler<In_CclsBase> {
FindSymbolsAtLocation(working_file, file, request->params.position)) {
if (sym.kind == SymbolKind::Type) {
if (const auto* def = db->GetType(sym).AnyDef())
out.result = GetLsLocationExs(
db, working_files, GetDeclarations(db, def->bases),
config->xref.container, config->xref.maxNum);
out.result = GetLsLocationExs(db, working_files,
GetDeclarations(db, def->bases));
break;
} else if (sym.kind == SymbolKind::Func) {
if (const auto* def = db->GetFunc(sym).AnyDef())
out.result = GetLsLocationExs(
db, working_files, GetDeclarations(db, def->bases),
config->xref.container, config->xref.maxNum);
out.result = GetLsLocationExs(db, working_files,
GetDeclarations(db, def->bases));
break;
}
}

View File

@ -35,9 +35,7 @@ struct Handler_CclsCallers : BaseMessageHandler<In_CclsCallers> {
uses.push_back(func_ref);
for (Use func_ref : GetUsesForAllDerived(db, func))
uses.push_back(func_ref);
out.result =
GetLsLocationExs(db, working_files, uses, config->xref.container,
config->xref.maxNum);
out.result = GetLsLocationExs(db, working_files, uses);
break;
}
}

View File

@ -30,15 +30,13 @@ struct Handler_CclsDerived : BaseMessageHandler<In_CclsDerived> {
FindSymbolsAtLocation(working_file, file, request->params.position)) {
if (sym.kind == SymbolKind::Type) {
QueryType& type = db->GetType(sym);
out.result = GetLsLocationExs(
db, working_files, GetDeclarations(db, type.derived),
config->xref.container, config->xref.maxNum);
out.result = GetLsLocationExs(db, working_files,
GetDeclarations(db, type.derived));
break;
} else if (sym.kind == SymbolKind::Func) {
QueryFunc& func = db->GetFunc(sym);
out.result = GetLsLocationExs(
db, working_files, GetDeclarations(db, func.derived),
config->xref.container, config->xref.maxNum);
out.result = GetLsLocationExs(db, working_files,
GetDeclarations(db, func.derived));
break;
}
}

View File

@ -87,8 +87,7 @@ struct Handler_CclsFreshenIndex : BaseMessageHandler<In_CclsFreshenIndex> {
Timer time;
// Send index requests for every file.
project->Index(config, QueueManager::instance(), working_files,
std::monostate());
project->Index(QueueManager::instance(), working_files, std::monostate());
time.ResetAndPrint("[perf] Dispatched $ccls/freshenIndex index requests");
}
};

View File

@ -132,12 +132,10 @@ struct Handler_CclsRandom : BaseMessageHandler<In_CclsRandom> {
for (int i = 0; i < n; i++) {
sum += x[i];
if (sum >= roulette) {
Maybe<Use> use = GetDefinitionExtent(db, syms[i]);
if (!use)
continue;
if (auto ls_loc = GetLsLocationEx(db, working_files, *use,
config->xref.container))
out.result.push_back(*ls_loc);
if (Maybe<Use> use = GetDefinitionExtent(db, syms[i]))
if (auto ls_loc = GetLsLocationEx(db, working_files, *use,
g_config->xref.container))
out.result.push_back(*ls_loc);
break;
}
}

View File

@ -43,9 +43,8 @@ struct Handler_CclsVars : BaseMessageHandler<In_CclsVars> {
// fallthrough
case SymbolKind::Type: {
QueryType& type = db->types[id.id];
out.result = GetLsLocationExs(
db, working_files, GetDeclarations(db, type.instances),
config->xref.container, config->xref.maxNum);
out.result = GetLsLocationExs(db, working_files,
GetDeclarations(db, type.instances));
break;
}
}

View File

@ -428,6 +428,7 @@ struct Handler_Initialize : BaseMessageHandler<In_InitializeRequest> {
JsonWriter json_writer(&writer);
Reflect(json_writer, request->params.initializationOptions);
LOG_S(INFO) << "Init parameters: " << output.GetString();
std::unique_ptr<Config> config;
if (request->params.rootUri) {
std::string project_path =
@ -437,9 +438,9 @@ struct Handler_Initialize : BaseMessageHandler<In_InitializeRequest> {
{
if (request->params.initializationOptions)
*config = *request->params.initializationOptions;
config = std::make_unique<Config>(*request->params.initializationOptions);
else
*config = Config();
config = std::make_unique<Config>();
rapidjson::Document reader;
reader.Parse(g_init_options.c_str());
if (!reader.HasParseError()) {
@ -508,32 +509,29 @@ struct Handler_Initialize : BaseMessageHandler<In_InitializeRequest> {
MakeDirectoryRecursive(config->cacheDirectory + '@' +
EscapeFileName(config->projectRoot));
g_config = *config;
g_config = std::move(config);
Timer time;
diag_engine->Init(config);
semantic_cache->Init(config);
diag_engine->Init();
semantic_cache->Init();
// Open up / load the project.
project->Load(config, project_path);
project->Load(project_path);
time.ResetAndPrint("[perf] Loaded compilation entries (" +
std::to_string(project->entries.size()) + " files)");
// Start indexer threads. Start this after loading the project, as that
// may take a long time. Indexer threads will emit status/progress
// reports.
if (config->index.threads == 0) {
if (g_config->index.threads == 0) {
// If the user has not specified how many indexers to run, try to
// guess an appropriate value. Default to 80% utilization.
const float kDefaultTargetUtilization = 0.8f;
config->index.threads = (int)(std::thread::hardware_concurrency() *
kDefaultTargetUtilization);
if (config->index.threads <= 0)
config->index.threads = 1;
g_config->index.threads =
std::max(int(std::thread::hardware_concurrency() * 0.8), 1);
}
LOG_S(INFO) << "Starting " << config->index.threads << " indexers";
for (int i = 0; i < config->index.threads; ++i) {
LOG_S(INFO) << "Starting " << g_config->index.threads << " indexers";
for (int i = 0; i < g_config->index.threads; ++i) {
StartThread("indexer" + std::to_string(i), [=]() {
Indexer_Main(config, diag_engine, file_consumer_shared,
Indexer_Main(diag_engine, file_consumer_shared,
timestamp_manager, import_manager,
import_pipeline_status, project, working_files, waiter);
});
@ -544,8 +542,7 @@ struct Handler_Initialize : BaseMessageHandler<In_InitializeRequest> {
include_complete->Rescan();
time.Reset();
project->Index(config, QueueManager::instance(), working_files,
request->id);
project->Index(QueueManager::instance(), working_files, request->id);
// We need to support multiple concurrent index processes.
time.ResetAndPrint("[perf] Dispatched initial index requests");
}

View File

@ -206,7 +206,7 @@ struct Handler_TextDocumentCodeLens
case SymbolKind::Var: {
QueryVar& var = db->GetVar(sym);
const QueryVar::Def* def = var.AnyDef();
if (!def || (def->is_local() && !config->codeLens.localVariables))
if (!def || (def->is_local() && !g_config->codeLens.localVariables))
continue;
bool force_display = true;

View File

@ -160,9 +160,8 @@ char* tofixedbase64(T input, char* out) {
// when given 1000+ completion items.
void FilterAndSortCompletionResponse(
Out_TextDocumentComplete* complete_response,
const std::string& complete_text,
bool enable) {
if (!enable)
const std::string& complete_text) {
if (!g_config->completion.filterAndSort)
return;
ScopedPerfTimer timer{"FilterAndSortCompletionResponse"};
@ -327,8 +326,7 @@ struct Handler_TextDocumentCompletion : MessageHandler {
return k == result.keyword;
})) {
out.result.items = preprocessorKeywordCompletionItems(result.match);
FilterAndSortCompletionResponse(&out, result.keyword,
config->completion.filterAndSort);
FilterAndSortCompletionResponse(&out, result.keyword);
}
} else if (result.keyword.compare("include") == 0) {
{
@ -342,8 +340,7 @@ struct Handler_TextDocumentCompletion : MessageHandler {
if (quote.empty() || quote == (item.use_angle_brackets_ ? "<" : "\""))
out.result.items.push_back(item);
}
FilterAndSortCompletionResponse(&out, result.pattern,
config->completion.filterAndSort);
FilterAndSortCompletionResponse(&out, result.pattern);
DecorateIncludePaths(result.match, &out.result.items);
}
@ -365,8 +362,7 @@ struct Handler_TextDocumentCompletion : MessageHandler {
out.result.items = results;
// Emit completion results.
FilterAndSortCompletionResponse(&out, existing_completion,
config->completion.filterAndSort);
FilterAndSortCompletionResponse(&out, existing_completion);
QueueManager::WriteStdout(kMethodType, out);
// Cache completion results.

View File

@ -105,9 +105,7 @@ struct Handler_TextDocumentDefinition
if (uses.empty() && on_def)
uses.push_back(*on_def);
}
AddRange(&out.result,
GetLsLocationExs(db, working_files, uses, config->xref.container,
config->xref.maxNum));
AddRange(&out.result, GetLsLocationExs(db, working_files, uses));
if (!out.result.empty())
break;
}
@ -172,7 +170,7 @@ struct Handler_TextDocumentDefinition
Maybe<Use> use = GetDefinitionSpell(db, db->symbols[best_i]);
assert(use);
if (auto ls_loc = GetLsLocationEx(db, working_files, *use,
config->xref.container))
g_config->xref.container))
out.result.push_back(*ls_loc);
}
}

View File

@ -25,7 +25,7 @@ struct Handler_TextDocumentDidChange
void Run(In_TextDocumentDidChange* request) override {
std::string path = request->params.textDocument.uri.GetPath();
working_files->OnChange(request->params);
if (config->enableIndexOnDidChange) {
if (g_config->index.onDidChange) {
std::optional<std::string> content = ReadContent(path);
if (!content) {
LOG_S(ERROR) << "Unable to read file content after saving " << path;

View File

@ -47,7 +47,7 @@ struct Handler_TextDocumentDidSave
// mutex and check to see if we should skip the current request.
// if so, ignore that index response.
// TODO: send as priority request
if (!config->enableIndexOnDidChange) {
if (!g_config->index.onDidChange) {
std::optional<std::string> content = ReadContent(path);
if (!content) {
LOG_S(ERROR) << "Unable to read file content after saving " << path;

View File

@ -56,7 +56,7 @@ struct Handler_TextDocumentReferences
Out_TextDocumentReferences out;
out.id = request->id;
bool container = config->xref.container;
bool container = g_config->xref.container;
for (const SymbolRef& sym :
FindSymbolsAtLocation(working_file, file, request->params.position)) {
@ -94,8 +94,8 @@ struct Handler_TextDocumentReferences
break;
}
if ((int)out.result.size() >= config->xref.maxNum)
out.result.resize(config->xref.maxNum);
if ((int)out.result.size() >= g_config->xref.maxNum)
out.result.resize(g_config->xref.maxNum);
QueueManager::WriteStdout(kMethodType, out);
}
};

View File

@ -50,7 +50,7 @@ struct Handler_TextDocumentTypeDefinition
for (const auto& def : type.def)
if (def.spell) {
if (auto ls_loc = GetLsLocationEx(db, working_files, *def.spell,
config->xref.container))
g_config->xref.container))
out.result.push_back(*ls_loc);
}
break;

View File

@ -28,13 +28,12 @@ struct Handler_WorkspaceDidChangeConfiguration
MethodType GetMethodType() const override { return kMethodType; }
void Run(In_WorkspaceDidChangeConfiguration* request) override {
Timer time;
project->Load(config, config->projectRoot);
project->Load(g_config->projectRoot);
time.ResetAndPrint("[perf] Loaded compilation entries (" +
std::to_string(project->entries.size()) + " files)");
time.Reset();
project->Index(config, QueueManager::instance(), working_files,
std::monostate());
project->Index(QueueManager::instance(), working_files, std::monostate());
time.ResetAndPrint(
"[perf] Dispatched workspace/didChangeConfiguration index requests");

View File

@ -77,8 +77,8 @@ struct Handler_WorkspaceSymbol : BaseMessageHandler<In_WorkspaceSymbol> {
// db->detailed_names indices of each lsSymbolInformation in out.result
std::vector<int> result_indices;
std::vector<lsSymbolInformation> unsorted_results;
inserted_results.reserve(config->workspaceSymbol.maxNum);
result_indices.reserve(config->workspaceSymbol.maxNum);
inserted_results.reserve(g_config->workspaceSymbol.maxNum);
result_indices.reserve(g_config->workspaceSymbol.maxNum);
// We use detailed_names without parameters for matching.
@ -93,14 +93,14 @@ struct Handler_WorkspaceSymbol : BaseMessageHandler<In_WorkspaceSymbol> {
if (InsertSymbolIntoResult(db, working_files, db->symbols[i],
&unsorted_results)) {
result_indices.push_back(i);
if (unsorted_results.size() >= config->workspaceSymbol.maxNum)
if (unsorted_results.size() >= g_config->workspaceSymbol.maxNum)
break;
}
}
}
// Find subsequence matches.
if (unsorted_results.size() < config->workspaceSymbol.maxNum) {
if (unsorted_results.size() < g_config->workspaceSymbol.maxNum) {
std::string query_without_space;
query_without_space.reserve(query.size());
for (char c : query)
@ -118,14 +118,14 @@ struct Handler_WorkspaceSymbol : BaseMessageHandler<In_WorkspaceSymbol> {
if (InsertSymbolIntoResult(db, working_files, db->symbols[i],
&unsorted_results)) {
result_indices.push_back(i);
if (unsorted_results.size() >= config->workspaceSymbol.maxNum)
if (unsorted_results.size() >= g_config->workspaceSymbol.maxNum)
break;
}
}
}
}
if (config->workspaceSymbol.sort && query.size() <= FuzzyMatcher::kMaxPat) {
if (g_config->workspaceSymbol.sort && query.size() <= FuzzyMatcher::kMaxPat) {
// Sort results with a fuzzy matching algorithm.
int longest = 0;
for (int i : result_indices)

View File

@ -21,10 +21,7 @@
#include <unistd.h>
#endif
#include <fstream>
#include <iostream>
#include <limits>
#include <sstream>
#include <unordered_set>
#include <vector>
@ -61,7 +58,7 @@ struct ProjectConfig {
// of clang for the same platform are incompatible). Note that libclang always generate it's own pch
// internally. For details, see https://github.com/Valloric/ycmd/issues/892 .
std::vector<std::string> kBlacklistMulti = {
"-MF", "-MT", "-MQ", "-o", "--serialize-diagnostics", "-Xclang", "-include", "-include-pch"};
"-MF", "-MT", "-MQ", "-o", "--serialize-diagnostics", "-Xclang"};
// Blacklisted flags which are always removed from the command line.
std::vector<std::string> kBlacklist = {
@ -106,7 +103,6 @@ LanguageId SourceFileLanguage(const std::string& path) {
}
Project::Entry GetCompilationEntryFromCompileCommandEntry(
Config* init_opts,
ProjectConfig* config,
const CompileCommandsEntry& entry) {
Project::Entry result;
@ -240,7 +236,7 @@ Project::Entry GetCompilationEntryFromCompileCommandEntry(
// Add -resource-dir so clang can correctly resolve system includes like
// <cstddef>
if (!AnyStartsWith(result.args, "-resource-dir"))
result.args.push_back("-resource-dir=" + init_opts->resourceDirectory);
result.args.push_back("-resource-dir=" + g_config->resourceDirectory);
// There could be a clang version mismatch between what the project uses and
// what ccls uses. Make sure we do not emit warnings for mismatched options.
@ -249,7 +245,7 @@ Project::Entry GetCompilationEntryFromCompileCommandEntry(
// Using -fparse-all-comments enables documentation in the indexer and in
// code completion.
if (init_opts->index.comments > 1 &&
if (g_config->index.comments > 1 &&
!AnyStartsWith(result.args, "-fparse-all-comments")) {
result.args.push_back("-fparse-all-comments");
}
@ -269,8 +265,7 @@ std::vector<std::string> ReadCompilerArgumentsFromFile(
return args;
}
std::vector<Project::Entry> LoadFromDirectoryListing(Config* init_opts,
ProjectConfig* config) {
std::vector<Project::Entry> LoadFromDirectoryListing(ProjectConfig* config) {
std::vector<Project::Entry> result;
config->mode = ProjectMode::DotCcls;
LOG_IF_S(WARNING, !FileExists(config->project_dir + "/.ccls") &&
@ -322,24 +317,22 @@ std::vector<Project::Entry> LoadFromDirectoryListing(Config* init_opts,
if (e.args.empty())
e.args.push_back("%clang"); // Add a Dummy.
e.args.push_back(e.file);
result.push_back(
GetCompilationEntryFromCompileCommandEntry(init_opts, config, e));
result.push_back(GetCompilationEntryFromCompileCommandEntry(config, e));
}
return result;
}
std::vector<Project::Entry> LoadCompilationEntriesFromDirectory(
Config* config,
ProjectConfig* project,
const std::string& opt_compilation_db_dir) {
// If there is a .ccls file always load using directory listing.
if (FileExists(project->project_dir + ".ccls"))
return LoadFromDirectoryListing(config, project);
return LoadFromDirectoryListing(project);
// If |compilationDatabaseCommand| is specified, execute it to get the compdb.
std::string comp_db_dir;
if (config->compilationDatabaseCommand.empty()) {
fs::path comp_db_dir;
if (g_config->compilationDatabaseCommand.empty()) {
project->mode = ProjectMode::CompileCommandsJson;
// Try to load compile_commands.json, but fallback to a project listing.
comp_db_dir = opt_compilation_db_dir.empty() ? project->project_dir
@ -356,32 +349,35 @@ std::vector<Project::Entry> LoadCompilationEntriesFromDirectory(
rapidjson::StringBuffer input;
rapidjson::Writer<rapidjson::StringBuffer> writer(input);
JsonWriter json_writer(&writer);
Reflect(json_writer, *config);
Reflect(json_writer, *g_config);
std::string contents = GetExternalCommandOutput(
std::vector<std::string>{config->compilationDatabaseCommand,
std::vector<std::string>{g_config->compilationDatabaseCommand,
project->project_dir},
input.GetString());
std::ofstream(comp_db_dir + "/compile_commands.json") << contents;
FILE* fout = fopen((comp_db_dir / "compile_commands.json").c_str(), "wb");
fwrite(contents.c_str(), contents.size(), 1, fout);
fclose(fout);
#endif
}
LOG_S(INFO) << "Trying to load compile_commands.json";
fs::path comp_db_path = comp_db_dir / "compile_commands.json";
LOG_S(INFO) << "Trying to load " << comp_db_path.string();
CXCompilationDatabase_Error cx_db_load_error;
CXCompilationDatabase cx_db = clang_CompilationDatabase_fromDirectory(
comp_db_dir.c_str(), &cx_db_load_error);
if (!config->compilationDatabaseCommand.empty()) {
if (!g_config->compilationDatabaseCommand.empty()) {
#ifdef _WIN32
// TODO
#else
unlink((comp_db_dir + "/compile_commands.json").c_str());
unlink(comp_db_path.c_str());
rmdir(comp_db_dir.c_str());
#endif
}
if (cx_db_load_error == CXCompilationDatabase_CanNotLoadDatabase) {
LOG_S(INFO) << "Unable to load compile_commands.json located at \""
<< comp_db_dir << "\"; using directory listing instead.";
return LoadFromDirectoryListing(config, project);
LOG_S(INFO) << "Unable to load " << comp_db_path.string()
<< "; using directory listing instead.";
return LoadFromDirectoryListing(project);
}
Timer clang_time;
@ -421,7 +417,7 @@ std::vector<Project::Entry> LoadCompilationEntriesFromDirectory(
entry.file = entry.ResolveIfRelative(relative_filename);
result.push_back(
GetCompilationEntryFromCompileCommandEntry(config, project, entry));
GetCompilationEntryFromCompileCommandEntry(project, entry));
our_time.Pause();
}
@ -452,13 +448,13 @@ int ComputeGuessScore(std::string_view a, std::string_view b) {
} // namespace
void Project::Load(Config* config, const std::string& root_directory) {
void Project::Load(const std::string& root_directory) {
// Load data.
ProjectConfig project;
project.extra_flags = config->extraClangArguments;
project.extra_flags = g_config->extraClangArguments;
project.project_dir = root_directory;
entries = LoadCompilationEntriesFromDirectory(
config, &project, config->compilationDatabaseDirectory);
&project, g_config->compilationDatabaseDirectory);
// Cleanup / postprocess include directories.
quote_include_directories.assign(project.quote_dirs.begin(),
@ -539,28 +535,24 @@ Project::Entry Project::FindCompilationEntryForFile(
}
void Project::ForAllFilteredFiles(
Config* config,
std::function<void(int i, const Entry& entry)> action) {
GroupMatch matcher(config->index.whitelist, config->index.blacklist);
GroupMatch matcher(g_config->index.whitelist, g_config->index.blacklist);
for (int i = 0; i < entries.size(); ++i) {
const Project::Entry& entry = entries[i];
std::string failure_reason;
if (matcher.IsMatch(entry.filename, &failure_reason))
action(i, entries[i]);
else {
if (config->index.logSkippedPaths) {
LOG_S(INFO) << "[" << i + 1 << "/" << entries.size() << "]: Failed "
<< failure_reason << "; skipping " << entry.filename;
}
else if (g_config->index.logSkippedPaths) {
LOG_S(INFO) << "[" << i + 1 << "/" << entries.size() << "]: Failed "
<< failure_reason << "; skipping " << entry.filename;
}
}
}
void Project::Index(Config* config,
QueueManager* queue,
void Project::Index(QueueManager* queue,
WorkingFiles* wfiles,
lsRequestId id) {
ForAllFilteredFiles(config, [&](int i, const Project::Entry& entry) {
ForAllFilteredFiles([&](int i, const Project::Entry& entry) {
std::optional<std::string> content = ReadContent(entry.filename);
if (!content) {
LOG_S(ERROR) << "When loading project, canont read file "
@ -578,30 +570,22 @@ TEST_SUITE("Project") {
void CheckFlags(const std::string& directory, const std::string& file,
std::vector<std::string> raw,
std::vector<std::string> expected) {
Config config;
g_config = std::make_unique<Config>();
g_config->resourceDirectory = "/w/resource_dir/";
ProjectConfig project;
project.project_dir = "/w/c/s/";
config.resourceDirectory = "/w/resource_dir/";
CompileCommandsEntry entry;
entry.directory = directory;
entry.args = raw;
entry.file = file;
Project::Entry result =
GetCompilationEntryFromCompileCommandEntry(&config, &project, entry);
GetCompilationEntryFromCompileCommandEntry(&project, entry);
if (result.args != expected) {
std::cout << "Raw: " << StringJoin(raw) << std::endl;
std::cout << "Expected: " << StringJoin(expected) << std::endl;
std::cout << "Actual: " << StringJoin(result.args) << std::endl;
}
for (int i = 0; i < std::min(result.args.size(), expected.size()); ++i) {
if (result.args[i] != expected[i]) {
std::cout << std::endl;
std::cout << "mismatch at " << i << std::endl;
std::cout << " expected: " << expected[i] << std::endl;
std::cout << " actual: " << result.args[i] << std::endl;
}
fprintf(stderr, "Raw: %s\n", StringJoin(raw).c_str());
fprintf(stderr, "Expected: %s\n", StringJoin(expected).c_str());
fprintf(stderr, "Actual: %s\n", StringJoin(result.args).c_str());
}
REQUIRE(result.args == expected);
}
@ -671,7 +655,7 @@ TEST_SUITE("Project") {
}
TEST_CASE("Directory extraction") {
Config init_opts;
g_config = std::make_unique<Config>();
ProjectConfig config;
config.project_dir = "/w/c/s/";
@ -701,7 +685,7 @@ TEST_SUITE("Project") {
"foo.cc"};
entry.file = "foo.cc";
Project::Entry result =
GetCompilationEntryFromCompileCommandEntry(&init_opts, &config, entry);
GetCompilationEntryFromCompileCommandEntry(&config, entry);
std::unordered_set<std::string> angle_expected{
"/a_absolute1", "/a_absolute2", "/base/a_relative1",

View File

@ -40,7 +40,7 @@ struct Project {
// will affect flags in their subtrees (relative paths are relative to the
// project root, not subdirectories). For compile_commands.json, its entries
// are indexed.
void Load(Config* config, const std::string& root_directory);
void Load(const std::string& root_directory);
// Lookup the CompilationEntry for |filename|. If no entry was found this
// will infer one based on existing project structure.
@ -55,11 +55,7 @@ struct Project {
// Run |action| on every file in the project.
void ForAllFilteredFiles(
Config* config,
std::function<void(int i, const Entry& entry)> action);
void Index(Config* config,
QueueManager* queue,
WorkingFiles* wfiles,
lsRequestId id);
void Index(QueueManager* queue, WorkingFiles* wfiles, lsRequestId id);
};

View File

@ -251,17 +251,16 @@ std::optional<lsLocationEx> GetLsLocationEx(QueryDatabase* db,
std::vector<lsLocationEx> GetLsLocationExs(QueryDatabase* db,
WorkingFiles* working_files,
const std::vector<Use>& uses,
bool container,
int limit) {
const std::vector<Use>& uses) {
std::vector<lsLocationEx> ret;
for (Use use : uses)
if (auto loc = GetLsLocationEx(db, working_files, use, container))
if (auto loc =
GetLsLocationEx(db, working_files, use, g_config->xref.container))
ret.push_back(*loc);
std::sort(ret.begin(), ret.end());
ret.erase(std::unique(ret.begin(), ret.end()), ret.end());
if (ret.size() > limit)
ret.resize(limit);
if (ret.size() > g_config->xref.maxNum)
ret.resize(g_config->xref.maxNum);
return ret;
}

View File

@ -41,9 +41,7 @@ std::optional<lsLocationEx> GetLsLocationEx(QueryDatabase* db,
bool container);
std::vector<lsLocationEx> GetLsLocationExs(QueryDatabase* db,
WorkingFiles* working_files,
const std::vector<Use>& refs,
bool container,
int limit);
const std::vector<Use>& refs);
// Returns a symbol. The symbol will have *NOT* have a location assigned.
std::optional<lsSymbolInformation> GetSymbolInfo(QueryDatabase* db,
WorkingFiles* working_files,

View File

@ -262,11 +262,11 @@ bool RunIndexTests(const std::string& filter_path, bool enable_update) {
flags.push_back(path);
// Run test.
Config config;
g_config = std::make_unique<Config>();
FileConsumerSharedState file_consumer_shared;
PerformanceImportFile perf;
auto dbs = Parse(&config, &file_consumer_shared, path, flags, {}, &perf,
&index, false /*dump_ast*/);
auto dbs = Parse(&file_consumer_shared, path, flags, {}, &perf, &index,
false /*dump_ast*/);
for (const auto& entry : all_expected_output) {
const std::string& expected_path = entry.first;